mirror of
https://github.com/golang/go.git
synced 2025-12-08 06:10:04 +00:00
[dev.typeparams] merge dev.regabi 41f3af9d04 into dev.typeparams
This brings in the new ir.Node interface, replacing *gc.Node. Change-Id: I82c623655eee08d77d623babf22ec4d91f9aa3cd
This commit is contained in:
commit
5c2e14872c
151 changed files with 15197 additions and 15289 deletions
|
|
@ -455,7 +455,7 @@ environmental variable is set accordingly.</p>
|
||||||
each collection, summarizing the amount of memory collected
|
each collection, summarizing the amount of memory collected
|
||||||
and the length of the pause.</li>
|
and the length of the pause.</li>
|
||||||
<li>GODEBUG=inittrace=1 prints a summary of execution time and memory allocation
|
<li>GODEBUG=inittrace=1 prints a summary of execution time and memory allocation
|
||||||
information for completed package initilization work.</li>
|
information for completed package initialization work.</li>
|
||||||
<li>GODEBUG=schedtrace=X prints scheduling events every X milliseconds.</li>
|
<li>GODEBUG=schedtrace=X prints scheduling events every X milliseconds.</li>
|
||||||
</ul>
|
</ul>
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -501,6 +501,10 @@ Do not send CLs removing the interior tags from such phrases.
|
||||||
<p><!-- CL 261917 -->
|
<p><!-- CL 261917 -->
|
||||||
<a href="/pkg/syscall/#SysProcAttr"><code>SysProcAttr</code></a> on Windows has a new NoInheritHandles field that disables inheriting handles when creating a new process.
|
<a href="/pkg/syscall/#SysProcAttr"><code>SysProcAttr</code></a> on Windows has a new NoInheritHandles field that disables inheriting handles when creating a new process.
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
|
<p><!-- CL 269761, golang.org/issue/42584 -->
|
||||||
|
<a href="/pkg/syscall/#DLLError"><code>DLLError</code></a> on Windows now has an Unwrap function for unwrapping its underlying error.
|
||||||
|
</p>
|
||||||
</dd>
|
</dd>
|
||||||
</dl><!-- syscall -->
|
</dl><!-- syscall -->
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -186,7 +186,7 @@ func (p *Package) writeDefs() {
|
||||||
panic(fmt.Errorf("invalid var kind %q", n.Kind))
|
panic(fmt.Errorf("invalid var kind %q", n.Kind))
|
||||||
}
|
}
|
||||||
if *gccgo {
|
if *gccgo {
|
||||||
fmt.Fprintf(fc, `extern void *%s __asm__("%s.%s");`, n.Mangle, gccgoSymbolPrefix, n.Mangle)
|
fmt.Fprintf(fc, `extern void *%s __asm__("%s.%s");`, n.Mangle, gccgoSymbolPrefix, gccgoToSymbol(n.Mangle))
|
||||||
fmt.Fprintf(&gccgoInit, "\t%s = &%s;\n", n.Mangle, n.C)
|
fmt.Fprintf(&gccgoInit, "\t%s = &%s;\n", n.Mangle, n.C)
|
||||||
fmt.Fprintf(fc, "\n")
|
fmt.Fprintf(fc, "\n")
|
||||||
}
|
}
|
||||||
|
|
@ -1148,7 +1148,7 @@ func (p *Package) writeGccgoExports(fgo2, fm, fgcc, fgcch io.Writer) {
|
||||||
// will not be able to link against it from the C
|
// will not be able to link against it from the C
|
||||||
// code.
|
// code.
|
||||||
goName := "Cgoexp_" + exp.ExpName
|
goName := "Cgoexp_" + exp.ExpName
|
||||||
fmt.Fprintf(fgcc, `extern %s %s %s __asm__("%s.%s");`, cRet, goName, cParams, gccgoSymbolPrefix, goName)
|
fmt.Fprintf(fgcc, `extern %s %s %s __asm__("%s.%s");`, cRet, goName, cParams, gccgoSymbolPrefix, gccgoToSymbol(goName))
|
||||||
fmt.Fprint(fgcc, "\n")
|
fmt.Fprint(fgcc, "\n")
|
||||||
|
|
||||||
fmt.Fprint(fgcc, "\nCGO_NO_SANITIZE_THREAD\n")
|
fmt.Fprint(fgcc, "\nCGO_NO_SANITIZE_THREAD\n")
|
||||||
|
|
@ -1182,7 +1182,7 @@ func (p *Package) writeGccgoExports(fgo2, fm, fgcc, fgcch io.Writer) {
|
||||||
fmt.Fprint(fgcc, "}\n")
|
fmt.Fprint(fgcc, "}\n")
|
||||||
|
|
||||||
// Dummy declaration for _cgo_main.c
|
// Dummy declaration for _cgo_main.c
|
||||||
fmt.Fprintf(fm, `char %s[1] __asm__("%s.%s");`, goName, gccgoSymbolPrefix, goName)
|
fmt.Fprintf(fm, `char %s[1] __asm__("%s.%s");`, goName, gccgoSymbolPrefix, gccgoToSymbol(goName))
|
||||||
fmt.Fprint(fm, "\n")
|
fmt.Fprint(fm, "\n")
|
||||||
|
|
||||||
// For gccgo we use a wrapper function in Go, in order
|
// For gccgo we use a wrapper function in Go, in order
|
||||||
|
|
@ -1266,9 +1266,8 @@ func (p *Package) writeExportHeader(fgcch io.Writer) {
|
||||||
fmt.Fprintf(fgcch, "%s\n", p.gccExportHeaderProlog())
|
fmt.Fprintf(fgcch, "%s\n", p.gccExportHeaderProlog())
|
||||||
}
|
}
|
||||||
|
|
||||||
// gccgoPkgpathToSymbol converts a package path to a mangled packagepath
|
// gccgoToSymbol converts a name to a mangled symbol for gccgo.
|
||||||
// symbol.
|
func gccgoToSymbol(ppath string) string {
|
||||||
func gccgoPkgpathToSymbol(ppath string) string {
|
|
||||||
if gccgoMangler == nil {
|
if gccgoMangler == nil {
|
||||||
var err error
|
var err error
|
||||||
cmd := os.Getenv("GCCGO")
|
cmd := os.Getenv("GCCGO")
|
||||||
|
|
@ -1293,12 +1292,12 @@ func (p *Package) gccgoSymbolPrefix() string {
|
||||||
}
|
}
|
||||||
|
|
||||||
if *gccgopkgpath != "" {
|
if *gccgopkgpath != "" {
|
||||||
return gccgoPkgpathToSymbol(*gccgopkgpath)
|
return gccgoToSymbol(*gccgopkgpath)
|
||||||
}
|
}
|
||||||
if *gccgoprefix == "" && p.PackageName == "main" {
|
if *gccgoprefix == "" && p.PackageName == "main" {
|
||||||
return "main"
|
return "main"
|
||||||
}
|
}
|
||||||
prefix := gccgoPkgpathToSymbol(*gccgoprefix)
|
prefix := gccgoToSymbol(*gccgoprefix)
|
||||||
if prefix == "" {
|
if prefix == "" {
|
||||||
prefix = "go"
|
prefix = "go"
|
||||||
}
|
}
|
||||||
|
|
@ -1687,8 +1686,12 @@ void _cgoPREFIX_Cfunc__Cmalloc(void *v) {
|
||||||
`
|
`
|
||||||
|
|
||||||
func (p *Package) cPrologGccgo() string {
|
func (p *Package) cPrologGccgo() string {
|
||||||
return strings.Replace(strings.Replace(cPrologGccgo, "PREFIX", cPrefix, -1),
|
r := strings.NewReplacer(
|
||||||
"GCCGOSYMBOLPREF", p.gccgoSymbolPrefix(), -1)
|
"PREFIX", cPrefix,
|
||||||
|
"GCCGOSYMBOLPREF", p.gccgoSymbolPrefix(),
|
||||||
|
"_cgoCheckPointer", gccgoToSymbol("_cgoCheckPointer"),
|
||||||
|
"_cgoCheckResult", gccgoToSymbol("_cgoCheckResult"))
|
||||||
|
return r.Replace(cPrologGccgo)
|
||||||
}
|
}
|
||||||
|
|
||||||
const cPrologGccgo = `
|
const cPrologGccgo = `
|
||||||
|
|
|
||||||
|
|
@ -22,16 +22,7 @@ package main_test
|
||||||
var knownFormats = map[string]string{
|
var knownFormats = map[string]string{
|
||||||
"*bytes.Buffer %s": "",
|
"*bytes.Buffer %s": "",
|
||||||
"*cmd/compile/internal/gc.EscLocation %v": "",
|
"*cmd/compile/internal/gc.EscLocation %v": "",
|
||||||
"*cmd/compile/internal/gc.Mpflt %v": "",
|
"*cmd/compile/internal/ir.node %v": "",
|
||||||
"*cmd/compile/internal/gc.Mpint %v": "",
|
|
||||||
"*cmd/compile/internal/gc.Node %#v": "",
|
|
||||||
"*cmd/compile/internal/gc.Node %+S": "",
|
|
||||||
"*cmd/compile/internal/gc.Node %+v": "",
|
|
||||||
"*cmd/compile/internal/gc.Node %L": "",
|
|
||||||
"*cmd/compile/internal/gc.Node %S": "",
|
|
||||||
"*cmd/compile/internal/gc.Node %j": "",
|
|
||||||
"*cmd/compile/internal/gc.Node %p": "",
|
|
||||||
"*cmd/compile/internal/gc.Node %v": "",
|
|
||||||
"*cmd/compile/internal/ssa.Block %s": "",
|
"*cmd/compile/internal/ssa.Block %s": "",
|
||||||
"*cmd/compile/internal/ssa.Block %v": "",
|
"*cmd/compile/internal/ssa.Block %v": "",
|
||||||
"*cmd/compile/internal/ssa.Func %s": "",
|
"*cmd/compile/internal/ssa.Func %s": "",
|
||||||
|
|
@ -54,7 +45,6 @@ var knownFormats = map[string]string{
|
||||||
"*cmd/compile/internal/types.Sym %v": "",
|
"*cmd/compile/internal/types.Sym %v": "",
|
||||||
"*cmd/compile/internal/types.Type %#L": "",
|
"*cmd/compile/internal/types.Type %#L": "",
|
||||||
"*cmd/compile/internal/types.Type %#v": "",
|
"*cmd/compile/internal/types.Type %#v": "",
|
||||||
"*cmd/compile/internal/types.Type %+v": "",
|
|
||||||
"*cmd/compile/internal/types.Type %-S": "",
|
"*cmd/compile/internal/types.Type %-S": "",
|
||||||
"*cmd/compile/internal/types.Type %0S": "",
|
"*cmd/compile/internal/types.Type %0S": "",
|
||||||
"*cmd/compile/internal/types.Type %L": "",
|
"*cmd/compile/internal/types.Type %L": "",
|
||||||
|
|
@ -84,9 +74,7 @@ var knownFormats = map[string]string{
|
||||||
"*cmd/internal/obj.Addr %v": "",
|
"*cmd/internal/obj.Addr %v": "",
|
||||||
"*cmd/internal/obj.LSym %v": "",
|
"*cmd/internal/obj.LSym %v": "",
|
||||||
"*math/big.Float %f": "",
|
"*math/big.Float %f": "",
|
||||||
"*math/big.Int %#x": "",
|
|
||||||
"*math/big.Int %s": "",
|
"*math/big.Int %s": "",
|
||||||
"*math/big.Int %v": "",
|
|
||||||
"[16]byte %x": "",
|
"[16]byte %x": "",
|
||||||
"[]*cmd/compile/internal/ssa.Block %v": "",
|
"[]*cmd/compile/internal/ssa.Block %v": "",
|
||||||
"[]*cmd/compile/internal/ssa.Value %v": "",
|
"[]*cmd/compile/internal/ssa.Value %v": "",
|
||||||
|
|
@ -110,27 +98,28 @@ var knownFormats = map[string]string{
|
||||||
"byte %q": "",
|
"byte %q": "",
|
||||||
"byte %v": "",
|
"byte %v": "",
|
||||||
"cmd/compile/internal/arm.shift %d": "",
|
"cmd/compile/internal/arm.shift %d": "",
|
||||||
"cmd/compile/internal/gc.Class %d": "",
|
|
||||||
"cmd/compile/internal/gc.Class %s": "",
|
|
||||||
"cmd/compile/internal/gc.Class %v": "",
|
|
||||||
"cmd/compile/internal/gc.Ctype %d": "",
|
|
||||||
"cmd/compile/internal/gc.Ctype %v": "",
|
|
||||||
"cmd/compile/internal/gc.Nodes %#v": "",
|
|
||||||
"cmd/compile/internal/gc.Nodes %+v": "",
|
|
||||||
"cmd/compile/internal/gc.Nodes %.v": "",
|
|
||||||
"cmd/compile/internal/gc.Nodes %v": "",
|
|
||||||
"cmd/compile/internal/gc.Op %#v": "",
|
|
||||||
"cmd/compile/internal/gc.Op %v": "",
|
|
||||||
"cmd/compile/internal/gc.Val %#v": "",
|
|
||||||
"cmd/compile/internal/gc.Val %T": "",
|
|
||||||
"cmd/compile/internal/gc.Val %v": "",
|
|
||||||
"cmd/compile/internal/gc.fmtMode %d": "",
|
|
||||||
"cmd/compile/internal/gc.initKind %d": "",
|
"cmd/compile/internal/gc.initKind %d": "",
|
||||||
"cmd/compile/internal/gc.itag %v": "",
|
"cmd/compile/internal/gc.itag %v": "",
|
||||||
"cmd/compile/internal/importer.itag %v": "",
|
"cmd/compile/internal/importer.itag %v": "",
|
||||||
|
"cmd/compile/internal/ir.Class %d": "",
|
||||||
|
"cmd/compile/internal/ir.Class %v": "",
|
||||||
|
"cmd/compile/internal/ir.FmtMode %d": "",
|
||||||
|
"cmd/compile/internal/ir.Node %#v": "",
|
||||||
|
"cmd/compile/internal/ir.Node %+S": "",
|
||||||
|
"cmd/compile/internal/ir.Node %+v": "",
|
||||||
|
"cmd/compile/internal/ir.Node %L": "",
|
||||||
|
"cmd/compile/internal/ir.Node %S": "",
|
||||||
|
"cmd/compile/internal/ir.Node %j": "",
|
||||||
|
"cmd/compile/internal/ir.Node %p": "",
|
||||||
|
"cmd/compile/internal/ir.Node %v": "",
|
||||||
|
"cmd/compile/internal/ir.Nodes %#v": "",
|
||||||
|
"cmd/compile/internal/ir.Nodes %+v": "",
|
||||||
|
"cmd/compile/internal/ir.Nodes %.v": "",
|
||||||
|
"cmd/compile/internal/ir.Nodes %v": "",
|
||||||
|
"cmd/compile/internal/ir.Op %#v": "",
|
||||||
|
"cmd/compile/internal/ir.Op %v": "",
|
||||||
"cmd/compile/internal/ssa.BranchPrediction %d": "",
|
"cmd/compile/internal/ssa.BranchPrediction %d": "",
|
||||||
"cmd/compile/internal/ssa.Edge %v": "",
|
"cmd/compile/internal/ssa.Edge %v": "",
|
||||||
"cmd/compile/internal/ssa.GCNode %v": "",
|
|
||||||
"cmd/compile/internal/ssa.ID %d": "",
|
"cmd/compile/internal/ssa.ID %d": "",
|
||||||
"cmd/compile/internal/ssa.ID %v": "",
|
"cmd/compile/internal/ssa.ID %v": "",
|
||||||
"cmd/compile/internal/ssa.LocalSlot %s": "",
|
"cmd/compile/internal/ssa.LocalSlot %s": "",
|
||||||
|
|
@ -179,9 +168,11 @@ var knownFormats = map[string]string{
|
||||||
"error %v": "",
|
"error %v": "",
|
||||||
"float64 %.2f": "",
|
"float64 %.2f": "",
|
||||||
"float64 %.3f": "",
|
"float64 %.3f": "",
|
||||||
"float64 %.6g": "",
|
|
||||||
"float64 %g": "",
|
"float64 %g": "",
|
||||||
|
"go/constant.Kind %v": "",
|
||||||
|
"go/constant.Value %#v": "",
|
||||||
"go/constant.Value %s": "",
|
"go/constant.Value %s": "",
|
||||||
|
"go/constant.Value %v": "",
|
||||||
"int %#x": "",
|
"int %#x": "",
|
||||||
"int %-12d": "",
|
"int %-12d": "",
|
||||||
"int %-6d": "",
|
"int %-6d": "",
|
||||||
|
|
@ -199,7 +190,6 @@ var knownFormats = map[string]string{
|
||||||
"int32 %v": "",
|
"int32 %v": "",
|
||||||
"int32 %x": "",
|
"int32 %x": "",
|
||||||
"int64 %#x": "",
|
"int64 %#x": "",
|
||||||
"int64 %+d": "",
|
|
||||||
"int64 %-10d": "",
|
"int64 %-10d": "",
|
||||||
"int64 %.5d": "",
|
"int64 %.5d": "",
|
||||||
"int64 %d": "",
|
"int64 %d": "",
|
||||||
|
|
@ -214,13 +204,14 @@ var knownFormats = map[string]string{
|
||||||
"interface{} %q": "",
|
"interface{} %q": "",
|
||||||
"interface{} %s": "",
|
"interface{} %s": "",
|
||||||
"interface{} %v": "",
|
"interface{} %v": "",
|
||||||
"map[*cmd/compile/internal/gc.Node]*cmd/compile/internal/ssa.Value %v": "",
|
|
||||||
"map[*cmd/compile/internal/gc.Node][]*cmd/compile/internal/gc.Node %v": "",
|
|
||||||
"map[*cmd/compile/internal/types2.TypeParam]cmd/compile/internal/types2.Type %s": "",
|
"map[*cmd/compile/internal/types2.TypeParam]cmd/compile/internal/types2.Type %s": "",
|
||||||
|
"map[cmd/compile/internal/ir.Node]*cmd/compile/internal/ssa.Value %v": "",
|
||||||
|
"map[cmd/compile/internal/ir.Node][]cmd/compile/internal/ir.Node %v": "",
|
||||||
"map[cmd/compile/internal/ssa.ID]uint32 %v": "",
|
"map[cmd/compile/internal/ssa.ID]uint32 %v": "",
|
||||||
"map[int64]uint32 %v": "",
|
"map[int64]uint32 %v": "",
|
||||||
"math/big.Accuracy %s": "",
|
"math/big.Accuracy %s": "",
|
||||||
"reflect.Type %s": "",
|
"reflect.Type %s": "",
|
||||||
|
"reflect.Type %v": "",
|
||||||
"rune %#U": "",
|
"rune %#U": "",
|
||||||
"rune %c": "",
|
"rune %c": "",
|
||||||
"rune %q": "",
|
"rune %q": "",
|
||||||
|
|
|
||||||
|
|
@ -5,6 +5,7 @@
|
||||||
package amd64
|
package amd64
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"cmd/compile/internal/base"
|
||||||
"cmd/compile/internal/gc"
|
"cmd/compile/internal/gc"
|
||||||
"cmd/internal/obj"
|
"cmd/internal/obj"
|
||||||
"cmd/internal/obj/x86"
|
"cmd/internal/obj/x86"
|
||||||
|
|
@ -64,7 +65,7 @@ func zerorange(pp *gc.Progs, p *obj.Prog, off, cnt int64, state *uint32) *obj.Pr
|
||||||
if cnt%int64(gc.Widthreg) != 0 {
|
if cnt%int64(gc.Widthreg) != 0 {
|
||||||
// should only happen with nacl
|
// should only happen with nacl
|
||||||
if cnt%int64(gc.Widthptr) != 0 {
|
if cnt%int64(gc.Widthptr) != 0 {
|
||||||
gc.Fatalf("zerorange count not a multiple of widthptr %d", cnt)
|
base.Fatalf("zerorange count not a multiple of widthptr %d", cnt)
|
||||||
}
|
}
|
||||||
if *state&ax == 0 {
|
if *state&ax == 0 {
|
||||||
p = pp.Appendpp(p, x86.AMOVQ, obj.TYPE_CONST, 0, 0, obj.TYPE_REG, x86.REG_AX, 0)
|
p = pp.Appendpp(p, x86.AMOVQ, obj.TYPE_CONST, 0, 0, obj.TYPE_REG, x86.REG_AX, 0)
|
||||||
|
|
|
||||||
|
|
@ -8,6 +8,7 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"math"
|
"math"
|
||||||
|
|
||||||
|
"cmd/compile/internal/base"
|
||||||
"cmd/compile/internal/gc"
|
"cmd/compile/internal/gc"
|
||||||
"cmd/compile/internal/logopt"
|
"cmd/compile/internal/logopt"
|
||||||
"cmd/compile/internal/ssa"
|
"cmd/compile/internal/ssa"
|
||||||
|
|
@ -975,7 +976,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
|
||||||
r := v.Reg()
|
r := v.Reg()
|
||||||
// See the comments in cmd/internal/obj/x86/obj6.go
|
// See the comments in cmd/internal/obj/x86/obj6.go
|
||||||
// near CanUse1InsnTLS for a detailed explanation of these instructions.
|
// near CanUse1InsnTLS for a detailed explanation of these instructions.
|
||||||
if x86.CanUse1InsnTLS(gc.Ctxt) {
|
if x86.CanUse1InsnTLS(base.Ctxt) {
|
||||||
// MOVQ (TLS), r
|
// MOVQ (TLS), r
|
||||||
p := s.Prog(x86.AMOVQ)
|
p := s.Prog(x86.AMOVQ)
|
||||||
p.From.Type = obj.TYPE_MEM
|
p.From.Type = obj.TYPE_MEM
|
||||||
|
|
@ -1017,7 +1018,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
|
||||||
}
|
}
|
||||||
p := s.Prog(mov)
|
p := s.Prog(mov)
|
||||||
p.From.Type = obj.TYPE_ADDR
|
p.From.Type = obj.TYPE_ADDR
|
||||||
p.From.Offset = -gc.Ctxt.FixedFrameSize() // 0 on amd64, just to be consistent with other architectures
|
p.From.Offset = -base.Ctxt.FixedFrameSize() // 0 on amd64, just to be consistent with other architectures
|
||||||
p.From.Name = obj.NAME_PARAM
|
p.From.Name = obj.NAME_PARAM
|
||||||
p.To.Type = obj.TYPE_REG
|
p.To.Type = obj.TYPE_REG
|
||||||
p.To.Reg = v.Reg()
|
p.To.Reg = v.Reg()
|
||||||
|
|
@ -1164,8 +1165,8 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
|
||||||
if logopt.Enabled() {
|
if logopt.Enabled() {
|
||||||
logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name)
|
logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name)
|
||||||
}
|
}
|
||||||
if gc.Debug_checknil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers
|
if base.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers
|
||||||
gc.Warnl(v.Pos, "generated nil check")
|
base.WarnfAt(v.Pos, "generated nil check")
|
||||||
}
|
}
|
||||||
case ssa.OpAMD64MOVBatomicload, ssa.OpAMD64MOVLatomicload, ssa.OpAMD64MOVQatomicload:
|
case ssa.OpAMD64MOVBatomicload, ssa.OpAMD64MOVLatomicload, ssa.OpAMD64MOVQatomicload:
|
||||||
p := s.Prog(v.Op.Asm())
|
p := s.Prog(v.Op.Asm())
|
||||||
|
|
|
||||||
|
|
@ -9,7 +9,9 @@ import (
|
||||||
"math"
|
"math"
|
||||||
"math/bits"
|
"math/bits"
|
||||||
|
|
||||||
|
"cmd/compile/internal/base"
|
||||||
"cmd/compile/internal/gc"
|
"cmd/compile/internal/gc"
|
||||||
|
"cmd/compile/internal/ir"
|
||||||
"cmd/compile/internal/logopt"
|
"cmd/compile/internal/logopt"
|
||||||
"cmd/compile/internal/ssa"
|
"cmd/compile/internal/ssa"
|
||||||
"cmd/compile/internal/types"
|
"cmd/compile/internal/types"
|
||||||
|
|
@ -544,7 +546,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
|
||||||
case *obj.LSym:
|
case *obj.LSym:
|
||||||
wantreg = "SB"
|
wantreg = "SB"
|
||||||
gc.AddAux(&p.From, v)
|
gc.AddAux(&p.From, v)
|
||||||
case *gc.Node:
|
case ir.Node:
|
||||||
wantreg = "SP"
|
wantreg = "SP"
|
||||||
gc.AddAux(&p.From, v)
|
gc.AddAux(&p.From, v)
|
||||||
case nil:
|
case nil:
|
||||||
|
|
@ -741,8 +743,8 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
|
||||||
if logopt.Enabled() {
|
if logopt.Enabled() {
|
||||||
logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name)
|
logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name)
|
||||||
}
|
}
|
||||||
if gc.Debug_checknil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers
|
if base.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers
|
||||||
gc.Warnl(v.Pos, "generated nil check")
|
base.WarnfAt(v.Pos, "generated nil check")
|
||||||
}
|
}
|
||||||
case ssa.OpARMLoweredZero:
|
case ssa.OpARMLoweredZero:
|
||||||
// MOVW.P Rarg2, 4(R1)
|
// MOVW.P Rarg2, 4(R1)
|
||||||
|
|
@ -849,7 +851,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
|
||||||
// caller's SP is FixedFrameSize below the address of the first arg
|
// caller's SP is FixedFrameSize below the address of the first arg
|
||||||
p := s.Prog(arm.AMOVW)
|
p := s.Prog(arm.AMOVW)
|
||||||
p.From.Type = obj.TYPE_ADDR
|
p.From.Type = obj.TYPE_ADDR
|
||||||
p.From.Offset = -gc.Ctxt.FixedFrameSize()
|
p.From.Offset = -base.Ctxt.FixedFrameSize()
|
||||||
p.From.Name = obj.NAME_PARAM
|
p.From.Name = obj.NAME_PARAM
|
||||||
p.To.Type = obj.TYPE_REG
|
p.To.Type = obj.TYPE_REG
|
||||||
p.To.Reg = v.Reg()
|
p.To.Reg = v.Reg()
|
||||||
|
|
|
||||||
|
|
@ -7,7 +7,9 @@ package arm64
|
||||||
import (
|
import (
|
||||||
"math"
|
"math"
|
||||||
|
|
||||||
|
"cmd/compile/internal/base"
|
||||||
"cmd/compile/internal/gc"
|
"cmd/compile/internal/gc"
|
||||||
|
"cmd/compile/internal/ir"
|
||||||
"cmd/compile/internal/logopt"
|
"cmd/compile/internal/logopt"
|
||||||
"cmd/compile/internal/ssa"
|
"cmd/compile/internal/ssa"
|
||||||
"cmd/compile/internal/types"
|
"cmd/compile/internal/types"
|
||||||
|
|
@ -394,7 +396,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
|
||||||
case *obj.LSym:
|
case *obj.LSym:
|
||||||
wantreg = "SB"
|
wantreg = "SB"
|
||||||
gc.AddAux(&p.From, v)
|
gc.AddAux(&p.From, v)
|
||||||
case *gc.Node:
|
case ir.Node:
|
||||||
wantreg = "SP"
|
wantreg = "SP"
|
||||||
gc.AddAux(&p.From, v)
|
gc.AddAux(&p.From, v)
|
||||||
case nil:
|
case nil:
|
||||||
|
|
@ -1038,8 +1040,8 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
|
||||||
if logopt.Enabled() {
|
if logopt.Enabled() {
|
||||||
logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name)
|
logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name)
|
||||||
}
|
}
|
||||||
if gc.Debug_checknil != 0 && v.Pos.Line() > 1 { // v.Line==1 in generated wrappers
|
if base.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Line==1 in generated wrappers
|
||||||
gc.Warnl(v.Pos, "generated nil check")
|
base.WarnfAt(v.Pos, "generated nil check")
|
||||||
}
|
}
|
||||||
case ssa.OpARM64Equal,
|
case ssa.OpARM64Equal,
|
||||||
ssa.OpARM64NotEqual,
|
ssa.OpARM64NotEqual,
|
||||||
|
|
@ -1068,7 +1070,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
|
||||||
// caller's SP is FixedFrameSize below the address of the first arg
|
// caller's SP is FixedFrameSize below the address of the first arg
|
||||||
p := s.Prog(arm64.AMOVD)
|
p := s.Prog(arm64.AMOVD)
|
||||||
p.From.Type = obj.TYPE_ADDR
|
p.From.Type = obj.TYPE_ADDR
|
||||||
p.From.Offset = -gc.Ctxt.FixedFrameSize()
|
p.From.Offset = -base.Ctxt.FixedFrameSize()
|
||||||
p.From.Name = obj.NAME_PARAM
|
p.From.Name = obj.NAME_PARAM
|
||||||
p.To.Type = obj.TYPE_REG
|
p.To.Type = obj.TYPE_REG
|
||||||
p.To.Reg = v.Reg()
|
p.To.Reg = v.Reg()
|
||||||
|
|
|
||||||
28
src/cmd/compile/internal/base/base.go
Normal file
28
src/cmd/compile/internal/base/base.go
Normal file
|
|
@ -0,0 +1,28 @@
|
||||||
|
// Copyright 2009 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package base
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
|
||||||
|
"cmd/internal/obj"
|
||||||
|
)
|
||||||
|
|
||||||
|
var Ctxt *obj.Link
|
||||||
|
|
||||||
|
var atExitFuncs []func()
|
||||||
|
|
||||||
|
func AtExit(f func()) {
|
||||||
|
atExitFuncs = append(atExitFuncs, f)
|
||||||
|
}
|
||||||
|
|
||||||
|
func Exit(code int) {
|
||||||
|
for i := len(atExitFuncs) - 1; i >= 0; i-- {
|
||||||
|
f := atExitFuncs[i]
|
||||||
|
atExitFuncs = atExitFuncs[:i]
|
||||||
|
f()
|
||||||
|
}
|
||||||
|
os.Exit(code)
|
||||||
|
}
|
||||||
194
src/cmd/compile/internal/base/debug.go
Normal file
194
src/cmd/compile/internal/base/debug.go
Normal file
|
|
@ -0,0 +1,194 @@
|
||||||
|
// Copyright 2009 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// Debug arguments, set by -d flag.
|
||||||
|
|
||||||
|
package base
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"reflect"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"cmd/internal/objabi"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Debug holds the parsed debugging configuration values.
|
||||||
|
var Debug = DebugFlags{
|
||||||
|
Fieldtrack: &objabi.Fieldtrack_enabled,
|
||||||
|
}
|
||||||
|
|
||||||
|
// DebugFlags defines the debugging configuration values (see var Debug).
|
||||||
|
// Each struct field is a different value, named for the lower-case of the field name.
|
||||||
|
// Each field must be an int or string and must have a `help` struct tag.
|
||||||
|
//
|
||||||
|
// The -d option takes a comma-separated list of settings.
|
||||||
|
// Each setting is name=value; for ints, name is short for name=1.
|
||||||
|
type DebugFlags struct {
|
||||||
|
Append int `help:"print information about append compilation"`
|
||||||
|
Checkptr int `help:"instrument unsafe pointer conversions"`
|
||||||
|
Closure int `help:"print information about closure compilation"`
|
||||||
|
CompileLater int `help:"compile functions as late as possible"`
|
||||||
|
DclStack int `help:"run internal dclstack check"`
|
||||||
|
Defer int `help:"print information about defer compilation"`
|
||||||
|
DisableNil int `help:"disable nil checks"`
|
||||||
|
DumpPtrs int `help:"show Node pointers values in dump output"`
|
||||||
|
DwarfInl int `help:"print information about DWARF inlined function creation"`
|
||||||
|
Export int `help:"print export data"`
|
||||||
|
Fieldtrack *int `help:"enable field tracking"`
|
||||||
|
GCProg int `help:"print dump of GC programs"`
|
||||||
|
Libfuzzer int `help:"enable coverage instrumentation for libfuzzer"`
|
||||||
|
LocationLists int `help:"print information about DWARF location list creation"`
|
||||||
|
Nil int `help:"print information about nil checks"`
|
||||||
|
PCTab string `help:"print named pc-value table"`
|
||||||
|
Panic int `help:"show all compiler panics"`
|
||||||
|
Slice int `help:"print information about slice compilation"`
|
||||||
|
SoftFloat int `help:"force compiler to emit soft-float code"`
|
||||||
|
TypeAssert int `help:"print information about type assertion inlining"`
|
||||||
|
TypecheckInl int `help:"eager typechecking of inline function bodies"`
|
||||||
|
WB int `help:"print information about write barriers"`
|
||||||
|
|
||||||
|
any bool // set when any of the values have been set
|
||||||
|
}
|
||||||
|
|
||||||
|
// Any reports whether any of the debug flags have been set.
|
||||||
|
func (d *DebugFlags) Any() bool { return d.any }
|
||||||
|
|
||||||
|
type debugField struct {
|
||||||
|
name string
|
||||||
|
help string
|
||||||
|
val interface{} // *int or *string
|
||||||
|
}
|
||||||
|
|
||||||
|
var debugTab []debugField
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
v := reflect.ValueOf(&Debug).Elem()
|
||||||
|
t := v.Type()
|
||||||
|
for i := 0; i < t.NumField(); i++ {
|
||||||
|
f := t.Field(i)
|
||||||
|
if f.Name == "any" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
name := strings.ToLower(f.Name)
|
||||||
|
help := f.Tag.Get("help")
|
||||||
|
if help == "" {
|
||||||
|
panic(fmt.Sprintf("base.Debug.%s is missing help text", f.Name))
|
||||||
|
}
|
||||||
|
ptr := v.Field(i).Addr().Interface()
|
||||||
|
switch ptr.(type) {
|
||||||
|
default:
|
||||||
|
panic(fmt.Sprintf("base.Debug.%s has invalid type %v (must be int or string)", f.Name, f.Type))
|
||||||
|
case *int, *string:
|
||||||
|
// ok
|
||||||
|
case **int:
|
||||||
|
ptr = *ptr.(**int) // record the *int itself
|
||||||
|
}
|
||||||
|
debugTab = append(debugTab, debugField{name, help, ptr})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// DebugSSA is called to set a -d ssa/... option.
|
||||||
|
// If nil, those options are reported as invalid options.
|
||||||
|
// If DebugSSA returns a non-empty string, that text is reported as a compiler error.
|
||||||
|
var DebugSSA func(phase, flag string, val int, valString string) string
|
||||||
|
|
||||||
|
// parseDebug parses the -d debug string argument.
|
||||||
|
func parseDebug(debugstr string) {
|
||||||
|
// parse -d argument
|
||||||
|
if debugstr == "" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
Debug.any = true
|
||||||
|
Split:
|
||||||
|
for _, name := range strings.Split(debugstr, ",") {
|
||||||
|
if name == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// display help about the -d option itself and quit
|
||||||
|
if name == "help" {
|
||||||
|
fmt.Print(debugHelpHeader)
|
||||||
|
maxLen := len("ssa/help")
|
||||||
|
for _, t := range debugTab {
|
||||||
|
if len(t.name) > maxLen {
|
||||||
|
maxLen = len(t.name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for _, t := range debugTab {
|
||||||
|
fmt.Printf("\t%-*s\t%s\n", maxLen, t.name, t.help)
|
||||||
|
}
|
||||||
|
// ssa options have their own help
|
||||||
|
fmt.Printf("\t%-*s\t%s\n", maxLen, "ssa/help", "print help about SSA debugging")
|
||||||
|
fmt.Print(debugHelpFooter)
|
||||||
|
os.Exit(0)
|
||||||
|
}
|
||||||
|
val, valstring, haveInt := 1, "", true
|
||||||
|
if i := strings.IndexAny(name, "=:"); i >= 0 {
|
||||||
|
var err error
|
||||||
|
name, valstring = name[:i], name[i+1:]
|
||||||
|
val, err = strconv.Atoi(valstring)
|
||||||
|
if err != nil {
|
||||||
|
val, haveInt = 1, false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for _, t := range debugTab {
|
||||||
|
if t.name != name {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
switch vp := t.val.(type) {
|
||||||
|
case nil:
|
||||||
|
// Ignore
|
||||||
|
case *string:
|
||||||
|
*vp = valstring
|
||||||
|
case *int:
|
||||||
|
if !haveInt {
|
||||||
|
log.Fatalf("invalid debug value %v", name)
|
||||||
|
}
|
||||||
|
*vp = val
|
||||||
|
default:
|
||||||
|
panic("bad debugtab type")
|
||||||
|
}
|
||||||
|
continue Split
|
||||||
|
}
|
||||||
|
// special case for ssa for now
|
||||||
|
if DebugSSA != nil && strings.HasPrefix(name, "ssa/") {
|
||||||
|
// expect form ssa/phase/flag
|
||||||
|
// e.g. -d=ssa/generic_cse/time
|
||||||
|
// _ in phase name also matches space
|
||||||
|
phase := name[4:]
|
||||||
|
flag := "debug" // default flag is debug
|
||||||
|
if i := strings.Index(phase, "/"); i >= 0 {
|
||||||
|
flag = phase[i+1:]
|
||||||
|
phase = phase[:i]
|
||||||
|
}
|
||||||
|
err := DebugSSA(phase, flag, val, valstring)
|
||||||
|
if err != "" {
|
||||||
|
log.Fatalf(err)
|
||||||
|
}
|
||||||
|
continue Split
|
||||||
|
}
|
||||||
|
log.Fatalf("unknown debug key -d %s\n", name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const debugHelpHeader = `usage: -d arg[,arg]* and arg is <key>[=<value>]
|
||||||
|
|
||||||
|
<key> is one of:
|
||||||
|
|
||||||
|
`
|
||||||
|
|
||||||
|
const debugHelpFooter = `
|
||||||
|
<value> is key-specific.
|
||||||
|
|
||||||
|
Key "checkptr" supports values:
|
||||||
|
"0": instrumentation disabled
|
||||||
|
"1": conversions involving unsafe.Pointer are instrumented
|
||||||
|
"2": conversions to unsafe.Pointer force heap allocation
|
||||||
|
|
||||||
|
Key "pctab" supports values:
|
||||||
|
"pctospadj", "pctofile", "pctoline", "pctoinline", "pctopcdata"
|
||||||
|
`
|
||||||
454
src/cmd/compile/internal/base/flag.go
Normal file
454
src/cmd/compile/internal/base/flag.go
Normal file
|
|
@ -0,0 +1,454 @@
|
||||||
|
// Copyright 2009 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package base
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"io/ioutil"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"reflect"
|
||||||
|
"runtime"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"cmd/internal/objabi"
|
||||||
|
"cmd/internal/sys"
|
||||||
|
)
|
||||||
|
|
||||||
|
func usage() {
|
||||||
|
fmt.Fprintf(os.Stderr, "usage: compile [options] file.go...\n")
|
||||||
|
objabi.Flagprint(os.Stderr)
|
||||||
|
Exit(2)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Flag holds the parsed command-line flags.
|
||||||
|
// See ParseFlag for non-zero defaults.
|
||||||
|
var Flag CmdFlags
|
||||||
|
|
||||||
|
// A CountFlag is a counting integer flag.
|
||||||
|
// It accepts -name=value to set the value directly,
|
||||||
|
// but it also accepts -name with no =value to increment the count.
|
||||||
|
type CountFlag int
|
||||||
|
|
||||||
|
// CmdFlags defines the command-line flags (see var Flag).
|
||||||
|
// Each struct field is a different flag, by default named for the lower-case of the field name.
|
||||||
|
// If the flag name is a single letter, the default flag name is left upper-case.
|
||||||
|
// If the flag name is "Lower" followed by a single letter, the default flag name is the lower-case of the last letter.
|
||||||
|
//
|
||||||
|
// If this default flag name can't be made right, the `flag` struct tag can be used to replace it,
|
||||||
|
// but this should be done only in exceptional circumstances: it helps everyone if the flag name
|
||||||
|
// is obvious from the field name when the flag is used elsewhere in the compiler sources.
|
||||||
|
// The `flag:"-"` struct tag makes a field invisible to the flag logic and should also be used sparingly.
|
||||||
|
//
|
||||||
|
// Each field must have a `help` struct tag giving the flag help message.
|
||||||
|
//
|
||||||
|
// The allowed field types are bool, int, string, pointers to those (for values stored elsewhere),
|
||||||
|
// CountFlag (for a counting flag), and func(string) (for a flag that uses special code for parsing).
|
||||||
|
type CmdFlags struct {
|
||||||
|
// Single letters
|
||||||
|
B CountFlag "help:\"disable bounds checking\""
|
||||||
|
C CountFlag "help:\"disable printing of columns in error messages\""
|
||||||
|
D string "help:\"set relative `path` for local imports\""
|
||||||
|
E CountFlag "help:\"debug symbol export\""
|
||||||
|
G CountFlag "help:\"accept generic code\""
|
||||||
|
I func(string) "help:\"add `directory` to import search path\""
|
||||||
|
K CountFlag "help:\"debug missing line numbers\""
|
||||||
|
L CountFlag "help:\"show full file names in error messages\""
|
||||||
|
N CountFlag "help:\"disable optimizations\""
|
||||||
|
S CountFlag "help:\"print assembly listing\""
|
||||||
|
// V is added by objabi.AddVersionFlag
|
||||||
|
W CountFlag "help:\"debug parse tree after type checking\""
|
||||||
|
|
||||||
|
LowerC int "help:\"concurrency during compilation (1 means no concurrency)\""
|
||||||
|
LowerD func(string) "help:\"enable debugging settings; try -d help\""
|
||||||
|
LowerE CountFlag "help:\"no limit on number of errors reported\""
|
||||||
|
LowerH CountFlag "help:\"halt on error\""
|
||||||
|
LowerJ CountFlag "help:\"debug runtime-initialized variables\""
|
||||||
|
LowerL CountFlag "help:\"disable inlining\""
|
||||||
|
LowerM CountFlag "help:\"print optimization decisions\""
|
||||||
|
LowerO string "help:\"write output to `file`\""
|
||||||
|
LowerP *string "help:\"set expected package import `path`\"" // &Ctxt.Pkgpath, set below
|
||||||
|
LowerR CountFlag "help:\"debug generated wrappers\""
|
||||||
|
LowerT bool "help:\"enable tracing for debugging the compiler\""
|
||||||
|
LowerW CountFlag "help:\"debug type checking\""
|
||||||
|
LowerV *bool "help:\"increase debug verbosity\""
|
||||||
|
|
||||||
|
// Special characters
|
||||||
|
Percent int "flag:\"%\" help:\"debug non-static initializers\""
|
||||||
|
CompilingRuntime bool "flag:\"+\" help:\"compiling runtime\""
|
||||||
|
|
||||||
|
// Longer names
|
||||||
|
AsmHdr string "help:\"write assembly header to `file`\""
|
||||||
|
Bench string "help:\"append benchmark times to `file`\""
|
||||||
|
BlockProfile string "help:\"write block profile to `file`\""
|
||||||
|
BuildID string "help:\"record `id` as the build id in the export metadata\""
|
||||||
|
CPUProfile string "help:\"write cpu profile to `file`\""
|
||||||
|
Complete bool "help:\"compiling complete package (no C or assembly)\""
|
||||||
|
Dwarf bool "help:\"generate DWARF symbols\""
|
||||||
|
DwarfBASEntries *bool "help:\"use base address selection entries in DWARF\"" // &Ctxt.UseBASEntries, set below
|
||||||
|
DwarfLocationLists *bool "help:\"add location lists to DWARF in optimized mode\"" // &Ctxt.Flag_locationlists, set below
|
||||||
|
Dynlink *bool "help:\"support references to Go symbols defined in other shared libraries\"" // &Ctxt.Flag_dynlink, set below
|
||||||
|
EmbedCfg func(string) "help:\"read go:embed configuration from `file`\""
|
||||||
|
GenDwarfInl int "help:\"generate DWARF inline info records\"" // 0=disabled, 1=funcs, 2=funcs+formals/locals
|
||||||
|
GoVersion string "help:\"required version of the runtime\""
|
||||||
|
ImportCfg func(string) "help:\"read import configuration from `file`\""
|
||||||
|
ImportMap func(string) "help:\"add `definition` of the form source=actual to import map\""
|
||||||
|
InstallSuffix string "help:\"set pkg directory `suffix`\""
|
||||||
|
JSON string "help:\"version,file for JSON compiler/optimizer detail output\""
|
||||||
|
Lang string "help:\"Go language version source code expects\""
|
||||||
|
LinkObj string "help:\"write linker-specific object to `file`\""
|
||||||
|
LinkShared *bool "help:\"generate code that will be linked against Go shared libraries\"" // &Ctxt.Flag_linkshared, set below
|
||||||
|
Live CountFlag "help:\"debug liveness analysis\""
|
||||||
|
MSan bool "help:\"build code compatible with C/C++ memory sanitizer\""
|
||||||
|
MemProfile string "help:\"write memory profile to `file`\""
|
||||||
|
MemProfileRate int64 "help:\"set runtime.MemProfileRate to `rate`\""
|
||||||
|
MutexProfile string "help:\"write mutex profile to `file`\""
|
||||||
|
NoLocalImports bool "help:\"reject local (relative) imports\""
|
||||||
|
Pack bool "help:\"write to file.a instead of file.o\""
|
||||||
|
Race bool "help:\"enable race detector\""
|
||||||
|
Shared *bool "help:\"generate code that can be linked into a shared library\"" // &Ctxt.Flag_shared, set below
|
||||||
|
SmallFrames bool "help:\"reduce the size limit for stack allocated objects\"" // small stacks, to diagnose GC latency; see golang.org/issue/27732
|
||||||
|
Spectre string "help:\"enable spectre mitigations in `list` (all, index, ret)\""
|
||||||
|
Std bool "help:\"compiling standard library\""
|
||||||
|
SymABIs string "help:\"read symbol ABIs from `file`\""
|
||||||
|
TraceProfile string "help:\"write an execution trace to `file`\""
|
||||||
|
TrimPath string "help:\"remove `prefix` from recorded source file paths\""
|
||||||
|
WB bool "help:\"enable write barrier\"" // TODO: remove
|
||||||
|
|
||||||
|
// Configuration derived from flags; not a flag itself.
|
||||||
|
Cfg struct {
|
||||||
|
Embed struct { // set by -embedcfg
|
||||||
|
Patterns map[string][]string
|
||||||
|
Files map[string]string
|
||||||
|
}
|
||||||
|
ImportDirs []string // appended to by -I
|
||||||
|
ImportMap map[string]string // set by -importmap OR -importcfg
|
||||||
|
PackageFile map[string]string // set by -importcfg; nil means not in use
|
||||||
|
SpectreIndex bool // set by -spectre=index or -spectre=all
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParseFlags parses the command-line flags into Flag.
|
||||||
|
func ParseFlags() {
|
||||||
|
Flag.I = addImportDir
|
||||||
|
|
||||||
|
Flag.LowerC = 1
|
||||||
|
Flag.LowerD = parseDebug
|
||||||
|
Flag.LowerP = &Ctxt.Pkgpath
|
||||||
|
Flag.LowerV = &Ctxt.Debugvlog
|
||||||
|
|
||||||
|
Flag.Dwarf = objabi.GOARCH != "wasm"
|
||||||
|
Flag.DwarfBASEntries = &Ctxt.UseBASEntries
|
||||||
|
Flag.DwarfLocationLists = &Ctxt.Flag_locationlists
|
||||||
|
*Flag.DwarfLocationLists = true
|
||||||
|
Flag.Dynlink = &Ctxt.Flag_dynlink
|
||||||
|
Flag.EmbedCfg = readEmbedCfg
|
||||||
|
Flag.GenDwarfInl = 2
|
||||||
|
Flag.ImportCfg = readImportCfg
|
||||||
|
Flag.ImportMap = addImportMap
|
||||||
|
Flag.LinkShared = &Ctxt.Flag_linkshared
|
||||||
|
Flag.Shared = &Ctxt.Flag_shared
|
||||||
|
Flag.WB = true
|
||||||
|
|
||||||
|
Flag.Cfg.ImportMap = make(map[string]string)
|
||||||
|
|
||||||
|
objabi.AddVersionFlag() // -V
|
||||||
|
registerFlags()
|
||||||
|
objabi.Flagparse(usage)
|
||||||
|
|
||||||
|
if Flag.MSan && !sys.MSanSupported(objabi.GOOS, objabi.GOARCH) {
|
||||||
|
log.Fatalf("%s/%s does not support -msan", objabi.GOOS, objabi.GOARCH)
|
||||||
|
}
|
||||||
|
if Flag.Race && !sys.RaceDetectorSupported(objabi.GOOS, objabi.GOARCH) {
|
||||||
|
log.Fatalf("%s/%s does not support -race", objabi.GOOS, objabi.GOARCH)
|
||||||
|
}
|
||||||
|
if (*Flag.Shared || *Flag.Dynlink || *Flag.LinkShared) && !Ctxt.Arch.InFamily(sys.AMD64, sys.ARM, sys.ARM64, sys.I386, sys.PPC64, sys.RISCV64, sys.S390X) {
|
||||||
|
log.Fatalf("%s/%s does not support -shared", objabi.GOOS, objabi.GOARCH)
|
||||||
|
}
|
||||||
|
parseSpectre(Flag.Spectre) // left as string for recordFlags
|
||||||
|
|
||||||
|
Ctxt.Flag_shared = Ctxt.Flag_dynlink || Ctxt.Flag_shared
|
||||||
|
Ctxt.Flag_optimize = Flag.N == 0
|
||||||
|
Ctxt.Debugasm = int(Flag.S)
|
||||||
|
|
||||||
|
if flag.NArg() < 1 {
|
||||||
|
usage()
|
||||||
|
}
|
||||||
|
|
||||||
|
if Flag.GoVersion != "" && Flag.GoVersion != runtime.Version() {
|
||||||
|
fmt.Printf("compile: version %q does not match go tool version %q\n", runtime.Version(), Flag.GoVersion)
|
||||||
|
Exit(2)
|
||||||
|
}
|
||||||
|
|
||||||
|
if Flag.LowerO == "" {
|
||||||
|
p := flag.Arg(0)
|
||||||
|
if i := strings.LastIndex(p, "/"); i >= 0 {
|
||||||
|
p = p[i+1:]
|
||||||
|
}
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
if i := strings.LastIndex(p, `\`); i >= 0 {
|
||||||
|
p = p[i+1:]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if i := strings.LastIndex(p, "."); i >= 0 {
|
||||||
|
p = p[:i]
|
||||||
|
}
|
||||||
|
suffix := ".o"
|
||||||
|
if Flag.Pack {
|
||||||
|
suffix = ".a"
|
||||||
|
}
|
||||||
|
Flag.LowerO = p + suffix
|
||||||
|
}
|
||||||
|
|
||||||
|
if Flag.Race && Flag.MSan {
|
||||||
|
log.Fatal("cannot use both -race and -msan")
|
||||||
|
}
|
||||||
|
if Flag.Race || Flag.MSan {
|
||||||
|
// -race and -msan imply -d=checkptr for now.
|
||||||
|
Debug.Checkptr = 1
|
||||||
|
}
|
||||||
|
|
||||||
|
if Flag.CompilingRuntime && Flag.N != 0 {
|
||||||
|
log.Fatal("cannot disable optimizations while compiling runtime")
|
||||||
|
}
|
||||||
|
if Flag.LowerC < 1 {
|
||||||
|
log.Fatalf("-c must be at least 1, got %d", Flag.LowerC)
|
||||||
|
}
|
||||||
|
if Flag.LowerC > 1 && !concurrentBackendAllowed() {
|
||||||
|
log.Fatalf("cannot use concurrent backend compilation with provided flags; invoked as %v", os.Args)
|
||||||
|
}
|
||||||
|
|
||||||
|
if Flag.CompilingRuntime {
|
||||||
|
// Runtime can't use -d=checkptr, at least not yet.
|
||||||
|
Debug.Checkptr = 0
|
||||||
|
|
||||||
|
// Fuzzing the runtime isn't interesting either.
|
||||||
|
Debug.Libfuzzer = 0
|
||||||
|
}
|
||||||
|
|
||||||
|
// set via a -d flag
|
||||||
|
Ctxt.Debugpcln = Debug.PCTab
|
||||||
|
}
|
||||||
|
|
||||||
|
// registerFlags adds flag registrations for all the fields in Flag.
|
||||||
|
// See the comment on type CmdFlags for the rules.
|
||||||
|
func registerFlags() {
|
||||||
|
var (
|
||||||
|
boolType = reflect.TypeOf(bool(false))
|
||||||
|
intType = reflect.TypeOf(int(0))
|
||||||
|
stringType = reflect.TypeOf(string(""))
|
||||||
|
ptrBoolType = reflect.TypeOf(new(bool))
|
||||||
|
ptrIntType = reflect.TypeOf(new(int))
|
||||||
|
ptrStringType = reflect.TypeOf(new(string))
|
||||||
|
countType = reflect.TypeOf(CountFlag(0))
|
||||||
|
funcType = reflect.TypeOf((func(string))(nil))
|
||||||
|
)
|
||||||
|
|
||||||
|
v := reflect.ValueOf(&Flag).Elem()
|
||||||
|
t := v.Type()
|
||||||
|
for i := 0; i < t.NumField(); i++ {
|
||||||
|
f := t.Field(i)
|
||||||
|
if f.Name == "Cfg" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
var name string
|
||||||
|
if len(f.Name) == 1 {
|
||||||
|
name = f.Name
|
||||||
|
} else if len(f.Name) == 6 && f.Name[:5] == "Lower" && 'A' <= f.Name[5] && f.Name[5] <= 'Z' {
|
||||||
|
name = string(rune(f.Name[5] + 'a' - 'A'))
|
||||||
|
} else {
|
||||||
|
name = strings.ToLower(f.Name)
|
||||||
|
}
|
||||||
|
if tag := f.Tag.Get("flag"); tag != "" {
|
||||||
|
name = tag
|
||||||
|
}
|
||||||
|
|
||||||
|
help := f.Tag.Get("help")
|
||||||
|
if help == "" {
|
||||||
|
panic(fmt.Sprintf("base.Flag.%s is missing help text", f.Name))
|
||||||
|
}
|
||||||
|
|
||||||
|
if k := f.Type.Kind(); (k == reflect.Ptr || k == reflect.Func) && v.Field(i).IsNil() {
|
||||||
|
panic(fmt.Sprintf("base.Flag.%s is uninitialized %v", f.Name, f.Type))
|
||||||
|
}
|
||||||
|
|
||||||
|
switch f.Type {
|
||||||
|
case boolType:
|
||||||
|
p := v.Field(i).Addr().Interface().(*bool)
|
||||||
|
flag.BoolVar(p, name, *p, help)
|
||||||
|
case intType:
|
||||||
|
p := v.Field(i).Addr().Interface().(*int)
|
||||||
|
flag.IntVar(p, name, *p, help)
|
||||||
|
case stringType:
|
||||||
|
p := v.Field(i).Addr().Interface().(*string)
|
||||||
|
flag.StringVar(p, name, *p, help)
|
||||||
|
case ptrBoolType:
|
||||||
|
p := v.Field(i).Interface().(*bool)
|
||||||
|
flag.BoolVar(p, name, *p, help)
|
||||||
|
case ptrIntType:
|
||||||
|
p := v.Field(i).Interface().(*int)
|
||||||
|
flag.IntVar(p, name, *p, help)
|
||||||
|
case ptrStringType:
|
||||||
|
p := v.Field(i).Interface().(*string)
|
||||||
|
flag.StringVar(p, name, *p, help)
|
||||||
|
case countType:
|
||||||
|
p := (*int)(v.Field(i).Addr().Interface().(*CountFlag))
|
||||||
|
objabi.Flagcount(name, help, p)
|
||||||
|
case funcType:
|
||||||
|
f := v.Field(i).Interface().(func(string))
|
||||||
|
objabi.Flagfn1(name, help, f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// concurrentFlagOk reports whether the current compiler flags
|
||||||
|
// are compatible with concurrent compilation.
|
||||||
|
func concurrentFlagOk() bool {
|
||||||
|
// TODO(rsc): Many of these are fine. Remove them.
|
||||||
|
return Flag.Percent == 0 &&
|
||||||
|
Flag.E == 0 &&
|
||||||
|
Flag.K == 0 &&
|
||||||
|
Flag.L == 0 &&
|
||||||
|
Flag.LowerH == 0 &&
|
||||||
|
Flag.LowerJ == 0 &&
|
||||||
|
Flag.LowerM == 0 &&
|
||||||
|
Flag.LowerR == 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func concurrentBackendAllowed() bool {
|
||||||
|
if !concurrentFlagOk() {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Debug.S by itself is ok, because all printing occurs
|
||||||
|
// while writing the object file, and that is non-concurrent.
|
||||||
|
// Adding Debug_vlog, however, causes Debug.S to also print
|
||||||
|
// while flushing the plist, which happens concurrently.
|
||||||
|
if Ctxt.Debugvlog || Debug.Any() || Flag.Live > 0 {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
// TODO: Test and delete this condition.
|
||||||
|
if objabi.Fieldtrack_enabled != 0 {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
// TODO: fix races and enable the following flags
|
||||||
|
if Ctxt.Flag_shared || Ctxt.Flag_dynlink || Flag.Race {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func addImportDir(dir string) {
|
||||||
|
if dir != "" {
|
||||||
|
Flag.Cfg.ImportDirs = append(Flag.Cfg.ImportDirs, dir)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func addImportMap(s string) {
|
||||||
|
if Flag.Cfg.ImportMap == nil {
|
||||||
|
Flag.Cfg.ImportMap = make(map[string]string)
|
||||||
|
}
|
||||||
|
if strings.Count(s, "=") != 1 {
|
||||||
|
log.Fatal("-importmap argument must be of the form source=actual")
|
||||||
|
}
|
||||||
|
i := strings.Index(s, "=")
|
||||||
|
source, actual := s[:i], s[i+1:]
|
||||||
|
if source == "" || actual == "" {
|
||||||
|
log.Fatal("-importmap argument must be of the form source=actual; source and actual must be non-empty")
|
||||||
|
}
|
||||||
|
Flag.Cfg.ImportMap[source] = actual
|
||||||
|
}
|
||||||
|
|
||||||
|
func readImportCfg(file string) {
|
||||||
|
if Flag.Cfg.ImportMap == nil {
|
||||||
|
Flag.Cfg.ImportMap = make(map[string]string)
|
||||||
|
}
|
||||||
|
Flag.Cfg.PackageFile = map[string]string{}
|
||||||
|
data, err := ioutil.ReadFile(file)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("-importcfg: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
for lineNum, line := range strings.Split(string(data), "\n") {
|
||||||
|
lineNum++ // 1-based
|
||||||
|
line = strings.TrimSpace(line)
|
||||||
|
if line == "" || strings.HasPrefix(line, "#") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
var verb, args string
|
||||||
|
if i := strings.Index(line, " "); i < 0 {
|
||||||
|
verb = line
|
||||||
|
} else {
|
||||||
|
verb, args = line[:i], strings.TrimSpace(line[i+1:])
|
||||||
|
}
|
||||||
|
var before, after string
|
||||||
|
if i := strings.Index(args, "="); i >= 0 {
|
||||||
|
before, after = args[:i], args[i+1:]
|
||||||
|
}
|
||||||
|
switch verb {
|
||||||
|
default:
|
||||||
|
log.Fatalf("%s:%d: unknown directive %q", file, lineNum, verb)
|
||||||
|
case "importmap":
|
||||||
|
if before == "" || after == "" {
|
||||||
|
log.Fatalf(`%s:%d: invalid importmap: syntax is "importmap old=new"`, file, lineNum)
|
||||||
|
}
|
||||||
|
Flag.Cfg.ImportMap[before] = after
|
||||||
|
case "packagefile":
|
||||||
|
if before == "" || after == "" {
|
||||||
|
log.Fatalf(`%s:%d: invalid packagefile: syntax is "packagefile path=filename"`, file, lineNum)
|
||||||
|
}
|
||||||
|
Flag.Cfg.PackageFile[before] = after
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func readEmbedCfg(file string) {
|
||||||
|
data, err := ioutil.ReadFile(file)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("-embedcfg: %v", err)
|
||||||
|
}
|
||||||
|
if err := json.Unmarshal(data, &Flag.Cfg.Embed); err != nil {
|
||||||
|
log.Fatalf("%s: %v", file, err)
|
||||||
|
}
|
||||||
|
if Flag.Cfg.Embed.Patterns == nil {
|
||||||
|
log.Fatalf("%s: invalid embedcfg: missing Patterns", file)
|
||||||
|
}
|
||||||
|
if Flag.Cfg.Embed.Files == nil {
|
||||||
|
log.Fatalf("%s: invalid embedcfg: missing Files", file)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseSpectre parses the spectre configuration from the string s.
|
||||||
|
func parseSpectre(s string) {
|
||||||
|
for _, f := range strings.Split(s, ",") {
|
||||||
|
f = strings.TrimSpace(f)
|
||||||
|
switch f {
|
||||||
|
default:
|
||||||
|
log.Fatalf("unknown setting -spectre=%s", f)
|
||||||
|
case "":
|
||||||
|
// nothing
|
||||||
|
case "all":
|
||||||
|
Flag.Cfg.SpectreIndex = true
|
||||||
|
Ctxt.Retpoline = true
|
||||||
|
case "index":
|
||||||
|
Flag.Cfg.SpectreIndex = true
|
||||||
|
case "ret":
|
||||||
|
Ctxt.Retpoline = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if Flag.Cfg.SpectreIndex {
|
||||||
|
switch objabi.GOARCH {
|
||||||
|
case "amd64":
|
||||||
|
// ok
|
||||||
|
default:
|
||||||
|
log.Fatalf("GOARCH=%s does not support -spectre=index", objabi.GOARCH)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
260
src/cmd/compile/internal/base/print.go
Normal file
260
src/cmd/compile/internal/base/print.go
Normal file
|
|
@ -0,0 +1,260 @@
|
||||||
|
// Copyright 2020 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package base
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"runtime/debug"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"cmd/internal/objabi"
|
||||||
|
"cmd/internal/src"
|
||||||
|
)
|
||||||
|
|
||||||
|
// An errorMsg is a queued error message, waiting to be printed.
|
||||||
|
type errorMsg struct {
|
||||||
|
pos src.XPos
|
||||||
|
msg string
|
||||||
|
}
|
||||||
|
|
||||||
|
// Pos is the current source position being processed,
|
||||||
|
// printed by Errorf, ErrorfLang, Fatalf, and Warnf.
|
||||||
|
var Pos src.XPos
|
||||||
|
|
||||||
|
var (
|
||||||
|
errorMsgs []errorMsg
|
||||||
|
numErrors int // number of entries in errorMsgs that are errors (as opposed to warnings)
|
||||||
|
numSyntaxErrors int
|
||||||
|
)
|
||||||
|
|
||||||
|
// Errors returns the number of errors reported.
|
||||||
|
func Errors() int {
|
||||||
|
return numErrors
|
||||||
|
}
|
||||||
|
|
||||||
|
// SyntaxErrors returns the number of syntax errors reported
|
||||||
|
func SyntaxErrors() int {
|
||||||
|
return numSyntaxErrors
|
||||||
|
}
|
||||||
|
|
||||||
|
// addErrorMsg adds a new errorMsg (which may be a warning) to errorMsgs.
|
||||||
|
func addErrorMsg(pos src.XPos, format string, args ...interface{}) {
|
||||||
|
msg := fmt.Sprintf(format, args...)
|
||||||
|
// Only add the position if know the position.
|
||||||
|
// See issue golang.org/issue/11361.
|
||||||
|
if pos.IsKnown() {
|
||||||
|
msg = fmt.Sprintf("%v: %s", FmtPos(pos), msg)
|
||||||
|
}
|
||||||
|
errorMsgs = append(errorMsgs, errorMsg{
|
||||||
|
pos: pos,
|
||||||
|
msg: msg + "\n",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// FmtPos formats pos as a file:line string.
|
||||||
|
func FmtPos(pos src.XPos) string {
|
||||||
|
if Ctxt == nil {
|
||||||
|
return "???"
|
||||||
|
}
|
||||||
|
return Ctxt.OutermostPos(pos).Format(Flag.C == 0, Flag.L == 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// byPos sorts errors by source position.
|
||||||
|
type byPos []errorMsg
|
||||||
|
|
||||||
|
func (x byPos) Len() int { return len(x) }
|
||||||
|
func (x byPos) Less(i, j int) bool { return x[i].pos.Before(x[j].pos) }
|
||||||
|
func (x byPos) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
|
||||||
|
|
||||||
|
// FlushErrors sorts errors seen so far by line number, prints them to stdout,
|
||||||
|
// and empties the errors array.
|
||||||
|
func FlushErrors() {
|
||||||
|
Ctxt.Bso.Flush()
|
||||||
|
if len(errorMsgs) == 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
sort.Stable(byPos(errorMsgs))
|
||||||
|
for i, err := range errorMsgs {
|
||||||
|
if i == 0 || err.msg != errorMsgs[i-1].msg {
|
||||||
|
fmt.Printf("%s", err.msg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
errorMsgs = errorMsgs[:0]
|
||||||
|
}
|
||||||
|
|
||||||
|
// lasterror keeps track of the most recently issued error,
|
||||||
|
// to avoid printing multiple error messages on the same line.
|
||||||
|
var lasterror struct {
|
||||||
|
syntax src.XPos // source position of last syntax error
|
||||||
|
other src.XPos // source position of last non-syntax error
|
||||||
|
msg string // error message of last non-syntax error
|
||||||
|
}
|
||||||
|
|
||||||
|
// sameline reports whether two positions a, b are on the same line.
|
||||||
|
func sameline(a, b src.XPos) bool {
|
||||||
|
p := Ctxt.PosTable.Pos(a)
|
||||||
|
q := Ctxt.PosTable.Pos(b)
|
||||||
|
return p.Base() == q.Base() && p.Line() == q.Line()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Errorf reports a formatted error at the current line.
|
||||||
|
func Errorf(format string, args ...interface{}) {
|
||||||
|
ErrorfAt(Pos, format, args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ErrorfAt reports a formatted error message at pos.
|
||||||
|
func ErrorfAt(pos src.XPos, format string, args ...interface{}) {
|
||||||
|
msg := fmt.Sprintf(format, args...)
|
||||||
|
|
||||||
|
if strings.HasPrefix(msg, "syntax error") {
|
||||||
|
numSyntaxErrors++
|
||||||
|
// only one syntax error per line, no matter what error
|
||||||
|
if sameline(lasterror.syntax, pos) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
lasterror.syntax = pos
|
||||||
|
} else {
|
||||||
|
// only one of multiple equal non-syntax errors per line
|
||||||
|
// (flusherrors shows only one of them, so we filter them
|
||||||
|
// here as best as we can (they may not appear in order)
|
||||||
|
// so that we don't count them here and exit early, and
|
||||||
|
// then have nothing to show for.)
|
||||||
|
if sameline(lasterror.other, pos) && lasterror.msg == msg {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
lasterror.other = pos
|
||||||
|
lasterror.msg = msg
|
||||||
|
}
|
||||||
|
|
||||||
|
addErrorMsg(pos, "%s", msg)
|
||||||
|
numErrors++
|
||||||
|
|
||||||
|
hcrash()
|
||||||
|
if numErrors >= 10 && Flag.LowerE == 0 {
|
||||||
|
FlushErrors()
|
||||||
|
fmt.Printf("%v: too many errors\n", FmtPos(pos))
|
||||||
|
ErrorExit()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ErrorfVers reports that a language feature (format, args) requires a later version of Go.
|
||||||
|
func ErrorfVers(lang string, format string, args ...interface{}) {
|
||||||
|
Errorf("%s requires %s or later (-lang was set to %s; check go.mod)", fmt.Sprintf(format, args...), lang, Flag.Lang)
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateErrorDot is a clumsy hack that rewrites the last error,
|
||||||
|
// if it was "LINE: undefined: NAME", to be "LINE: undefined: NAME in EXPR".
|
||||||
|
// It is used to give better error messages for dot (selector) expressions.
|
||||||
|
func UpdateErrorDot(line string, name, expr string) {
|
||||||
|
if len(errorMsgs) == 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
e := &errorMsgs[len(errorMsgs)-1]
|
||||||
|
if strings.HasPrefix(e.msg, line) && e.msg == fmt.Sprintf("%v: undefined: %v\n", line, name) {
|
||||||
|
e.msg = fmt.Sprintf("%v: undefined: %v in %v\n", line, name, expr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Warnf reports a formatted warning at the current line.
|
||||||
|
// In general the Go compiler does NOT generate warnings,
|
||||||
|
// so this should be used only when the user has opted in
|
||||||
|
// to additional output by setting a particular flag.
|
||||||
|
func Warn(format string, args ...interface{}) {
|
||||||
|
WarnfAt(Pos, format, args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// WarnfAt reports a formatted warning at pos.
|
||||||
|
// In general the Go compiler does NOT generate warnings,
|
||||||
|
// so this should be used only when the user has opted in
|
||||||
|
// to additional output by setting a particular flag.
|
||||||
|
func WarnfAt(pos src.XPos, format string, args ...interface{}) {
|
||||||
|
addErrorMsg(pos, format, args...)
|
||||||
|
if Flag.LowerM != 0 {
|
||||||
|
FlushErrors()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fatalf reports a fatal error - an internal problem - at the current line and exits.
|
||||||
|
// If other errors have already been printed, then Fatalf just quietly exits.
|
||||||
|
// (The internal problem may have been caused by incomplete information
|
||||||
|
// after the already-reported errors, so best to let users fix those and
|
||||||
|
// try again without being bothered about a spurious internal error.)
|
||||||
|
//
|
||||||
|
// But if no errors have been printed, or if -d panic has been specified,
|
||||||
|
// Fatalf prints the error as an "internal compiler error". In a released build,
|
||||||
|
// it prints an error asking to file a bug report. In development builds, it
|
||||||
|
// prints a stack trace.
|
||||||
|
//
|
||||||
|
// If -h has been specified, Fatalf panics to force the usual runtime info dump.
|
||||||
|
func Fatalf(format string, args ...interface{}) {
|
||||||
|
FatalfAt(Pos, format, args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// FatalfAt reports a fatal error - an internal problem - at pos and exits.
|
||||||
|
// If other errors have already been printed, then FatalfAt just quietly exits.
|
||||||
|
// (The internal problem may have been caused by incomplete information
|
||||||
|
// after the already-reported errors, so best to let users fix those and
|
||||||
|
// try again without being bothered about a spurious internal error.)
|
||||||
|
//
|
||||||
|
// But if no errors have been printed, or if -d panic has been specified,
|
||||||
|
// FatalfAt prints the error as an "internal compiler error". In a released build,
|
||||||
|
// it prints an error asking to file a bug report. In development builds, it
|
||||||
|
// prints a stack trace.
|
||||||
|
//
|
||||||
|
// If -h has been specified, FatalfAt panics to force the usual runtime info dump.
|
||||||
|
func FatalfAt(pos src.XPos, format string, args ...interface{}) {
|
||||||
|
FlushErrors()
|
||||||
|
|
||||||
|
if Debug.Panic != 0 || numErrors == 0 {
|
||||||
|
fmt.Printf("%v: internal compiler error: ", FmtPos(pos))
|
||||||
|
fmt.Printf(format, args...)
|
||||||
|
fmt.Printf("\n")
|
||||||
|
|
||||||
|
// If this is a released compiler version, ask for a bug report.
|
||||||
|
if strings.HasPrefix(objabi.Version, "go") {
|
||||||
|
fmt.Printf("\n")
|
||||||
|
fmt.Printf("Please file a bug report including a short program that triggers the error.\n")
|
||||||
|
fmt.Printf("https://golang.org/issue/new\n")
|
||||||
|
} else {
|
||||||
|
// Not a release; dump a stack trace, too.
|
||||||
|
fmt.Println()
|
||||||
|
os.Stdout.Write(debug.Stack())
|
||||||
|
fmt.Println()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
hcrash()
|
||||||
|
ErrorExit()
|
||||||
|
}
|
||||||
|
|
||||||
|
// hcrash crashes the compiler when -h is set, to find out where a message is generated.
|
||||||
|
func hcrash() {
|
||||||
|
if Flag.LowerH != 0 {
|
||||||
|
FlushErrors()
|
||||||
|
if Flag.LowerO != "" {
|
||||||
|
os.Remove(Flag.LowerO)
|
||||||
|
}
|
||||||
|
panic("-h")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ErrorExit handles an error-status exit.
|
||||||
|
// It flushes any pending errors, removes the output file, and exits.
|
||||||
|
func ErrorExit() {
|
||||||
|
FlushErrors()
|
||||||
|
if Flag.LowerO != "" {
|
||||||
|
os.Remove(Flag.LowerO)
|
||||||
|
}
|
||||||
|
os.Exit(2)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExitIfErrors calls ErrorExit if any errors have been reported.
|
||||||
|
func ExitIfErrors() {
|
||||||
|
if Errors() > 0 {
|
||||||
|
ErrorExit()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -5,6 +5,8 @@
|
||||||
package gc
|
package gc
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"cmd/compile/internal/base"
|
||||||
|
"cmd/compile/internal/ir"
|
||||||
"cmd/compile/internal/types"
|
"cmd/compile/internal/types"
|
||||||
"cmd/internal/obj"
|
"cmd/internal/obj"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
@ -69,11 +71,11 @@ func EqCanPanic(t *types.Type) bool {
|
||||||
switch t.Etype {
|
switch t.Etype {
|
||||||
default:
|
default:
|
||||||
return false
|
return false
|
||||||
case TINTER:
|
case types.TINTER:
|
||||||
return true
|
return true
|
||||||
case TARRAY:
|
case types.TARRAY:
|
||||||
return EqCanPanic(t.Elem())
|
return EqCanPanic(t.Elem())
|
||||||
case TSTRUCT:
|
case types.TSTRUCT:
|
||||||
for _, f := range t.FieldSlice() {
|
for _, f := range t.FieldSlice() {
|
||||||
if !f.Sym.IsBlank() && EqCanPanic(f.Type) {
|
if !f.Sym.IsBlank() && EqCanPanic(f.Type) {
|
||||||
return true
|
return true
|
||||||
|
|
@ -119,45 +121,45 @@ func algtype1(t *types.Type) (AlgKind, *types.Type) {
|
||||||
}
|
}
|
||||||
|
|
||||||
switch t.Etype {
|
switch t.Etype {
|
||||||
case TANY, TFORW:
|
case types.TANY, types.TFORW:
|
||||||
// will be defined later.
|
// will be defined later.
|
||||||
return ANOEQ, t
|
return ANOEQ, t
|
||||||
|
|
||||||
case TINT8, TUINT8, TINT16, TUINT16,
|
case types.TINT8, types.TUINT8, types.TINT16, types.TUINT16,
|
||||||
TINT32, TUINT32, TINT64, TUINT64,
|
types.TINT32, types.TUINT32, types.TINT64, types.TUINT64,
|
||||||
TINT, TUINT, TUINTPTR,
|
types.TINT, types.TUINT, types.TUINTPTR,
|
||||||
TBOOL, TPTR,
|
types.TBOOL, types.TPTR,
|
||||||
TCHAN, TUNSAFEPTR:
|
types.TCHAN, types.TUNSAFEPTR:
|
||||||
return AMEM, nil
|
return AMEM, nil
|
||||||
|
|
||||||
case TFUNC, TMAP:
|
case types.TFUNC, types.TMAP:
|
||||||
return ANOEQ, t
|
return ANOEQ, t
|
||||||
|
|
||||||
case TFLOAT32:
|
case types.TFLOAT32:
|
||||||
return AFLOAT32, nil
|
return AFLOAT32, nil
|
||||||
|
|
||||||
case TFLOAT64:
|
case types.TFLOAT64:
|
||||||
return AFLOAT64, nil
|
return AFLOAT64, nil
|
||||||
|
|
||||||
case TCOMPLEX64:
|
case types.TCOMPLEX64:
|
||||||
return ACPLX64, nil
|
return ACPLX64, nil
|
||||||
|
|
||||||
case TCOMPLEX128:
|
case types.TCOMPLEX128:
|
||||||
return ACPLX128, nil
|
return ACPLX128, nil
|
||||||
|
|
||||||
case TSTRING:
|
case types.TSTRING:
|
||||||
return ASTRING, nil
|
return ASTRING, nil
|
||||||
|
|
||||||
case TINTER:
|
case types.TINTER:
|
||||||
if t.IsEmptyInterface() {
|
if t.IsEmptyInterface() {
|
||||||
return ANILINTER, nil
|
return ANILINTER, nil
|
||||||
}
|
}
|
||||||
return AINTER, nil
|
return AINTER, nil
|
||||||
|
|
||||||
case TSLICE:
|
case types.TSLICE:
|
||||||
return ANOEQ, t
|
return ANOEQ, t
|
||||||
|
|
||||||
case TARRAY:
|
case types.TARRAY:
|
||||||
a, bad := algtype1(t.Elem())
|
a, bad := algtype1(t.Elem())
|
||||||
switch a {
|
switch a {
|
||||||
case AMEM:
|
case AMEM:
|
||||||
|
|
@ -177,7 +179,7 @@ func algtype1(t *types.Type) (AlgKind, *types.Type) {
|
||||||
|
|
||||||
return ASPECIAL, nil
|
return ASPECIAL, nil
|
||||||
|
|
||||||
case TSTRUCT:
|
case types.TSTRUCT:
|
||||||
fields := t.FieldSlice()
|
fields := t.FieldSlice()
|
||||||
|
|
||||||
// One-field struct is same as that one field alone.
|
// One-field struct is same as that one field alone.
|
||||||
|
|
@ -203,7 +205,7 @@ func algtype1(t *types.Type) (AlgKind, *types.Type) {
|
||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
Fatalf("algtype1: unexpected type %v", t)
|
base.Fatalf("algtype1: unexpected type %v", t)
|
||||||
return 0, nil
|
return 0, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -214,7 +216,7 @@ func genhash(t *types.Type) *obj.LSym {
|
||||||
switch algtype(t) {
|
switch algtype(t) {
|
||||||
default:
|
default:
|
||||||
// genhash is only called for types that have equality
|
// genhash is only called for types that have equality
|
||||||
Fatalf("genhash %v", t)
|
base.Fatalf("genhash %v", t)
|
||||||
case AMEM0:
|
case AMEM0:
|
||||||
return sysClosure("memhash0")
|
return sysClosure("memhash0")
|
||||||
case AMEM8:
|
case AMEM8:
|
||||||
|
|
@ -282,24 +284,24 @@ func genhash(t *types.Type) *obj.LSym {
|
||||||
}
|
}
|
||||||
|
|
||||||
sym := typesymprefix(".hash", t)
|
sym := typesymprefix(".hash", t)
|
||||||
if Debug.r != 0 {
|
if base.Flag.LowerR != 0 {
|
||||||
fmt.Printf("genhash %v %v %v\n", closure, sym, t)
|
fmt.Printf("genhash %v %v %v\n", closure, sym, t)
|
||||||
}
|
}
|
||||||
|
|
||||||
lineno = autogeneratedPos // less confusing than end of input
|
base.Pos = autogeneratedPos // less confusing than end of input
|
||||||
dclcontext = PEXTERN
|
dclcontext = ir.PEXTERN
|
||||||
|
|
||||||
// func sym(p *T, h uintptr) uintptr
|
// func sym(p *T, h uintptr) uintptr
|
||||||
tfn := nod(OTFUNC, nil, nil)
|
tfn := ir.Nod(ir.OTFUNC, nil, nil)
|
||||||
tfn.List.Set2(
|
tfn.PtrList().Set2(
|
||||||
namedfield("p", types.NewPtr(t)),
|
namedfield("p", types.NewPtr(t)),
|
||||||
namedfield("h", types.Types[TUINTPTR]),
|
namedfield("h", types.Types[types.TUINTPTR]),
|
||||||
)
|
)
|
||||||
tfn.Rlist.Set1(anonfield(types.Types[TUINTPTR]))
|
tfn.PtrRlist().Set1(anonfield(types.Types[types.TUINTPTR]))
|
||||||
|
|
||||||
fn := dclfunc(sym, tfn)
|
fn := dclfunc(sym, tfn)
|
||||||
np := asNode(tfn.Type.Params().Field(0).Nname)
|
np := ir.AsNode(tfn.Type().Params().Field(0).Nname)
|
||||||
nh := asNode(tfn.Type.Params().Field(1).Nname)
|
nh := ir.AsNode(tfn.Type().Params().Field(1).Nname)
|
||||||
|
|
||||||
switch t.Etype {
|
switch t.Etype {
|
||||||
case types.TARRAY:
|
case types.TARRAY:
|
||||||
|
|
@ -308,25 +310,25 @@ func genhash(t *types.Type) *obj.LSym {
|
||||||
// pure memory.
|
// pure memory.
|
||||||
hashel := hashfor(t.Elem())
|
hashel := hashfor(t.Elem())
|
||||||
|
|
||||||
n := nod(ORANGE, nil, nod(ODEREF, np, nil))
|
n := ir.Nod(ir.ORANGE, nil, ir.Nod(ir.ODEREF, np, nil))
|
||||||
ni := newname(lookup("i"))
|
ni := NewName(lookup("i"))
|
||||||
ni.Type = types.Types[TINT]
|
ni.SetType(types.Types[types.TINT])
|
||||||
n.List.Set1(ni)
|
n.PtrList().Set1(ni)
|
||||||
n.SetColas(true)
|
n.SetColas(true)
|
||||||
colasdefn(n.List.Slice(), n)
|
colasdefn(n.List().Slice(), n)
|
||||||
ni = n.List.First()
|
ni = n.List().First()
|
||||||
|
|
||||||
// h = hashel(&p[i], h)
|
// h = hashel(&p[i], h)
|
||||||
call := nod(OCALL, hashel, nil)
|
call := ir.Nod(ir.OCALL, hashel, nil)
|
||||||
|
|
||||||
nx := nod(OINDEX, np, ni)
|
nx := ir.Nod(ir.OINDEX, np, ni)
|
||||||
nx.SetBounded(true)
|
nx.SetBounded(true)
|
||||||
na := nod(OADDR, nx, nil)
|
na := ir.Nod(ir.OADDR, nx, nil)
|
||||||
call.List.Append(na)
|
call.PtrList().Append(na)
|
||||||
call.List.Append(nh)
|
call.PtrList().Append(nh)
|
||||||
n.Nbody.Append(nod(OAS, nh, call))
|
n.PtrBody().Append(ir.Nod(ir.OAS, nh, call))
|
||||||
|
|
||||||
fn.Nbody.Append(n)
|
fn.PtrBody().Append(n)
|
||||||
|
|
||||||
case types.TSTRUCT:
|
case types.TSTRUCT:
|
||||||
// Walk the struct using memhash for runs of AMEM
|
// Walk the struct using memhash for runs of AMEM
|
||||||
|
|
@ -343,12 +345,12 @@ func genhash(t *types.Type) *obj.LSym {
|
||||||
// Hash non-memory fields with appropriate hash function.
|
// Hash non-memory fields with appropriate hash function.
|
||||||
if !IsRegularMemory(f.Type) {
|
if !IsRegularMemory(f.Type) {
|
||||||
hashel := hashfor(f.Type)
|
hashel := hashfor(f.Type)
|
||||||
call := nod(OCALL, hashel, nil)
|
call := ir.Nod(ir.OCALL, hashel, nil)
|
||||||
nx := nodSym(OXDOT, np, f.Sym) // TODO: fields from other packages?
|
nx := nodSym(ir.OXDOT, np, f.Sym) // TODO: fields from other packages?
|
||||||
na := nod(OADDR, nx, nil)
|
na := ir.Nod(ir.OADDR, nx, nil)
|
||||||
call.List.Append(na)
|
call.PtrList().Append(na)
|
||||||
call.List.Append(nh)
|
call.PtrList().Append(nh)
|
||||||
fn.Nbody.Append(nod(OAS, nh, call))
|
fn.PtrBody().Append(ir.Nod(ir.OAS, nh, call))
|
||||||
i++
|
i++
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
@ -358,40 +360,40 @@ func genhash(t *types.Type) *obj.LSym {
|
||||||
|
|
||||||
// h = hashel(&p.first, size, h)
|
// h = hashel(&p.first, size, h)
|
||||||
hashel := hashmem(f.Type)
|
hashel := hashmem(f.Type)
|
||||||
call := nod(OCALL, hashel, nil)
|
call := ir.Nod(ir.OCALL, hashel, nil)
|
||||||
nx := nodSym(OXDOT, np, f.Sym) // TODO: fields from other packages?
|
nx := nodSym(ir.OXDOT, np, f.Sym) // TODO: fields from other packages?
|
||||||
na := nod(OADDR, nx, nil)
|
na := ir.Nod(ir.OADDR, nx, nil)
|
||||||
call.List.Append(na)
|
call.PtrList().Append(na)
|
||||||
call.List.Append(nh)
|
call.PtrList().Append(nh)
|
||||||
call.List.Append(nodintconst(size))
|
call.PtrList().Append(nodintconst(size))
|
||||||
fn.Nbody.Append(nod(OAS, nh, call))
|
fn.PtrBody().Append(ir.Nod(ir.OAS, nh, call))
|
||||||
|
|
||||||
i = next
|
i = next
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
r := nod(ORETURN, nil, nil)
|
r := ir.Nod(ir.ORETURN, nil, nil)
|
||||||
r.List.Append(nh)
|
r.PtrList().Append(nh)
|
||||||
fn.Nbody.Append(r)
|
fn.PtrBody().Append(r)
|
||||||
|
|
||||||
if Debug.r != 0 {
|
if base.Flag.LowerR != 0 {
|
||||||
dumplist("genhash body", fn.Nbody)
|
ir.DumpList("genhash body", fn.Body())
|
||||||
}
|
}
|
||||||
|
|
||||||
funcbody()
|
funcbody()
|
||||||
|
|
||||||
fn.Func.SetDupok(true)
|
fn.Func().SetDupok(true)
|
||||||
fn = typecheck(fn, ctxStmt)
|
fn = typecheck(fn, ctxStmt)
|
||||||
|
|
||||||
Curfn = fn
|
Curfn = fn
|
||||||
typecheckslice(fn.Nbody.Slice(), ctxStmt)
|
typecheckslice(fn.Body().Slice(), ctxStmt)
|
||||||
Curfn = nil
|
Curfn = nil
|
||||||
|
|
||||||
if debug_dclstack != 0 {
|
if base.Debug.DclStack != 0 {
|
||||||
testdclstack()
|
testdclstack()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn.Func.SetNilCheckDisabled(true)
|
fn.Func().SetNilCheckDisabled(true)
|
||||||
xtop = append(xtop, fn)
|
xtop = append(xtop, fn)
|
||||||
|
|
||||||
// Build closure. It doesn't close over any variables, so
|
// Build closure. It doesn't close over any variables, so
|
||||||
|
|
@ -402,12 +404,12 @@ func genhash(t *types.Type) *obj.LSym {
|
||||||
return closure
|
return closure
|
||||||
}
|
}
|
||||||
|
|
||||||
func hashfor(t *types.Type) *Node {
|
func hashfor(t *types.Type) ir.Node {
|
||||||
var sym *types.Sym
|
var sym *types.Sym
|
||||||
|
|
||||||
switch a, _ := algtype1(t); a {
|
switch a, _ := algtype1(t); a {
|
||||||
case AMEM:
|
case AMEM:
|
||||||
Fatalf("hashfor with AMEM type")
|
base.Fatalf("hashfor with AMEM type")
|
||||||
case AINTER:
|
case AINTER:
|
||||||
sym = Runtimepkg.Lookup("interhash")
|
sym = Runtimepkg.Lookup("interhash")
|
||||||
case ANILINTER:
|
case ANILINTER:
|
||||||
|
|
@ -428,14 +430,14 @@ func hashfor(t *types.Type) *Node {
|
||||||
sym = typesymprefix(".hash", t)
|
sym = typesymprefix(".hash", t)
|
||||||
}
|
}
|
||||||
|
|
||||||
n := newname(sym)
|
n := NewName(sym)
|
||||||
setNodeNameFunc(n)
|
setNodeNameFunc(n)
|
||||||
n.Type = functype(nil, []*Node{
|
n.SetType(functype(nil, []ir.Node{
|
||||||
anonfield(types.NewPtr(t)),
|
anonfield(types.NewPtr(t)),
|
||||||
anonfield(types.Types[TUINTPTR]),
|
anonfield(types.Types[types.TUINTPTR]),
|
||||||
}, []*Node{
|
}, []ir.Node{
|
||||||
anonfield(types.Types[TUINTPTR]),
|
anonfield(types.Types[types.TUINTPTR]),
|
||||||
})
|
}))
|
||||||
return n
|
return n
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -509,27 +511,27 @@ func geneq(t *types.Type) *obj.LSym {
|
||||||
return closure
|
return closure
|
||||||
}
|
}
|
||||||
sym := typesymprefix(".eq", t)
|
sym := typesymprefix(".eq", t)
|
||||||
if Debug.r != 0 {
|
if base.Flag.LowerR != 0 {
|
||||||
fmt.Printf("geneq %v\n", t)
|
fmt.Printf("geneq %v\n", t)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Autogenerate code for equality of structs and arrays.
|
// Autogenerate code for equality of structs and arrays.
|
||||||
|
|
||||||
lineno = autogeneratedPos // less confusing than end of input
|
base.Pos = autogeneratedPos // less confusing than end of input
|
||||||
dclcontext = PEXTERN
|
dclcontext = ir.PEXTERN
|
||||||
|
|
||||||
// func sym(p, q *T) bool
|
// func sym(p, q *T) bool
|
||||||
tfn := nod(OTFUNC, nil, nil)
|
tfn := ir.Nod(ir.OTFUNC, nil, nil)
|
||||||
tfn.List.Set2(
|
tfn.PtrList().Set2(
|
||||||
namedfield("p", types.NewPtr(t)),
|
namedfield("p", types.NewPtr(t)),
|
||||||
namedfield("q", types.NewPtr(t)),
|
namedfield("q", types.NewPtr(t)),
|
||||||
)
|
)
|
||||||
tfn.Rlist.Set1(namedfield("r", types.Types[TBOOL]))
|
tfn.PtrRlist().Set1(namedfield("r", types.Types[types.TBOOL]))
|
||||||
|
|
||||||
fn := dclfunc(sym, tfn)
|
fn := dclfunc(sym, tfn)
|
||||||
np := asNode(tfn.Type.Params().Field(0).Nname)
|
np := ir.AsNode(tfn.Type().Params().Field(0).Nname)
|
||||||
nq := asNode(tfn.Type.Params().Field(1).Nname)
|
nq := ir.AsNode(tfn.Type().Params().Field(1).Nname)
|
||||||
nr := asNode(tfn.Type.Results().Field(0).Nname)
|
nr := ir.AsNode(tfn.Type().Results().Field(0).Nname)
|
||||||
|
|
||||||
// Label to jump to if an equality test fails.
|
// Label to jump to if an equality test fails.
|
||||||
neq := autolabel(".neq")
|
neq := autolabel(".neq")
|
||||||
|
|
@ -539,9 +541,9 @@ func geneq(t *types.Type) *obj.LSym {
|
||||||
// so t must be either an array or a struct.
|
// so t must be either an array or a struct.
|
||||||
switch t.Etype {
|
switch t.Etype {
|
||||||
default:
|
default:
|
||||||
Fatalf("geneq %v", t)
|
base.Fatalf("geneq %v", t)
|
||||||
|
|
||||||
case TARRAY:
|
case types.TARRAY:
|
||||||
nelem := t.NumElem()
|
nelem := t.NumElem()
|
||||||
|
|
||||||
// checkAll generates code to check the equality of all array elements.
|
// checkAll generates code to check the equality of all array elements.
|
||||||
|
|
@ -565,17 +567,17 @@ func geneq(t *types.Type) *obj.LSym {
|
||||||
//
|
//
|
||||||
// TODO(josharian): consider doing some loop unrolling
|
// TODO(josharian): consider doing some loop unrolling
|
||||||
// for larger nelem as well, processing a few elements at a time in a loop.
|
// for larger nelem as well, processing a few elements at a time in a loop.
|
||||||
checkAll := func(unroll int64, last bool, eq func(pi, qi *Node) *Node) {
|
checkAll := func(unroll int64, last bool, eq func(pi, qi ir.Node) ir.Node) {
|
||||||
// checkIdx generates a node to check for equality at index i.
|
// checkIdx generates a node to check for equality at index i.
|
||||||
checkIdx := func(i *Node) *Node {
|
checkIdx := func(i ir.Node) ir.Node {
|
||||||
// pi := p[i]
|
// pi := p[i]
|
||||||
pi := nod(OINDEX, np, i)
|
pi := ir.Nod(ir.OINDEX, np, i)
|
||||||
pi.SetBounded(true)
|
pi.SetBounded(true)
|
||||||
pi.Type = t.Elem()
|
pi.SetType(t.Elem())
|
||||||
// qi := q[i]
|
// qi := q[i]
|
||||||
qi := nod(OINDEX, nq, i)
|
qi := ir.Nod(ir.OINDEX, nq, i)
|
||||||
qi.SetBounded(true)
|
qi.SetBounded(true)
|
||||||
qi.Type = t.Elem()
|
qi.SetType(t.Elem())
|
||||||
return eq(pi, qi)
|
return eq(pi, qi)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -587,68 +589,68 @@ func geneq(t *types.Type) *obj.LSym {
|
||||||
// Generate a series of checks.
|
// Generate a series of checks.
|
||||||
for i := int64(0); i < nelem; i++ {
|
for i := int64(0); i < nelem; i++ {
|
||||||
// if check {} else { goto neq }
|
// if check {} else { goto neq }
|
||||||
nif := nod(OIF, checkIdx(nodintconst(i)), nil)
|
nif := ir.Nod(ir.OIF, checkIdx(nodintconst(i)), nil)
|
||||||
nif.Rlist.Append(nodSym(OGOTO, nil, neq))
|
nif.PtrRlist().Append(nodSym(ir.OGOTO, nil, neq))
|
||||||
fn.Nbody.Append(nif)
|
fn.PtrBody().Append(nif)
|
||||||
}
|
}
|
||||||
if last {
|
if last {
|
||||||
fn.Nbody.Append(nod(OAS, nr, checkIdx(nodintconst(nelem))))
|
fn.PtrBody().Append(ir.Nod(ir.OAS, nr, checkIdx(nodintconst(nelem))))
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// Generate a for loop.
|
// Generate a for loop.
|
||||||
// for i := 0; i < nelem; i++
|
// for i := 0; i < nelem; i++
|
||||||
i := temp(types.Types[TINT])
|
i := temp(types.Types[types.TINT])
|
||||||
init := nod(OAS, i, nodintconst(0))
|
init := ir.Nod(ir.OAS, i, nodintconst(0))
|
||||||
cond := nod(OLT, i, nodintconst(nelem))
|
cond := ir.Nod(ir.OLT, i, nodintconst(nelem))
|
||||||
post := nod(OAS, i, nod(OADD, i, nodintconst(1)))
|
post := ir.Nod(ir.OAS, i, ir.Nod(ir.OADD, i, nodintconst(1)))
|
||||||
loop := nod(OFOR, cond, post)
|
loop := ir.Nod(ir.OFOR, cond, post)
|
||||||
loop.Ninit.Append(init)
|
loop.PtrInit().Append(init)
|
||||||
// if eq(pi, qi) {} else { goto neq }
|
// if eq(pi, qi) {} else { goto neq }
|
||||||
nif := nod(OIF, checkIdx(i), nil)
|
nif := ir.Nod(ir.OIF, checkIdx(i), nil)
|
||||||
nif.Rlist.Append(nodSym(OGOTO, nil, neq))
|
nif.PtrRlist().Append(nodSym(ir.OGOTO, nil, neq))
|
||||||
loop.Nbody.Append(nif)
|
loop.PtrBody().Append(nif)
|
||||||
fn.Nbody.Append(loop)
|
fn.PtrBody().Append(loop)
|
||||||
if last {
|
if last {
|
||||||
fn.Nbody.Append(nod(OAS, nr, nodbool(true)))
|
fn.PtrBody().Append(ir.Nod(ir.OAS, nr, nodbool(true)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
switch t.Elem().Etype {
|
switch t.Elem().Etype {
|
||||||
case TSTRING:
|
case types.TSTRING:
|
||||||
// Do two loops. First, check that all the lengths match (cheap).
|
// Do two loops. First, check that all the lengths match (cheap).
|
||||||
// Second, check that all the contents match (expensive).
|
// Second, check that all the contents match (expensive).
|
||||||
// TODO: when the array size is small, unroll the length match checks.
|
// TODO: when the array size is small, unroll the length match checks.
|
||||||
checkAll(3, false, func(pi, qi *Node) *Node {
|
checkAll(3, false, func(pi, qi ir.Node) ir.Node {
|
||||||
// Compare lengths.
|
// Compare lengths.
|
||||||
eqlen, _ := eqstring(pi, qi)
|
eqlen, _ := eqstring(pi, qi)
|
||||||
return eqlen
|
return eqlen
|
||||||
})
|
})
|
||||||
checkAll(1, true, func(pi, qi *Node) *Node {
|
checkAll(1, true, func(pi, qi ir.Node) ir.Node {
|
||||||
// Compare contents.
|
// Compare contents.
|
||||||
_, eqmem := eqstring(pi, qi)
|
_, eqmem := eqstring(pi, qi)
|
||||||
return eqmem
|
return eqmem
|
||||||
})
|
})
|
||||||
case TFLOAT32, TFLOAT64:
|
case types.TFLOAT32, types.TFLOAT64:
|
||||||
checkAll(2, true, func(pi, qi *Node) *Node {
|
checkAll(2, true, func(pi, qi ir.Node) ir.Node {
|
||||||
// p[i] == q[i]
|
// p[i] == q[i]
|
||||||
return nod(OEQ, pi, qi)
|
return ir.Nod(ir.OEQ, pi, qi)
|
||||||
})
|
})
|
||||||
// TODO: pick apart structs, do them piecemeal too
|
// TODO: pick apart structs, do them piecemeal too
|
||||||
default:
|
default:
|
||||||
checkAll(1, true, func(pi, qi *Node) *Node {
|
checkAll(1, true, func(pi, qi ir.Node) ir.Node {
|
||||||
// p[i] == q[i]
|
// p[i] == q[i]
|
||||||
return nod(OEQ, pi, qi)
|
return ir.Nod(ir.OEQ, pi, qi)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
case TSTRUCT:
|
case types.TSTRUCT:
|
||||||
// Build a list of conditions to satisfy.
|
// Build a list of conditions to satisfy.
|
||||||
// The conditions are a list-of-lists. Conditions are reorderable
|
// The conditions are a list-of-lists. Conditions are reorderable
|
||||||
// within each inner list. The outer lists must be evaluated in order.
|
// within each inner list. The outer lists must be evaluated in order.
|
||||||
var conds [][]*Node
|
var conds [][]ir.Node
|
||||||
conds = append(conds, []*Node{})
|
conds = append(conds, []ir.Node{})
|
||||||
and := func(n *Node) {
|
and := func(n ir.Node) {
|
||||||
i := len(conds) - 1
|
i := len(conds) - 1
|
||||||
conds[i] = append(conds[i], n)
|
conds[i] = append(conds[i], n)
|
||||||
}
|
}
|
||||||
|
|
@ -668,21 +670,21 @@ func geneq(t *types.Type) *obj.LSym {
|
||||||
if !IsRegularMemory(f.Type) {
|
if !IsRegularMemory(f.Type) {
|
||||||
if EqCanPanic(f.Type) {
|
if EqCanPanic(f.Type) {
|
||||||
// Enforce ordering by starting a new set of reorderable conditions.
|
// Enforce ordering by starting a new set of reorderable conditions.
|
||||||
conds = append(conds, []*Node{})
|
conds = append(conds, []ir.Node{})
|
||||||
}
|
}
|
||||||
p := nodSym(OXDOT, np, f.Sym)
|
p := nodSym(ir.OXDOT, np, f.Sym)
|
||||||
q := nodSym(OXDOT, nq, f.Sym)
|
q := nodSym(ir.OXDOT, nq, f.Sym)
|
||||||
switch {
|
switch {
|
||||||
case f.Type.IsString():
|
case f.Type.IsString():
|
||||||
eqlen, eqmem := eqstring(p, q)
|
eqlen, eqmem := eqstring(p, q)
|
||||||
and(eqlen)
|
and(eqlen)
|
||||||
and(eqmem)
|
and(eqmem)
|
||||||
default:
|
default:
|
||||||
and(nod(OEQ, p, q))
|
and(ir.Nod(ir.OEQ, p, q))
|
||||||
}
|
}
|
||||||
if EqCanPanic(f.Type) {
|
if EqCanPanic(f.Type) {
|
||||||
// Also enforce ordering after something that can panic.
|
// Also enforce ordering after something that can panic.
|
||||||
conds = append(conds, []*Node{})
|
conds = append(conds, []ir.Node{})
|
||||||
}
|
}
|
||||||
i++
|
i++
|
||||||
continue
|
continue
|
||||||
|
|
@ -707,10 +709,10 @@ func geneq(t *types.Type) *obj.LSym {
|
||||||
|
|
||||||
// Sort conditions to put runtime calls last.
|
// Sort conditions to put runtime calls last.
|
||||||
// Preserve the rest of the ordering.
|
// Preserve the rest of the ordering.
|
||||||
var flatConds []*Node
|
var flatConds []ir.Node
|
||||||
for _, c := range conds {
|
for _, c := range conds {
|
||||||
isCall := func(n *Node) bool {
|
isCall := func(n ir.Node) bool {
|
||||||
return n.Op == OCALL || n.Op == OCALLFUNC
|
return n.Op() == ir.OCALL || n.Op() == ir.OCALLFUNC
|
||||||
}
|
}
|
||||||
sort.SliceStable(c, func(i, j int) bool {
|
sort.SliceStable(c, func(i, j int) bool {
|
||||||
return !isCall(c[i]) && isCall(c[j])
|
return !isCall(c[i]) && isCall(c[j])
|
||||||
|
|
@ -719,54 +721,54 @@ func geneq(t *types.Type) *obj.LSym {
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(flatConds) == 0 {
|
if len(flatConds) == 0 {
|
||||||
fn.Nbody.Append(nod(OAS, nr, nodbool(true)))
|
fn.PtrBody().Append(ir.Nod(ir.OAS, nr, nodbool(true)))
|
||||||
} else {
|
} else {
|
||||||
for _, c := range flatConds[:len(flatConds)-1] {
|
for _, c := range flatConds[:len(flatConds)-1] {
|
||||||
// if cond {} else { goto neq }
|
// if cond {} else { goto neq }
|
||||||
n := nod(OIF, c, nil)
|
n := ir.Nod(ir.OIF, c, nil)
|
||||||
n.Rlist.Append(nodSym(OGOTO, nil, neq))
|
n.PtrRlist().Append(nodSym(ir.OGOTO, nil, neq))
|
||||||
fn.Nbody.Append(n)
|
fn.PtrBody().Append(n)
|
||||||
}
|
}
|
||||||
fn.Nbody.Append(nod(OAS, nr, flatConds[len(flatConds)-1]))
|
fn.PtrBody().Append(ir.Nod(ir.OAS, nr, flatConds[len(flatConds)-1]))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// ret:
|
// ret:
|
||||||
// return
|
// return
|
||||||
ret := autolabel(".ret")
|
ret := autolabel(".ret")
|
||||||
fn.Nbody.Append(nodSym(OLABEL, nil, ret))
|
fn.PtrBody().Append(nodSym(ir.OLABEL, nil, ret))
|
||||||
fn.Nbody.Append(nod(ORETURN, nil, nil))
|
fn.PtrBody().Append(ir.Nod(ir.ORETURN, nil, nil))
|
||||||
|
|
||||||
// neq:
|
// neq:
|
||||||
// r = false
|
// r = false
|
||||||
// return (or goto ret)
|
// return (or goto ret)
|
||||||
fn.Nbody.Append(nodSym(OLABEL, nil, neq))
|
fn.PtrBody().Append(nodSym(ir.OLABEL, nil, neq))
|
||||||
fn.Nbody.Append(nod(OAS, nr, nodbool(false)))
|
fn.PtrBody().Append(ir.Nod(ir.OAS, nr, nodbool(false)))
|
||||||
if EqCanPanic(t) || hasCall(fn) {
|
if EqCanPanic(t) || hasCall(fn) {
|
||||||
// Epilogue is large, so share it with the equal case.
|
// Epilogue is large, so share it with the equal case.
|
||||||
fn.Nbody.Append(nodSym(OGOTO, nil, ret))
|
fn.PtrBody().Append(nodSym(ir.OGOTO, nil, ret))
|
||||||
} else {
|
} else {
|
||||||
// Epilogue is small, so don't bother sharing.
|
// Epilogue is small, so don't bother sharing.
|
||||||
fn.Nbody.Append(nod(ORETURN, nil, nil))
|
fn.PtrBody().Append(ir.Nod(ir.ORETURN, nil, nil))
|
||||||
}
|
}
|
||||||
// TODO(khr): the epilogue size detection condition above isn't perfect.
|
// TODO(khr): the epilogue size detection condition above isn't perfect.
|
||||||
// We should really do a generic CL that shares epilogues across
|
// We should really do a generic CL that shares epilogues across
|
||||||
// the board. See #24936.
|
// the board. See #24936.
|
||||||
|
|
||||||
if Debug.r != 0 {
|
if base.Flag.LowerR != 0 {
|
||||||
dumplist("geneq body", fn.Nbody)
|
ir.DumpList("geneq body", fn.Body())
|
||||||
}
|
}
|
||||||
|
|
||||||
funcbody()
|
funcbody()
|
||||||
|
|
||||||
fn.Func.SetDupok(true)
|
fn.Func().SetDupok(true)
|
||||||
fn = typecheck(fn, ctxStmt)
|
fn = typecheck(fn, ctxStmt)
|
||||||
|
|
||||||
Curfn = fn
|
Curfn = fn
|
||||||
typecheckslice(fn.Nbody.Slice(), ctxStmt)
|
typecheckslice(fn.Body().Slice(), ctxStmt)
|
||||||
Curfn = nil
|
Curfn = nil
|
||||||
|
|
||||||
if debug_dclstack != 0 {
|
if base.Debug.DclStack != 0 {
|
||||||
testdclstack()
|
testdclstack()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -774,7 +776,7 @@ func geneq(t *types.Type) *obj.LSym {
|
||||||
// We are comparing a struct or an array,
|
// We are comparing a struct or an array,
|
||||||
// neither of which can be nil, and our comparisons
|
// neither of which can be nil, and our comparisons
|
||||||
// are shallow.
|
// are shallow.
|
||||||
fn.Func.SetNilCheckDisabled(true)
|
fn.Func().SetNilCheckDisabled(true)
|
||||||
xtop = append(xtop, fn)
|
xtop = append(xtop, fn)
|
||||||
|
|
||||||
// Generate a closure which points at the function we just generated.
|
// Generate a closure which points at the function we just generated.
|
||||||
|
|
@ -783,32 +785,32 @@ func geneq(t *types.Type) *obj.LSym {
|
||||||
return closure
|
return closure
|
||||||
}
|
}
|
||||||
|
|
||||||
func hasCall(n *Node) bool {
|
func hasCall(n ir.Node) bool {
|
||||||
if n.Op == OCALL || n.Op == OCALLFUNC {
|
if n.Op() == ir.OCALL || n.Op() == ir.OCALLFUNC {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
if n.Left != nil && hasCall(n.Left) {
|
if n.Left() != nil && hasCall(n.Left()) {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
if n.Right != nil && hasCall(n.Right) {
|
if n.Right() != nil && hasCall(n.Right()) {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
for _, x := range n.Ninit.Slice() {
|
for _, x := range n.Init().Slice() {
|
||||||
if hasCall(x) {
|
if hasCall(x) {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for _, x := range n.Nbody.Slice() {
|
for _, x := range n.Body().Slice() {
|
||||||
if hasCall(x) {
|
if hasCall(x) {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for _, x := range n.List.Slice() {
|
for _, x := range n.List().Slice() {
|
||||||
if hasCall(x) {
|
if hasCall(x) {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for _, x := range n.Rlist.Slice() {
|
for _, x := range n.Rlist().Slice() {
|
||||||
if hasCall(x) {
|
if hasCall(x) {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
@ -818,10 +820,10 @@ func hasCall(n *Node) bool {
|
||||||
|
|
||||||
// eqfield returns the node
|
// eqfield returns the node
|
||||||
// p.field == q.field
|
// p.field == q.field
|
||||||
func eqfield(p *Node, q *Node, field *types.Sym) *Node {
|
func eqfield(p ir.Node, q ir.Node, field *types.Sym) ir.Node {
|
||||||
nx := nodSym(OXDOT, p, field)
|
nx := nodSym(ir.OXDOT, p, field)
|
||||||
ny := nodSym(OXDOT, q, field)
|
ny := nodSym(ir.OXDOT, q, field)
|
||||||
ne := nod(OEQ, nx, ny)
|
ne := ir.Nod(ir.OEQ, nx, ny)
|
||||||
return ne
|
return ne
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -831,23 +833,23 @@ func eqfield(p *Node, q *Node, field *types.Sym) *Node {
|
||||||
// memequal(s.ptr, t.ptr, len(s))
|
// memequal(s.ptr, t.ptr, len(s))
|
||||||
// which can be used to construct string equality comparison.
|
// which can be used to construct string equality comparison.
|
||||||
// eqlen must be evaluated before eqmem, and shortcircuiting is required.
|
// eqlen must be evaluated before eqmem, and shortcircuiting is required.
|
||||||
func eqstring(s, t *Node) (eqlen, eqmem *Node) {
|
func eqstring(s, t ir.Node) (eqlen, eqmem ir.Node) {
|
||||||
s = conv(s, types.Types[TSTRING])
|
s = conv(s, types.Types[types.TSTRING])
|
||||||
t = conv(t, types.Types[TSTRING])
|
t = conv(t, types.Types[types.TSTRING])
|
||||||
sptr := nod(OSPTR, s, nil)
|
sptr := ir.Nod(ir.OSPTR, s, nil)
|
||||||
tptr := nod(OSPTR, t, nil)
|
tptr := ir.Nod(ir.OSPTR, t, nil)
|
||||||
slen := conv(nod(OLEN, s, nil), types.Types[TUINTPTR])
|
slen := conv(ir.Nod(ir.OLEN, s, nil), types.Types[types.TUINTPTR])
|
||||||
tlen := conv(nod(OLEN, t, nil), types.Types[TUINTPTR])
|
tlen := conv(ir.Nod(ir.OLEN, t, nil), types.Types[types.TUINTPTR])
|
||||||
|
|
||||||
fn := syslook("memequal")
|
fn := syslook("memequal")
|
||||||
fn = substArgTypes(fn, types.Types[TUINT8], types.Types[TUINT8])
|
fn = substArgTypes(fn, types.Types[types.TUINT8], types.Types[types.TUINT8])
|
||||||
call := nod(OCALL, fn, nil)
|
call := ir.Nod(ir.OCALL, fn, nil)
|
||||||
call.List.Append(sptr, tptr, slen.copy())
|
call.PtrList().Append(sptr, tptr, ir.Copy(slen))
|
||||||
call = typecheck(call, ctxExpr|ctxMultiOK)
|
call = typecheck(call, ctxExpr|ctxMultiOK)
|
||||||
|
|
||||||
cmp := nod(OEQ, slen, tlen)
|
cmp := ir.Nod(ir.OEQ, slen, tlen)
|
||||||
cmp = typecheck(cmp, ctxExpr)
|
cmp = typecheck(cmp, ctxExpr)
|
||||||
cmp.Type = types.Types[TBOOL]
|
cmp.SetType(types.Types[types.TBOOL])
|
||||||
return cmp, call
|
return cmp, call
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -857,58 +859,58 @@ func eqstring(s, t *Node) (eqlen, eqmem *Node) {
|
||||||
// ifaceeq(s.tab, s.data, t.data) (or efaceeq(s.typ, s.data, t.data), as appropriate)
|
// ifaceeq(s.tab, s.data, t.data) (or efaceeq(s.typ, s.data, t.data), as appropriate)
|
||||||
// which can be used to construct interface equality comparison.
|
// which can be used to construct interface equality comparison.
|
||||||
// eqtab must be evaluated before eqdata, and shortcircuiting is required.
|
// eqtab must be evaluated before eqdata, and shortcircuiting is required.
|
||||||
func eqinterface(s, t *Node) (eqtab, eqdata *Node) {
|
func eqinterface(s, t ir.Node) (eqtab, eqdata ir.Node) {
|
||||||
if !types.Identical(s.Type, t.Type) {
|
if !types.Identical(s.Type(), t.Type()) {
|
||||||
Fatalf("eqinterface %v %v", s.Type, t.Type)
|
base.Fatalf("eqinterface %v %v", s.Type(), t.Type())
|
||||||
}
|
}
|
||||||
// func ifaceeq(tab *uintptr, x, y unsafe.Pointer) (ret bool)
|
// func ifaceeq(tab *uintptr, x, y unsafe.Pointer) (ret bool)
|
||||||
// func efaceeq(typ *uintptr, x, y unsafe.Pointer) (ret bool)
|
// func efaceeq(typ *uintptr, x, y unsafe.Pointer) (ret bool)
|
||||||
var fn *Node
|
var fn ir.Node
|
||||||
if s.Type.IsEmptyInterface() {
|
if s.Type().IsEmptyInterface() {
|
||||||
fn = syslook("efaceeq")
|
fn = syslook("efaceeq")
|
||||||
} else {
|
} else {
|
||||||
fn = syslook("ifaceeq")
|
fn = syslook("ifaceeq")
|
||||||
}
|
}
|
||||||
|
|
||||||
stab := nod(OITAB, s, nil)
|
stab := ir.Nod(ir.OITAB, s, nil)
|
||||||
ttab := nod(OITAB, t, nil)
|
ttab := ir.Nod(ir.OITAB, t, nil)
|
||||||
sdata := nod(OIDATA, s, nil)
|
sdata := ir.Nod(ir.OIDATA, s, nil)
|
||||||
tdata := nod(OIDATA, t, nil)
|
tdata := ir.Nod(ir.OIDATA, t, nil)
|
||||||
sdata.Type = types.Types[TUNSAFEPTR]
|
sdata.SetType(types.Types[types.TUNSAFEPTR])
|
||||||
tdata.Type = types.Types[TUNSAFEPTR]
|
tdata.SetType(types.Types[types.TUNSAFEPTR])
|
||||||
sdata.SetTypecheck(1)
|
sdata.SetTypecheck(1)
|
||||||
tdata.SetTypecheck(1)
|
tdata.SetTypecheck(1)
|
||||||
|
|
||||||
call := nod(OCALL, fn, nil)
|
call := ir.Nod(ir.OCALL, fn, nil)
|
||||||
call.List.Append(stab, sdata, tdata)
|
call.PtrList().Append(stab, sdata, tdata)
|
||||||
call = typecheck(call, ctxExpr|ctxMultiOK)
|
call = typecheck(call, ctxExpr|ctxMultiOK)
|
||||||
|
|
||||||
cmp := nod(OEQ, stab, ttab)
|
cmp := ir.Nod(ir.OEQ, stab, ttab)
|
||||||
cmp = typecheck(cmp, ctxExpr)
|
cmp = typecheck(cmp, ctxExpr)
|
||||||
cmp.Type = types.Types[TBOOL]
|
cmp.SetType(types.Types[types.TBOOL])
|
||||||
return cmp, call
|
return cmp, call
|
||||||
}
|
}
|
||||||
|
|
||||||
// eqmem returns the node
|
// eqmem returns the node
|
||||||
// memequal(&p.field, &q.field [, size])
|
// memequal(&p.field, &q.field [, size])
|
||||||
func eqmem(p *Node, q *Node, field *types.Sym, size int64) *Node {
|
func eqmem(p ir.Node, q ir.Node, field *types.Sym, size int64) ir.Node {
|
||||||
nx := nod(OADDR, nodSym(OXDOT, p, field), nil)
|
nx := ir.Nod(ir.OADDR, nodSym(ir.OXDOT, p, field), nil)
|
||||||
ny := nod(OADDR, nodSym(OXDOT, q, field), nil)
|
ny := ir.Nod(ir.OADDR, nodSym(ir.OXDOT, q, field), nil)
|
||||||
nx = typecheck(nx, ctxExpr)
|
nx = typecheck(nx, ctxExpr)
|
||||||
ny = typecheck(ny, ctxExpr)
|
ny = typecheck(ny, ctxExpr)
|
||||||
|
|
||||||
fn, needsize := eqmemfunc(size, nx.Type.Elem())
|
fn, needsize := eqmemfunc(size, nx.Type().Elem())
|
||||||
call := nod(OCALL, fn, nil)
|
call := ir.Nod(ir.OCALL, fn, nil)
|
||||||
call.List.Append(nx)
|
call.PtrList().Append(nx)
|
||||||
call.List.Append(ny)
|
call.PtrList().Append(ny)
|
||||||
if needsize {
|
if needsize {
|
||||||
call.List.Append(nodintconst(size))
|
call.PtrList().Append(nodintconst(size))
|
||||||
}
|
}
|
||||||
|
|
||||||
return call
|
return call
|
||||||
}
|
}
|
||||||
|
|
||||||
func eqmemfunc(size int64, t *types.Type) (fn *Node, needsize bool) {
|
func eqmemfunc(size int64, t *types.Type) (fn ir.Node, needsize bool) {
|
||||||
switch size {
|
switch size {
|
||||||
default:
|
default:
|
||||||
fn = syslook("memequal")
|
fn = syslook("memequal")
|
||||||
|
|
@ -949,7 +951,7 @@ func memrun(t *types.Type, start int) (size int64, next int) {
|
||||||
// by padding.
|
// by padding.
|
||||||
func ispaddedfield(t *types.Type, i int) bool {
|
func ispaddedfield(t *types.Type, i int) bool {
|
||||||
if !t.IsStruct() {
|
if !t.IsStruct() {
|
||||||
Fatalf("ispaddedfield called non-struct %v", t)
|
base.Fatalf("ispaddedfield called non-struct %v", t)
|
||||||
}
|
}
|
||||||
end := t.Width
|
end := t.Width
|
||||||
if i+1 < t.NumFields() {
|
if i+1 < t.NumFields() {
|
||||||
|
|
|
||||||
|
|
@ -6,6 +6,8 @@ package gc
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
|
"cmd/compile/internal/base"
|
||||||
|
"cmd/compile/internal/ir"
|
||||||
"cmd/compile/internal/types"
|
"cmd/compile/internal/types"
|
||||||
"fmt"
|
"fmt"
|
||||||
"sort"
|
"sort"
|
||||||
|
|
@ -21,7 +23,7 @@ var defercalc int
|
||||||
|
|
||||||
func Rnd(o int64, r int64) int64 {
|
func Rnd(o int64, r int64) int64 {
|
||||||
if r < 1 || r > 8 || r&(r-1) != 0 {
|
if r < 1 || r > 8 || r&(r-1) != 0 {
|
||||||
Fatalf("rnd %d", r)
|
base.Fatalf("rnd %d", r)
|
||||||
}
|
}
|
||||||
return (o + r - 1) &^ (r - 1)
|
return (o + r - 1) &^ (r - 1)
|
||||||
}
|
}
|
||||||
|
|
@ -39,7 +41,7 @@ func expandiface(t *types.Type) {
|
||||||
case langSupported(1, 14, t.Pkg()) && !explicit && types.Identical(m.Type, prev.Type):
|
case langSupported(1, 14, t.Pkg()) && !explicit && types.Identical(m.Type, prev.Type):
|
||||||
return
|
return
|
||||||
default:
|
default:
|
||||||
yyerrorl(m.Pos, "duplicate method %s", m.Sym.Name)
|
base.ErrorfAt(m.Pos, "duplicate method %s", m.Sym.Name)
|
||||||
}
|
}
|
||||||
methods = append(methods, m)
|
methods = append(methods, m)
|
||||||
}
|
}
|
||||||
|
|
@ -59,7 +61,7 @@ func expandiface(t *types.Type) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if !m.Type.IsInterface() {
|
if !m.Type.IsInterface() {
|
||||||
yyerrorl(m.Pos, "interface contains embedded non-interface %v", m.Type)
|
base.ErrorfAt(m.Pos, "interface contains embedded non-interface %v", m.Type)
|
||||||
m.SetBroke(true)
|
m.SetBroke(true)
|
||||||
t.SetBroke(true)
|
t.SetBroke(true)
|
||||||
// Add to fields so that error messages
|
// Add to fields so that error messages
|
||||||
|
|
@ -74,11 +76,8 @@ func expandiface(t *types.Type) {
|
||||||
// (including broken ones, if any) and add to t's
|
// (including broken ones, if any) and add to t's
|
||||||
// method set.
|
// method set.
|
||||||
for _, t1 := range m.Type.Fields().Slice() {
|
for _, t1 := range m.Type.Fields().Slice() {
|
||||||
f := types.NewField()
|
// Use m.Pos rather than t1.Pos to preserve embedding position.
|
||||||
f.Pos = m.Pos // preserve embedding position
|
f := types.NewField(m.Pos, t1.Sym, t1.Type)
|
||||||
f.Sym = t1.Sym
|
|
||||||
f.Type = t1.Type
|
|
||||||
f.SetBroke(t1.Broke())
|
|
||||||
addMethod(f, false)
|
addMethod(f, false)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -86,7 +85,7 @@ func expandiface(t *types.Type) {
|
||||||
sort.Sort(methcmp(methods))
|
sort.Sort(methcmp(methods))
|
||||||
|
|
||||||
if int64(len(methods)) >= thearch.MAXWIDTH/int64(Widthptr) {
|
if int64(len(methods)) >= thearch.MAXWIDTH/int64(Widthptr) {
|
||||||
yyerrorl(typePos(t), "interface too large")
|
base.ErrorfAt(typePos(t), "interface too large")
|
||||||
}
|
}
|
||||||
for i, m := range methods {
|
for i, m := range methods {
|
||||||
m.Offset = int64(i) * int64(Widthptr)
|
m.Offset = int64(i) * int64(Widthptr)
|
||||||
|
|
@ -119,7 +118,7 @@ func widstruct(errtype *types.Type, t *types.Type, o int64, flag int) int64 {
|
||||||
o = Rnd(o, int64(f.Type.Align))
|
o = Rnd(o, int64(f.Type.Align))
|
||||||
}
|
}
|
||||||
f.Offset = o
|
f.Offset = o
|
||||||
if n := asNode(f.Nname); n != nil {
|
if n := ir.AsNode(f.Nname); n != nil {
|
||||||
// addrescapes has similar code to update these offsets.
|
// addrescapes has similar code to update these offsets.
|
||||||
// Usually addrescapes runs after widstruct,
|
// Usually addrescapes runs after widstruct,
|
||||||
// in which case we could drop this,
|
// in which case we could drop this,
|
||||||
|
|
@ -127,17 +126,17 @@ func widstruct(errtype *types.Type, t *types.Type, o int64, flag int) int64 {
|
||||||
// NOTE(rsc): This comment may be stale.
|
// NOTE(rsc): This comment may be stale.
|
||||||
// It's possible the ordering has changed and this is
|
// It's possible the ordering has changed and this is
|
||||||
// now the common case. I'm not sure.
|
// now the common case. I'm not sure.
|
||||||
if n.Name.Param.Stackcopy != nil {
|
if n.Name().Param.Stackcopy != nil {
|
||||||
n.Name.Param.Stackcopy.Xoffset = o
|
n.Name().Param.Stackcopy.SetOffset(o)
|
||||||
n.Xoffset = 0
|
n.SetOffset(0)
|
||||||
} else {
|
} else {
|
||||||
n.Xoffset = o
|
n.SetOffset(o)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
w := f.Type.Width
|
w := f.Type.Width
|
||||||
if w < 0 {
|
if w < 0 {
|
||||||
Fatalf("invalid width %d", f.Type.Width)
|
base.Fatalf("invalid width %d", f.Type.Width)
|
||||||
}
|
}
|
||||||
if w == 0 {
|
if w == 0 {
|
||||||
lastzero = o
|
lastzero = o
|
||||||
|
|
@ -150,7 +149,7 @@ func widstruct(errtype *types.Type, t *types.Type, o int64, flag int) int64 {
|
||||||
maxwidth = 1<<31 - 1
|
maxwidth = 1<<31 - 1
|
||||||
}
|
}
|
||||||
if o >= maxwidth {
|
if o >= maxwidth {
|
||||||
yyerrorl(typePos(errtype), "type %L too large", errtype)
|
base.ErrorfAt(typePos(errtype), "type %L too large", errtype)
|
||||||
o = 8 // small but nonzero
|
o = 8 // small but nonzero
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -199,7 +198,7 @@ func findTypeLoop(t *types.Type, path *[]*types.Type) bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
*path = append(*path, t)
|
*path = append(*path, t)
|
||||||
if p := asNode(t.Nod).Name.Param; p != nil && findTypeLoop(p.Ntype.Type, path) {
|
if p := ir.AsNode(t.Nod).Name().Param; p != nil && findTypeLoop(p.Ntype.Type(), path) {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
*path = (*path)[:len(*path)-1]
|
*path = (*path)[:len(*path)-1]
|
||||||
|
|
@ -207,17 +206,17 @@ func findTypeLoop(t *types.Type, path *[]*types.Type) bool {
|
||||||
// Anonymous type. Recurse on contained types.
|
// Anonymous type. Recurse on contained types.
|
||||||
|
|
||||||
switch t.Etype {
|
switch t.Etype {
|
||||||
case TARRAY:
|
case types.TARRAY:
|
||||||
if findTypeLoop(t.Elem(), path) {
|
if findTypeLoop(t.Elem(), path) {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
case TSTRUCT:
|
case types.TSTRUCT:
|
||||||
for _, f := range t.Fields().Slice() {
|
for _, f := range t.Fields().Slice() {
|
||||||
if findTypeLoop(f.Type, path) {
|
if findTypeLoop(f.Type, path) {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
case TINTER:
|
case types.TINTER:
|
||||||
for _, m := range t.Methods().Slice() {
|
for _, m := range t.Methods().Slice() {
|
||||||
if m.Type.IsInterface() { // embedded interface
|
if m.Type.IsInterface() { // embedded interface
|
||||||
if findTypeLoop(m.Type, path) {
|
if findTypeLoop(m.Type, path) {
|
||||||
|
|
@ -238,7 +237,7 @@ func reportTypeLoop(t *types.Type) {
|
||||||
|
|
||||||
var l []*types.Type
|
var l []*types.Type
|
||||||
if !findTypeLoop(t, &l) {
|
if !findTypeLoop(t, &l) {
|
||||||
Fatalf("failed to find type loop for: %v", t)
|
base.Fatalf("failed to find type loop for: %v", t)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Rotate loop so that the earliest type declaration is first.
|
// Rotate loop so that the earliest type declaration is first.
|
||||||
|
|
@ -253,11 +252,11 @@ func reportTypeLoop(t *types.Type) {
|
||||||
var msg bytes.Buffer
|
var msg bytes.Buffer
|
||||||
fmt.Fprintf(&msg, "invalid recursive type %v\n", l[0])
|
fmt.Fprintf(&msg, "invalid recursive type %v\n", l[0])
|
||||||
for _, t := range l {
|
for _, t := range l {
|
||||||
fmt.Fprintf(&msg, "\t%v: %v refers to\n", linestr(typePos(t)), t)
|
fmt.Fprintf(&msg, "\t%v: %v refers to\n", base.FmtPos(typePos(t)), t)
|
||||||
t.SetBroke(true)
|
t.SetBroke(true)
|
||||||
}
|
}
|
||||||
fmt.Fprintf(&msg, "\t%v: %v", linestr(typePos(l[0])), l[0])
|
fmt.Fprintf(&msg, "\t%v: %v", base.FmtPos(typePos(l[0])), l[0])
|
||||||
yyerrorl(typePos(l[0]), msg.String())
|
base.ErrorfAt(typePos(l[0]), msg.String())
|
||||||
}
|
}
|
||||||
|
|
||||||
// dowidth calculates and stores the size and alignment for t.
|
// dowidth calculates and stores the size and alignment for t.
|
||||||
|
|
@ -271,7 +270,7 @@ func dowidth(t *types.Type) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if Widthptr == 0 {
|
if Widthptr == 0 {
|
||||||
Fatalf("dowidth without betypeinit")
|
base.Fatalf("dowidth without betypeinit")
|
||||||
}
|
}
|
||||||
|
|
||||||
if t == nil {
|
if t == nil {
|
||||||
|
|
@ -295,7 +294,7 @@ func dowidth(t *types.Type) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
t.SetBroke(true)
|
t.SetBroke(true)
|
||||||
Fatalf("width not calculated: %v", t)
|
base.Fatalf("width not calculated: %v", t)
|
||||||
}
|
}
|
||||||
|
|
||||||
// break infinite recursion if the broken recursive type
|
// break infinite recursion if the broken recursive type
|
||||||
|
|
@ -307,9 +306,9 @@ func dowidth(t *types.Type) {
|
||||||
// defer checkwidth calls until after we're done
|
// defer checkwidth calls until after we're done
|
||||||
defercheckwidth()
|
defercheckwidth()
|
||||||
|
|
||||||
lno := lineno
|
lno := base.Pos
|
||||||
if asNode(t.Nod) != nil {
|
if ir.AsNode(t.Nod) != nil {
|
||||||
lineno = asNode(t.Nod).Pos
|
base.Pos = ir.AsNode(t.Nod).Pos()
|
||||||
}
|
}
|
||||||
|
|
||||||
t.Width = -2
|
t.Width = -2
|
||||||
|
|
@ -317,7 +316,7 @@ func dowidth(t *types.Type) {
|
||||||
|
|
||||||
et := t.Etype
|
et := t.Etype
|
||||||
switch et {
|
switch et {
|
||||||
case TFUNC, TCHAN, TMAP, TSTRING:
|
case types.TFUNC, types.TCHAN, types.TMAP, types.TSTRING:
|
||||||
break
|
break
|
||||||
|
|
||||||
// simtype == 0 during bootstrap
|
// simtype == 0 during bootstrap
|
||||||
|
|
@ -330,44 +329,44 @@ func dowidth(t *types.Type) {
|
||||||
var w int64
|
var w int64
|
||||||
switch et {
|
switch et {
|
||||||
default:
|
default:
|
||||||
Fatalf("dowidth: unknown type: %v", t)
|
base.Fatalf("dowidth: unknown type: %v", t)
|
||||||
|
|
||||||
// compiler-specific stuff
|
// compiler-specific stuff
|
||||||
case TINT8, TUINT8, TBOOL:
|
case types.TINT8, types.TUINT8, types.TBOOL:
|
||||||
// bool is int8
|
// bool is int8
|
||||||
w = 1
|
w = 1
|
||||||
|
|
||||||
case TINT16, TUINT16:
|
case types.TINT16, types.TUINT16:
|
||||||
w = 2
|
w = 2
|
||||||
|
|
||||||
case TINT32, TUINT32, TFLOAT32:
|
case types.TINT32, types.TUINT32, types.TFLOAT32:
|
||||||
w = 4
|
w = 4
|
||||||
|
|
||||||
case TINT64, TUINT64, TFLOAT64:
|
case types.TINT64, types.TUINT64, types.TFLOAT64:
|
||||||
w = 8
|
w = 8
|
||||||
t.Align = uint8(Widthreg)
|
t.Align = uint8(Widthreg)
|
||||||
|
|
||||||
case TCOMPLEX64:
|
case types.TCOMPLEX64:
|
||||||
w = 8
|
w = 8
|
||||||
t.Align = 4
|
t.Align = 4
|
||||||
|
|
||||||
case TCOMPLEX128:
|
case types.TCOMPLEX128:
|
||||||
w = 16
|
w = 16
|
||||||
t.Align = uint8(Widthreg)
|
t.Align = uint8(Widthreg)
|
||||||
|
|
||||||
case TPTR:
|
case types.TPTR:
|
||||||
w = int64(Widthptr)
|
w = int64(Widthptr)
|
||||||
checkwidth(t.Elem())
|
checkwidth(t.Elem())
|
||||||
|
|
||||||
case TUNSAFEPTR:
|
case types.TUNSAFEPTR:
|
||||||
w = int64(Widthptr)
|
w = int64(Widthptr)
|
||||||
|
|
||||||
case TINTER: // implemented as 2 pointers
|
case types.TINTER: // implemented as 2 pointers
|
||||||
w = 2 * int64(Widthptr)
|
w = 2 * int64(Widthptr)
|
||||||
t.Align = uint8(Widthptr)
|
t.Align = uint8(Widthptr)
|
||||||
expandiface(t)
|
expandiface(t)
|
||||||
|
|
||||||
case TCHAN: // implemented as pointer
|
case types.TCHAN: // implemented as pointer
|
||||||
w = int64(Widthptr)
|
w = int64(Widthptr)
|
||||||
|
|
||||||
checkwidth(t.Elem())
|
checkwidth(t.Elem())
|
||||||
|
|
@ -377,35 +376,35 @@ func dowidth(t *types.Type) {
|
||||||
t1 := types.NewChanArgs(t)
|
t1 := types.NewChanArgs(t)
|
||||||
checkwidth(t1)
|
checkwidth(t1)
|
||||||
|
|
||||||
case TCHANARGS:
|
case types.TCHANARGS:
|
||||||
t1 := t.ChanArgs()
|
t1 := t.ChanArgs()
|
||||||
dowidth(t1) // just in case
|
dowidth(t1) // just in case
|
||||||
if t1.Elem().Width >= 1<<16 {
|
if t1.Elem().Width >= 1<<16 {
|
||||||
yyerrorl(typePos(t1), "channel element type too large (>64kB)")
|
base.ErrorfAt(typePos(t1), "channel element type too large (>64kB)")
|
||||||
}
|
}
|
||||||
w = 1 // anything will do
|
w = 1 // anything will do
|
||||||
|
|
||||||
case TMAP: // implemented as pointer
|
case types.TMAP: // implemented as pointer
|
||||||
w = int64(Widthptr)
|
w = int64(Widthptr)
|
||||||
checkwidth(t.Elem())
|
checkwidth(t.Elem())
|
||||||
checkwidth(t.Key())
|
checkwidth(t.Key())
|
||||||
|
|
||||||
case TFORW: // should have been filled in
|
case types.TFORW: // should have been filled in
|
||||||
reportTypeLoop(t)
|
reportTypeLoop(t)
|
||||||
w = 1 // anything will do
|
w = 1 // anything will do
|
||||||
|
|
||||||
case TANY:
|
case types.TANY:
|
||||||
// dummy type; should be replaced before use.
|
// not a real type; should be replaced before use.
|
||||||
Fatalf("dowidth any")
|
base.Fatalf("dowidth any")
|
||||||
|
|
||||||
case TSTRING:
|
case types.TSTRING:
|
||||||
if sizeofString == 0 {
|
if sizeofString == 0 {
|
||||||
Fatalf("early dowidth string")
|
base.Fatalf("early dowidth string")
|
||||||
}
|
}
|
||||||
w = sizeofString
|
w = sizeofString
|
||||||
t.Align = uint8(Widthptr)
|
t.Align = uint8(Widthptr)
|
||||||
|
|
||||||
case TARRAY:
|
case types.TARRAY:
|
||||||
if t.Elem() == nil {
|
if t.Elem() == nil {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
@ -414,13 +413,13 @@ func dowidth(t *types.Type) {
|
||||||
if t.Elem().Width != 0 {
|
if t.Elem().Width != 0 {
|
||||||
cap := (uint64(thearch.MAXWIDTH) - 1) / uint64(t.Elem().Width)
|
cap := (uint64(thearch.MAXWIDTH) - 1) / uint64(t.Elem().Width)
|
||||||
if uint64(t.NumElem()) > cap {
|
if uint64(t.NumElem()) > cap {
|
||||||
yyerrorl(typePos(t), "type %L larger than address space", t)
|
base.ErrorfAt(typePos(t), "type %L larger than address space", t)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
w = t.NumElem() * t.Elem().Width
|
w = t.NumElem() * t.Elem().Width
|
||||||
t.Align = t.Elem().Align
|
t.Align = t.Elem().Align
|
||||||
|
|
||||||
case TSLICE:
|
case types.TSLICE:
|
||||||
if t.Elem() == nil {
|
if t.Elem() == nil {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
@ -428,46 +427,46 @@ func dowidth(t *types.Type) {
|
||||||
checkwidth(t.Elem())
|
checkwidth(t.Elem())
|
||||||
t.Align = uint8(Widthptr)
|
t.Align = uint8(Widthptr)
|
||||||
|
|
||||||
case TSTRUCT:
|
case types.TSTRUCT:
|
||||||
if t.IsFuncArgStruct() {
|
if t.IsFuncArgStruct() {
|
||||||
Fatalf("dowidth fn struct %v", t)
|
base.Fatalf("dowidth fn struct %v", t)
|
||||||
}
|
}
|
||||||
w = widstruct(t, t, 0, 1)
|
w = widstruct(t, t, 0, 1)
|
||||||
|
|
||||||
// make fake type to check later to
|
// make fake type to check later to
|
||||||
// trigger function argument computation.
|
// trigger function argument computation.
|
||||||
case TFUNC:
|
case types.TFUNC:
|
||||||
t1 := types.NewFuncArgs(t)
|
t1 := types.NewFuncArgs(t)
|
||||||
checkwidth(t1)
|
checkwidth(t1)
|
||||||
w = int64(Widthptr) // width of func type is pointer
|
w = int64(Widthptr) // width of func type is pointer
|
||||||
|
|
||||||
// function is 3 cated structures;
|
// function is 3 cated structures;
|
||||||
// compute their widths as side-effect.
|
// compute their widths as side-effect.
|
||||||
case TFUNCARGS:
|
case types.TFUNCARGS:
|
||||||
t1 := t.FuncArgs()
|
t1 := t.FuncArgs()
|
||||||
w = widstruct(t1, t1.Recvs(), 0, 0)
|
w = widstruct(t1, t1.Recvs(), 0, 0)
|
||||||
w = widstruct(t1, t1.Params(), w, Widthreg)
|
w = widstruct(t1, t1.Params(), w, Widthreg)
|
||||||
w = widstruct(t1, t1.Results(), w, Widthreg)
|
w = widstruct(t1, t1.Results(), w, Widthreg)
|
||||||
t1.Extra.(*types.Func).Argwid = w
|
t1.Extra.(*types.Func).Argwid = w
|
||||||
if w%int64(Widthreg) != 0 {
|
if w%int64(Widthreg) != 0 {
|
||||||
Warn("bad type %v %d\n", t1, w)
|
base.Warn("bad type %v %d\n", t1, w)
|
||||||
}
|
}
|
||||||
t.Align = 1
|
t.Align = 1
|
||||||
}
|
}
|
||||||
|
|
||||||
if Widthptr == 4 && w != int64(int32(w)) {
|
if Widthptr == 4 && w != int64(int32(w)) {
|
||||||
yyerrorl(typePos(t), "type %v too large", t)
|
base.ErrorfAt(typePos(t), "type %v too large", t)
|
||||||
}
|
}
|
||||||
|
|
||||||
t.Width = w
|
t.Width = w
|
||||||
if t.Align == 0 {
|
if t.Align == 0 {
|
||||||
if w == 0 || w > 8 || w&(w-1) != 0 {
|
if w == 0 || w > 8 || w&(w-1) != 0 {
|
||||||
Fatalf("invalid alignment for %v", t)
|
base.Fatalf("invalid alignment for %v", t)
|
||||||
}
|
}
|
||||||
t.Align = uint8(w)
|
t.Align = uint8(w)
|
||||||
}
|
}
|
||||||
|
|
||||||
lineno = lno
|
base.Pos = lno
|
||||||
|
|
||||||
resumecheckwidth()
|
resumecheckwidth()
|
||||||
}
|
}
|
||||||
|
|
@ -498,7 +497,7 @@ func checkwidth(t *types.Type) {
|
||||||
// function arg structs should not be checked
|
// function arg structs should not be checked
|
||||||
// outside of the enclosing function.
|
// outside of the enclosing function.
|
||||||
if t.IsFuncArgStruct() {
|
if t.IsFuncArgStruct() {
|
||||||
Fatalf("checkwidth %v", t)
|
base.Fatalf("checkwidth %v", t)
|
||||||
}
|
}
|
||||||
|
|
||||||
if defercalc == 0 {
|
if defercalc == 0 {
|
||||||
|
|
|
||||||
|
|
@ -5,6 +5,7 @@
|
||||||
package gc
|
package gc
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"cmd/compile/internal/ir"
|
||||||
"cmd/compile/internal/types"
|
"cmd/compile/internal/types"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -12,6 +13,15 @@ type exporter struct {
|
||||||
marked map[*types.Type]bool // types already seen by markType
|
marked map[*types.Type]bool // types already seen by markType
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// markObject visits a reachable object.
|
||||||
|
func (p *exporter) markObject(n ir.Node) {
|
||||||
|
if n.Op() == ir.ONAME && n.Class() == ir.PFUNC {
|
||||||
|
inlFlood(n)
|
||||||
|
}
|
||||||
|
|
||||||
|
p.markType(n.Type())
|
||||||
|
}
|
||||||
|
|
||||||
// markType recursively visits types reachable from t to identify
|
// markType recursively visits types reachable from t to identify
|
||||||
// functions whose inline bodies may be needed.
|
// functions whose inline bodies may be needed.
|
||||||
func (p *exporter) markType(t *types.Type) {
|
func (p *exporter) markType(t *types.Type) {
|
||||||
|
|
@ -25,10 +35,10 @@ func (p *exporter) markType(t *types.Type) {
|
||||||
// only their unexpanded method set (i.e., exclusive of
|
// only their unexpanded method set (i.e., exclusive of
|
||||||
// interface embeddings), and the switch statement below
|
// interface embeddings), and the switch statement below
|
||||||
// handles their full method set.
|
// handles their full method set.
|
||||||
if t.Sym != nil && t.Etype != TINTER {
|
if t.Sym != nil && t.Etype != types.TINTER {
|
||||||
for _, m := range t.Methods().Slice() {
|
for _, m := range t.Methods().Slice() {
|
||||||
if types.IsExported(m.Sym.Name) {
|
if types.IsExported(m.Sym.Name) {
|
||||||
p.markType(m.Type)
|
p.markObject(ir.AsNode(m.Nname))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -43,36 +53,31 @@ func (p *exporter) markType(t *types.Type) {
|
||||||
// the user already needs some way to construct values of
|
// the user already needs some way to construct values of
|
||||||
// those types.
|
// those types.
|
||||||
switch t.Etype {
|
switch t.Etype {
|
||||||
case TPTR, TARRAY, TSLICE:
|
case types.TPTR, types.TARRAY, types.TSLICE:
|
||||||
p.markType(t.Elem())
|
p.markType(t.Elem())
|
||||||
|
|
||||||
case TCHAN:
|
case types.TCHAN:
|
||||||
if t.ChanDir().CanRecv() {
|
if t.ChanDir().CanRecv() {
|
||||||
p.markType(t.Elem())
|
p.markType(t.Elem())
|
||||||
}
|
}
|
||||||
|
|
||||||
case TMAP:
|
case types.TMAP:
|
||||||
p.markType(t.Key())
|
p.markType(t.Key())
|
||||||
p.markType(t.Elem())
|
p.markType(t.Elem())
|
||||||
|
|
||||||
case TSTRUCT:
|
case types.TSTRUCT:
|
||||||
for _, f := range t.FieldSlice() {
|
for _, f := range t.FieldSlice() {
|
||||||
if types.IsExported(f.Sym.Name) || f.Embedded != 0 {
|
if types.IsExported(f.Sym.Name) || f.Embedded != 0 {
|
||||||
p.markType(f.Type)
|
p.markType(f.Type)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
case TFUNC:
|
case types.TFUNC:
|
||||||
// If t is the type of a function or method, then
|
|
||||||
// t.Nname() is its ONAME. Mark its inline body and
|
|
||||||
// any recursively called functions for export.
|
|
||||||
inlFlood(asNode(t.Nname()))
|
|
||||||
|
|
||||||
for _, f := range t.Results().FieldSlice() {
|
for _, f := range t.Results().FieldSlice() {
|
||||||
p.markType(f.Type)
|
p.markType(f.Type)
|
||||||
}
|
}
|
||||||
|
|
||||||
case TINTER:
|
case types.TINTER:
|
||||||
for _, f := range t.FieldSlice() {
|
for _, f := range t.FieldSlice() {
|
||||||
if types.IsExported(f.Sym.Name) {
|
if types.IsExported(f.Sym.Name) {
|
||||||
p.markType(f.Type)
|
p.markType(f.Type)
|
||||||
|
|
@ -129,23 +134,23 @@ func predeclared() []*types.Type {
|
||||||
// elements have been initialized before
|
// elements have been initialized before
|
||||||
predecl = []*types.Type{
|
predecl = []*types.Type{
|
||||||
// basic types
|
// basic types
|
||||||
types.Types[TBOOL],
|
types.Types[types.TBOOL],
|
||||||
types.Types[TINT],
|
types.Types[types.TINT],
|
||||||
types.Types[TINT8],
|
types.Types[types.TINT8],
|
||||||
types.Types[TINT16],
|
types.Types[types.TINT16],
|
||||||
types.Types[TINT32],
|
types.Types[types.TINT32],
|
||||||
types.Types[TINT64],
|
types.Types[types.TINT64],
|
||||||
types.Types[TUINT],
|
types.Types[types.TUINT],
|
||||||
types.Types[TUINT8],
|
types.Types[types.TUINT8],
|
||||||
types.Types[TUINT16],
|
types.Types[types.TUINT16],
|
||||||
types.Types[TUINT32],
|
types.Types[types.TUINT32],
|
||||||
types.Types[TUINT64],
|
types.Types[types.TUINT64],
|
||||||
types.Types[TUINTPTR],
|
types.Types[types.TUINTPTR],
|
||||||
types.Types[TFLOAT32],
|
types.Types[types.TFLOAT32],
|
||||||
types.Types[TFLOAT64],
|
types.Types[types.TFLOAT64],
|
||||||
types.Types[TCOMPLEX64],
|
types.Types[types.TCOMPLEX64],
|
||||||
types.Types[TCOMPLEX128],
|
types.Types[types.TCOMPLEX128],
|
||||||
types.Types[TSTRING],
|
types.Types[types.TSTRING],
|
||||||
|
|
||||||
// basic type aliases
|
// basic type aliases
|
||||||
types.Bytetype,
|
types.Bytetype,
|
||||||
|
|
@ -161,16 +166,16 @@ func predeclared() []*types.Type {
|
||||||
types.UntypedFloat,
|
types.UntypedFloat,
|
||||||
types.UntypedComplex,
|
types.UntypedComplex,
|
||||||
types.UntypedString,
|
types.UntypedString,
|
||||||
types.Types[TNIL],
|
types.Types[types.TNIL],
|
||||||
|
|
||||||
// package unsafe
|
// package unsafe
|
||||||
types.Types[TUNSAFEPTR],
|
types.Types[types.TUNSAFEPTR],
|
||||||
|
|
||||||
// invalid type (package contains errors)
|
// invalid type (package contains errors)
|
||||||
types.Types[Txxx],
|
types.Types[types.Txxx],
|
||||||
|
|
||||||
// any type, for builtin export data
|
// any type, for builtin export data
|
||||||
types.Types[TANY],
|
types.Types[types.TANY],
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return predecl
|
return predecl
|
||||||
|
|
|
||||||
|
|
@ -5,20 +5,15 @@
|
||||||
package gc
|
package gc
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"cmd/compile/internal/ir"
|
||||||
"cmd/internal/src"
|
"cmd/internal/src"
|
||||||
)
|
)
|
||||||
|
|
||||||
// numImport tracks how often a package with a given name is imported.
|
func npos(pos src.XPos, n ir.Node) ir.Node {
|
||||||
// It is used to provide a better error message (by using the package
|
n.SetPos(pos)
|
||||||
// path to disambiguate) if a package that appears multiple times with
|
|
||||||
// the same name appears in an error message.
|
|
||||||
var numImport = make(map[string]int)
|
|
||||||
|
|
||||||
func npos(pos src.XPos, n *Node) *Node {
|
|
||||||
n.Pos = pos
|
|
||||||
return n
|
return n
|
||||||
}
|
}
|
||||||
|
|
||||||
func builtinCall(op Op) *Node {
|
func builtinCall(op ir.Op) ir.Node {
|
||||||
return nod(OCALL, mkname(builtinpkg.Lookup(goopnames[op])), nil)
|
return ir.Nod(ir.OCALL, mkname(ir.BuiltinPkg.Lookup(ir.OpNames[op])), nil)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -6,8 +6,11 @@
|
||||||
|
|
||||||
package gc
|
package gc
|
||||||
|
|
||||||
import "runtime"
|
import (
|
||||||
|
"cmd/compile/internal/base"
|
||||||
|
"runtime"
|
||||||
|
)
|
||||||
|
|
||||||
func startMutexProfiling() {
|
func startMutexProfiling() {
|
||||||
Fatalf("mutex profiling unavailable in version %v", runtime.Version())
|
base.Fatalf("mutex profiling unavailable in version %v", runtime.Version())
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,10 @@
|
||||||
|
|
||||||
package gc
|
package gc
|
||||||
|
|
||||||
import "cmd/compile/internal/types"
|
import (
|
||||||
|
"cmd/compile/internal/ir"
|
||||||
|
"cmd/compile/internal/types"
|
||||||
|
)
|
||||||
|
|
||||||
var runtimeDecls = [...]struct {
|
var runtimeDecls = [...]struct {
|
||||||
name string
|
name string
|
||||||
|
|
@ -205,134 +208,134 @@ func runtimeTypes() []*types.Type {
|
||||||
var typs [131]*types.Type
|
var typs [131]*types.Type
|
||||||
typs[0] = types.Bytetype
|
typs[0] = types.Bytetype
|
||||||
typs[1] = types.NewPtr(typs[0])
|
typs[1] = types.NewPtr(typs[0])
|
||||||
typs[2] = types.Types[TANY]
|
typs[2] = types.Types[types.TANY]
|
||||||
typs[3] = types.NewPtr(typs[2])
|
typs[3] = types.NewPtr(typs[2])
|
||||||
typs[4] = functype(nil, []*Node{anonfield(typs[1])}, []*Node{anonfield(typs[3])})
|
typs[4] = functype(nil, []ir.Node{anonfield(typs[1])}, []ir.Node{anonfield(typs[3])})
|
||||||
typs[5] = types.Types[TUINTPTR]
|
typs[5] = types.Types[types.TUINTPTR]
|
||||||
typs[6] = types.Types[TBOOL]
|
typs[6] = types.Types[types.TBOOL]
|
||||||
typs[7] = types.Types[TUNSAFEPTR]
|
typs[7] = types.Types[types.TUNSAFEPTR]
|
||||||
typs[8] = functype(nil, []*Node{anonfield(typs[5]), anonfield(typs[1]), anonfield(typs[6])}, []*Node{anonfield(typs[7])})
|
typs[8] = functype(nil, []ir.Node{anonfield(typs[5]), anonfield(typs[1]), anonfield(typs[6])}, []ir.Node{anonfield(typs[7])})
|
||||||
typs[9] = functype(nil, nil, nil)
|
typs[9] = functype(nil, nil, nil)
|
||||||
typs[10] = types.Types[TINTER]
|
typs[10] = types.Types[types.TINTER]
|
||||||
typs[11] = functype(nil, []*Node{anonfield(typs[10])}, nil)
|
typs[11] = functype(nil, []ir.Node{anonfield(typs[10])}, nil)
|
||||||
typs[12] = types.Types[TINT32]
|
typs[12] = types.Types[types.TINT32]
|
||||||
typs[13] = types.NewPtr(typs[12])
|
typs[13] = types.NewPtr(typs[12])
|
||||||
typs[14] = functype(nil, []*Node{anonfield(typs[13])}, []*Node{anonfield(typs[10])})
|
typs[14] = functype(nil, []ir.Node{anonfield(typs[13])}, []ir.Node{anonfield(typs[10])})
|
||||||
typs[15] = types.Types[TINT]
|
typs[15] = types.Types[types.TINT]
|
||||||
typs[16] = functype(nil, []*Node{anonfield(typs[15]), anonfield(typs[15])}, nil)
|
typs[16] = functype(nil, []ir.Node{anonfield(typs[15]), anonfield(typs[15])}, nil)
|
||||||
typs[17] = types.Types[TUINT]
|
typs[17] = types.Types[types.TUINT]
|
||||||
typs[18] = functype(nil, []*Node{anonfield(typs[17]), anonfield(typs[15])}, nil)
|
typs[18] = functype(nil, []ir.Node{anonfield(typs[17]), anonfield(typs[15])}, nil)
|
||||||
typs[19] = functype(nil, []*Node{anonfield(typs[6])}, nil)
|
typs[19] = functype(nil, []ir.Node{anonfield(typs[6])}, nil)
|
||||||
typs[20] = types.Types[TFLOAT64]
|
typs[20] = types.Types[types.TFLOAT64]
|
||||||
typs[21] = functype(nil, []*Node{anonfield(typs[20])}, nil)
|
typs[21] = functype(nil, []ir.Node{anonfield(typs[20])}, nil)
|
||||||
typs[22] = types.Types[TINT64]
|
typs[22] = types.Types[types.TINT64]
|
||||||
typs[23] = functype(nil, []*Node{anonfield(typs[22])}, nil)
|
typs[23] = functype(nil, []ir.Node{anonfield(typs[22])}, nil)
|
||||||
typs[24] = types.Types[TUINT64]
|
typs[24] = types.Types[types.TUINT64]
|
||||||
typs[25] = functype(nil, []*Node{anonfield(typs[24])}, nil)
|
typs[25] = functype(nil, []ir.Node{anonfield(typs[24])}, nil)
|
||||||
typs[26] = types.Types[TCOMPLEX128]
|
typs[26] = types.Types[types.TCOMPLEX128]
|
||||||
typs[27] = functype(nil, []*Node{anonfield(typs[26])}, nil)
|
typs[27] = functype(nil, []ir.Node{anonfield(typs[26])}, nil)
|
||||||
typs[28] = types.Types[TSTRING]
|
typs[28] = types.Types[types.TSTRING]
|
||||||
typs[29] = functype(nil, []*Node{anonfield(typs[28])}, nil)
|
typs[29] = functype(nil, []ir.Node{anonfield(typs[28])}, nil)
|
||||||
typs[30] = functype(nil, []*Node{anonfield(typs[2])}, nil)
|
typs[30] = functype(nil, []ir.Node{anonfield(typs[2])}, nil)
|
||||||
typs[31] = functype(nil, []*Node{anonfield(typs[5])}, nil)
|
typs[31] = functype(nil, []ir.Node{anonfield(typs[5])}, nil)
|
||||||
typs[32] = types.NewArray(typs[0], 32)
|
typs[32] = types.NewArray(typs[0], 32)
|
||||||
typs[33] = types.NewPtr(typs[32])
|
typs[33] = types.NewPtr(typs[32])
|
||||||
typs[34] = functype(nil, []*Node{anonfield(typs[33]), anonfield(typs[28]), anonfield(typs[28])}, []*Node{anonfield(typs[28])})
|
typs[34] = functype(nil, []ir.Node{anonfield(typs[33]), anonfield(typs[28]), anonfield(typs[28])}, []ir.Node{anonfield(typs[28])})
|
||||||
typs[35] = functype(nil, []*Node{anonfield(typs[33]), anonfield(typs[28]), anonfield(typs[28]), anonfield(typs[28])}, []*Node{anonfield(typs[28])})
|
typs[35] = functype(nil, []ir.Node{anonfield(typs[33]), anonfield(typs[28]), anonfield(typs[28]), anonfield(typs[28])}, []ir.Node{anonfield(typs[28])})
|
||||||
typs[36] = functype(nil, []*Node{anonfield(typs[33]), anonfield(typs[28]), anonfield(typs[28]), anonfield(typs[28]), anonfield(typs[28])}, []*Node{anonfield(typs[28])})
|
typs[36] = functype(nil, []ir.Node{anonfield(typs[33]), anonfield(typs[28]), anonfield(typs[28]), anonfield(typs[28]), anonfield(typs[28])}, []ir.Node{anonfield(typs[28])})
|
||||||
typs[37] = functype(nil, []*Node{anonfield(typs[33]), anonfield(typs[28]), anonfield(typs[28]), anonfield(typs[28]), anonfield(typs[28]), anonfield(typs[28])}, []*Node{anonfield(typs[28])})
|
typs[37] = functype(nil, []ir.Node{anonfield(typs[33]), anonfield(typs[28]), anonfield(typs[28]), anonfield(typs[28]), anonfield(typs[28]), anonfield(typs[28])}, []ir.Node{anonfield(typs[28])})
|
||||||
typs[38] = types.NewSlice(typs[28])
|
typs[38] = types.NewSlice(typs[28])
|
||||||
typs[39] = functype(nil, []*Node{anonfield(typs[33]), anonfield(typs[38])}, []*Node{anonfield(typs[28])})
|
typs[39] = functype(nil, []ir.Node{anonfield(typs[33]), anonfield(typs[38])}, []ir.Node{anonfield(typs[28])})
|
||||||
typs[40] = functype(nil, []*Node{anonfield(typs[28]), anonfield(typs[28])}, []*Node{anonfield(typs[15])})
|
typs[40] = functype(nil, []ir.Node{anonfield(typs[28]), anonfield(typs[28])}, []ir.Node{anonfield(typs[15])})
|
||||||
typs[41] = types.NewArray(typs[0], 4)
|
typs[41] = types.NewArray(typs[0], 4)
|
||||||
typs[42] = types.NewPtr(typs[41])
|
typs[42] = types.NewPtr(typs[41])
|
||||||
typs[43] = functype(nil, []*Node{anonfield(typs[42]), anonfield(typs[22])}, []*Node{anonfield(typs[28])})
|
typs[43] = functype(nil, []ir.Node{anonfield(typs[42]), anonfield(typs[22])}, []ir.Node{anonfield(typs[28])})
|
||||||
typs[44] = functype(nil, []*Node{anonfield(typs[33]), anonfield(typs[1]), anonfield(typs[15])}, []*Node{anonfield(typs[28])})
|
typs[44] = functype(nil, []ir.Node{anonfield(typs[33]), anonfield(typs[1]), anonfield(typs[15])}, []ir.Node{anonfield(typs[28])})
|
||||||
typs[45] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[15])}, []*Node{anonfield(typs[28])})
|
typs[45] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[15])}, []ir.Node{anonfield(typs[28])})
|
||||||
typs[46] = types.Runetype
|
typs[46] = types.Runetype
|
||||||
typs[47] = types.NewSlice(typs[46])
|
typs[47] = types.NewSlice(typs[46])
|
||||||
typs[48] = functype(nil, []*Node{anonfield(typs[33]), anonfield(typs[47])}, []*Node{anonfield(typs[28])})
|
typs[48] = functype(nil, []ir.Node{anonfield(typs[33]), anonfield(typs[47])}, []ir.Node{anonfield(typs[28])})
|
||||||
typs[49] = types.NewSlice(typs[0])
|
typs[49] = types.NewSlice(typs[0])
|
||||||
typs[50] = functype(nil, []*Node{anonfield(typs[33]), anonfield(typs[28])}, []*Node{anonfield(typs[49])})
|
typs[50] = functype(nil, []ir.Node{anonfield(typs[33]), anonfield(typs[28])}, []ir.Node{anonfield(typs[49])})
|
||||||
typs[51] = types.NewArray(typs[46], 32)
|
typs[51] = types.NewArray(typs[46], 32)
|
||||||
typs[52] = types.NewPtr(typs[51])
|
typs[52] = types.NewPtr(typs[51])
|
||||||
typs[53] = functype(nil, []*Node{anonfield(typs[52]), anonfield(typs[28])}, []*Node{anonfield(typs[47])})
|
typs[53] = functype(nil, []ir.Node{anonfield(typs[52]), anonfield(typs[28])}, []ir.Node{anonfield(typs[47])})
|
||||||
typs[54] = functype(nil, []*Node{anonfield(typs[3]), anonfield(typs[15]), anonfield(typs[3]), anonfield(typs[15]), anonfield(typs[5])}, []*Node{anonfield(typs[15])})
|
typs[54] = functype(nil, []ir.Node{anonfield(typs[3]), anonfield(typs[15]), anonfield(typs[3]), anonfield(typs[15]), anonfield(typs[5])}, []ir.Node{anonfield(typs[15])})
|
||||||
typs[55] = functype(nil, []*Node{anonfield(typs[28]), anonfield(typs[15])}, []*Node{anonfield(typs[46]), anonfield(typs[15])})
|
typs[55] = functype(nil, []ir.Node{anonfield(typs[28]), anonfield(typs[15])}, []ir.Node{anonfield(typs[46]), anonfield(typs[15])})
|
||||||
typs[56] = functype(nil, []*Node{anonfield(typs[28])}, []*Node{anonfield(typs[15])})
|
typs[56] = functype(nil, []ir.Node{anonfield(typs[28])}, []ir.Node{anonfield(typs[15])})
|
||||||
typs[57] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[2])}, []*Node{anonfield(typs[2])})
|
typs[57] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[2])}, []ir.Node{anonfield(typs[2])})
|
||||||
typs[58] = functype(nil, []*Node{anonfield(typs[2])}, []*Node{anonfield(typs[7])})
|
typs[58] = functype(nil, []ir.Node{anonfield(typs[2])}, []ir.Node{anonfield(typs[7])})
|
||||||
typs[59] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[3])}, []*Node{anonfield(typs[2])})
|
typs[59] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[3])}, []ir.Node{anonfield(typs[2])})
|
||||||
typs[60] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[2])}, []*Node{anonfield(typs[2]), anonfield(typs[6])})
|
typs[60] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[2])}, []ir.Node{anonfield(typs[2]), anonfield(typs[6])})
|
||||||
typs[61] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[1]), anonfield(typs[1])}, nil)
|
typs[61] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[1]), anonfield(typs[1])}, nil)
|
||||||
typs[62] = functype(nil, []*Node{anonfield(typs[1])}, nil)
|
typs[62] = functype(nil, []ir.Node{anonfield(typs[1])}, nil)
|
||||||
typs[63] = types.NewPtr(typs[5])
|
typs[63] = types.NewPtr(typs[5])
|
||||||
typs[64] = functype(nil, []*Node{anonfield(typs[63]), anonfield(typs[7]), anonfield(typs[7])}, []*Node{anonfield(typs[6])})
|
typs[64] = functype(nil, []ir.Node{anonfield(typs[63]), anonfield(typs[7]), anonfield(typs[7])}, []ir.Node{anonfield(typs[6])})
|
||||||
typs[65] = types.Types[TUINT32]
|
typs[65] = types.Types[types.TUINT32]
|
||||||
typs[66] = functype(nil, nil, []*Node{anonfield(typs[65])})
|
typs[66] = functype(nil, nil, []ir.Node{anonfield(typs[65])})
|
||||||
typs[67] = types.NewMap(typs[2], typs[2])
|
typs[67] = types.NewMap(typs[2], typs[2])
|
||||||
typs[68] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[22]), anonfield(typs[3])}, []*Node{anonfield(typs[67])})
|
typs[68] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[22]), anonfield(typs[3])}, []ir.Node{anonfield(typs[67])})
|
||||||
typs[69] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[15]), anonfield(typs[3])}, []*Node{anonfield(typs[67])})
|
typs[69] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[15]), anonfield(typs[3])}, []ir.Node{anonfield(typs[67])})
|
||||||
typs[70] = functype(nil, nil, []*Node{anonfield(typs[67])})
|
typs[70] = functype(nil, nil, []ir.Node{anonfield(typs[67])})
|
||||||
typs[71] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[3])}, []*Node{anonfield(typs[3])})
|
typs[71] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[3])}, []ir.Node{anonfield(typs[3])})
|
||||||
typs[72] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[2])}, []*Node{anonfield(typs[3])})
|
typs[72] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[2])}, []ir.Node{anonfield(typs[3])})
|
||||||
typs[73] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[3]), anonfield(typs[1])}, []*Node{anonfield(typs[3])})
|
typs[73] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[3]), anonfield(typs[1])}, []ir.Node{anonfield(typs[3])})
|
||||||
typs[74] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[3])}, []*Node{anonfield(typs[3]), anonfield(typs[6])})
|
typs[74] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[3])}, []ir.Node{anonfield(typs[3]), anonfield(typs[6])})
|
||||||
typs[75] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[2])}, []*Node{anonfield(typs[3]), anonfield(typs[6])})
|
typs[75] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[2])}, []ir.Node{anonfield(typs[3]), anonfield(typs[6])})
|
||||||
typs[76] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[3]), anonfield(typs[1])}, []*Node{anonfield(typs[3]), anonfield(typs[6])})
|
typs[76] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[3]), anonfield(typs[1])}, []ir.Node{anonfield(typs[3]), anonfield(typs[6])})
|
||||||
typs[77] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[3])}, nil)
|
typs[77] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[3])}, nil)
|
||||||
typs[78] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[2])}, nil)
|
typs[78] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[2])}, nil)
|
||||||
typs[79] = functype(nil, []*Node{anonfield(typs[3])}, nil)
|
typs[79] = functype(nil, []ir.Node{anonfield(typs[3])}, nil)
|
||||||
typs[80] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[67])}, nil)
|
typs[80] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[67])}, nil)
|
||||||
typs[81] = types.NewChan(typs[2], types.Cboth)
|
typs[81] = types.NewChan(typs[2], types.Cboth)
|
||||||
typs[82] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[22])}, []*Node{anonfield(typs[81])})
|
typs[82] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[22])}, []ir.Node{anonfield(typs[81])})
|
||||||
typs[83] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[15])}, []*Node{anonfield(typs[81])})
|
typs[83] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[15])}, []ir.Node{anonfield(typs[81])})
|
||||||
typs[84] = types.NewChan(typs[2], types.Crecv)
|
typs[84] = types.NewChan(typs[2], types.Crecv)
|
||||||
typs[85] = functype(nil, []*Node{anonfield(typs[84]), anonfield(typs[3])}, nil)
|
typs[85] = functype(nil, []ir.Node{anonfield(typs[84]), anonfield(typs[3])}, nil)
|
||||||
typs[86] = functype(nil, []*Node{anonfield(typs[84]), anonfield(typs[3])}, []*Node{anonfield(typs[6])})
|
typs[86] = functype(nil, []ir.Node{anonfield(typs[84]), anonfield(typs[3])}, []ir.Node{anonfield(typs[6])})
|
||||||
typs[87] = types.NewChan(typs[2], types.Csend)
|
typs[87] = types.NewChan(typs[2], types.Csend)
|
||||||
typs[88] = functype(nil, []*Node{anonfield(typs[87]), anonfield(typs[3])}, nil)
|
typs[88] = functype(nil, []ir.Node{anonfield(typs[87]), anonfield(typs[3])}, nil)
|
||||||
typs[89] = types.NewArray(typs[0], 3)
|
typs[89] = types.NewArray(typs[0], 3)
|
||||||
typs[90] = tostruct([]*Node{namedfield("enabled", typs[6]), namedfield("pad", typs[89]), namedfield("needed", typs[6]), namedfield("cgo", typs[6]), namedfield("alignme", typs[24])})
|
typs[90] = tostruct([]ir.Node{namedfield("enabled", typs[6]), namedfield("pad", typs[89]), namedfield("needed", typs[6]), namedfield("cgo", typs[6]), namedfield("alignme", typs[24])})
|
||||||
typs[91] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[3]), anonfield(typs[3])}, nil)
|
typs[91] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[3]), anonfield(typs[3])}, nil)
|
||||||
typs[92] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[3])}, nil)
|
typs[92] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[3])}, nil)
|
||||||
typs[93] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[3]), anonfield(typs[15]), anonfield(typs[3]), anonfield(typs[15])}, []*Node{anonfield(typs[15])})
|
typs[93] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[3]), anonfield(typs[15]), anonfield(typs[3]), anonfield(typs[15])}, []ir.Node{anonfield(typs[15])})
|
||||||
typs[94] = functype(nil, []*Node{anonfield(typs[87]), anonfield(typs[3])}, []*Node{anonfield(typs[6])})
|
typs[94] = functype(nil, []ir.Node{anonfield(typs[87]), anonfield(typs[3])}, []ir.Node{anonfield(typs[6])})
|
||||||
typs[95] = functype(nil, []*Node{anonfield(typs[3]), anonfield(typs[84])}, []*Node{anonfield(typs[6])})
|
typs[95] = functype(nil, []ir.Node{anonfield(typs[3]), anonfield(typs[84])}, []ir.Node{anonfield(typs[6])})
|
||||||
typs[96] = types.NewPtr(typs[6])
|
typs[96] = types.NewPtr(typs[6])
|
||||||
typs[97] = functype(nil, []*Node{anonfield(typs[3]), anonfield(typs[96]), anonfield(typs[84])}, []*Node{anonfield(typs[6])})
|
typs[97] = functype(nil, []ir.Node{anonfield(typs[3]), anonfield(typs[96]), anonfield(typs[84])}, []ir.Node{anonfield(typs[6])})
|
||||||
typs[98] = functype(nil, []*Node{anonfield(typs[63])}, nil)
|
typs[98] = functype(nil, []ir.Node{anonfield(typs[63])}, nil)
|
||||||
typs[99] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[1]), anonfield(typs[63]), anonfield(typs[15]), anonfield(typs[15]), anonfield(typs[6])}, []*Node{anonfield(typs[15]), anonfield(typs[6])})
|
typs[99] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[1]), anonfield(typs[63]), anonfield(typs[15]), anonfield(typs[15]), anonfield(typs[6])}, []ir.Node{anonfield(typs[15]), anonfield(typs[6])})
|
||||||
typs[100] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[15]), anonfield(typs[15])}, []*Node{anonfield(typs[7])})
|
typs[100] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[15]), anonfield(typs[15])}, []ir.Node{anonfield(typs[7])})
|
||||||
typs[101] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[22]), anonfield(typs[22])}, []*Node{anonfield(typs[7])})
|
typs[101] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[22]), anonfield(typs[22])}, []ir.Node{anonfield(typs[7])})
|
||||||
typs[102] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[15]), anonfield(typs[15]), anonfield(typs[7])}, []*Node{anonfield(typs[7])})
|
typs[102] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[15]), anonfield(typs[15]), anonfield(typs[7])}, []ir.Node{anonfield(typs[7])})
|
||||||
typs[103] = types.NewSlice(typs[2])
|
typs[103] = types.NewSlice(typs[2])
|
||||||
typs[104] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[103]), anonfield(typs[15])}, []*Node{anonfield(typs[103])})
|
typs[104] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[103]), anonfield(typs[15])}, []ir.Node{anonfield(typs[103])})
|
||||||
typs[105] = functype(nil, []*Node{anonfield(typs[3]), anonfield(typs[3]), anonfield(typs[5])}, nil)
|
typs[105] = functype(nil, []ir.Node{anonfield(typs[3]), anonfield(typs[3]), anonfield(typs[5])}, nil)
|
||||||
typs[106] = functype(nil, []*Node{anonfield(typs[7]), anonfield(typs[5])}, nil)
|
typs[106] = functype(nil, []ir.Node{anonfield(typs[7]), anonfield(typs[5])}, nil)
|
||||||
typs[107] = functype(nil, []*Node{anonfield(typs[3]), anonfield(typs[3]), anonfield(typs[5])}, []*Node{anonfield(typs[6])})
|
typs[107] = functype(nil, []ir.Node{anonfield(typs[3]), anonfield(typs[3]), anonfield(typs[5])}, []ir.Node{anonfield(typs[6])})
|
||||||
typs[108] = functype(nil, []*Node{anonfield(typs[3]), anonfield(typs[3])}, []*Node{anonfield(typs[6])})
|
typs[108] = functype(nil, []ir.Node{anonfield(typs[3]), anonfield(typs[3])}, []ir.Node{anonfield(typs[6])})
|
||||||
typs[109] = functype(nil, []*Node{anonfield(typs[7]), anonfield(typs[7])}, []*Node{anonfield(typs[6])})
|
typs[109] = functype(nil, []ir.Node{anonfield(typs[7]), anonfield(typs[7])}, []ir.Node{anonfield(typs[6])})
|
||||||
typs[110] = functype(nil, []*Node{anonfield(typs[7]), anonfield(typs[5]), anonfield(typs[5])}, []*Node{anonfield(typs[5])})
|
typs[110] = functype(nil, []ir.Node{anonfield(typs[7]), anonfield(typs[5]), anonfield(typs[5])}, []ir.Node{anonfield(typs[5])})
|
||||||
typs[111] = functype(nil, []*Node{anonfield(typs[7]), anonfield(typs[5])}, []*Node{anonfield(typs[5])})
|
typs[111] = functype(nil, []ir.Node{anonfield(typs[7]), anonfield(typs[5])}, []ir.Node{anonfield(typs[5])})
|
||||||
typs[112] = functype(nil, []*Node{anonfield(typs[22]), anonfield(typs[22])}, []*Node{anonfield(typs[22])})
|
typs[112] = functype(nil, []ir.Node{anonfield(typs[22]), anonfield(typs[22])}, []ir.Node{anonfield(typs[22])})
|
||||||
typs[113] = functype(nil, []*Node{anonfield(typs[24]), anonfield(typs[24])}, []*Node{anonfield(typs[24])})
|
typs[113] = functype(nil, []ir.Node{anonfield(typs[24]), anonfield(typs[24])}, []ir.Node{anonfield(typs[24])})
|
||||||
typs[114] = functype(nil, []*Node{anonfield(typs[20])}, []*Node{anonfield(typs[22])})
|
typs[114] = functype(nil, []ir.Node{anonfield(typs[20])}, []ir.Node{anonfield(typs[22])})
|
||||||
typs[115] = functype(nil, []*Node{anonfield(typs[20])}, []*Node{anonfield(typs[24])})
|
typs[115] = functype(nil, []ir.Node{anonfield(typs[20])}, []ir.Node{anonfield(typs[24])})
|
||||||
typs[116] = functype(nil, []*Node{anonfield(typs[20])}, []*Node{anonfield(typs[65])})
|
typs[116] = functype(nil, []ir.Node{anonfield(typs[20])}, []ir.Node{anonfield(typs[65])})
|
||||||
typs[117] = functype(nil, []*Node{anonfield(typs[22])}, []*Node{anonfield(typs[20])})
|
typs[117] = functype(nil, []ir.Node{anonfield(typs[22])}, []ir.Node{anonfield(typs[20])})
|
||||||
typs[118] = functype(nil, []*Node{anonfield(typs[24])}, []*Node{anonfield(typs[20])})
|
typs[118] = functype(nil, []ir.Node{anonfield(typs[24])}, []ir.Node{anonfield(typs[20])})
|
||||||
typs[119] = functype(nil, []*Node{anonfield(typs[65])}, []*Node{anonfield(typs[20])})
|
typs[119] = functype(nil, []ir.Node{anonfield(typs[65])}, []ir.Node{anonfield(typs[20])})
|
||||||
typs[120] = functype(nil, []*Node{anonfield(typs[26]), anonfield(typs[26])}, []*Node{anonfield(typs[26])})
|
typs[120] = functype(nil, []ir.Node{anonfield(typs[26]), anonfield(typs[26])}, []ir.Node{anonfield(typs[26])})
|
||||||
typs[121] = functype(nil, []*Node{anonfield(typs[5]), anonfield(typs[5])}, nil)
|
typs[121] = functype(nil, []ir.Node{anonfield(typs[5]), anonfield(typs[5])}, nil)
|
||||||
typs[122] = functype(nil, []*Node{anonfield(typs[7]), anonfield(typs[1]), anonfield(typs[5])}, nil)
|
typs[122] = functype(nil, []ir.Node{anonfield(typs[7]), anonfield(typs[1]), anonfield(typs[5])}, nil)
|
||||||
typs[123] = types.NewSlice(typs[7])
|
typs[123] = types.NewSlice(typs[7])
|
||||||
typs[124] = functype(nil, []*Node{anonfield(typs[7]), anonfield(typs[123])}, nil)
|
typs[124] = functype(nil, []ir.Node{anonfield(typs[7]), anonfield(typs[123])}, nil)
|
||||||
typs[125] = types.Types[TUINT8]
|
typs[125] = types.Types[types.TUINT8]
|
||||||
typs[126] = functype(nil, []*Node{anonfield(typs[125]), anonfield(typs[125])}, nil)
|
typs[126] = functype(nil, []ir.Node{anonfield(typs[125]), anonfield(typs[125])}, nil)
|
||||||
typs[127] = types.Types[TUINT16]
|
typs[127] = types.Types[types.TUINT16]
|
||||||
typs[128] = functype(nil, []*Node{anonfield(typs[127]), anonfield(typs[127])}, nil)
|
typs[128] = functype(nil, []ir.Node{anonfield(typs[127]), anonfield(typs[127])}, nil)
|
||||||
typs[129] = functype(nil, []*Node{anonfield(typs[65]), anonfield(typs[65])}, nil)
|
typs[129] = functype(nil, []ir.Node{anonfield(typs[65]), anonfield(typs[65])}, nil)
|
||||||
typs[130] = functype(nil, []*Node{anonfield(typs[24]), anonfield(typs[24])}, nil)
|
typs[130] = functype(nil, []ir.Node{anonfield(typs[24]), anonfield(typs[24])}, nil)
|
||||||
return typs[:]
|
return typs[:]
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -6,6 +6,8 @@ package gc
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"math/bits"
|
"math/bits"
|
||||||
|
|
||||||
|
"cmd/compile/internal/base"
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
|
|
@ -35,7 +37,7 @@ func bvbulkalloc(nbit int32, count int32) bulkBvec {
|
||||||
nword := (nbit + wordBits - 1) / wordBits
|
nword := (nbit + wordBits - 1) / wordBits
|
||||||
size := int64(nword) * int64(count)
|
size := int64(nword) * int64(count)
|
||||||
if int64(int32(size*4)) != size*4 {
|
if int64(int32(size*4)) != size*4 {
|
||||||
Fatalf("bvbulkalloc too big: nbit=%d count=%d nword=%d size=%d", nbit, count, nword, size)
|
base.Fatalf("bvbulkalloc too big: nbit=%d count=%d nword=%d size=%d", nbit, count, nword, size)
|
||||||
}
|
}
|
||||||
return bulkBvec{
|
return bulkBvec{
|
||||||
words: make([]uint32, size),
|
words: make([]uint32, size),
|
||||||
|
|
@ -52,7 +54,7 @@ func (b *bulkBvec) next() bvec {
|
||||||
|
|
||||||
func (bv1 bvec) Eq(bv2 bvec) bool {
|
func (bv1 bvec) Eq(bv2 bvec) bool {
|
||||||
if bv1.n != bv2.n {
|
if bv1.n != bv2.n {
|
||||||
Fatalf("bvequal: lengths %d and %d are not equal", bv1.n, bv2.n)
|
base.Fatalf("bvequal: lengths %d and %d are not equal", bv1.n, bv2.n)
|
||||||
}
|
}
|
||||||
for i, x := range bv1.b {
|
for i, x := range bv1.b {
|
||||||
if x != bv2.b[i] {
|
if x != bv2.b[i] {
|
||||||
|
|
@ -68,7 +70,7 @@ func (dst bvec) Copy(src bvec) {
|
||||||
|
|
||||||
func (bv bvec) Get(i int32) bool {
|
func (bv bvec) Get(i int32) bool {
|
||||||
if i < 0 || i >= bv.n {
|
if i < 0 || i >= bv.n {
|
||||||
Fatalf("bvget: index %d is out of bounds with length %d\n", i, bv.n)
|
base.Fatalf("bvget: index %d is out of bounds with length %d\n", i, bv.n)
|
||||||
}
|
}
|
||||||
mask := uint32(1 << uint(i%wordBits))
|
mask := uint32(1 << uint(i%wordBits))
|
||||||
return bv.b[i>>wordShift]&mask != 0
|
return bv.b[i>>wordShift]&mask != 0
|
||||||
|
|
@ -76,7 +78,7 @@ func (bv bvec) Get(i int32) bool {
|
||||||
|
|
||||||
func (bv bvec) Set(i int32) {
|
func (bv bvec) Set(i int32) {
|
||||||
if i < 0 || i >= bv.n {
|
if i < 0 || i >= bv.n {
|
||||||
Fatalf("bvset: index %d is out of bounds with length %d\n", i, bv.n)
|
base.Fatalf("bvset: index %d is out of bounds with length %d\n", i, bv.n)
|
||||||
}
|
}
|
||||||
mask := uint32(1 << uint(i%wordBits))
|
mask := uint32(1 << uint(i%wordBits))
|
||||||
bv.b[i/wordBits] |= mask
|
bv.b[i/wordBits] |= mask
|
||||||
|
|
@ -84,7 +86,7 @@ func (bv bvec) Set(i int32) {
|
||||||
|
|
||||||
func (bv bvec) Unset(i int32) {
|
func (bv bvec) Unset(i int32) {
|
||||||
if i < 0 || i >= bv.n {
|
if i < 0 || i >= bv.n {
|
||||||
Fatalf("bvunset: index %d is out of bounds with length %d\n", i, bv.n)
|
base.Fatalf("bvunset: index %d is out of bounds with length %d\n", i, bv.n)
|
||||||
}
|
}
|
||||||
mask := uint32(1 << uint(i%wordBits))
|
mask := uint32(1 << uint(i%wordBits))
|
||||||
bv.b[i/wordBits] &^= mask
|
bv.b[i/wordBits] &^= mask
|
||||||
|
|
|
||||||
|
|
@ -5,37 +5,40 @@
|
||||||
package gc
|
package gc
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"cmd/compile/internal/base"
|
||||||
|
"cmd/compile/internal/ir"
|
||||||
"cmd/compile/internal/syntax"
|
"cmd/compile/internal/syntax"
|
||||||
"cmd/compile/internal/types"
|
"cmd/compile/internal/types"
|
||||||
|
"cmd/internal/src"
|
||||||
"fmt"
|
"fmt"
|
||||||
)
|
)
|
||||||
|
|
||||||
func (p *noder) funcLit(expr *syntax.FuncLit) *Node {
|
func (p *noder) funcLit(expr *syntax.FuncLit) ir.Node {
|
||||||
xtype := p.typeExpr(expr.Type)
|
xtype := p.typeExpr(expr.Type)
|
||||||
ntype := p.typeExpr(expr.Type)
|
ntype := p.typeExpr(expr.Type)
|
||||||
|
|
||||||
xfunc := p.nod(expr, ODCLFUNC, nil, nil)
|
dcl := p.nod(expr, ir.ODCLFUNC, nil, nil)
|
||||||
xfunc.Func.SetIsHiddenClosure(Curfn != nil)
|
fn := dcl.Func()
|
||||||
xfunc.Func.Nname = newfuncnamel(p.pos(expr), nblank.Sym) // filled in by typecheckclosure
|
fn.SetIsHiddenClosure(Curfn != nil)
|
||||||
xfunc.Func.Nname.Name.Param.Ntype = xtype
|
fn.Nname = newfuncnamel(p.pos(expr), ir.BlankNode.Sym(), fn) // filled in by typecheckclosure
|
||||||
xfunc.Func.Nname.Name.Defn = xfunc
|
fn.Nname.Name().Param.Ntype = xtype
|
||||||
|
fn.Nname.Name().Defn = dcl
|
||||||
|
|
||||||
clo := p.nod(expr, OCLOSURE, nil, nil)
|
clo := p.nod(expr, ir.OCLOSURE, nil, nil)
|
||||||
clo.Func.Ntype = ntype
|
clo.SetFunc(fn)
|
||||||
|
fn.ClosureType = ntype
|
||||||
|
fn.OClosure = clo
|
||||||
|
|
||||||
xfunc.Func.Closure = clo
|
p.funcBody(dcl, expr.Body)
|
||||||
clo.Func.Closure = xfunc
|
|
||||||
|
|
||||||
p.funcBody(xfunc, expr.Body)
|
|
||||||
|
|
||||||
// closure-specific variables are hanging off the
|
// closure-specific variables are hanging off the
|
||||||
// ordinary ones in the symbol table; see oldname.
|
// ordinary ones in the symbol table; see oldname.
|
||||||
// unhook them.
|
// unhook them.
|
||||||
// make the list of pointers for the closure call.
|
// make the list of pointers for the closure call.
|
||||||
for _, v := range xfunc.Func.Cvars.Slice() {
|
for _, v := range fn.ClosureVars.Slice() {
|
||||||
// Unlink from v1; see comment in syntax.go type Param for these fields.
|
// Unlink from v1; see comment in syntax.go type Param for these fields.
|
||||||
v1 := v.Name.Defn
|
v1 := v.Name().Defn
|
||||||
v1.Name.Param.Innermost = v.Name.Param.Outer
|
v1.Name().Param.Innermost = v.Name().Param.Outer
|
||||||
|
|
||||||
// If the closure usage of v is not dense,
|
// If the closure usage of v is not dense,
|
||||||
// we need to make it dense; now that we're out
|
// we need to make it dense; now that we're out
|
||||||
|
|
@ -65,7 +68,7 @@ func (p *noder) funcLit(expr *syntax.FuncLit) *Node {
|
||||||
// obtains f3's v, creating it if necessary (as it is in the example).
|
// obtains f3's v, creating it if necessary (as it is in the example).
|
||||||
//
|
//
|
||||||
// capturevars will decide whether to use v directly or &v.
|
// capturevars will decide whether to use v directly or &v.
|
||||||
v.Name.Param.Outer = oldname(v.Sym)
|
v.Name().Param.Outer = oldname(v.Sym())
|
||||||
}
|
}
|
||||||
|
|
||||||
return clo
|
return clo
|
||||||
|
|
@ -75,60 +78,61 @@ func (p *noder) funcLit(expr *syntax.FuncLit) *Node {
|
||||||
// function associated with the closure.
|
// function associated with the closure.
|
||||||
// TODO: This creation of the named function should probably really be done in a
|
// TODO: This creation of the named function should probably really be done in a
|
||||||
// separate pass from type-checking.
|
// separate pass from type-checking.
|
||||||
func typecheckclosure(clo *Node, top int) {
|
func typecheckclosure(clo ir.Node, top int) {
|
||||||
xfunc := clo.Func.Closure
|
fn := clo.Func()
|
||||||
|
dcl := fn.Decl
|
||||||
// Set current associated iota value, so iota can be used inside
|
// Set current associated iota value, so iota can be used inside
|
||||||
// function in ConstSpec, see issue #22344
|
// function in ConstSpec, see issue #22344
|
||||||
if x := getIotaValue(); x >= 0 {
|
if x := getIotaValue(); x >= 0 {
|
||||||
xfunc.SetIota(x)
|
dcl.SetIota(x)
|
||||||
}
|
}
|
||||||
|
|
||||||
clo.Func.Ntype = typecheck(clo.Func.Ntype, ctxType)
|
fn.ClosureType = typecheck(fn.ClosureType, ctxType)
|
||||||
clo.Type = clo.Func.Ntype.Type
|
clo.SetType(fn.ClosureType.Type())
|
||||||
clo.Func.Top = top
|
fn.ClosureCalled = top&ctxCallee != 0
|
||||||
|
|
||||||
// Do not typecheck xfunc twice, otherwise, we will end up pushing
|
// Do not typecheck dcl twice, otherwise, we will end up pushing
|
||||||
// xfunc to xtop multiple times, causing initLSym called twice.
|
// dcl to xtop multiple times, causing initLSym called twice.
|
||||||
// See #30709
|
// See #30709
|
||||||
if xfunc.Typecheck() == 1 {
|
if dcl.Typecheck() == 1 {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, ln := range xfunc.Func.Cvars.Slice() {
|
for _, ln := range fn.ClosureVars.Slice() {
|
||||||
n := ln.Name.Defn
|
n := ln.Name().Defn
|
||||||
if !n.Name.Captured() {
|
if !n.Name().Captured() {
|
||||||
n.Name.SetCaptured(true)
|
n.Name().SetCaptured(true)
|
||||||
if n.Name.Decldepth == 0 {
|
if n.Name().Decldepth == 0 {
|
||||||
Fatalf("typecheckclosure: var %S does not have decldepth assigned", n)
|
base.Fatalf("typecheckclosure: var %S does not have decldepth assigned", n)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Ignore assignments to the variable in straightline code
|
// Ignore assignments to the variable in straightline code
|
||||||
// preceding the first capturing by a closure.
|
// preceding the first capturing by a closure.
|
||||||
if n.Name.Decldepth == decldepth {
|
if n.Name().Decldepth == decldepth {
|
||||||
n.Name.SetAssigned(false)
|
n.Name().SetAssigned(false)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
xfunc.Func.Nname.Sym = closurename(Curfn)
|
fn.Nname.SetSym(closurename(Curfn))
|
||||||
setNodeNameFunc(xfunc.Func.Nname)
|
setNodeNameFunc(fn.Nname)
|
||||||
xfunc = typecheck(xfunc, ctxStmt)
|
dcl = typecheck(dcl, ctxStmt)
|
||||||
|
|
||||||
// Type check the body now, but only if we're inside a function.
|
// Type check the body now, but only if we're inside a function.
|
||||||
// At top level (in a variable initialization: curfn==nil) we're not
|
// At top level (in a variable initialization: curfn==nil) we're not
|
||||||
// ready to type check code yet; we'll check it later, because the
|
// ready to type check code yet; we'll check it later, because the
|
||||||
// underlying closure function we create is added to xtop.
|
// underlying closure function we create is added to xtop.
|
||||||
if Curfn != nil && clo.Type != nil {
|
if Curfn != nil && clo.Type() != nil {
|
||||||
oldfn := Curfn
|
oldfn := Curfn
|
||||||
Curfn = xfunc
|
Curfn = dcl
|
||||||
olddd := decldepth
|
olddd := decldepth
|
||||||
decldepth = 1
|
decldepth = 1
|
||||||
typecheckslice(xfunc.Nbody.Slice(), ctxStmt)
|
typecheckslice(dcl.Body().Slice(), ctxStmt)
|
||||||
decldepth = olddd
|
decldepth = olddd
|
||||||
Curfn = oldfn
|
Curfn = oldfn
|
||||||
}
|
}
|
||||||
|
|
||||||
xtop = append(xtop, xfunc)
|
xtop = append(xtop, dcl)
|
||||||
}
|
}
|
||||||
|
|
||||||
// globClosgen is like Func.Closgen, but for the global scope.
|
// globClosgen is like Func.Closgen, but for the global scope.
|
||||||
|
|
@ -136,23 +140,23 @@ var globClosgen int
|
||||||
|
|
||||||
// closurename generates a new unique name for a closure within
|
// closurename generates a new unique name for a closure within
|
||||||
// outerfunc.
|
// outerfunc.
|
||||||
func closurename(outerfunc *Node) *types.Sym {
|
func closurename(outerfunc ir.Node) *types.Sym {
|
||||||
outer := "glob."
|
outer := "glob."
|
||||||
prefix := "func"
|
prefix := "func"
|
||||||
gen := &globClosgen
|
gen := &globClosgen
|
||||||
|
|
||||||
if outerfunc != nil {
|
if outerfunc != nil {
|
||||||
if outerfunc.Func.Closure != nil {
|
if outerfunc.Func().OClosure != nil {
|
||||||
prefix = ""
|
prefix = ""
|
||||||
}
|
}
|
||||||
|
|
||||||
outer = outerfunc.funcname()
|
outer = ir.FuncName(outerfunc)
|
||||||
|
|
||||||
// There may be multiple functions named "_". In those
|
// There may be multiple functions named "_". In those
|
||||||
// cases, we can't use their individual Closgens as it
|
// cases, we can't use their individual Closgens as it
|
||||||
// would lead to name clashes.
|
// would lead to name clashes.
|
||||||
if !outerfunc.Func.Nname.isBlank() {
|
if !ir.IsBlank(outerfunc.Func().Nname) {
|
||||||
gen = &outerfunc.Func.Closgen
|
gen = &outerfunc.Func().Closgen
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -168,15 +172,14 @@ var capturevarscomplete bool
|
||||||
// by value or by reference.
|
// by value or by reference.
|
||||||
// We use value capturing for values <= 128 bytes that are never reassigned
|
// We use value capturing for values <= 128 bytes that are never reassigned
|
||||||
// after capturing (effectively constant).
|
// after capturing (effectively constant).
|
||||||
func capturevars(xfunc *Node) {
|
func capturevars(dcl ir.Node) {
|
||||||
lno := lineno
|
lno := base.Pos
|
||||||
lineno = xfunc.Pos
|
base.Pos = dcl.Pos()
|
||||||
|
fn := dcl.Func()
|
||||||
clo := xfunc.Func.Closure
|
cvars := fn.ClosureVars.Slice()
|
||||||
cvars := xfunc.Func.Cvars.Slice()
|
|
||||||
out := cvars[:0]
|
out := cvars[:0]
|
||||||
for _, v := range cvars {
|
for _, v := range cvars {
|
||||||
if v.Type == nil {
|
if v.Type() == nil {
|
||||||
// If v.Type is nil, it means v looked like it
|
// If v.Type is nil, it means v looked like it
|
||||||
// was going to be used in the closure, but
|
// was going to be used in the closure, but
|
||||||
// isn't. This happens in struct literals like
|
// isn't. This happens in struct literals like
|
||||||
|
|
@ -189,47 +192,47 @@ func capturevars(xfunc *Node) {
|
||||||
|
|
||||||
// type check the & of closed variables outside the closure,
|
// type check the & of closed variables outside the closure,
|
||||||
// so that the outer frame also grabs them and knows they escape.
|
// so that the outer frame also grabs them and knows they escape.
|
||||||
dowidth(v.Type)
|
dowidth(v.Type())
|
||||||
|
|
||||||
outer := v.Name.Param.Outer
|
outer := v.Name().Param.Outer
|
||||||
outermost := v.Name.Defn
|
outermost := v.Name().Defn
|
||||||
|
|
||||||
// out parameters will be assigned to implicitly upon return.
|
// out parameters will be assigned to implicitly upon return.
|
||||||
if outermost.Class() != PPARAMOUT && !outermost.Name.Addrtaken() && !outermost.Name.Assigned() && v.Type.Width <= 128 {
|
if outermost.Class() != ir.PPARAMOUT && !outermost.Name().Addrtaken() && !outermost.Name().Assigned() && v.Type().Width <= 128 {
|
||||||
v.Name.SetByval(true)
|
v.Name().SetByval(true)
|
||||||
} else {
|
} else {
|
||||||
outermost.Name.SetAddrtaken(true)
|
outermost.Name().SetAddrtaken(true)
|
||||||
outer = nod(OADDR, outer, nil)
|
outer = ir.Nod(ir.OADDR, outer, nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
if Debug.m > 1 {
|
if base.Flag.LowerM > 1 {
|
||||||
var name *types.Sym
|
var name *types.Sym
|
||||||
if v.Name.Curfn != nil && v.Name.Curfn.Func.Nname != nil {
|
if v.Name().Curfn != nil && v.Name().Curfn.Func().Nname != nil {
|
||||||
name = v.Name.Curfn.Func.Nname.Sym
|
name = v.Name().Curfn.Func().Nname.Sym()
|
||||||
}
|
}
|
||||||
how := "ref"
|
how := "ref"
|
||||||
if v.Name.Byval() {
|
if v.Name().Byval() {
|
||||||
how = "value"
|
how = "value"
|
||||||
}
|
}
|
||||||
Warnl(v.Pos, "%v capturing by %s: %v (addr=%v assign=%v width=%d)", name, how, v.Sym, outermost.Name.Addrtaken(), outermost.Name.Assigned(), int32(v.Type.Width))
|
base.WarnfAt(v.Pos(), "%v capturing by %s: %v (addr=%v assign=%v width=%d)", name, how, v.Sym(), outermost.Name().Addrtaken(), outermost.Name().Assigned(), int32(v.Type().Width))
|
||||||
}
|
}
|
||||||
|
|
||||||
outer = typecheck(outer, ctxExpr)
|
outer = typecheck(outer, ctxExpr)
|
||||||
clo.Func.Enter.Append(outer)
|
fn.ClosureEnter.Append(outer)
|
||||||
}
|
}
|
||||||
|
|
||||||
xfunc.Func.Cvars.Set(out)
|
fn.ClosureVars.Set(out)
|
||||||
lineno = lno
|
base.Pos = lno
|
||||||
}
|
}
|
||||||
|
|
||||||
// transformclosure is called in a separate phase after escape analysis.
|
// transformclosure is called in a separate phase after escape analysis.
|
||||||
// It transform closure bodies to properly reference captured variables.
|
// It transform closure bodies to properly reference captured variables.
|
||||||
func transformclosure(xfunc *Node) {
|
func transformclosure(dcl ir.Node) {
|
||||||
lno := lineno
|
lno := base.Pos
|
||||||
lineno = xfunc.Pos
|
base.Pos = dcl.Pos()
|
||||||
clo := xfunc.Func.Closure
|
fn := dcl.Func()
|
||||||
|
|
||||||
if clo.Func.Top&ctxCallee != 0 {
|
if fn.ClosureCalled {
|
||||||
// If the closure is directly called, we transform it to a plain function call
|
// If the closure is directly called, we transform it to a plain function call
|
||||||
// with variables passed as args. This avoids allocation of a closure object.
|
// with variables passed as args. This avoids allocation of a closure object.
|
||||||
// Here we do only a part of the transformation. Walk of OCALLFUNC(OCLOSURE)
|
// Here we do only a part of the transformation. Walk of OCALLFUNC(OCLOSURE)
|
||||||
|
|
@ -246,116 +249,112 @@ func transformclosure(xfunc *Node) {
|
||||||
// }(byval, &byref, 42)
|
// }(byval, &byref, 42)
|
||||||
|
|
||||||
// f is ONAME of the actual function.
|
// f is ONAME of the actual function.
|
||||||
f := xfunc.Func.Nname
|
f := fn.Nname
|
||||||
|
|
||||||
// We are going to insert captured variables before input args.
|
// We are going to insert captured variables before input args.
|
||||||
var params []*types.Field
|
var params []*types.Field
|
||||||
var decls []*Node
|
var decls []ir.Node
|
||||||
for _, v := range xfunc.Func.Cvars.Slice() {
|
for _, v := range fn.ClosureVars.Slice() {
|
||||||
if !v.Name.Byval() {
|
if !v.Name().Byval() {
|
||||||
// If v of type T is captured by reference,
|
// If v of type T is captured by reference,
|
||||||
// we introduce function param &v *T
|
// we introduce function param &v *T
|
||||||
// and v remains PAUTOHEAP with &v heapaddr
|
// and v remains PAUTOHEAP with &v heapaddr
|
||||||
// (accesses will implicitly deref &v).
|
// (accesses will implicitly deref &v).
|
||||||
addr := newname(lookup("&" + v.Sym.Name))
|
addr := NewName(lookup("&" + v.Sym().Name))
|
||||||
addr.Type = types.NewPtr(v.Type)
|
addr.SetType(types.NewPtr(v.Type()))
|
||||||
v.Name.Param.Heapaddr = addr
|
v.Name().Param.Heapaddr = addr
|
||||||
v = addr
|
v = addr
|
||||||
}
|
}
|
||||||
|
|
||||||
v.SetClass(PPARAM)
|
v.SetClass(ir.PPARAM)
|
||||||
decls = append(decls, v)
|
decls = append(decls, v)
|
||||||
|
|
||||||
fld := types.NewField()
|
fld := types.NewField(src.NoXPos, v.Sym(), v.Type())
|
||||||
fld.Nname = asTypesNode(v)
|
fld.Nname = v
|
||||||
fld.Type = v.Type
|
|
||||||
fld.Sym = v.Sym
|
|
||||||
params = append(params, fld)
|
params = append(params, fld)
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(params) > 0 {
|
if len(params) > 0 {
|
||||||
// Prepend params and decls.
|
// Prepend params and decls.
|
||||||
f.Type.Params().SetFields(append(params, f.Type.Params().FieldSlice()...))
|
f.Type().Params().SetFields(append(params, f.Type().Params().FieldSlice()...))
|
||||||
xfunc.Func.Dcl = append(decls, xfunc.Func.Dcl...)
|
fn.Dcl = append(decls, fn.Dcl...)
|
||||||
}
|
}
|
||||||
|
|
||||||
dowidth(f.Type)
|
dowidth(f.Type())
|
||||||
xfunc.Type = f.Type // update type of ODCLFUNC
|
dcl.SetType(f.Type()) // update type of ODCLFUNC
|
||||||
} else {
|
} else {
|
||||||
// The closure is not called, so it is going to stay as closure.
|
// The closure is not called, so it is going to stay as closure.
|
||||||
var body []*Node
|
var body []ir.Node
|
||||||
offset := int64(Widthptr)
|
offset := int64(Widthptr)
|
||||||
for _, v := range xfunc.Func.Cvars.Slice() {
|
for _, v := range fn.ClosureVars.Slice() {
|
||||||
// cv refers to the field inside of closure OSTRUCTLIT.
|
// cv refers to the field inside of closure OSTRUCTLIT.
|
||||||
cv := nod(OCLOSUREVAR, nil, nil)
|
cv := ir.Nod(ir.OCLOSUREVAR, nil, nil)
|
||||||
|
|
||||||
cv.Type = v.Type
|
cv.SetType(v.Type())
|
||||||
if !v.Name.Byval() {
|
if !v.Name().Byval() {
|
||||||
cv.Type = types.NewPtr(v.Type)
|
cv.SetType(types.NewPtr(v.Type()))
|
||||||
}
|
}
|
||||||
offset = Rnd(offset, int64(cv.Type.Align))
|
offset = Rnd(offset, int64(cv.Type().Align))
|
||||||
cv.Xoffset = offset
|
cv.SetOffset(offset)
|
||||||
offset += cv.Type.Width
|
offset += cv.Type().Width
|
||||||
|
|
||||||
if v.Name.Byval() && v.Type.Width <= int64(2*Widthptr) {
|
if v.Name().Byval() && v.Type().Width <= int64(2*Widthptr) {
|
||||||
// If it is a small variable captured by value, downgrade it to PAUTO.
|
// If it is a small variable captured by value, downgrade it to PAUTO.
|
||||||
v.SetClass(PAUTO)
|
v.SetClass(ir.PAUTO)
|
||||||
xfunc.Func.Dcl = append(xfunc.Func.Dcl, v)
|
fn.Dcl = append(fn.Dcl, v)
|
||||||
body = append(body, nod(OAS, v, cv))
|
body = append(body, ir.Nod(ir.OAS, v, cv))
|
||||||
} else {
|
} else {
|
||||||
// Declare variable holding addresses taken from closure
|
// Declare variable holding addresses taken from closure
|
||||||
// and initialize in entry prologue.
|
// and initialize in entry prologue.
|
||||||
addr := newname(lookup("&" + v.Sym.Name))
|
addr := NewName(lookup("&" + v.Sym().Name))
|
||||||
addr.Type = types.NewPtr(v.Type)
|
addr.SetType(types.NewPtr(v.Type()))
|
||||||
addr.SetClass(PAUTO)
|
addr.SetClass(ir.PAUTO)
|
||||||
addr.Name.SetUsed(true)
|
addr.Name().SetUsed(true)
|
||||||
addr.Name.Curfn = xfunc
|
addr.Name().Curfn = dcl
|
||||||
xfunc.Func.Dcl = append(xfunc.Func.Dcl, addr)
|
fn.Dcl = append(fn.Dcl, addr)
|
||||||
v.Name.Param.Heapaddr = addr
|
v.Name().Param.Heapaddr = addr
|
||||||
if v.Name.Byval() {
|
if v.Name().Byval() {
|
||||||
cv = nod(OADDR, cv, nil)
|
cv = ir.Nod(ir.OADDR, cv, nil)
|
||||||
}
|
}
|
||||||
body = append(body, nod(OAS, addr, cv))
|
body = append(body, ir.Nod(ir.OAS, addr, cv))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(body) > 0 {
|
if len(body) > 0 {
|
||||||
typecheckslice(body, ctxStmt)
|
typecheckslice(body, ctxStmt)
|
||||||
xfunc.Func.Enter.Set(body)
|
fn.Enter.Set(body)
|
||||||
xfunc.Func.SetNeedctxt(true)
|
fn.SetNeedctxt(true)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
lineno = lno
|
base.Pos = lno
|
||||||
}
|
}
|
||||||
|
|
||||||
// hasemptycvars reports whether closure clo has an
|
// hasemptycvars reports whether closure clo has an
|
||||||
// empty list of captured vars.
|
// empty list of captured vars.
|
||||||
func hasemptycvars(clo *Node) bool {
|
func hasemptycvars(clo ir.Node) bool {
|
||||||
xfunc := clo.Func.Closure
|
return clo.Func().ClosureVars.Len() == 0
|
||||||
return xfunc.Func.Cvars.Len() == 0
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// closuredebugruntimecheck applies boilerplate checks for debug flags
|
// closuredebugruntimecheck applies boilerplate checks for debug flags
|
||||||
// and compiling runtime
|
// and compiling runtime
|
||||||
func closuredebugruntimecheck(clo *Node) {
|
func closuredebugruntimecheck(clo ir.Node) {
|
||||||
if Debug_closure > 0 {
|
if base.Debug.Closure > 0 {
|
||||||
xfunc := clo.Func.Closure
|
if clo.Esc() == EscHeap {
|
||||||
if clo.Esc == EscHeap {
|
base.WarnfAt(clo.Pos(), "heap closure, captured vars = %v", clo.Func().ClosureVars)
|
||||||
Warnl(clo.Pos, "heap closure, captured vars = %v", xfunc.Func.Cvars)
|
|
||||||
} else {
|
} else {
|
||||||
Warnl(clo.Pos, "stack closure, captured vars = %v", xfunc.Func.Cvars)
|
base.WarnfAt(clo.Pos(), "stack closure, captured vars = %v", clo.Func().ClosureVars)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if compiling_runtime && clo.Esc == EscHeap {
|
if base.Flag.CompilingRuntime && clo.Esc() == EscHeap {
|
||||||
yyerrorl(clo.Pos, "heap-allocated closure, not allowed in runtime")
|
base.ErrorfAt(clo.Pos(), "heap-allocated closure, not allowed in runtime")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// closureType returns the struct type used to hold all the information
|
// closureType returns the struct type used to hold all the information
|
||||||
// needed in the closure for clo (clo must be a OCLOSURE node).
|
// needed in the closure for clo (clo must be a OCLOSURE node).
|
||||||
// The address of a variable of the returned type can be cast to a func.
|
// The address of a variable of the returned type can be cast to a func.
|
||||||
func closureType(clo *Node) *types.Type {
|
func closureType(clo ir.Node) *types.Type {
|
||||||
// Create closure in the form of a composite literal.
|
// Create closure in the form of a composite literal.
|
||||||
// supposing the closure captures an int i and a string s
|
// supposing the closure captures an int i and a string s
|
||||||
// and has one float64 argument and no results,
|
// and has one float64 argument and no results,
|
||||||
|
|
@ -369,94 +368,95 @@ func closureType(clo *Node) *types.Type {
|
||||||
// The information appears in the binary in the form of type descriptors;
|
// The information appears in the binary in the form of type descriptors;
|
||||||
// the struct is unnamed so that closures in multiple packages with the
|
// the struct is unnamed so that closures in multiple packages with the
|
||||||
// same struct type can share the descriptor.
|
// same struct type can share the descriptor.
|
||||||
fields := []*Node{
|
fields := []ir.Node{
|
||||||
namedfield(".F", types.Types[TUINTPTR]),
|
namedfield(".F", types.Types[types.TUINTPTR]),
|
||||||
}
|
}
|
||||||
for _, v := range clo.Func.Closure.Func.Cvars.Slice() {
|
for _, v := range clo.Func().ClosureVars.Slice() {
|
||||||
typ := v.Type
|
typ := v.Type()
|
||||||
if !v.Name.Byval() {
|
if !v.Name().Byval() {
|
||||||
typ = types.NewPtr(typ)
|
typ = types.NewPtr(typ)
|
||||||
}
|
}
|
||||||
fields = append(fields, symfield(v.Sym, typ))
|
fields = append(fields, symfield(v.Sym(), typ))
|
||||||
}
|
}
|
||||||
typ := tostruct(fields)
|
typ := tostruct(fields)
|
||||||
typ.SetNoalg(true)
|
typ.SetNoalg(true)
|
||||||
return typ
|
return typ
|
||||||
}
|
}
|
||||||
|
|
||||||
func walkclosure(clo *Node, init *Nodes) *Node {
|
func walkclosure(clo ir.Node, init *ir.Nodes) ir.Node {
|
||||||
xfunc := clo.Func.Closure
|
fn := clo.Func()
|
||||||
|
|
||||||
// If no closure vars, don't bother wrapping.
|
// If no closure vars, don't bother wrapping.
|
||||||
if hasemptycvars(clo) {
|
if hasemptycvars(clo) {
|
||||||
if Debug_closure > 0 {
|
if base.Debug.Closure > 0 {
|
||||||
Warnl(clo.Pos, "closure converted to global")
|
base.WarnfAt(clo.Pos(), "closure converted to global")
|
||||||
}
|
}
|
||||||
return xfunc.Func.Nname
|
return fn.Nname
|
||||||
}
|
}
|
||||||
closuredebugruntimecheck(clo)
|
closuredebugruntimecheck(clo)
|
||||||
|
|
||||||
typ := closureType(clo)
|
typ := closureType(clo)
|
||||||
|
|
||||||
clos := nod(OCOMPLIT, nil, typenod(typ))
|
clos := ir.Nod(ir.OCOMPLIT, nil, typenod(typ))
|
||||||
clos.Esc = clo.Esc
|
clos.SetEsc(clo.Esc())
|
||||||
clos.List.Set(append([]*Node{nod(OCFUNC, xfunc.Func.Nname, nil)}, clo.Func.Enter.Slice()...))
|
clos.PtrList().Set(append([]ir.Node{ir.Nod(ir.OCFUNC, fn.Nname, nil)}, fn.ClosureEnter.Slice()...))
|
||||||
|
|
||||||
clos = nod(OADDR, clos, nil)
|
clos = ir.Nod(ir.OADDR, clos, nil)
|
||||||
clos.Esc = clo.Esc
|
clos.SetEsc(clo.Esc())
|
||||||
|
|
||||||
// Force type conversion from *struct to the func type.
|
// Force type conversion from *struct to the func type.
|
||||||
clos = convnop(clos, clo.Type)
|
clos = convnop(clos, clo.Type())
|
||||||
|
|
||||||
// non-escaping temp to use, if any.
|
// non-escaping temp to use, if any.
|
||||||
if x := prealloc[clo]; x != nil {
|
if x := prealloc[clo]; x != nil {
|
||||||
if !types.Identical(typ, x.Type) {
|
if !types.Identical(typ, x.Type()) {
|
||||||
panic("closure type does not match order's assigned type")
|
panic("closure type does not match order's assigned type")
|
||||||
}
|
}
|
||||||
clos.Left.Right = x
|
clos.Left().SetRight(x)
|
||||||
delete(prealloc, clo)
|
delete(prealloc, clo)
|
||||||
}
|
}
|
||||||
|
|
||||||
return walkexpr(clos, init)
|
return walkexpr(clos, init)
|
||||||
}
|
}
|
||||||
|
|
||||||
func typecheckpartialcall(fn *Node, sym *types.Sym) {
|
func typecheckpartialcall(dot ir.Node, sym *types.Sym) {
|
||||||
switch fn.Op {
|
switch dot.Op() {
|
||||||
case ODOTINTER, ODOTMETH:
|
case ir.ODOTINTER, ir.ODOTMETH:
|
||||||
break
|
break
|
||||||
|
|
||||||
default:
|
default:
|
||||||
Fatalf("invalid typecheckpartialcall")
|
base.Fatalf("invalid typecheckpartialcall")
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create top-level function.
|
// Create top-level function.
|
||||||
xfunc := makepartialcall(fn, fn.Type, sym)
|
dcl := makepartialcall(dot, dot.Type(), sym)
|
||||||
fn.Func = xfunc.Func
|
dcl.Func().SetWrapper(true)
|
||||||
fn.Func.SetWrapper(true)
|
dot.SetOp(ir.OCALLPART)
|
||||||
fn.Right = newname(sym)
|
dot.SetRight(NewName(sym))
|
||||||
fn.Op = OCALLPART
|
dot.SetType(dcl.Type())
|
||||||
fn.Type = xfunc.Type
|
dot.SetFunc(dcl.Func())
|
||||||
|
dot.SetOpt(nil) // clear types.Field from ODOTMETH
|
||||||
}
|
}
|
||||||
|
|
||||||
// makepartialcall returns a DCLFUNC node representing the wrapper function (*-fm) needed
|
// makepartialcall returns a DCLFUNC node representing the wrapper function (*-fm) needed
|
||||||
// for partial calls.
|
// for partial calls.
|
||||||
func makepartialcall(fn *Node, t0 *types.Type, meth *types.Sym) *Node {
|
func makepartialcall(dot ir.Node, t0 *types.Type, meth *types.Sym) ir.Node {
|
||||||
rcvrtype := fn.Left.Type
|
rcvrtype := dot.Left().Type()
|
||||||
sym := methodSymSuffix(rcvrtype, meth, "-fm")
|
sym := methodSymSuffix(rcvrtype, meth, "-fm")
|
||||||
|
|
||||||
if sym.Uniq() {
|
if sym.Uniq() {
|
||||||
return asNode(sym.Def)
|
return ir.AsNode(sym.Def)
|
||||||
}
|
}
|
||||||
sym.SetUniq(true)
|
sym.SetUniq(true)
|
||||||
|
|
||||||
savecurfn := Curfn
|
savecurfn := Curfn
|
||||||
saveLineNo := lineno
|
saveLineNo := base.Pos
|
||||||
Curfn = nil
|
Curfn = nil
|
||||||
|
|
||||||
// Set line number equal to the line number where the method is declared.
|
// Set line number equal to the line number where the method is declared.
|
||||||
var m *types.Field
|
var m *types.Field
|
||||||
if lookdot0(meth, rcvrtype, &m, false) == 1 && m.Pos.IsKnown() {
|
if lookdot0(meth, rcvrtype, &m, false) == 1 && m.Pos.IsKnown() {
|
||||||
lineno = m.Pos
|
base.Pos = m.Pos
|
||||||
}
|
}
|
||||||
// Note: !m.Pos.IsKnown() happens for method expressions where
|
// Note: !m.Pos.IsKnown() happens for method expressions where
|
||||||
// the method is implicitly declared. The Error method of the
|
// the method is implicitly declared. The Error method of the
|
||||||
|
|
@ -464,73 +464,74 @@ func makepartialcall(fn *Node, t0 *types.Type, meth *types.Sym) *Node {
|
||||||
// number at the use of the method expression in this
|
// number at the use of the method expression in this
|
||||||
// case. See issue 29389.
|
// case. See issue 29389.
|
||||||
|
|
||||||
tfn := nod(OTFUNC, nil, nil)
|
tfn := ir.Nod(ir.OTFUNC, nil, nil)
|
||||||
tfn.List.Set(structargs(t0.Params(), true))
|
tfn.PtrList().Set(structargs(t0.Params(), true))
|
||||||
tfn.Rlist.Set(structargs(t0.Results(), false))
|
tfn.PtrRlist().Set(structargs(t0.Results(), false))
|
||||||
|
|
||||||
xfunc := dclfunc(sym, tfn)
|
dcl := dclfunc(sym, tfn)
|
||||||
xfunc.Func.SetDupok(true)
|
fn := dcl.Func()
|
||||||
xfunc.Func.SetNeedctxt(true)
|
fn.SetDupok(true)
|
||||||
|
fn.SetNeedctxt(true)
|
||||||
|
|
||||||
tfn.Type.SetPkg(t0.Pkg())
|
tfn.Type().SetPkg(t0.Pkg())
|
||||||
|
|
||||||
// Declare and initialize variable holding receiver.
|
// Declare and initialize variable holding receiver.
|
||||||
|
|
||||||
cv := nod(OCLOSUREVAR, nil, nil)
|
cv := ir.Nod(ir.OCLOSUREVAR, nil, nil)
|
||||||
cv.Type = rcvrtype
|
cv.SetType(rcvrtype)
|
||||||
cv.Xoffset = Rnd(int64(Widthptr), int64(cv.Type.Align))
|
cv.SetOffset(Rnd(int64(Widthptr), int64(cv.Type().Align)))
|
||||||
|
|
||||||
ptr := newname(lookup(".this"))
|
ptr := NewName(lookup(".this"))
|
||||||
declare(ptr, PAUTO)
|
declare(ptr, ir.PAUTO)
|
||||||
ptr.Name.SetUsed(true)
|
ptr.Name().SetUsed(true)
|
||||||
var body []*Node
|
var body []ir.Node
|
||||||
if rcvrtype.IsPtr() || rcvrtype.IsInterface() {
|
if rcvrtype.IsPtr() || rcvrtype.IsInterface() {
|
||||||
ptr.Type = rcvrtype
|
ptr.SetType(rcvrtype)
|
||||||
body = append(body, nod(OAS, ptr, cv))
|
body = append(body, ir.Nod(ir.OAS, ptr, cv))
|
||||||
} else {
|
} else {
|
||||||
ptr.Type = types.NewPtr(rcvrtype)
|
ptr.SetType(types.NewPtr(rcvrtype))
|
||||||
body = append(body, nod(OAS, ptr, nod(OADDR, cv, nil)))
|
body = append(body, ir.Nod(ir.OAS, ptr, ir.Nod(ir.OADDR, cv, nil)))
|
||||||
}
|
}
|
||||||
|
|
||||||
call := nod(OCALL, nodSym(OXDOT, ptr, meth), nil)
|
call := ir.Nod(ir.OCALL, nodSym(ir.OXDOT, ptr, meth), nil)
|
||||||
call.List.Set(paramNnames(tfn.Type))
|
call.PtrList().Set(paramNnames(tfn.Type()))
|
||||||
call.SetIsDDD(tfn.Type.IsVariadic())
|
call.SetIsDDD(tfn.Type().IsVariadic())
|
||||||
if t0.NumResults() != 0 {
|
if t0.NumResults() != 0 {
|
||||||
n := nod(ORETURN, nil, nil)
|
n := ir.Nod(ir.ORETURN, nil, nil)
|
||||||
n.List.Set1(call)
|
n.PtrList().Set1(call)
|
||||||
call = n
|
call = n
|
||||||
}
|
}
|
||||||
body = append(body, call)
|
body = append(body, call)
|
||||||
|
|
||||||
xfunc.Nbody.Set(body)
|
dcl.PtrBody().Set(body)
|
||||||
funcbody()
|
funcbody()
|
||||||
|
|
||||||
xfunc = typecheck(xfunc, ctxStmt)
|
dcl = typecheck(dcl, ctxStmt)
|
||||||
// Need to typecheck the body of the just-generated wrapper.
|
// Need to typecheck the body of the just-generated wrapper.
|
||||||
// typecheckslice() requires that Curfn is set when processing an ORETURN.
|
// typecheckslice() requires that Curfn is set when processing an ORETURN.
|
||||||
Curfn = xfunc
|
Curfn = dcl
|
||||||
typecheckslice(xfunc.Nbody.Slice(), ctxStmt)
|
typecheckslice(dcl.Body().Slice(), ctxStmt)
|
||||||
sym.Def = asTypesNode(xfunc)
|
sym.Def = dcl
|
||||||
xtop = append(xtop, xfunc)
|
xtop = append(xtop, dcl)
|
||||||
Curfn = savecurfn
|
Curfn = savecurfn
|
||||||
lineno = saveLineNo
|
base.Pos = saveLineNo
|
||||||
|
|
||||||
return xfunc
|
return dcl
|
||||||
}
|
}
|
||||||
|
|
||||||
// partialCallType returns the struct type used to hold all the information
|
// partialCallType returns the struct type used to hold all the information
|
||||||
// needed in the closure for n (n must be a OCALLPART node).
|
// needed in the closure for n (n must be a OCALLPART node).
|
||||||
// The address of a variable of the returned type can be cast to a func.
|
// The address of a variable of the returned type can be cast to a func.
|
||||||
func partialCallType(n *Node) *types.Type {
|
func partialCallType(n ir.Node) *types.Type {
|
||||||
t := tostruct([]*Node{
|
t := tostruct([]ir.Node{
|
||||||
namedfield("F", types.Types[TUINTPTR]),
|
namedfield("F", types.Types[types.TUINTPTR]),
|
||||||
namedfield("R", n.Left.Type),
|
namedfield("R", n.Left().Type()),
|
||||||
})
|
})
|
||||||
t.SetNoalg(true)
|
t.SetNoalg(true)
|
||||||
return t
|
return t
|
||||||
}
|
}
|
||||||
|
|
||||||
func walkpartialcall(n *Node, init *Nodes) *Node {
|
func walkpartialcall(n ir.Node, init *ir.Nodes) ir.Node {
|
||||||
// Create closure in the form of a composite literal.
|
// Create closure in the form of a composite literal.
|
||||||
// For x.M with receiver (x) type T, the generated code looks like:
|
// For x.M with receiver (x) type T, the generated code looks like:
|
||||||
//
|
//
|
||||||
|
|
@ -538,38 +539,38 @@ func walkpartialcall(n *Node, init *Nodes) *Node {
|
||||||
//
|
//
|
||||||
// Like walkclosure above.
|
// Like walkclosure above.
|
||||||
|
|
||||||
if n.Left.Type.IsInterface() {
|
if n.Left().Type().IsInterface() {
|
||||||
// Trigger panic for method on nil interface now.
|
// Trigger panic for method on nil interface now.
|
||||||
// Otherwise it happens in the wrapper and is confusing.
|
// Otherwise it happens in the wrapper and is confusing.
|
||||||
n.Left = cheapexpr(n.Left, init)
|
n.SetLeft(cheapexpr(n.Left(), init))
|
||||||
n.Left = walkexpr(n.Left, nil)
|
n.SetLeft(walkexpr(n.Left(), nil))
|
||||||
|
|
||||||
tab := nod(OITAB, n.Left, nil)
|
tab := ir.Nod(ir.OITAB, n.Left(), nil)
|
||||||
tab = typecheck(tab, ctxExpr)
|
tab = typecheck(tab, ctxExpr)
|
||||||
|
|
||||||
c := nod(OCHECKNIL, tab, nil)
|
c := ir.Nod(ir.OCHECKNIL, tab, nil)
|
||||||
c.SetTypecheck(1)
|
c.SetTypecheck(1)
|
||||||
init.Append(c)
|
init.Append(c)
|
||||||
}
|
}
|
||||||
|
|
||||||
typ := partialCallType(n)
|
typ := partialCallType(n)
|
||||||
|
|
||||||
clos := nod(OCOMPLIT, nil, typenod(typ))
|
clos := ir.Nod(ir.OCOMPLIT, nil, typenod(typ))
|
||||||
clos.Esc = n.Esc
|
clos.SetEsc(n.Esc())
|
||||||
clos.List.Set2(nod(OCFUNC, n.Func.Nname, nil), n.Left)
|
clos.PtrList().Set2(ir.Nod(ir.OCFUNC, n.Func().Nname, nil), n.Left())
|
||||||
|
|
||||||
clos = nod(OADDR, clos, nil)
|
clos = ir.Nod(ir.OADDR, clos, nil)
|
||||||
clos.Esc = n.Esc
|
clos.SetEsc(n.Esc())
|
||||||
|
|
||||||
// Force type conversion from *struct to the func type.
|
// Force type conversion from *struct to the func type.
|
||||||
clos = convnop(clos, n.Type)
|
clos = convnop(clos, n.Type())
|
||||||
|
|
||||||
// non-escaping temp to use, if any.
|
// non-escaping temp to use, if any.
|
||||||
if x := prealloc[n]; x != nil {
|
if x := prealloc[n]; x != nil {
|
||||||
if !types.Identical(typ, x.Type) {
|
if !types.Identical(typ, x.Type()) {
|
||||||
panic("partial call type does not match order's assigned type")
|
panic("partial call type does not match order's assigned type")
|
||||||
}
|
}
|
||||||
clos.Left.Right = x
|
clos.Left().SetRight(x)
|
||||||
delete(prealloc, n)
|
delete(prealloc, n)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -578,16 +579,16 @@ func walkpartialcall(n *Node, init *Nodes) *Node {
|
||||||
|
|
||||||
// callpartMethod returns the *types.Field representing the method
|
// callpartMethod returns the *types.Field representing the method
|
||||||
// referenced by method value n.
|
// referenced by method value n.
|
||||||
func callpartMethod(n *Node) *types.Field {
|
func callpartMethod(n ir.Node) *types.Field {
|
||||||
if n.Op != OCALLPART {
|
if n.Op() != ir.OCALLPART {
|
||||||
Fatalf("expected OCALLPART, got %v", n)
|
base.Fatalf("expected OCALLPART, got %v", n)
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO(mdempsky): Optimize this. If necessary,
|
// TODO(mdempsky): Optimize this. If necessary,
|
||||||
// makepartialcall could save m for us somewhere.
|
// makepartialcall could save m for us somewhere.
|
||||||
var m *types.Field
|
var m *types.Field
|
||||||
if lookdot0(n.Right.Sym, n.Left.Type, &m, false) != 1 {
|
if lookdot0(n.Right().Sym(), n.Left().Type(), &m, false) != 1 {
|
||||||
Fatalf("failed to find field for OCALLPART")
|
base.Fatalf("failed to find field for OCALLPART")
|
||||||
}
|
}
|
||||||
|
|
||||||
return m
|
return m
|
||||||
|
|
|
||||||
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
|
@ -18,7 +18,7 @@ func TestDeps(t *testing.T) {
|
||||||
}
|
}
|
||||||
for _, dep := range strings.Fields(strings.Trim(string(out), "[]")) {
|
for _, dep := range strings.Fields(strings.Trim(string(out), "[]")) {
|
||||||
switch dep {
|
switch dep {
|
||||||
case "go/build", "go/token":
|
case "go/build", "go/scanner":
|
||||||
// cmd/compile/internal/importer introduces a dependency
|
// cmd/compile/internal/importer introduces a dependency
|
||||||
// on go/build and go/token; cmd/compile/internal/ uses
|
// on go/build and go/token; cmd/compile/internal/ uses
|
||||||
// go/constant which uses go/token in its API. Once we
|
// go/constant which uses go/token in its API. Once we
|
||||||
|
|
|
||||||
|
|
@ -5,6 +5,7 @@
|
||||||
package gc
|
package gc
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"cmd/compile/internal/base"
|
||||||
"cmd/internal/dwarf"
|
"cmd/internal/dwarf"
|
||||||
"cmd/internal/obj"
|
"cmd/internal/obj"
|
||||||
"cmd/internal/src"
|
"cmd/internal/src"
|
||||||
|
|
@ -26,8 +27,8 @@ type varPos struct {
|
||||||
func assembleInlines(fnsym *obj.LSym, dwVars []*dwarf.Var) dwarf.InlCalls {
|
func assembleInlines(fnsym *obj.LSym, dwVars []*dwarf.Var) dwarf.InlCalls {
|
||||||
var inlcalls dwarf.InlCalls
|
var inlcalls dwarf.InlCalls
|
||||||
|
|
||||||
if Debug_gendwarfinl != 0 {
|
if base.Debug.DwarfInl != 0 {
|
||||||
Ctxt.Logf("assembling DWARF inlined routine info for %v\n", fnsym.Name)
|
base.Ctxt.Logf("assembling DWARF inlined routine info for %v\n", fnsym.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
// This maps inline index (from Ctxt.InlTree) to index in inlcalls.Calls
|
// This maps inline index (from Ctxt.InlTree) to index in inlcalls.Calls
|
||||||
|
|
@ -106,7 +107,7 @@ func assembleInlines(fnsym *obj.LSym, dwVars []*dwarf.Var) dwarf.InlCalls {
|
||||||
}
|
}
|
||||||
m = makePreinlineDclMap(fnsym)
|
m = makePreinlineDclMap(fnsym)
|
||||||
} else {
|
} else {
|
||||||
ifnlsym := Ctxt.InlTree.InlinedFunction(int(ii - 1))
|
ifnlsym := base.Ctxt.InlTree.InlinedFunction(int(ii - 1))
|
||||||
m = makePreinlineDclMap(ifnlsym)
|
m = makePreinlineDclMap(ifnlsym)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -181,7 +182,7 @@ func assembleInlines(fnsym *obj.LSym, dwVars []*dwarf.Var) dwarf.InlCalls {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Debugging
|
// Debugging
|
||||||
if Debug_gendwarfinl != 0 {
|
if base.Debug.DwarfInl != 0 {
|
||||||
dumpInlCalls(inlcalls)
|
dumpInlCalls(inlcalls)
|
||||||
dumpInlVars(dwVars)
|
dumpInlVars(dwVars)
|
||||||
}
|
}
|
||||||
|
|
@ -205,15 +206,15 @@ func assembleInlines(fnsym *obj.LSym, dwVars []*dwarf.Var) dwarf.InlCalls {
|
||||||
// abstract function DIE for an inlined routine imported from a
|
// abstract function DIE for an inlined routine imported from a
|
||||||
// previously compiled package.
|
// previously compiled package.
|
||||||
func genAbstractFunc(fn *obj.LSym) {
|
func genAbstractFunc(fn *obj.LSym) {
|
||||||
ifn := Ctxt.DwFixups.GetPrecursorFunc(fn)
|
ifn := base.Ctxt.DwFixups.GetPrecursorFunc(fn)
|
||||||
if ifn == nil {
|
if ifn == nil {
|
||||||
Ctxt.Diag("failed to locate precursor fn for %v", fn)
|
base.Ctxt.Diag("failed to locate precursor fn for %v", fn)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if Debug_gendwarfinl != 0 {
|
if base.Debug.DwarfInl != 0 {
|
||||||
Ctxt.Logf("DwarfAbstractFunc(%v)\n", fn.Name)
|
base.Ctxt.Logf("DwarfAbstractFunc(%v)\n", fn.Name)
|
||||||
}
|
}
|
||||||
Ctxt.DwarfAbstractFunc(ifn, fn, myimportpath)
|
base.Ctxt.DwarfAbstractFunc(ifn, fn, base.Ctxt.Pkgpath)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Undo any versioning performed when a name was written
|
// Undo any versioning performed when a name was written
|
||||||
|
|
@ -235,15 +236,15 @@ func makePreinlineDclMap(fnsym *obj.LSym) map[varPos]int {
|
||||||
dcl := preInliningDcls(fnsym)
|
dcl := preInliningDcls(fnsym)
|
||||||
m := make(map[varPos]int)
|
m := make(map[varPos]int)
|
||||||
for i, n := range dcl {
|
for i, n := range dcl {
|
||||||
pos := Ctxt.InnermostPos(n.Pos)
|
pos := base.Ctxt.InnermostPos(n.Pos())
|
||||||
vp := varPos{
|
vp := varPos{
|
||||||
DeclName: unversion(n.Sym.Name),
|
DeclName: unversion(n.Sym().Name),
|
||||||
DeclFile: pos.RelFilename(),
|
DeclFile: pos.RelFilename(),
|
||||||
DeclLine: pos.RelLine(),
|
DeclLine: pos.RelLine(),
|
||||||
DeclCol: pos.Col(),
|
DeclCol: pos.Col(),
|
||||||
}
|
}
|
||||||
if _, found := m[vp]; found {
|
if _, found := m[vp]; found {
|
||||||
Fatalf("child dcl collision on symbol %s within %v\n", n.Sym.Name, fnsym.Name)
|
base.Fatalf("child dcl collision on symbol %s within %v\n", n.Sym().Name, fnsym.Name)
|
||||||
}
|
}
|
||||||
m[vp] = i
|
m[vp] = i
|
||||||
}
|
}
|
||||||
|
|
@ -260,17 +261,17 @@ func insertInlCall(dwcalls *dwarf.InlCalls, inlIdx int, imap map[int]int) int {
|
||||||
// is one. We do this first so that parents appear before their
|
// is one. We do this first so that parents appear before their
|
||||||
// children in the resulting table.
|
// children in the resulting table.
|
||||||
parCallIdx := -1
|
parCallIdx := -1
|
||||||
parInlIdx := Ctxt.InlTree.Parent(inlIdx)
|
parInlIdx := base.Ctxt.InlTree.Parent(inlIdx)
|
||||||
if parInlIdx >= 0 {
|
if parInlIdx >= 0 {
|
||||||
parCallIdx = insertInlCall(dwcalls, parInlIdx, imap)
|
parCallIdx = insertInlCall(dwcalls, parInlIdx, imap)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create new entry for this inline
|
// Create new entry for this inline
|
||||||
inlinedFn := Ctxt.InlTree.InlinedFunction(inlIdx)
|
inlinedFn := base.Ctxt.InlTree.InlinedFunction(inlIdx)
|
||||||
callXPos := Ctxt.InlTree.CallPos(inlIdx)
|
callXPos := base.Ctxt.InlTree.CallPos(inlIdx)
|
||||||
absFnSym := Ctxt.DwFixups.AbsFuncDwarfSym(inlinedFn)
|
absFnSym := base.Ctxt.DwFixups.AbsFuncDwarfSym(inlinedFn)
|
||||||
pb := Ctxt.PosTable.Pos(callXPos).Base()
|
pb := base.Ctxt.PosTable.Pos(callXPos).Base()
|
||||||
callFileSym := Ctxt.Lookup(pb.SymFilename())
|
callFileSym := base.Ctxt.Lookup(pb.SymFilename())
|
||||||
ic := dwarf.InlCall{
|
ic := dwarf.InlCall{
|
||||||
InlIndex: inlIdx,
|
InlIndex: inlIdx,
|
||||||
CallFile: callFileSym,
|
CallFile: callFileSym,
|
||||||
|
|
@ -298,7 +299,7 @@ func insertInlCall(dwcalls *dwarf.InlCalls, inlIdx int, imap map[int]int) int {
|
||||||
// the index for a node from the inlined body of D will refer to the
|
// the index for a node from the inlined body of D will refer to the
|
||||||
// call to D from C. Whew.
|
// call to D from C. Whew.
|
||||||
func posInlIndex(xpos src.XPos) int {
|
func posInlIndex(xpos src.XPos) int {
|
||||||
pos := Ctxt.PosTable.Pos(xpos)
|
pos := base.Ctxt.PosTable.Pos(xpos)
|
||||||
if b := pos.Base(); b != nil {
|
if b := pos.Base(); b != nil {
|
||||||
ii := b.InliningIndex()
|
ii := b.InliningIndex()
|
||||||
if ii >= 0 {
|
if ii >= 0 {
|
||||||
|
|
@ -324,7 +325,7 @@ func addRange(calls []dwarf.InlCall, start, end int64, ii int, imap map[int]int)
|
||||||
// Append range to correct inlined call
|
// Append range to correct inlined call
|
||||||
callIdx, found := imap[ii]
|
callIdx, found := imap[ii]
|
||||||
if !found {
|
if !found {
|
||||||
Fatalf("can't find inlIndex %d in imap for prog at %d\n", ii, start)
|
base.Fatalf("can't find inlIndex %d in imap for prog at %d\n", ii, start)
|
||||||
}
|
}
|
||||||
call := &calls[callIdx]
|
call := &calls[callIdx]
|
||||||
call.Ranges = append(call.Ranges, dwarf.Range{Start: start, End: end})
|
call.Ranges = append(call.Ranges, dwarf.Range{Start: start, End: end})
|
||||||
|
|
@ -332,23 +333,23 @@ func addRange(calls []dwarf.InlCall, start, end int64, ii int, imap map[int]int)
|
||||||
|
|
||||||
func dumpInlCall(inlcalls dwarf.InlCalls, idx, ilevel int) {
|
func dumpInlCall(inlcalls dwarf.InlCalls, idx, ilevel int) {
|
||||||
for i := 0; i < ilevel; i++ {
|
for i := 0; i < ilevel; i++ {
|
||||||
Ctxt.Logf(" ")
|
base.Ctxt.Logf(" ")
|
||||||
}
|
}
|
||||||
ic := inlcalls.Calls[idx]
|
ic := inlcalls.Calls[idx]
|
||||||
callee := Ctxt.InlTree.InlinedFunction(ic.InlIndex)
|
callee := base.Ctxt.InlTree.InlinedFunction(ic.InlIndex)
|
||||||
Ctxt.Logf(" %d: II:%d (%s) V: (", idx, ic.InlIndex, callee.Name)
|
base.Ctxt.Logf(" %d: II:%d (%s) V: (", idx, ic.InlIndex, callee.Name)
|
||||||
for _, f := range ic.InlVars {
|
for _, f := range ic.InlVars {
|
||||||
Ctxt.Logf(" %v", f.Name)
|
base.Ctxt.Logf(" %v", f.Name)
|
||||||
}
|
}
|
||||||
Ctxt.Logf(" ) C: (")
|
base.Ctxt.Logf(" ) C: (")
|
||||||
for _, k := range ic.Children {
|
for _, k := range ic.Children {
|
||||||
Ctxt.Logf(" %v", k)
|
base.Ctxt.Logf(" %v", k)
|
||||||
}
|
}
|
||||||
Ctxt.Logf(" ) R:")
|
base.Ctxt.Logf(" ) R:")
|
||||||
for _, r := range ic.Ranges {
|
for _, r := range ic.Ranges {
|
||||||
Ctxt.Logf(" [%d,%d)", r.Start, r.End)
|
base.Ctxt.Logf(" [%d,%d)", r.Start, r.End)
|
||||||
}
|
}
|
||||||
Ctxt.Logf("\n")
|
base.Ctxt.Logf("\n")
|
||||||
for _, k := range ic.Children {
|
for _, k := range ic.Children {
|
||||||
dumpInlCall(inlcalls, k, ilevel+1)
|
dumpInlCall(inlcalls, k, ilevel+1)
|
||||||
}
|
}
|
||||||
|
|
@ -373,7 +374,7 @@ func dumpInlVars(dwvars []*dwarf.Var) {
|
||||||
if dwv.IsInAbstract {
|
if dwv.IsInAbstract {
|
||||||
ia = 1
|
ia = 1
|
||||||
}
|
}
|
||||||
Ctxt.Logf("V%d: %s CI:%d II:%d IA:%d %s\n", i, dwv.Name, dwv.ChildIndex, dwv.InlIndex-1, ia, typ)
|
base.Ctxt.Logf("V%d: %s CI:%d II:%d IA:%d %s\n", i, dwv.Name, dwv.ChildIndex, dwv.InlIndex-1, ia, typ)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -410,7 +411,7 @@ func checkInlCall(funcName string, inlCalls dwarf.InlCalls, funcSize int64, idx,
|
||||||
|
|
||||||
// Callee
|
// Callee
|
||||||
ic := inlCalls.Calls[idx]
|
ic := inlCalls.Calls[idx]
|
||||||
callee := Ctxt.InlTree.InlinedFunction(ic.InlIndex).Name
|
callee := base.Ctxt.InlTree.InlinedFunction(ic.InlIndex).Name
|
||||||
calleeRanges := ic.Ranges
|
calleeRanges := ic.Ranges
|
||||||
|
|
||||||
// Caller
|
// Caller
|
||||||
|
|
@ -418,14 +419,14 @@ func checkInlCall(funcName string, inlCalls dwarf.InlCalls, funcSize int64, idx,
|
||||||
parentRanges := []dwarf.Range{dwarf.Range{Start: int64(0), End: funcSize}}
|
parentRanges := []dwarf.Range{dwarf.Range{Start: int64(0), End: funcSize}}
|
||||||
if parentIdx != -1 {
|
if parentIdx != -1 {
|
||||||
pic := inlCalls.Calls[parentIdx]
|
pic := inlCalls.Calls[parentIdx]
|
||||||
caller = Ctxt.InlTree.InlinedFunction(pic.InlIndex).Name
|
caller = base.Ctxt.InlTree.InlinedFunction(pic.InlIndex).Name
|
||||||
parentRanges = pic.Ranges
|
parentRanges = pic.Ranges
|
||||||
}
|
}
|
||||||
|
|
||||||
// Callee ranges contained in caller ranges?
|
// Callee ranges contained in caller ranges?
|
||||||
c, m := rangesContainsAll(parentRanges, calleeRanges)
|
c, m := rangesContainsAll(parentRanges, calleeRanges)
|
||||||
if !c {
|
if !c {
|
||||||
Fatalf("** malformed inlined routine range in %s: caller %s callee %s II=%d %s\n", funcName, caller, callee, idx, m)
|
base.Fatalf("** malformed inlined routine range in %s: caller %s callee %s II=%d %s\n", funcName, caller, callee, idx, m)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Now visit kids
|
// Now visit kids
|
||||||
|
|
|
||||||
|
|
@ -5,40 +5,19 @@
|
||||||
package gc
|
package gc
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"cmd/compile/internal/base"
|
||||||
|
"cmd/compile/internal/ir"
|
||||||
"cmd/compile/internal/syntax"
|
"cmd/compile/internal/syntax"
|
||||||
"cmd/compile/internal/types"
|
"cmd/compile/internal/types"
|
||||||
"cmd/internal/obj"
|
"cmd/internal/obj"
|
||||||
"encoding/json"
|
|
||||||
"io/ioutil"
|
|
||||||
"log"
|
|
||||||
"path"
|
"path"
|
||||||
"sort"
|
"sort"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
var embedlist []*Node
|
var embedlist []ir.Node
|
||||||
|
|
||||||
var embedCfg struct {
|
|
||||||
Patterns map[string][]string
|
|
||||||
Files map[string]string
|
|
||||||
}
|
|
||||||
|
|
||||||
func readEmbedCfg(file string) {
|
|
||||||
data, err := ioutil.ReadFile(file)
|
|
||||||
if err != nil {
|
|
||||||
log.Fatalf("-embedcfg: %v", err)
|
|
||||||
}
|
|
||||||
if err := json.Unmarshal(data, &embedCfg); err != nil {
|
|
||||||
log.Fatalf("%s: %v", file, err)
|
|
||||||
}
|
|
||||||
if embedCfg.Patterns == nil {
|
|
||||||
log.Fatalf("%s: invalid embedcfg: missing Patterns", file)
|
|
||||||
}
|
|
||||||
if embedCfg.Files == nil {
|
|
||||||
log.Fatalf("%s: invalid embedcfg: missing Files", file)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const (
|
const (
|
||||||
embedUnknown = iota
|
embedUnknown = iota
|
||||||
|
|
@ -49,7 +28,7 @@ const (
|
||||||
|
|
||||||
var numLocalEmbed int
|
var numLocalEmbed int
|
||||||
|
|
||||||
func varEmbed(p *noder, names []*Node, typ *Node, exprs []*Node, embeds []PragmaEmbed) (newExprs []*Node) {
|
func varEmbed(p *noder, names []ir.Node, typ ir.Node, exprs []ir.Node, embeds []PragmaEmbed) (newExprs []ir.Node) {
|
||||||
haveEmbed := false
|
haveEmbed := false
|
||||||
for _, decl := range p.file.DeclList {
|
for _, decl := range p.file.DeclList {
|
||||||
imp, ok := decl.(*syntax.ImportDecl)
|
imp, ok := decl.(*syntax.ImportDecl)
|
||||||
|
|
@ -66,30 +45,30 @@ func varEmbed(p *noder, names []*Node, typ *Node, exprs []*Node, embeds []Pragma
|
||||||
|
|
||||||
pos := embeds[0].Pos
|
pos := embeds[0].Pos
|
||||||
if !haveEmbed {
|
if !haveEmbed {
|
||||||
p.yyerrorpos(pos, "invalid go:embed: missing import \"embed\"")
|
p.errorAt(pos, "invalid go:embed: missing import \"embed\"")
|
||||||
return exprs
|
return exprs
|
||||||
}
|
}
|
||||||
if embedCfg.Patterns == nil {
|
if base.Flag.Cfg.Embed.Patterns == nil {
|
||||||
p.yyerrorpos(pos, "invalid go:embed: build system did not supply embed configuration")
|
p.errorAt(pos, "invalid go:embed: build system did not supply embed configuration")
|
||||||
return exprs
|
return exprs
|
||||||
}
|
}
|
||||||
if len(names) > 1 {
|
if len(names) > 1 {
|
||||||
p.yyerrorpos(pos, "go:embed cannot apply to multiple vars")
|
p.errorAt(pos, "go:embed cannot apply to multiple vars")
|
||||||
return exprs
|
return exprs
|
||||||
}
|
}
|
||||||
if len(exprs) > 0 {
|
if len(exprs) > 0 {
|
||||||
p.yyerrorpos(pos, "go:embed cannot apply to var with initializer")
|
p.errorAt(pos, "go:embed cannot apply to var with initializer")
|
||||||
return exprs
|
return exprs
|
||||||
}
|
}
|
||||||
if typ == nil {
|
if typ == nil {
|
||||||
// Should not happen, since len(exprs) == 0 now.
|
// Should not happen, since len(exprs) == 0 now.
|
||||||
p.yyerrorpos(pos, "go:embed cannot apply to var without type")
|
p.errorAt(pos, "go:embed cannot apply to var without type")
|
||||||
return exprs
|
return exprs
|
||||||
}
|
}
|
||||||
|
|
||||||
kind := embedKindApprox(typ)
|
kind := embedKindApprox(typ)
|
||||||
if kind == embedUnknown {
|
if kind == embedUnknown {
|
||||||
p.yyerrorpos(pos, "go:embed cannot apply to var of type %v", typ)
|
p.errorAt(pos, "go:embed cannot apply to var of type %v", typ)
|
||||||
return exprs
|
return exprs
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -98,13 +77,13 @@ func varEmbed(p *noder, names []*Node, typ *Node, exprs []*Node, embeds []Pragma
|
||||||
var list []string
|
var list []string
|
||||||
for _, e := range embeds {
|
for _, e := range embeds {
|
||||||
for _, pattern := range e.Patterns {
|
for _, pattern := range e.Patterns {
|
||||||
files, ok := embedCfg.Patterns[pattern]
|
files, ok := base.Flag.Cfg.Embed.Patterns[pattern]
|
||||||
if !ok {
|
if !ok {
|
||||||
p.yyerrorpos(e.Pos, "invalid go:embed: build system did not map pattern: %s", pattern)
|
p.errorAt(e.Pos, "invalid go:embed: build system did not map pattern: %s", pattern)
|
||||||
}
|
}
|
||||||
for _, file := range files {
|
for _, file := range files {
|
||||||
if embedCfg.Files[file] == "" {
|
if base.Flag.Cfg.Embed.Files[file] == "" {
|
||||||
p.yyerrorpos(e.Pos, "invalid go:embed: build system did not map file: %s", file)
|
p.errorAt(e.Pos, "invalid go:embed: build system did not map file: %s", file)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if !have[file] {
|
if !have[file] {
|
||||||
|
|
@ -126,23 +105,23 @@ func varEmbed(p *noder, names []*Node, typ *Node, exprs []*Node, embeds []Pragma
|
||||||
|
|
||||||
if kind == embedString || kind == embedBytes {
|
if kind == embedString || kind == embedBytes {
|
||||||
if len(list) > 1 {
|
if len(list) > 1 {
|
||||||
p.yyerrorpos(pos, "invalid go:embed: multiple files for type %v", typ)
|
p.errorAt(pos, "invalid go:embed: multiple files for type %v", typ)
|
||||||
return exprs
|
return exprs
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
v := names[0]
|
v := names[0]
|
||||||
if dclcontext != PEXTERN {
|
if dclcontext != ir.PEXTERN {
|
||||||
numLocalEmbed++
|
numLocalEmbed++
|
||||||
v = newnamel(v.Pos, lookupN("embed.", numLocalEmbed))
|
v = ir.NewNameAt(v.Pos(), lookupN("embed.", numLocalEmbed))
|
||||||
v.Sym.Def = asTypesNode(v)
|
v.Sym().Def = v
|
||||||
v.Name.Param.Ntype = typ
|
v.Name().Param.Ntype = typ
|
||||||
v.SetClass(PEXTERN)
|
v.SetClass(ir.PEXTERN)
|
||||||
externdcl = append(externdcl, v)
|
externdcl = append(externdcl, v)
|
||||||
exprs = []*Node{v}
|
exprs = []ir.Node{v}
|
||||||
}
|
}
|
||||||
|
|
||||||
v.Name.Param.SetEmbedFiles(list)
|
v.Name().Param.SetEmbedFiles(list)
|
||||||
embedlist = append(embedlist, v)
|
embedlist = append(embedlist, v)
|
||||||
return exprs
|
return exprs
|
||||||
}
|
}
|
||||||
|
|
@ -151,18 +130,18 @@ func varEmbed(p *noder, names []*Node, typ *Node, exprs []*Node, embeds []Pragma
|
||||||
// The match is approximate because we haven't done scope resolution yet and
|
// The match is approximate because we haven't done scope resolution yet and
|
||||||
// can't tell whether "string" and "byte" really mean "string" and "byte".
|
// can't tell whether "string" and "byte" really mean "string" and "byte".
|
||||||
// The result must be confirmed later, after type checking, using embedKind.
|
// The result must be confirmed later, after type checking, using embedKind.
|
||||||
func embedKindApprox(typ *Node) int {
|
func embedKindApprox(typ ir.Node) int {
|
||||||
if typ.Sym != nil && typ.Sym.Name == "FS" && (typ.Sym.Pkg.Path == "embed" || (typ.Sym.Pkg == localpkg && myimportpath == "embed")) {
|
if typ.Sym() != nil && typ.Sym().Name == "FS" && (typ.Sym().Pkg.Path == "embed" || (typ.Sym().Pkg == ir.LocalPkg && base.Ctxt.Pkgpath == "embed")) {
|
||||||
return embedFiles
|
return embedFiles
|
||||||
}
|
}
|
||||||
// These are not guaranteed to match only string and []byte -
|
// These are not guaranteed to match only string and []byte -
|
||||||
// maybe the local package has redefined one of those words.
|
// maybe the local package has redefined one of those words.
|
||||||
// But it's the best we can do now during the noder.
|
// But it's the best we can do now during the noder.
|
||||||
// The stricter check happens later, in initEmbed calling embedKind.
|
// The stricter check happens later, in initEmbed calling embedKind.
|
||||||
if typ.Sym != nil && typ.Sym.Name == "string" && typ.Sym.Pkg == localpkg {
|
if typ.Sym() != nil && typ.Sym().Name == "string" && typ.Sym().Pkg == ir.LocalPkg {
|
||||||
return embedString
|
return embedString
|
||||||
}
|
}
|
||||||
if typ.Op == OTARRAY && typ.Left == nil && typ.Right.Sym != nil && typ.Right.Sym.Name == "byte" && typ.Right.Sym.Pkg == localpkg {
|
if typ.Op() == ir.OTARRAY && typ.Left() == nil && typ.Right().Sym() != nil && typ.Right().Sym().Name == "byte" && typ.Right().Sym().Pkg == ir.LocalPkg {
|
||||||
return embedBytes
|
return embedBytes
|
||||||
}
|
}
|
||||||
return embedUnknown
|
return embedUnknown
|
||||||
|
|
@ -170,10 +149,10 @@ func embedKindApprox(typ *Node) int {
|
||||||
|
|
||||||
// embedKind determines the kind of embedding variable.
|
// embedKind determines the kind of embedding variable.
|
||||||
func embedKind(typ *types.Type) int {
|
func embedKind(typ *types.Type) int {
|
||||||
if typ.Sym != nil && typ.Sym.Name == "FS" && (typ.Sym.Pkg.Path == "embed" || (typ.Sym.Pkg == localpkg && myimportpath == "embed")) {
|
if typ.Sym != nil && typ.Sym.Name == "FS" && (typ.Sym.Pkg.Path == "embed" || (typ.Sym.Pkg == ir.LocalPkg && base.Ctxt.Pkgpath == "embed")) {
|
||||||
return embedFiles
|
return embedFiles
|
||||||
}
|
}
|
||||||
if typ == types.Types[TSTRING] {
|
if typ == types.Types[types.TSTRING] {
|
||||||
return embedString
|
return embedString
|
||||||
}
|
}
|
||||||
if typ.Sym == nil && typ.IsSlice() && typ.Elem() == types.Bytetype {
|
if typ.Sym == nil && typ.IsSlice() && typ.Elem() == types.Bytetype {
|
||||||
|
|
@ -213,19 +192,19 @@ func dumpembeds() {
|
||||||
|
|
||||||
// initEmbed emits the init data for a //go:embed variable,
|
// initEmbed emits the init data for a //go:embed variable,
|
||||||
// which is either a string, a []byte, or an embed.FS.
|
// which is either a string, a []byte, or an embed.FS.
|
||||||
func initEmbed(v *Node) {
|
func initEmbed(v ir.Node) {
|
||||||
files := v.Name.Param.EmbedFiles()
|
files := v.Name().Param.EmbedFiles()
|
||||||
switch kind := embedKind(v.Type); kind {
|
switch kind := embedKind(v.Type()); kind {
|
||||||
case embedUnknown:
|
case embedUnknown:
|
||||||
yyerrorl(v.Pos, "go:embed cannot apply to var of type %v", v.Type)
|
base.ErrorfAt(v.Pos(), "go:embed cannot apply to var of type %v", v.Type())
|
||||||
|
|
||||||
case embedString, embedBytes:
|
case embedString, embedBytes:
|
||||||
file := files[0]
|
file := files[0]
|
||||||
fsym, size, err := fileStringSym(v.Pos, embedCfg.Files[file], kind == embedString, nil)
|
fsym, size, err := fileStringSym(v.Pos(), base.Flag.Cfg.Embed.Files[file], kind == embedString, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
yyerrorl(v.Pos, "embed %s: %v", file, err)
|
base.ErrorfAt(v.Pos(), "embed %s: %v", file, err)
|
||||||
}
|
}
|
||||||
sym := v.Sym.Linksym()
|
sym := v.Sym().Linksym()
|
||||||
off := 0
|
off := 0
|
||||||
off = dsymptr(sym, off, fsym, 0) // data string
|
off = dsymptr(sym, off, fsym, 0) // data string
|
||||||
off = duintptr(sym, off, uint64(size)) // len
|
off = duintptr(sym, off, uint64(size)) // len
|
||||||
|
|
@ -234,7 +213,7 @@ func initEmbed(v *Node) {
|
||||||
}
|
}
|
||||||
|
|
||||||
case embedFiles:
|
case embedFiles:
|
||||||
slicedata := Ctxt.Lookup(`"".` + v.Sym.Name + `.files`)
|
slicedata := base.Ctxt.Lookup(`"".` + v.Sym().Name + `.files`)
|
||||||
off := 0
|
off := 0
|
||||||
// []files pointed at by Files
|
// []files pointed at by Files
|
||||||
off = dsymptr(slicedata, off, slicedata, 3*Widthptr) // []file, pointing just past slice
|
off = dsymptr(slicedata, off, slicedata, 3*Widthptr) // []file, pointing just past slice
|
||||||
|
|
@ -249,7 +228,7 @@ func initEmbed(v *Node) {
|
||||||
const hashSize = 16
|
const hashSize = 16
|
||||||
hash := make([]byte, hashSize)
|
hash := make([]byte, hashSize)
|
||||||
for _, file := range files {
|
for _, file := range files {
|
||||||
off = dsymptr(slicedata, off, stringsym(v.Pos, file), 0) // file string
|
off = dsymptr(slicedata, off, stringsym(v.Pos(), file), 0) // file string
|
||||||
off = duintptr(slicedata, off, uint64(len(file)))
|
off = duintptr(slicedata, off, uint64(len(file)))
|
||||||
if strings.HasSuffix(file, "/") {
|
if strings.HasSuffix(file, "/") {
|
||||||
// entry for directory - no data
|
// entry for directory - no data
|
||||||
|
|
@ -257,17 +236,17 @@ func initEmbed(v *Node) {
|
||||||
off = duintptr(slicedata, off, 0)
|
off = duintptr(slicedata, off, 0)
|
||||||
off += hashSize
|
off += hashSize
|
||||||
} else {
|
} else {
|
||||||
fsym, size, err := fileStringSym(v.Pos, embedCfg.Files[file], true, hash)
|
fsym, size, err := fileStringSym(v.Pos(), base.Flag.Cfg.Embed.Files[file], true, hash)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
yyerrorl(v.Pos, "embed %s: %v", file, err)
|
base.ErrorfAt(v.Pos(), "embed %s: %v", file, err)
|
||||||
}
|
}
|
||||||
off = dsymptr(slicedata, off, fsym, 0) // data string
|
off = dsymptr(slicedata, off, fsym, 0) // data string
|
||||||
off = duintptr(slicedata, off, uint64(size))
|
off = duintptr(slicedata, off, uint64(size))
|
||||||
off = int(slicedata.WriteBytes(Ctxt, int64(off), hash))
|
off = int(slicedata.WriteBytes(base.Ctxt, int64(off), hash))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ggloblsym(slicedata, int32(off), obj.RODATA|obj.LOCAL)
|
ggloblsym(slicedata, int32(off), obj.RODATA|obj.LOCAL)
|
||||||
sym := v.Sym.Linksym()
|
sym := v.Sym().Linksym()
|
||||||
dsymptr(sym, 0, slicedata, 0)
|
dsymptr(sym, 0, slicedata, 0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,472 +0,0 @@
|
||||||
// Copyright 2011 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
package gc
|
|
||||||
|
|
||||||
import (
|
|
||||||
"cmd/compile/internal/types"
|
|
||||||
"fmt"
|
|
||||||
)
|
|
||||||
|
|
||||||
func escapes(all []*Node) {
|
|
||||||
visitBottomUp(all, escapeFuncs)
|
|
||||||
}
|
|
||||||
|
|
||||||
const (
|
|
||||||
EscFuncUnknown = 0 + iota
|
|
||||||
EscFuncPlanned
|
|
||||||
EscFuncStarted
|
|
||||||
EscFuncTagged
|
|
||||||
)
|
|
||||||
|
|
||||||
func min8(a, b int8) int8 {
|
|
||||||
if a < b {
|
|
||||||
return a
|
|
||||||
}
|
|
||||||
return b
|
|
||||||
}
|
|
||||||
|
|
||||||
func max8(a, b int8) int8 {
|
|
||||||
if a > b {
|
|
||||||
return a
|
|
||||||
}
|
|
||||||
return b
|
|
||||||
}
|
|
||||||
|
|
||||||
const (
|
|
||||||
EscUnknown = iota
|
|
||||||
EscNone // Does not escape to heap, result, or parameters.
|
|
||||||
EscHeap // Reachable from the heap
|
|
||||||
EscNever // By construction will not escape.
|
|
||||||
)
|
|
||||||
|
|
||||||
// funcSym returns fn.Func.Nname.Sym if no nils are encountered along the way.
|
|
||||||
func funcSym(fn *Node) *types.Sym {
|
|
||||||
if fn == nil || fn.Func.Nname == nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return fn.Func.Nname.Sym
|
|
||||||
}
|
|
||||||
|
|
||||||
// Mark labels that have no backjumps to them as not increasing e.loopdepth.
|
|
||||||
// Walk hasn't generated (goto|label).Left.Sym.Label yet, so we'll cheat
|
|
||||||
// and set it to one of the following two. Then in esc we'll clear it again.
|
|
||||||
var (
|
|
||||||
looping Node
|
|
||||||
nonlooping Node
|
|
||||||
)
|
|
||||||
|
|
||||||
func isSliceSelfAssign(dst, src *Node) bool {
|
|
||||||
// Detect the following special case.
|
|
||||||
//
|
|
||||||
// func (b *Buffer) Foo() {
|
|
||||||
// n, m := ...
|
|
||||||
// b.buf = b.buf[n:m]
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// This assignment is a no-op for escape analysis,
|
|
||||||
// it does not store any new pointers into b that were not already there.
|
|
||||||
// However, without this special case b will escape, because we assign to OIND/ODOTPTR.
|
|
||||||
// Here we assume that the statement will not contain calls,
|
|
||||||
// that is, that order will move any calls to init.
|
|
||||||
// Otherwise base ONAME value could change between the moments
|
|
||||||
// when we evaluate it for dst and for src.
|
|
||||||
|
|
||||||
// dst is ONAME dereference.
|
|
||||||
if dst.Op != ODEREF && dst.Op != ODOTPTR || dst.Left.Op != ONAME {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
// src is a slice operation.
|
|
||||||
switch src.Op {
|
|
||||||
case OSLICE, OSLICE3, OSLICESTR:
|
|
||||||
// OK.
|
|
||||||
case OSLICEARR, OSLICE3ARR:
|
|
||||||
// Since arrays are embedded into containing object,
|
|
||||||
// slice of non-pointer array will introduce a new pointer into b that was not already there
|
|
||||||
// (pointer to b itself). After such assignment, if b contents escape,
|
|
||||||
// b escapes as well. If we ignore such OSLICEARR, we will conclude
|
|
||||||
// that b does not escape when b contents do.
|
|
||||||
//
|
|
||||||
// Pointer to an array is OK since it's not stored inside b directly.
|
|
||||||
// For slicing an array (not pointer to array), there is an implicit OADDR.
|
|
||||||
// We check that to determine non-pointer array slicing.
|
|
||||||
if src.Left.Op == OADDR {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
default:
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
// slice is applied to ONAME dereference.
|
|
||||||
if src.Left.Op != ODEREF && src.Left.Op != ODOTPTR || src.Left.Left.Op != ONAME {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
// dst and src reference the same base ONAME.
|
|
||||||
return dst.Left == src.Left.Left
|
|
||||||
}
|
|
||||||
|
|
||||||
// isSelfAssign reports whether assignment from src to dst can
|
|
||||||
// be ignored by the escape analysis as it's effectively a self-assignment.
|
|
||||||
func isSelfAssign(dst, src *Node) bool {
|
|
||||||
if isSliceSelfAssign(dst, src) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
// Detect trivial assignments that assign back to the same object.
|
|
||||||
//
|
|
||||||
// It covers these cases:
|
|
||||||
// val.x = val.y
|
|
||||||
// val.x[i] = val.y[j]
|
|
||||||
// val.x1.x2 = val.x1.y2
|
|
||||||
// ... etc
|
|
||||||
//
|
|
||||||
// These assignments do not change assigned object lifetime.
|
|
||||||
|
|
||||||
if dst == nil || src == nil || dst.Op != src.Op {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
switch dst.Op {
|
|
||||||
case ODOT, ODOTPTR:
|
|
||||||
// Safe trailing accessors that are permitted to differ.
|
|
||||||
case OINDEX:
|
|
||||||
if mayAffectMemory(dst.Right) || mayAffectMemory(src.Right) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
default:
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
// The expression prefix must be both "safe" and identical.
|
|
||||||
return samesafeexpr(dst.Left, src.Left)
|
|
||||||
}
|
|
||||||
|
|
||||||
// mayAffectMemory reports whether evaluation of n may affect the program's
|
|
||||||
// memory state. If the expression can't affect memory state, then it can be
|
|
||||||
// safely ignored by the escape analysis.
|
|
||||||
func mayAffectMemory(n *Node) bool {
|
|
||||||
// We may want to use a list of "memory safe" ops instead of generally
|
|
||||||
// "side-effect free", which would include all calls and other ops that can
|
|
||||||
// allocate or change global state. For now, it's safer to start with the latter.
|
|
||||||
//
|
|
||||||
// We're ignoring things like division by zero, index out of range,
|
|
||||||
// and nil pointer dereference here.
|
|
||||||
switch n.Op {
|
|
||||||
case ONAME, OCLOSUREVAR, OLITERAL:
|
|
||||||
return false
|
|
||||||
|
|
||||||
// Left+Right group.
|
|
||||||
case OINDEX, OADD, OSUB, OOR, OXOR, OMUL, OLSH, ORSH, OAND, OANDNOT, ODIV, OMOD:
|
|
||||||
return mayAffectMemory(n.Left) || mayAffectMemory(n.Right)
|
|
||||||
|
|
||||||
// Left group.
|
|
||||||
case ODOT, ODOTPTR, ODEREF, OCONVNOP, OCONV, OLEN, OCAP,
|
|
||||||
ONOT, OBITNOT, OPLUS, ONEG, OALIGNOF, OOFFSETOF, OSIZEOF:
|
|
||||||
return mayAffectMemory(n.Left)
|
|
||||||
|
|
||||||
default:
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// heapAllocReason returns the reason the given Node must be heap
|
|
||||||
// allocated, or the empty string if it doesn't.
|
|
||||||
func heapAllocReason(n *Node) string {
|
|
||||||
if n.Type == nil {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parameters are always passed via the stack.
|
|
||||||
if n.Op == ONAME && (n.Class() == PPARAM || n.Class() == PPARAMOUT) {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
if n.Type.Width > maxStackVarSize {
|
|
||||||
return "too large for stack"
|
|
||||||
}
|
|
||||||
|
|
||||||
if (n.Op == ONEW || n.Op == OPTRLIT) && n.Type.Elem().Width >= maxImplicitStackVarSize {
|
|
||||||
return "too large for stack"
|
|
||||||
}
|
|
||||||
|
|
||||||
if n.Op == OCLOSURE && closureType(n).Size() >= maxImplicitStackVarSize {
|
|
||||||
return "too large for stack"
|
|
||||||
}
|
|
||||||
if n.Op == OCALLPART && partialCallType(n).Size() >= maxImplicitStackVarSize {
|
|
||||||
return "too large for stack"
|
|
||||||
}
|
|
||||||
|
|
||||||
if n.Op == OMAKESLICE {
|
|
||||||
r := n.Right
|
|
||||||
if r == nil {
|
|
||||||
r = n.Left
|
|
||||||
}
|
|
||||||
if !smallintconst(r) {
|
|
||||||
return "non-constant size"
|
|
||||||
}
|
|
||||||
if t := n.Type; t.Elem().Width != 0 && r.Int64Val() >= maxImplicitStackVarSize/t.Elem().Width {
|
|
||||||
return "too large for stack"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
// addrescapes tags node n as having had its address taken
|
|
||||||
// by "increasing" the "value" of n.Esc to EscHeap.
|
|
||||||
// Storage is allocated as necessary to allow the address
|
|
||||||
// to be taken.
|
|
||||||
func addrescapes(n *Node) {
|
|
||||||
switch n.Op {
|
|
||||||
default:
|
|
||||||
// Unexpected Op, probably due to a previous type error. Ignore.
|
|
||||||
|
|
||||||
case ODEREF, ODOTPTR:
|
|
||||||
// Nothing to do.
|
|
||||||
|
|
||||||
case ONAME:
|
|
||||||
if n == nodfp {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
// if this is a tmpname (PAUTO), it was tagged by tmpname as not escaping.
|
|
||||||
// on PPARAM it means something different.
|
|
||||||
if n.Class() == PAUTO && n.Esc == EscNever {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
// If a closure reference escapes, mark the outer variable as escaping.
|
|
||||||
if n.Name.IsClosureVar() {
|
|
||||||
addrescapes(n.Name.Defn)
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
if n.Class() != PPARAM && n.Class() != PPARAMOUT && n.Class() != PAUTO {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
// This is a plain parameter or local variable that needs to move to the heap,
|
|
||||||
// but possibly for the function outside the one we're compiling.
|
|
||||||
// That is, if we have:
|
|
||||||
//
|
|
||||||
// func f(x int) {
|
|
||||||
// func() {
|
|
||||||
// global = &x
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// then we're analyzing the inner closure but we need to move x to the
|
|
||||||
// heap in f, not in the inner closure. Flip over to f before calling moveToHeap.
|
|
||||||
oldfn := Curfn
|
|
||||||
Curfn = n.Name.Curfn
|
|
||||||
if Curfn.Func.Closure != nil && Curfn.Op == OCLOSURE {
|
|
||||||
Curfn = Curfn.Func.Closure
|
|
||||||
}
|
|
||||||
ln := lineno
|
|
||||||
lineno = Curfn.Pos
|
|
||||||
moveToHeap(n)
|
|
||||||
Curfn = oldfn
|
|
||||||
lineno = ln
|
|
||||||
|
|
||||||
// ODOTPTR has already been introduced,
|
|
||||||
// so these are the non-pointer ODOT and OINDEX.
|
|
||||||
// In &x[0], if x is a slice, then x does not
|
|
||||||
// escape--the pointer inside x does, but that
|
|
||||||
// is always a heap pointer anyway.
|
|
||||||
case ODOT, OINDEX, OPAREN, OCONVNOP:
|
|
||||||
if !n.Left.Type.IsSlice() {
|
|
||||||
addrescapes(n.Left)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// moveToHeap records the parameter or local variable n as moved to the heap.
|
|
||||||
func moveToHeap(n *Node) {
|
|
||||||
if Debug.r != 0 {
|
|
||||||
Dump("MOVE", n)
|
|
||||||
}
|
|
||||||
if compiling_runtime {
|
|
||||||
yyerror("%v escapes to heap, not allowed in runtime", n)
|
|
||||||
}
|
|
||||||
if n.Class() == PAUTOHEAP {
|
|
||||||
Dump("n", n)
|
|
||||||
Fatalf("double move to heap")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Allocate a local stack variable to hold the pointer to the heap copy.
|
|
||||||
// temp will add it to the function declaration list automatically.
|
|
||||||
heapaddr := temp(types.NewPtr(n.Type))
|
|
||||||
heapaddr.Sym = lookup("&" + n.Sym.Name)
|
|
||||||
heapaddr.Orig.Sym = heapaddr.Sym
|
|
||||||
heapaddr.Pos = n.Pos
|
|
||||||
|
|
||||||
// Unset AutoTemp to persist the &foo variable name through SSA to
|
|
||||||
// liveness analysis.
|
|
||||||
// TODO(mdempsky/drchase): Cleaner solution?
|
|
||||||
heapaddr.Name.SetAutoTemp(false)
|
|
||||||
|
|
||||||
// Parameters have a local stack copy used at function start/end
|
|
||||||
// in addition to the copy in the heap that may live longer than
|
|
||||||
// the function.
|
|
||||||
if n.Class() == PPARAM || n.Class() == PPARAMOUT {
|
|
||||||
if n.Xoffset == BADWIDTH {
|
|
||||||
Fatalf("addrescapes before param assignment")
|
|
||||||
}
|
|
||||||
|
|
||||||
// We rewrite n below to be a heap variable (indirection of heapaddr).
|
|
||||||
// Preserve a copy so we can still write code referring to the original,
|
|
||||||
// and substitute that copy into the function declaration list
|
|
||||||
// so that analyses of the local (on-stack) variables use it.
|
|
||||||
stackcopy := newname(n.Sym)
|
|
||||||
stackcopy.Type = n.Type
|
|
||||||
stackcopy.Xoffset = n.Xoffset
|
|
||||||
stackcopy.SetClass(n.Class())
|
|
||||||
stackcopy.Name.Param.Heapaddr = heapaddr
|
|
||||||
if n.Class() == PPARAMOUT {
|
|
||||||
// Make sure the pointer to the heap copy is kept live throughout the function.
|
|
||||||
// The function could panic at any point, and then a defer could recover.
|
|
||||||
// Thus, we need the pointer to the heap copy always available so the
|
|
||||||
// post-deferreturn code can copy the return value back to the stack.
|
|
||||||
// See issue 16095.
|
|
||||||
heapaddr.Name.SetIsOutputParamHeapAddr(true)
|
|
||||||
}
|
|
||||||
n.Name.Param.Stackcopy = stackcopy
|
|
||||||
|
|
||||||
// Substitute the stackcopy into the function variable list so that
|
|
||||||
// liveness and other analyses use the underlying stack slot
|
|
||||||
// and not the now-pseudo-variable n.
|
|
||||||
found := false
|
|
||||||
for i, d := range Curfn.Func.Dcl {
|
|
||||||
if d == n {
|
|
||||||
Curfn.Func.Dcl[i] = stackcopy
|
|
||||||
found = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
// Parameters are before locals, so can stop early.
|
|
||||||
// This limits the search even in functions with many local variables.
|
|
||||||
if d.Class() == PAUTO {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !found {
|
|
||||||
Fatalf("cannot find %v in local variable list", n)
|
|
||||||
}
|
|
||||||
Curfn.Func.Dcl = append(Curfn.Func.Dcl, n)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Modify n in place so that uses of n now mean indirection of the heapaddr.
|
|
||||||
n.SetClass(PAUTOHEAP)
|
|
||||||
n.Xoffset = 0
|
|
||||||
n.Name.Param.Heapaddr = heapaddr
|
|
||||||
n.Esc = EscHeap
|
|
||||||
if Debug.m != 0 {
|
|
||||||
Warnl(n.Pos, "moved to heap: %v", n)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// This special tag is applied to uintptr variables
|
|
||||||
// that we believe may hold unsafe.Pointers for
|
|
||||||
// calls into assembly functions.
|
|
||||||
const unsafeUintptrTag = "unsafe-uintptr"
|
|
||||||
|
|
||||||
// This special tag is applied to uintptr parameters of functions
|
|
||||||
// marked go:uintptrescapes.
|
|
||||||
const uintptrEscapesTag = "uintptr-escapes"
|
|
||||||
|
|
||||||
func (e *Escape) paramTag(fn *Node, narg int, f *types.Field) string {
|
|
||||||
name := func() string {
|
|
||||||
if f.Sym != nil {
|
|
||||||
return f.Sym.Name
|
|
||||||
}
|
|
||||||
return fmt.Sprintf("arg#%d", narg)
|
|
||||||
}
|
|
||||||
|
|
||||||
if fn.Nbody.Len() == 0 {
|
|
||||||
// Assume that uintptr arguments must be held live across the call.
|
|
||||||
// This is most important for syscall.Syscall.
|
|
||||||
// See golang.org/issue/13372.
|
|
||||||
// This really doesn't have much to do with escape analysis per se,
|
|
||||||
// but we are reusing the ability to annotate an individual function
|
|
||||||
// argument and pass those annotations along to importing code.
|
|
||||||
if f.Type.IsUintptr() {
|
|
||||||
if Debug.m != 0 {
|
|
||||||
Warnl(f.Pos, "assuming %v is unsafe uintptr", name())
|
|
||||||
}
|
|
||||||
return unsafeUintptrTag
|
|
||||||
}
|
|
||||||
|
|
||||||
if !f.Type.HasPointers() { // don't bother tagging for scalars
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
var esc EscLeaks
|
|
||||||
|
|
||||||
// External functions are assumed unsafe, unless
|
|
||||||
// //go:noescape is given before the declaration.
|
|
||||||
if fn.Func.Pragma&Noescape != 0 {
|
|
||||||
if Debug.m != 0 && f.Sym != nil {
|
|
||||||
Warnl(f.Pos, "%v does not escape", name())
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if Debug.m != 0 && f.Sym != nil {
|
|
||||||
Warnl(f.Pos, "leaking param: %v", name())
|
|
||||||
}
|
|
||||||
esc.AddHeap(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
return esc.Encode()
|
|
||||||
}
|
|
||||||
|
|
||||||
if fn.Func.Pragma&UintptrEscapes != 0 {
|
|
||||||
if f.Type.IsUintptr() {
|
|
||||||
if Debug.m != 0 {
|
|
||||||
Warnl(f.Pos, "marking %v as escaping uintptr", name())
|
|
||||||
}
|
|
||||||
return uintptrEscapesTag
|
|
||||||
}
|
|
||||||
if f.IsDDD() && f.Type.Elem().IsUintptr() {
|
|
||||||
// final argument is ...uintptr.
|
|
||||||
if Debug.m != 0 {
|
|
||||||
Warnl(f.Pos, "marking %v as escaping ...uintptr", name())
|
|
||||||
}
|
|
||||||
return uintptrEscapesTag
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if !f.Type.HasPointers() { // don't bother tagging for scalars
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
// Unnamed parameters are unused and therefore do not escape.
|
|
||||||
if f.Sym == nil || f.Sym.IsBlank() {
|
|
||||||
var esc EscLeaks
|
|
||||||
return esc.Encode()
|
|
||||||
}
|
|
||||||
|
|
||||||
n := asNode(f.Nname)
|
|
||||||
loc := e.oldLoc(n)
|
|
||||||
esc := loc.paramEsc
|
|
||||||
esc.Optimize()
|
|
||||||
|
|
||||||
if Debug.m != 0 && !loc.escapes {
|
|
||||||
if esc.Empty() {
|
|
||||||
Warnl(f.Pos, "%v does not escape", name())
|
|
||||||
}
|
|
||||||
if x := esc.Heap(); x >= 0 {
|
|
||||||
if x == 0 {
|
|
||||||
Warnl(f.Pos, "leaking param: %v", name())
|
|
||||||
} else {
|
|
||||||
// TODO(mdempsky): Mention level=x like below?
|
|
||||||
Warnl(f.Pos, "leaking param content: %v", name())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for i := 0; i < numEscResults; i++ {
|
|
||||||
if x := esc.Result(i); x >= 0 {
|
|
||||||
res := fn.Type.Results().Field(i).Sym
|
|
||||||
Warnl(f.Pos, "leaking param: %v to result %v level=%d", name(), res, x)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return esc.Encode()
|
|
||||||
}
|
|
||||||
File diff suppressed because it is too large
Load diff
|
|
@ -5,34 +5,33 @@
|
||||||
package gc
|
package gc
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"cmd/compile/internal/base"
|
||||||
|
"cmd/compile/internal/ir"
|
||||||
"cmd/compile/internal/types"
|
"cmd/compile/internal/types"
|
||||||
"cmd/internal/bio"
|
"cmd/internal/bio"
|
||||||
"cmd/internal/src"
|
"cmd/internal/src"
|
||||||
"fmt"
|
"fmt"
|
||||||
)
|
"go/constant"
|
||||||
|
|
||||||
var (
|
|
||||||
Debug_export int // if set, print debugging information about export data
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func exportf(bout *bio.Writer, format string, args ...interface{}) {
|
func exportf(bout *bio.Writer, format string, args ...interface{}) {
|
||||||
fmt.Fprintf(bout, format, args...)
|
fmt.Fprintf(bout, format, args...)
|
||||||
if Debug_export != 0 {
|
if base.Debug.Export != 0 {
|
||||||
fmt.Printf(format, args...)
|
fmt.Printf(format, args...)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
var asmlist []*Node
|
var asmlist []ir.Node
|
||||||
|
|
||||||
// exportsym marks n for export (or reexport).
|
// exportsym marks n for export (or reexport).
|
||||||
func exportsym(n *Node) {
|
func exportsym(n ir.Node) {
|
||||||
if n.Sym.OnExportList() {
|
if n.Sym().OnExportList() {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
n.Sym.SetOnExportList(true)
|
n.Sym().SetOnExportList(true)
|
||||||
|
|
||||||
if Debug.E != 0 {
|
if base.Flag.E != 0 {
|
||||||
fmt.Printf("export symbol %v\n", n.Sym)
|
fmt.Printf("export symbol %v\n", n.Sym())
|
||||||
}
|
}
|
||||||
|
|
||||||
exportlist = append(exportlist, n)
|
exportlist = append(exportlist, n)
|
||||||
|
|
@ -42,22 +41,22 @@ func initname(s string) bool {
|
||||||
return s == "init"
|
return s == "init"
|
||||||
}
|
}
|
||||||
|
|
||||||
func autoexport(n *Node, ctxt Class) {
|
func autoexport(n ir.Node, ctxt ir.Class) {
|
||||||
if n.Sym.Pkg != localpkg {
|
if n.Sym().Pkg != ir.LocalPkg {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if (ctxt != PEXTERN && ctxt != PFUNC) || dclcontext != PEXTERN {
|
if (ctxt != ir.PEXTERN && ctxt != ir.PFUNC) || dclcontext != ir.PEXTERN {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if n.Type != nil && n.Type.IsKind(TFUNC) && n.IsMethod() {
|
if n.Type() != nil && n.Type().IsKind(types.TFUNC) && ir.IsMethod(n) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if types.IsExported(n.Sym.Name) || initname(n.Sym.Name) {
|
if types.IsExported(n.Sym().Name) || initname(n.Sym().Name) {
|
||||||
exportsym(n)
|
exportsym(n)
|
||||||
}
|
}
|
||||||
if asmhdr != "" && !n.Sym.Asm() {
|
if base.Flag.AsmHdr != "" && !n.Sym().Asm() {
|
||||||
n.Sym.SetAsm(true)
|
n.Sym().SetAsm(true)
|
||||||
asmlist = append(asmlist, n)
|
asmlist = append(asmlist, n)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -70,28 +69,28 @@ func dumpexport(bout *bio.Writer) {
|
||||||
size := bout.Offset() - off
|
size := bout.Offset() - off
|
||||||
exportf(bout, "\n$$\n")
|
exportf(bout, "\n$$\n")
|
||||||
|
|
||||||
if Debug_export != 0 {
|
if base.Debug.Export != 0 {
|
||||||
fmt.Printf("BenchmarkExportSize:%s 1 %d bytes\n", myimportpath, size)
|
fmt.Printf("BenchmarkExportSize:%s 1 %d bytes\n", base.Ctxt.Pkgpath, size)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func importsym(ipkg *types.Pkg, s *types.Sym, op Op) *Node {
|
func importsym(ipkg *types.Pkg, s *types.Sym, op ir.Op) ir.Node {
|
||||||
n := asNode(s.PkgDef())
|
n := ir.AsNode(s.PkgDef())
|
||||||
if n == nil {
|
if n == nil {
|
||||||
// iimport should have created a stub ONONAME
|
// iimport should have created a stub ONONAME
|
||||||
// declaration for all imported symbols. The exception
|
// declaration for all imported symbols. The exception
|
||||||
// is declarations for Runtimepkg, which are populated
|
// is declarations for Runtimepkg, which are populated
|
||||||
// by loadsys instead.
|
// by loadsys instead.
|
||||||
if s.Pkg != Runtimepkg {
|
if s.Pkg != Runtimepkg {
|
||||||
Fatalf("missing ONONAME for %v\n", s)
|
base.Fatalf("missing ONONAME for %v\n", s)
|
||||||
}
|
}
|
||||||
|
|
||||||
n = dclname(s)
|
n = dclname(s)
|
||||||
s.SetPkgDef(asTypesNode(n))
|
s.SetPkgDef(n)
|
||||||
s.Importdef = ipkg
|
s.Importdef = ipkg
|
||||||
}
|
}
|
||||||
if n.Op != ONONAME && n.Op != op {
|
if n.Op() != ir.ONONAME && n.Op() != op {
|
||||||
redeclare(lineno, s, fmt.Sprintf("during import %q", ipkg.Path))
|
redeclare(base.Pos, s, fmt.Sprintf("during import %q", ipkg.Path))
|
||||||
}
|
}
|
||||||
return n
|
return n
|
||||||
}
|
}
|
||||||
|
|
@ -100,57 +99,57 @@ func importsym(ipkg *types.Pkg, s *types.Sym, op Op) *Node {
|
||||||
// If no such type has been declared yet, a forward declaration is returned.
|
// If no such type has been declared yet, a forward declaration is returned.
|
||||||
// ipkg is the package being imported
|
// ipkg is the package being imported
|
||||||
func importtype(ipkg *types.Pkg, pos src.XPos, s *types.Sym) *types.Type {
|
func importtype(ipkg *types.Pkg, pos src.XPos, s *types.Sym) *types.Type {
|
||||||
n := importsym(ipkg, s, OTYPE)
|
n := importsym(ipkg, s, ir.OTYPE)
|
||||||
if n.Op != OTYPE {
|
if n.Op() != ir.OTYPE {
|
||||||
t := types.New(TFORW)
|
t := types.New(types.TFORW)
|
||||||
t.Sym = s
|
t.Sym = s
|
||||||
t.Nod = asTypesNode(n)
|
t.Nod = n
|
||||||
|
|
||||||
n.Op = OTYPE
|
n.SetOp(ir.OTYPE)
|
||||||
n.Pos = pos
|
n.SetPos(pos)
|
||||||
n.Type = t
|
n.SetType(t)
|
||||||
n.SetClass(PEXTERN)
|
n.SetClass(ir.PEXTERN)
|
||||||
}
|
}
|
||||||
|
|
||||||
t := n.Type
|
t := n.Type()
|
||||||
if t == nil {
|
if t == nil {
|
||||||
Fatalf("importtype %v", s)
|
base.Fatalf("importtype %v", s)
|
||||||
}
|
}
|
||||||
return t
|
return t
|
||||||
}
|
}
|
||||||
|
|
||||||
// importobj declares symbol s as an imported object representable by op.
|
// importobj declares symbol s as an imported object representable by op.
|
||||||
// ipkg is the package being imported
|
// ipkg is the package being imported
|
||||||
func importobj(ipkg *types.Pkg, pos src.XPos, s *types.Sym, op Op, ctxt Class, t *types.Type) *Node {
|
func importobj(ipkg *types.Pkg, pos src.XPos, s *types.Sym, op ir.Op, ctxt ir.Class, t *types.Type) ir.Node {
|
||||||
n := importsym(ipkg, s, op)
|
n := importsym(ipkg, s, op)
|
||||||
if n.Op != ONONAME {
|
if n.Op() != ir.ONONAME {
|
||||||
if n.Op == op && (n.Class() != ctxt || !types.Identical(n.Type, t)) {
|
if n.Op() == op && (n.Class() != ctxt || !types.Identical(n.Type(), t)) {
|
||||||
redeclare(lineno, s, fmt.Sprintf("during import %q", ipkg.Path))
|
redeclare(base.Pos, s, fmt.Sprintf("during import %q", ipkg.Path))
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
n.Op = op
|
n.SetOp(op)
|
||||||
n.Pos = pos
|
n.SetPos(pos)
|
||||||
n.SetClass(ctxt)
|
n.SetClass(ctxt)
|
||||||
if ctxt == PFUNC {
|
if ctxt == ir.PFUNC {
|
||||||
n.Sym.SetFunc(true)
|
n.Sym().SetFunc(true)
|
||||||
}
|
}
|
||||||
n.Type = t
|
n.SetType(t)
|
||||||
return n
|
return n
|
||||||
}
|
}
|
||||||
|
|
||||||
// importconst declares symbol s as an imported constant with type t and value val.
|
// importconst declares symbol s as an imported constant with type t and value val.
|
||||||
// ipkg is the package being imported
|
// ipkg is the package being imported
|
||||||
func importconst(ipkg *types.Pkg, pos src.XPos, s *types.Sym, t *types.Type, val Val) {
|
func importconst(ipkg *types.Pkg, pos src.XPos, s *types.Sym, t *types.Type, val constant.Value) {
|
||||||
n := importobj(ipkg, pos, s, OLITERAL, PEXTERN, t)
|
n := importobj(ipkg, pos, s, ir.OLITERAL, ir.PEXTERN, t)
|
||||||
if n == nil { // TODO: Check that value matches.
|
if n == nil { // TODO: Check that value matches.
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
n.SetVal(val)
|
n.SetVal(val)
|
||||||
|
|
||||||
if Debug.E != 0 {
|
if base.Flag.E != 0 {
|
||||||
fmt.Printf("import const %v %L = %v\n", s, t, val)
|
fmt.Printf("import const %v %L = %v\n", s, t, val)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -158,15 +157,14 @@ func importconst(ipkg *types.Pkg, pos src.XPos, s *types.Sym, t *types.Type, val
|
||||||
// importfunc declares symbol s as an imported function with type t.
|
// importfunc declares symbol s as an imported function with type t.
|
||||||
// ipkg is the package being imported
|
// ipkg is the package being imported
|
||||||
func importfunc(ipkg *types.Pkg, pos src.XPos, s *types.Sym, t *types.Type) {
|
func importfunc(ipkg *types.Pkg, pos src.XPos, s *types.Sym, t *types.Type) {
|
||||||
n := importobj(ipkg, pos, s, ONAME, PFUNC, t)
|
n := importobj(ipkg, pos, s, ir.ONAME, ir.PFUNC, t)
|
||||||
if n == nil {
|
if n == nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
n.Func = new(Func)
|
n.SetFunc(new(ir.Func))
|
||||||
t.SetNname(asTypesNode(n))
|
|
||||||
|
|
||||||
if Debug.E != 0 {
|
if base.Flag.E != 0 {
|
||||||
fmt.Printf("import func %v%S\n", s, t)
|
fmt.Printf("import func %v%S\n", s, t)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -174,12 +172,12 @@ func importfunc(ipkg *types.Pkg, pos src.XPos, s *types.Sym, t *types.Type) {
|
||||||
// importvar declares symbol s as an imported variable with type t.
|
// importvar declares symbol s as an imported variable with type t.
|
||||||
// ipkg is the package being imported
|
// ipkg is the package being imported
|
||||||
func importvar(ipkg *types.Pkg, pos src.XPos, s *types.Sym, t *types.Type) {
|
func importvar(ipkg *types.Pkg, pos src.XPos, s *types.Sym, t *types.Type) {
|
||||||
n := importobj(ipkg, pos, s, ONAME, PEXTERN, t)
|
n := importobj(ipkg, pos, s, ir.ONAME, ir.PEXTERN, t)
|
||||||
if n == nil {
|
if n == nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if Debug.E != 0 {
|
if base.Flag.E != 0 {
|
||||||
fmt.Printf("import var %v %L\n", s, t)
|
fmt.Printf("import var %v %L\n", s, t)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -187,43 +185,43 @@ func importvar(ipkg *types.Pkg, pos src.XPos, s *types.Sym, t *types.Type) {
|
||||||
// importalias declares symbol s as an imported type alias with type t.
|
// importalias declares symbol s as an imported type alias with type t.
|
||||||
// ipkg is the package being imported
|
// ipkg is the package being imported
|
||||||
func importalias(ipkg *types.Pkg, pos src.XPos, s *types.Sym, t *types.Type) {
|
func importalias(ipkg *types.Pkg, pos src.XPos, s *types.Sym, t *types.Type) {
|
||||||
n := importobj(ipkg, pos, s, OTYPE, PEXTERN, t)
|
n := importobj(ipkg, pos, s, ir.OTYPE, ir.PEXTERN, t)
|
||||||
if n == nil {
|
if n == nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if Debug.E != 0 {
|
if base.Flag.E != 0 {
|
||||||
fmt.Printf("import type %v = %L\n", s, t)
|
fmt.Printf("import type %v = %L\n", s, t)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func dumpasmhdr() {
|
func dumpasmhdr() {
|
||||||
b, err := bio.Create(asmhdr)
|
b, err := bio.Create(base.Flag.AsmHdr)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
Fatalf("%v", err)
|
base.Fatalf("%v", err)
|
||||||
}
|
}
|
||||||
fmt.Fprintf(b, "// generated by compile -asmhdr from package %s\n\n", localpkg.Name)
|
fmt.Fprintf(b, "// generated by compile -asmhdr from package %s\n\n", ir.LocalPkg.Name)
|
||||||
for _, n := range asmlist {
|
for _, n := range asmlist {
|
||||||
if n.Sym.IsBlank() {
|
if n.Sym().IsBlank() {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
switch n.Op {
|
switch n.Op() {
|
||||||
case OLITERAL:
|
case ir.OLITERAL:
|
||||||
t := n.Val().Ctype()
|
t := n.Val().Kind()
|
||||||
if t == CTFLT || t == CTCPLX {
|
if t == constant.Float || t == constant.Complex {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
fmt.Fprintf(b, "#define const_%s %#v\n", n.Sym.Name, n.Val())
|
fmt.Fprintf(b, "#define const_%s %#v\n", n.Sym().Name, n.Val())
|
||||||
|
|
||||||
case OTYPE:
|
case ir.OTYPE:
|
||||||
t := n.Type
|
t := n.Type()
|
||||||
if !t.IsStruct() || t.StructType().Map != nil || t.IsFuncArgStruct() {
|
if !t.IsStruct() || t.StructType().Map != nil || t.IsFuncArgStruct() {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
fmt.Fprintf(b, "#define %s__size %d\n", n.Sym.Name, int(t.Width))
|
fmt.Fprintf(b, "#define %s__size %d\n", n.Sym().Name, int(t.Width))
|
||||||
for _, f := range t.Fields().Slice() {
|
for _, f := range t.Fields().Slice() {
|
||||||
if !f.Sym.IsBlank() {
|
if !f.Sym.IsBlank() {
|
||||||
fmt.Fprintf(b, "#define %s_%s %d\n", n.Sym.Name, f.Sym.Name, int(f.Offset))
|
fmt.Fprintf(b, "#define %s_%s %d\n", n.Sym().Name, f.Sym.Name, int(f.Offset))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -5,6 +5,8 @@
|
||||||
package gc
|
package gc
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"cmd/compile/internal/base"
|
||||||
|
"cmd/compile/internal/ir"
|
||||||
"cmd/compile/internal/types"
|
"cmd/compile/internal/types"
|
||||||
"cmd/internal/obj"
|
"cmd/internal/obj"
|
||||||
"cmd/internal/src"
|
"cmd/internal/src"
|
||||||
|
|
@ -28,14 +30,14 @@ func sysvar(name string) *obj.LSym {
|
||||||
|
|
||||||
// isParamStackCopy reports whether this is the on-stack copy of a
|
// isParamStackCopy reports whether this is the on-stack copy of a
|
||||||
// function parameter that moved to the heap.
|
// function parameter that moved to the heap.
|
||||||
func (n *Node) isParamStackCopy() bool {
|
func isParamStackCopy(n ir.Node) bool {
|
||||||
return n.Op == ONAME && (n.Class() == PPARAM || n.Class() == PPARAMOUT) && n.Name.Param.Heapaddr != nil
|
return n.Op() == ir.ONAME && (n.Class() == ir.PPARAM || n.Class() == ir.PPARAMOUT) && n.Name().Param.Heapaddr != nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// isParamHeapCopy reports whether this is the on-heap copy of
|
// isParamHeapCopy reports whether this is the on-heap copy of
|
||||||
// a function parameter that moved to the heap.
|
// a function parameter that moved to the heap.
|
||||||
func (n *Node) isParamHeapCopy() bool {
|
func isParamHeapCopy(n ir.Node) bool {
|
||||||
return n.Op == ONAME && n.Class() == PAUTOHEAP && n.Name.Param.Stackcopy != nil
|
return n.Op() == ir.ONAME && n.Class() == ir.PAUTOHEAP && n.Name().Param.Stackcopy != nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// autotmpname returns the name for an autotmp variable numbered n.
|
// autotmpname returns the name for an autotmp variable numbered n.
|
||||||
|
|
@ -50,37 +52,37 @@ func autotmpname(n int) string {
|
||||||
}
|
}
|
||||||
|
|
||||||
// make a new Node off the books
|
// make a new Node off the books
|
||||||
func tempAt(pos src.XPos, curfn *Node, t *types.Type) *Node {
|
func tempAt(pos src.XPos, curfn ir.Node, t *types.Type) ir.Node {
|
||||||
if curfn == nil {
|
if curfn == nil {
|
||||||
Fatalf("no curfn for tempAt")
|
base.Fatalf("no curfn for tempAt")
|
||||||
}
|
}
|
||||||
if curfn.Func.Closure != nil && curfn.Op == OCLOSURE {
|
if curfn.Op() == ir.OCLOSURE {
|
||||||
Dump("tempAt", curfn)
|
ir.Dump("tempAt", curfn)
|
||||||
Fatalf("adding tempAt to wrong closure function")
|
base.Fatalf("adding tempAt to wrong closure function")
|
||||||
}
|
}
|
||||||
if t == nil {
|
if t == nil {
|
||||||
Fatalf("tempAt called with nil type")
|
base.Fatalf("tempAt called with nil type")
|
||||||
}
|
}
|
||||||
|
|
||||||
s := &types.Sym{
|
s := &types.Sym{
|
||||||
Name: autotmpname(len(curfn.Func.Dcl)),
|
Name: autotmpname(len(curfn.Func().Dcl)),
|
||||||
Pkg: localpkg,
|
Pkg: ir.LocalPkg,
|
||||||
}
|
}
|
||||||
n := newnamel(pos, s)
|
n := ir.NewNameAt(pos, s)
|
||||||
s.Def = asTypesNode(n)
|
s.Def = n
|
||||||
n.Type = t
|
n.SetType(t)
|
||||||
n.SetClass(PAUTO)
|
n.SetClass(ir.PAUTO)
|
||||||
n.Esc = EscNever
|
n.SetEsc(EscNever)
|
||||||
n.Name.Curfn = curfn
|
n.Name().Curfn = curfn
|
||||||
n.Name.SetUsed(true)
|
n.Name().SetUsed(true)
|
||||||
n.Name.SetAutoTemp(true)
|
n.Name().SetAutoTemp(true)
|
||||||
curfn.Func.Dcl = append(curfn.Func.Dcl, n)
|
curfn.Func().Dcl = append(curfn.Func().Dcl, n)
|
||||||
|
|
||||||
dowidth(t)
|
dowidth(t)
|
||||||
|
|
||||||
return n.Orig
|
return n.Orig()
|
||||||
}
|
}
|
||||||
|
|
||||||
func temp(t *types.Type) *Node {
|
func temp(t *types.Type) ir.Node {
|
||||||
return tempAt(lineno, Curfn, t)
|
return tempAt(base.Pos, Curfn, t)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -5,6 +5,8 @@
|
||||||
package gc
|
package gc
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"cmd/compile/internal/base"
|
||||||
|
"cmd/compile/internal/ir"
|
||||||
"cmd/compile/internal/ssa"
|
"cmd/compile/internal/ssa"
|
||||||
"cmd/compile/internal/types"
|
"cmd/compile/internal/types"
|
||||||
"cmd/internal/obj"
|
"cmd/internal/obj"
|
||||||
|
|
@ -12,10 +14,6 @@ import (
|
||||||
"sync"
|
"sync"
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
|
||||||
BADWIDTH = types.BADWIDTH
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
var (
|
||||||
// maximum size variable which we will allocate on the stack.
|
// maximum size variable which we will allocate on the stack.
|
||||||
// This limit is for explicit variable declarations like "var x T" or "x := ...".
|
// This limit is for explicit variable declarations like "var x T" or "x := ...".
|
||||||
|
|
@ -39,7 +37,7 @@ var (
|
||||||
|
|
||||||
// isRuntimePkg reports whether p is package runtime.
|
// isRuntimePkg reports whether p is package runtime.
|
||||||
func isRuntimePkg(p *types.Pkg) bool {
|
func isRuntimePkg(p *types.Pkg) bool {
|
||||||
if compiling_runtime && p == localpkg {
|
if base.Flag.CompilingRuntime && p == ir.LocalPkg {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
return p.Path == "runtime"
|
return p.Path == "runtime"
|
||||||
|
|
@ -47,31 +45,12 @@ func isRuntimePkg(p *types.Pkg) bool {
|
||||||
|
|
||||||
// isReflectPkg reports whether p is package reflect.
|
// isReflectPkg reports whether p is package reflect.
|
||||||
func isReflectPkg(p *types.Pkg) bool {
|
func isReflectPkg(p *types.Pkg) bool {
|
||||||
if p == localpkg {
|
if p == ir.LocalPkg {
|
||||||
return myimportpath == "reflect"
|
return base.Ctxt.Pkgpath == "reflect"
|
||||||
}
|
}
|
||||||
return p.Path == "reflect"
|
return p.Path == "reflect"
|
||||||
}
|
}
|
||||||
|
|
||||||
// The Class of a variable/function describes the "storage class"
|
|
||||||
// of a variable or function. During parsing, storage classes are
|
|
||||||
// called declaration contexts.
|
|
||||||
type Class uint8
|
|
||||||
|
|
||||||
//go:generate stringer -type=Class
|
|
||||||
const (
|
|
||||||
Pxxx Class = iota // no class; used during ssa conversion to indicate pseudo-variables
|
|
||||||
PEXTERN // global variables
|
|
||||||
PAUTO // local variables
|
|
||||||
PAUTOHEAP // local variables or parameters moved to heap
|
|
||||||
PPARAM // input arguments
|
|
||||||
PPARAMOUT // output results
|
|
||||||
PFUNC // global functions
|
|
||||||
|
|
||||||
// Careful: Class is stored in three bits in Node.flags.
|
|
||||||
_ = uint((1 << 3) - iota) // static assert for iota <= (1 << 3)
|
|
||||||
)
|
|
||||||
|
|
||||||
// Slices in the runtime are represented by three components:
|
// Slices in the runtime are represented by three components:
|
||||||
//
|
//
|
||||||
// type slice struct {
|
// type slice struct {
|
||||||
|
|
@ -99,40 +78,10 @@ var (
|
||||||
|
|
||||||
var pragcgobuf [][]string
|
var pragcgobuf [][]string
|
||||||
|
|
||||||
var outfile string
|
|
||||||
var linkobj string
|
|
||||||
|
|
||||||
// nerrors is the number of compiler errors reported
|
|
||||||
// since the last call to saveerrors.
|
|
||||||
var nerrors int
|
|
||||||
|
|
||||||
// nsavederrors is the total number of compiler errors
|
|
||||||
// reported before the last call to saveerrors.
|
|
||||||
var nsavederrors int
|
|
||||||
|
|
||||||
var nsyntaxerrors int
|
|
||||||
|
|
||||||
var decldepth int32
|
var decldepth int32
|
||||||
|
|
||||||
var nolocalimports bool
|
var nolocalimports bool
|
||||||
|
|
||||||
// gc debug flags
|
|
||||||
type DebugFlags struct {
|
|
||||||
P, B, C, E, G,
|
|
||||||
K, L, N, S,
|
|
||||||
W, e, h, j,
|
|
||||||
l, m, r, w int
|
|
||||||
}
|
|
||||||
|
|
||||||
var Debug DebugFlags
|
|
||||||
|
|
||||||
var debugstr string
|
|
||||||
|
|
||||||
var Debug_checknil int
|
|
||||||
var Debug_typeassert int
|
|
||||||
|
|
||||||
var localpkg *types.Pkg // package being compiled
|
|
||||||
|
|
||||||
var inimport bool // set during import
|
var inimport bool // set during import
|
||||||
|
|
||||||
var itabpkg *types.Pkg // fake pkg for itab entries
|
var itabpkg *types.Pkg // fake pkg for itab entries
|
||||||
|
|
@ -155,87 +104,53 @@ var gopkg *types.Pkg // pseudo-package for method symbols on anonymous receiver
|
||||||
|
|
||||||
var zerosize int64
|
var zerosize int64
|
||||||
|
|
||||||
var myimportpath string
|
var simtype [types.NTYPE]types.EType
|
||||||
|
|
||||||
var localimport string
|
|
||||||
|
|
||||||
var asmhdr string
|
|
||||||
|
|
||||||
var simtype [NTYPE]types.EType
|
|
||||||
|
|
||||||
var (
|
var (
|
||||||
isInt [NTYPE]bool
|
isInt [types.NTYPE]bool
|
||||||
isFloat [NTYPE]bool
|
isFloat [types.NTYPE]bool
|
||||||
isComplex [NTYPE]bool
|
isComplex [types.NTYPE]bool
|
||||||
issimple [NTYPE]bool
|
issimple [types.NTYPE]bool
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
okforeq [NTYPE]bool
|
okforeq [types.NTYPE]bool
|
||||||
okforadd [NTYPE]bool
|
okforadd [types.NTYPE]bool
|
||||||
okforand [NTYPE]bool
|
okforand [types.NTYPE]bool
|
||||||
okfornone [NTYPE]bool
|
okfornone [types.NTYPE]bool
|
||||||
okforcmp [NTYPE]bool
|
okforcmp [types.NTYPE]bool
|
||||||
okforbool [NTYPE]bool
|
okforbool [types.NTYPE]bool
|
||||||
okforcap [NTYPE]bool
|
okforcap [types.NTYPE]bool
|
||||||
okforlen [NTYPE]bool
|
okforlen [types.NTYPE]bool
|
||||||
okforarith [NTYPE]bool
|
okforarith [types.NTYPE]bool
|
||||||
okforconst [NTYPE]bool
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
okfor [OEND][]bool
|
okfor [ir.OEND][]bool
|
||||||
iscmp [OEND]bool
|
iscmp [ir.OEND]bool
|
||||||
)
|
)
|
||||||
|
|
||||||
var minintval [NTYPE]*Mpint
|
var xtop []ir.Node
|
||||||
|
|
||||||
var maxintval [NTYPE]*Mpint
|
var exportlist []ir.Node
|
||||||
|
|
||||||
var minfltval [NTYPE]*Mpflt
|
var importlist []ir.Node // imported functions and methods with inlinable bodies
|
||||||
|
|
||||||
var maxfltval [NTYPE]*Mpflt
|
|
||||||
|
|
||||||
var xtop []*Node
|
|
||||||
|
|
||||||
var exportlist []*Node
|
|
||||||
|
|
||||||
var importlist []*Node // imported functions and methods with inlinable bodies
|
|
||||||
|
|
||||||
var (
|
var (
|
||||||
funcsymsmu sync.Mutex // protects funcsyms and associated package lookups (see func funcsym)
|
funcsymsmu sync.Mutex // protects funcsyms and associated package lookups (see func funcsym)
|
||||||
funcsyms []*types.Sym
|
funcsyms []*types.Sym
|
||||||
)
|
)
|
||||||
|
|
||||||
var dclcontext Class // PEXTERN/PAUTO
|
var dclcontext ir.Class // PEXTERN/PAUTO
|
||||||
|
|
||||||
var Curfn *Node
|
var Curfn ir.Node
|
||||||
|
|
||||||
var Widthptr int
|
var Widthptr int
|
||||||
|
|
||||||
var Widthreg int
|
var Widthreg int
|
||||||
|
|
||||||
var nblank *Node
|
|
||||||
|
|
||||||
var typecheckok bool
|
var typecheckok bool
|
||||||
|
|
||||||
var compiling_runtime bool
|
|
||||||
|
|
||||||
// Compiling the standard library
|
|
||||||
var compiling_std bool
|
|
||||||
|
|
||||||
var use_writebarrier bool
|
|
||||||
|
|
||||||
var pure_go bool
|
|
||||||
|
|
||||||
var flag_installsuffix string
|
|
||||||
|
|
||||||
var flag_race bool
|
|
||||||
|
|
||||||
var flag_msan bool
|
|
||||||
|
|
||||||
var flagDWARF bool
|
|
||||||
|
|
||||||
// Whether we are adding any sort of code instrumentation, such as
|
// Whether we are adding any sort of code instrumentation, such as
|
||||||
// when the race detector is enabled.
|
// when the race detector is enabled.
|
||||||
var instrumenting bool
|
var instrumenting bool
|
||||||
|
|
@ -243,20 +158,7 @@ var instrumenting bool
|
||||||
// Whether we are tracking lexical scopes for DWARF.
|
// Whether we are tracking lexical scopes for DWARF.
|
||||||
var trackScopes bool
|
var trackScopes bool
|
||||||
|
|
||||||
// Controls generation of DWARF inlined instance records. Zero
|
var nodfp ir.Node
|
||||||
// disables, 1 emits inlined routines but suppresses var info,
|
|
||||||
// and 2 emits inlined routines with tracking of formals/locals.
|
|
||||||
var genDwarfInline int
|
|
||||||
|
|
||||||
var debuglive int
|
|
||||||
|
|
||||||
var Ctxt *obj.Link
|
|
||||||
|
|
||||||
var writearchive bool
|
|
||||||
|
|
||||||
var nodfp *Node
|
|
||||||
|
|
||||||
var disable_checknil int
|
|
||||||
|
|
||||||
var autogeneratedPos src.XPos
|
var autogeneratedPos src.XPos
|
||||||
|
|
||||||
|
|
@ -293,7 +195,7 @@ var thearch Arch
|
||||||
|
|
||||||
var (
|
var (
|
||||||
staticuint64s,
|
staticuint64s,
|
||||||
zerobase *Node
|
zerobase ir.Node
|
||||||
|
|
||||||
assertE2I,
|
assertE2I,
|
||||||
assertE2I2,
|
assertE2I2,
|
||||||
|
|
|
||||||
|
|
@ -31,6 +31,8 @@
|
||||||
package gc
|
package gc
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"cmd/compile/internal/base"
|
||||||
|
"cmd/compile/internal/ir"
|
||||||
"cmd/compile/internal/ssa"
|
"cmd/compile/internal/ssa"
|
||||||
"cmd/internal/obj"
|
"cmd/internal/obj"
|
||||||
"cmd/internal/objabi"
|
"cmd/internal/objabi"
|
||||||
|
|
@ -45,7 +47,7 @@ type Progs struct {
|
||||||
next *obj.Prog // next Prog
|
next *obj.Prog // next Prog
|
||||||
pc int64 // virtual PC; count of Progs
|
pc int64 // virtual PC; count of Progs
|
||||||
pos src.XPos // position to use for new Progs
|
pos src.XPos // position to use for new Progs
|
||||||
curfn *Node // fn these Progs are for
|
curfn ir.Node // fn these Progs are for
|
||||||
progcache []obj.Prog // local progcache
|
progcache []obj.Prog // local progcache
|
||||||
cacheidx int // first free element of progcache
|
cacheidx int // first free element of progcache
|
||||||
|
|
||||||
|
|
@ -55,10 +57,10 @@ type Progs struct {
|
||||||
|
|
||||||
// newProgs returns a new Progs for fn.
|
// newProgs returns a new Progs for fn.
|
||||||
// worker indicates which of the backend workers will use the Progs.
|
// worker indicates which of the backend workers will use the Progs.
|
||||||
func newProgs(fn *Node, worker int) *Progs {
|
func newProgs(fn ir.Node, worker int) *Progs {
|
||||||
pp := new(Progs)
|
pp := new(Progs)
|
||||||
if Ctxt.CanReuseProgs() {
|
if base.Ctxt.CanReuseProgs() {
|
||||||
sz := len(sharedProgArray) / nBackendWorkers
|
sz := len(sharedProgArray) / base.Flag.LowerC
|
||||||
pp.progcache = sharedProgArray[sz*worker : sz*(worker+1)]
|
pp.progcache = sharedProgArray[sz*worker : sz*(worker+1)]
|
||||||
}
|
}
|
||||||
pp.curfn = fn
|
pp.curfn = fn
|
||||||
|
|
@ -67,7 +69,7 @@ func newProgs(fn *Node, worker int) *Progs {
|
||||||
pp.next = pp.NewProg()
|
pp.next = pp.NewProg()
|
||||||
pp.clearp(pp.next)
|
pp.clearp(pp.next)
|
||||||
|
|
||||||
pp.pos = fn.Pos
|
pp.pos = fn.Pos()
|
||||||
pp.settext(fn)
|
pp.settext(fn)
|
||||||
// PCDATA tables implicitly start with index -1.
|
// PCDATA tables implicitly start with index -1.
|
||||||
pp.prevLive = LivenessIndex{-1, false}
|
pp.prevLive = LivenessIndex{-1, false}
|
||||||
|
|
@ -83,19 +85,19 @@ func (pp *Progs) NewProg() *obj.Prog {
|
||||||
} else {
|
} else {
|
||||||
p = new(obj.Prog)
|
p = new(obj.Prog)
|
||||||
}
|
}
|
||||||
p.Ctxt = Ctxt
|
p.Ctxt = base.Ctxt
|
||||||
return p
|
return p
|
||||||
}
|
}
|
||||||
|
|
||||||
// Flush converts from pp to machine code.
|
// Flush converts from pp to machine code.
|
||||||
func (pp *Progs) Flush() {
|
func (pp *Progs) Flush() {
|
||||||
plist := &obj.Plist{Firstpc: pp.Text, Curfn: pp.curfn}
|
plist := &obj.Plist{Firstpc: pp.Text, Curfn: pp.curfn}
|
||||||
obj.Flushplist(Ctxt, plist, pp.NewProg, myimportpath)
|
obj.Flushplist(base.Ctxt, plist, pp.NewProg, base.Ctxt.Pkgpath)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Free clears pp and any associated resources.
|
// Free clears pp and any associated resources.
|
||||||
func (pp *Progs) Free() {
|
func (pp *Progs) Free() {
|
||||||
if Ctxt.CanReuseProgs() {
|
if base.Ctxt.CanReuseProgs() {
|
||||||
// Clear progs to enable GC and avoid abuse.
|
// Clear progs to enable GC and avoid abuse.
|
||||||
s := pp.progcache[:pp.cacheidx]
|
s := pp.progcache[:pp.cacheidx]
|
||||||
for i := range s {
|
for i := range s {
|
||||||
|
|
@ -133,8 +135,8 @@ func (pp *Progs) Prog(as obj.As) *obj.Prog {
|
||||||
pp.clearp(pp.next)
|
pp.clearp(pp.next)
|
||||||
p.Link = pp.next
|
p.Link = pp.next
|
||||||
|
|
||||||
if !pp.pos.IsKnown() && Debug.K != 0 {
|
if !pp.pos.IsKnown() && base.Flag.K != 0 {
|
||||||
Warn("prog: unknown position (line 0)")
|
base.Warn("prog: unknown position (line 0)")
|
||||||
}
|
}
|
||||||
|
|
||||||
p.As = as
|
p.As = as
|
||||||
|
|
@ -172,17 +174,17 @@ func (pp *Progs) Appendpp(p *obj.Prog, as obj.As, ftype obj.AddrType, freg int16
|
||||||
return q
|
return q
|
||||||
}
|
}
|
||||||
|
|
||||||
func (pp *Progs) settext(fn *Node) {
|
func (pp *Progs) settext(fn ir.Node) {
|
||||||
if pp.Text != nil {
|
if pp.Text != nil {
|
||||||
Fatalf("Progs.settext called twice")
|
base.Fatalf("Progs.settext called twice")
|
||||||
}
|
}
|
||||||
ptxt := pp.Prog(obj.ATEXT)
|
ptxt := pp.Prog(obj.ATEXT)
|
||||||
pp.Text = ptxt
|
pp.Text = ptxt
|
||||||
|
|
||||||
fn.Func.lsym.Func().Text = ptxt
|
fn.Func().LSym.Func().Text = ptxt
|
||||||
ptxt.From.Type = obj.TYPE_MEM
|
ptxt.From.Type = obj.TYPE_MEM
|
||||||
ptxt.From.Name = obj.NAME_EXTERN
|
ptxt.From.Name = obj.NAME_EXTERN
|
||||||
ptxt.From.Sym = fn.Func.lsym
|
ptxt.From.Sym = fn.Func().LSym
|
||||||
}
|
}
|
||||||
|
|
||||||
// initLSym defines f's obj.LSym and initializes it based on the
|
// initLSym defines f's obj.LSym and initializes it based on the
|
||||||
|
|
@ -191,36 +193,36 @@ func (pp *Progs) settext(fn *Node) {
|
||||||
//
|
//
|
||||||
// initLSym must be called exactly once per function and must be
|
// initLSym must be called exactly once per function and must be
|
||||||
// called for both functions with bodies and functions without bodies.
|
// called for both functions with bodies and functions without bodies.
|
||||||
func (f *Func) initLSym(hasBody bool) {
|
func initLSym(f *ir.Func, hasBody bool) {
|
||||||
if f.lsym != nil {
|
if f.LSym != nil {
|
||||||
Fatalf("Func.initLSym called twice")
|
base.Fatalf("Func.initLSym called twice")
|
||||||
}
|
}
|
||||||
|
|
||||||
if nam := f.Nname; !nam.isBlank() {
|
if nam := f.Nname; !ir.IsBlank(nam) {
|
||||||
f.lsym = nam.Sym.Linksym()
|
f.LSym = nam.Sym().Linksym()
|
||||||
if f.Pragma&Systemstack != 0 {
|
if f.Pragma&ir.Systemstack != 0 {
|
||||||
f.lsym.Set(obj.AttrCFunc, true)
|
f.LSym.Set(obj.AttrCFunc, true)
|
||||||
}
|
}
|
||||||
|
|
||||||
var aliasABI obj.ABI
|
var aliasABI obj.ABI
|
||||||
needABIAlias := false
|
needABIAlias := false
|
||||||
defABI, hasDefABI := symabiDefs[f.lsym.Name]
|
defABI, hasDefABI := symabiDefs[f.LSym.Name]
|
||||||
if hasDefABI && defABI == obj.ABI0 {
|
if hasDefABI && defABI == obj.ABI0 {
|
||||||
// Symbol is defined as ABI0. Create an
|
// Symbol is defined as ABI0. Create an
|
||||||
// Internal -> ABI0 wrapper.
|
// Internal -> ABI0 wrapper.
|
||||||
f.lsym.SetABI(obj.ABI0)
|
f.LSym.SetABI(obj.ABI0)
|
||||||
needABIAlias, aliasABI = true, obj.ABIInternal
|
needABIAlias, aliasABI = true, obj.ABIInternal
|
||||||
} else {
|
} else {
|
||||||
// No ABI override. Check that the symbol is
|
// No ABI override. Check that the symbol is
|
||||||
// using the expected ABI.
|
// using the expected ABI.
|
||||||
want := obj.ABIInternal
|
want := obj.ABIInternal
|
||||||
if f.lsym.ABI() != want {
|
if f.LSym.ABI() != want {
|
||||||
Fatalf("function symbol %s has the wrong ABI %v, expected %v", f.lsym.Name, f.lsym.ABI(), want)
|
base.Fatalf("function symbol %s has the wrong ABI %v, expected %v", f.LSym.Name, f.LSym.ABI(), want)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
isLinknameExported := nam.Sym.Linkname != "" && (hasBody || hasDefABI)
|
isLinknameExported := nam.Sym().Linkname != "" && (hasBody || hasDefABI)
|
||||||
if abi, ok := symabiRefs[f.lsym.Name]; (ok && abi == obj.ABI0) || isLinknameExported {
|
if abi, ok := symabiRefs[f.LSym.Name]; (ok && abi == obj.ABI0) || isLinknameExported {
|
||||||
// Either 1) this symbol is definitely
|
// Either 1) this symbol is definitely
|
||||||
// referenced as ABI0 from this package; or 2)
|
// referenced as ABI0 from this package; or 2)
|
||||||
// this symbol is defined in this package but
|
// this symbol is defined in this package but
|
||||||
|
|
@ -232,7 +234,7 @@ func (f *Func) initLSym(hasBody bool) {
|
||||||
// since other packages may "pull" symbols
|
// since other packages may "pull" symbols
|
||||||
// using linkname and we don't want to create
|
// using linkname and we don't want to create
|
||||||
// duplicate ABI wrappers.
|
// duplicate ABI wrappers.
|
||||||
if f.lsym.ABI() != obj.ABI0 {
|
if f.LSym.ABI() != obj.ABI0 {
|
||||||
needABIAlias, aliasABI = true, obj.ABI0
|
needABIAlias, aliasABI = true, obj.ABI0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -243,13 +245,13 @@ func (f *Func) initLSym(hasBody bool) {
|
||||||
// rather than looking them up. The uniqueness
|
// rather than looking them up. The uniqueness
|
||||||
// of f.lsym ensures uniqueness of asym.
|
// of f.lsym ensures uniqueness of asym.
|
||||||
asym := &obj.LSym{
|
asym := &obj.LSym{
|
||||||
Name: f.lsym.Name,
|
Name: f.LSym.Name,
|
||||||
Type: objabi.SABIALIAS,
|
Type: objabi.SABIALIAS,
|
||||||
R: []obj.Reloc{{Sym: f.lsym}}, // 0 size, so "informational"
|
R: []obj.Reloc{{Sym: f.LSym}}, // 0 size, so "informational"
|
||||||
}
|
}
|
||||||
asym.SetABI(aliasABI)
|
asym.SetABI(aliasABI)
|
||||||
asym.Set(obj.AttrDuplicateOK, true)
|
asym.Set(obj.AttrDuplicateOK, true)
|
||||||
Ctxt.ABIAliases = append(Ctxt.ABIAliases, asym)
|
base.Ctxt.ABIAliases = append(base.Ctxt.ABIAliases, asym)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -268,7 +270,7 @@ func (f *Func) initLSym(hasBody bool) {
|
||||||
if f.Needctxt() {
|
if f.Needctxt() {
|
||||||
flag |= obj.NEEDCTXT
|
flag |= obj.NEEDCTXT
|
||||||
}
|
}
|
||||||
if f.Pragma&Nosplit != 0 {
|
if f.Pragma&ir.Nosplit != 0 {
|
||||||
flag |= obj.NOSPLIT
|
flag |= obj.NOSPLIT
|
||||||
}
|
}
|
||||||
if f.ReflectMethod() {
|
if f.ReflectMethod() {
|
||||||
|
|
@ -278,31 +280,31 @@ func (f *Func) initLSym(hasBody bool) {
|
||||||
// Clumsy but important.
|
// Clumsy but important.
|
||||||
// See test/recover.go for test cases and src/reflect/value.go
|
// See test/recover.go for test cases and src/reflect/value.go
|
||||||
// for the actual functions being considered.
|
// for the actual functions being considered.
|
||||||
if myimportpath == "reflect" {
|
if base.Ctxt.Pkgpath == "reflect" {
|
||||||
switch f.Nname.Sym.Name {
|
switch f.Nname.Sym().Name {
|
||||||
case "callReflect", "callMethod":
|
case "callReflect", "callMethod":
|
||||||
flag |= obj.WRAPPER
|
flag |= obj.WRAPPER
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ctxt.InitTextSym(f.lsym, flag)
|
base.Ctxt.InitTextSym(f.LSym, flag)
|
||||||
}
|
}
|
||||||
|
|
||||||
func ggloblnod(nam *Node) {
|
func ggloblnod(nam ir.Node) {
|
||||||
s := nam.Sym.Linksym()
|
s := nam.Sym().Linksym()
|
||||||
s.Gotype = ngotype(nam).Linksym()
|
s.Gotype = ngotype(nam).Linksym()
|
||||||
flags := 0
|
flags := 0
|
||||||
if nam.Name.Readonly() {
|
if nam.Name().Readonly() {
|
||||||
flags = obj.RODATA
|
flags = obj.RODATA
|
||||||
}
|
}
|
||||||
if nam.Type != nil && !nam.Type.HasPointers() {
|
if nam.Type() != nil && !nam.Type().HasPointers() {
|
||||||
flags |= obj.NOPTR
|
flags |= obj.NOPTR
|
||||||
}
|
}
|
||||||
Ctxt.Globl(s, nam.Type.Width, flags)
|
base.Ctxt.Globl(s, nam.Type().Width, flags)
|
||||||
if nam.Name.LibfuzzerExtraCounter() {
|
if nam.Name().LibfuzzerExtraCounter() {
|
||||||
s.Type = objabi.SLIBFUZZER_EXTRA_COUNTER
|
s.Type = objabi.SLIBFUZZER_EXTRA_COUNTER
|
||||||
}
|
}
|
||||||
if nam.Sym.Linkname != "" {
|
if nam.Sym().Linkname != "" {
|
||||||
// Make sure linkname'd symbol is non-package. When a symbol is
|
// Make sure linkname'd symbol is non-package. When a symbol is
|
||||||
// both imported and linkname'd, s.Pkg may not set to "_" in
|
// both imported and linkname'd, s.Pkg may not set to "_" in
|
||||||
// types.Sym.Linksym because LSym already exists. Set it here.
|
// types.Sym.Linksym because LSym already exists. Set it here.
|
||||||
|
|
@ -315,7 +317,7 @@ func ggloblsym(s *obj.LSym, width int32, flags int16) {
|
||||||
s.Set(obj.AttrLocal, true)
|
s.Set(obj.AttrLocal, true)
|
||||||
flags &^= obj.LOCAL
|
flags &^= obj.LOCAL
|
||||||
}
|
}
|
||||||
Ctxt.Globl(s, int64(width), int(flags))
|
base.Ctxt.Globl(s, int64(width), int(flags))
|
||||||
}
|
}
|
||||||
|
|
||||||
func Addrconst(a *obj.Addr, v int64) {
|
func Addrconst(a *obj.Addr, v int64) {
|
||||||
|
|
@ -326,7 +328,7 @@ func Addrconst(a *obj.Addr, v int64) {
|
||||||
|
|
||||||
func Patch(p *obj.Prog, to *obj.Prog) {
|
func Patch(p *obj.Prog, to *obj.Prog) {
|
||||||
if p.To.Type != obj.TYPE_BRANCH {
|
if p.To.Type != obj.TYPE_BRANCH {
|
||||||
Fatalf("patch: not a branch")
|
base.Fatalf("patch: not a branch")
|
||||||
}
|
}
|
||||||
p.To.SetTarget(to)
|
p.To.SetTarget(to)
|
||||||
p.To.Offset = to.Pc
|
p.To.Offset = to.Pc
|
||||||
|
|
|
||||||
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
|
@ -5,6 +5,8 @@
|
||||||
package gc
|
package gc
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"cmd/compile/internal/base"
|
||||||
|
"cmd/compile/internal/ir"
|
||||||
"cmd/compile/internal/types"
|
"cmd/compile/internal/types"
|
||||||
"cmd/internal/obj"
|
"cmd/internal/obj"
|
||||||
)
|
)
|
||||||
|
|
@ -15,8 +17,9 @@ import (
|
||||||
// the name, normally "pkg.init", is altered to "pkg.init.0".
|
// the name, normally "pkg.init", is altered to "pkg.init.0".
|
||||||
var renameinitgen int
|
var renameinitgen int
|
||||||
|
|
||||||
// Dummy function for autotmps generated during typechecking.
|
// Function collecting autotmps generated during typechecking,
|
||||||
var dummyInitFn = nod(ODCLFUNC, nil, nil)
|
// to be included in the package-level init function.
|
||||||
|
var initTodo = ir.Nod(ir.ODCLFUNC, nil, nil)
|
||||||
|
|
||||||
func renameinit() *types.Sym {
|
func renameinit() *types.Sym {
|
||||||
s := lookupN("init.", renameinitgen)
|
s := lookupN("init.", renameinitgen)
|
||||||
|
|
@ -30,7 +33,7 @@ func renameinit() *types.Sym {
|
||||||
// 1) Initialize all of the packages the current package depends on.
|
// 1) Initialize all of the packages the current package depends on.
|
||||||
// 2) Initialize all the variables that have initializers.
|
// 2) Initialize all the variables that have initializers.
|
||||||
// 3) Run any init functions.
|
// 3) Run any init functions.
|
||||||
func fninit(n []*Node) {
|
func fninit(n []ir.Node) {
|
||||||
nf := initOrder(n)
|
nf := initOrder(n)
|
||||||
|
|
||||||
var deps []*obj.LSym // initTask records for packages the current package depends on
|
var deps []*obj.LSym // initTask records for packages the current package depends on
|
||||||
|
|
@ -43,16 +46,16 @@ func fninit(n []*Node) {
|
||||||
|
|
||||||
// Make a function that contains all the initialization statements.
|
// Make a function that contains all the initialization statements.
|
||||||
if len(nf) > 0 {
|
if len(nf) > 0 {
|
||||||
lineno = nf[0].Pos // prolog/epilog gets line number of first init stmt
|
base.Pos = nf[0].Pos() // prolog/epilog gets line number of first init stmt
|
||||||
initializers := lookup("init")
|
initializers := lookup("init")
|
||||||
fn := dclfunc(initializers, nod(OTFUNC, nil, nil))
|
fn := dclfunc(initializers, ir.Nod(ir.OTFUNC, nil, nil))
|
||||||
for _, dcl := range dummyInitFn.Func.Dcl {
|
for _, dcl := range initTodo.Func().Dcl {
|
||||||
dcl.Name.Curfn = fn
|
dcl.Name().Curfn = fn
|
||||||
}
|
}
|
||||||
fn.Func.Dcl = append(fn.Func.Dcl, dummyInitFn.Func.Dcl...)
|
fn.Func().Dcl = append(fn.Func().Dcl, initTodo.Func().Dcl...)
|
||||||
dummyInitFn.Func.Dcl = nil
|
initTodo.Func().Dcl = nil
|
||||||
|
|
||||||
fn.Nbody.Set(nf)
|
fn.PtrBody().Set(nf)
|
||||||
funcbody()
|
funcbody()
|
||||||
|
|
||||||
fn = typecheck(fn, ctxStmt)
|
fn = typecheck(fn, ctxStmt)
|
||||||
|
|
@ -62,35 +65,35 @@ func fninit(n []*Node) {
|
||||||
xtop = append(xtop, fn)
|
xtop = append(xtop, fn)
|
||||||
fns = append(fns, initializers.Linksym())
|
fns = append(fns, initializers.Linksym())
|
||||||
}
|
}
|
||||||
if dummyInitFn.Func.Dcl != nil {
|
if initTodo.Func().Dcl != nil {
|
||||||
// We only generate temps using dummyInitFn if there
|
// We only generate temps using initTodo if there
|
||||||
// are package-scope initialization statements, so
|
// are package-scope initialization statements, so
|
||||||
// something's weird if we get here.
|
// something's weird if we get here.
|
||||||
Fatalf("dummyInitFn still has declarations")
|
base.Fatalf("initTodo still has declarations")
|
||||||
}
|
}
|
||||||
dummyInitFn = nil
|
initTodo = nil
|
||||||
|
|
||||||
// Record user init functions.
|
// Record user init functions.
|
||||||
for i := 0; i < renameinitgen; i++ {
|
for i := 0; i < renameinitgen; i++ {
|
||||||
s := lookupN("init.", i)
|
s := lookupN("init.", i)
|
||||||
fn := asNode(s.Def).Name.Defn
|
fn := ir.AsNode(s.Def).Name().Defn
|
||||||
// Skip init functions with empty bodies.
|
// Skip init functions with empty bodies.
|
||||||
if fn.Nbody.Len() == 1 && fn.Nbody.First().Op == OEMPTY {
|
if fn.Body().Len() == 1 && fn.Body().First().Op() == ir.OEMPTY {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
fns = append(fns, s.Linksym())
|
fns = append(fns, s.Linksym())
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(deps) == 0 && len(fns) == 0 && localpkg.Name != "main" && localpkg.Name != "runtime" {
|
if len(deps) == 0 && len(fns) == 0 && ir.LocalPkg.Name != "main" && ir.LocalPkg.Name != "runtime" {
|
||||||
return // nothing to initialize
|
return // nothing to initialize
|
||||||
}
|
}
|
||||||
|
|
||||||
// Make an .inittask structure.
|
// Make an .inittask structure.
|
||||||
sym := lookup(".inittask")
|
sym := lookup(".inittask")
|
||||||
nn := newname(sym)
|
nn := NewName(sym)
|
||||||
nn.Type = types.Types[TUINT8] // dummy type
|
nn.SetType(types.Types[types.TUINT8]) // fake type
|
||||||
nn.SetClass(PEXTERN)
|
nn.SetClass(ir.PEXTERN)
|
||||||
sym.Def = asTypesNode(nn)
|
sym.Def = nn
|
||||||
exportsym(nn)
|
exportsym(nn)
|
||||||
lsym := sym.Linksym()
|
lsym := sym.Linksym()
|
||||||
ot := 0
|
ot := 0
|
||||||
|
|
|
||||||
|
|
@ -8,6 +8,10 @@ import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"container/heap"
|
"container/heap"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
|
"cmd/compile/internal/base"
|
||||||
|
"cmd/compile/internal/ir"
|
||||||
|
"cmd/compile/internal/types"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Package initialization
|
// Package initialization
|
||||||
|
|
@ -60,7 +64,7 @@ const (
|
||||||
type InitOrder struct {
|
type InitOrder struct {
|
||||||
// blocking maps initialization assignments to the assignments
|
// blocking maps initialization assignments to the assignments
|
||||||
// that depend on it.
|
// that depend on it.
|
||||||
blocking map[*Node][]*Node
|
blocking map[ir.Node][]ir.Node
|
||||||
|
|
||||||
// ready is the queue of Pending initialization assignments
|
// ready is the queue of Pending initialization assignments
|
||||||
// that are ready for initialization.
|
// that are ready for initialization.
|
||||||
|
|
@ -71,45 +75,43 @@ type InitOrder struct {
|
||||||
// package-level declarations (in declaration order) and outputs the
|
// package-level declarations (in declaration order) and outputs the
|
||||||
// corresponding list of statements to include in the init() function
|
// corresponding list of statements to include in the init() function
|
||||||
// body.
|
// body.
|
||||||
func initOrder(l []*Node) []*Node {
|
func initOrder(l []ir.Node) []ir.Node {
|
||||||
s := InitSchedule{
|
s := InitSchedule{
|
||||||
initplans: make(map[*Node]*InitPlan),
|
initplans: make(map[ir.Node]*InitPlan),
|
||||||
inittemps: make(map[*Node]*Node),
|
inittemps: make(map[ir.Node]ir.Node),
|
||||||
}
|
}
|
||||||
o := InitOrder{
|
o := InitOrder{
|
||||||
blocking: make(map[*Node][]*Node),
|
blocking: make(map[ir.Node][]ir.Node),
|
||||||
}
|
}
|
||||||
|
|
||||||
// Process all package-level assignment in declaration order.
|
// Process all package-level assignment in declaration order.
|
||||||
for _, n := range l {
|
for _, n := range l {
|
||||||
switch n.Op {
|
switch n.Op() {
|
||||||
case OAS, OAS2DOTTYPE, OAS2FUNC, OAS2MAPR, OAS2RECV:
|
case ir.OAS, ir.OAS2DOTTYPE, ir.OAS2FUNC, ir.OAS2MAPR, ir.OAS2RECV:
|
||||||
o.processAssign(n)
|
o.processAssign(n)
|
||||||
o.flushReady(s.staticInit)
|
o.flushReady(s.staticInit)
|
||||||
case ODCLCONST, ODCLFUNC, ODCLTYPE:
|
case ir.ODCLCONST, ir.ODCLFUNC, ir.ODCLTYPE:
|
||||||
// nop
|
// nop
|
||||||
default:
|
default:
|
||||||
Fatalf("unexpected package-level statement: %v", n)
|
base.Fatalf("unexpected package-level statement: %v", n)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check that all assignments are now Done; if not, there must
|
// Check that all assignments are now Done; if not, there must
|
||||||
// have been a dependency cycle.
|
// have been a dependency cycle.
|
||||||
for _, n := range l {
|
for _, n := range l {
|
||||||
switch n.Op {
|
switch n.Op() {
|
||||||
case OAS, OAS2DOTTYPE, OAS2FUNC, OAS2MAPR, OAS2RECV:
|
case ir.OAS, ir.OAS2DOTTYPE, ir.OAS2FUNC, ir.OAS2MAPR, ir.OAS2RECV:
|
||||||
if n.Initorder() != InitDone {
|
if n.Initorder() != InitDone {
|
||||||
// If there have already been errors
|
// If there have already been errors
|
||||||
// printed, those errors may have
|
// printed, those errors may have
|
||||||
// confused us and there might not be
|
// confused us and there might not be
|
||||||
// a loop. Let the user fix those
|
// a loop. Let the user fix those
|
||||||
// first.
|
// first.
|
||||||
if nerrors > 0 {
|
base.ExitIfErrors()
|
||||||
errorexit()
|
|
||||||
}
|
|
||||||
|
|
||||||
findInitLoopAndExit(firstLHS(n), new([]*Node))
|
findInitLoopAndExit(firstLHS(n), new([]ir.Node))
|
||||||
Fatalf("initialization unfinished, but failed to identify loop")
|
base.Fatalf("initialization unfinished, but failed to identify loop")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -117,34 +119,34 @@ func initOrder(l []*Node) []*Node {
|
||||||
// Invariant consistency check. If this is non-zero, then we
|
// Invariant consistency check. If this is non-zero, then we
|
||||||
// should have found a cycle above.
|
// should have found a cycle above.
|
||||||
if len(o.blocking) != 0 {
|
if len(o.blocking) != 0 {
|
||||||
Fatalf("expected empty map: %v", o.blocking)
|
base.Fatalf("expected empty map: %v", o.blocking)
|
||||||
}
|
}
|
||||||
|
|
||||||
return s.out
|
return s.out
|
||||||
}
|
}
|
||||||
|
|
||||||
func (o *InitOrder) processAssign(n *Node) {
|
func (o *InitOrder) processAssign(n ir.Node) {
|
||||||
if n.Initorder() != InitNotStarted || n.Xoffset != BADWIDTH {
|
if n.Initorder() != InitNotStarted || n.Offset() != types.BADWIDTH {
|
||||||
Fatalf("unexpected state: %v, %v, %v", n, n.Initorder(), n.Xoffset)
|
base.Fatalf("unexpected state: %v, %v, %v", n, n.Initorder(), n.Offset())
|
||||||
}
|
}
|
||||||
|
|
||||||
n.SetInitorder(InitPending)
|
n.SetInitorder(InitPending)
|
||||||
n.Xoffset = 0
|
n.SetOffset(0)
|
||||||
|
|
||||||
// Compute number of variable dependencies and build the
|
// Compute number of variable dependencies and build the
|
||||||
// inverse dependency ("blocking") graph.
|
// inverse dependency ("blocking") graph.
|
||||||
for dep := range collectDeps(n, true) {
|
for dep := range collectDeps(n, true) {
|
||||||
defn := dep.Name.Defn
|
defn := dep.Name().Defn
|
||||||
// Skip dependencies on functions (PFUNC) and
|
// Skip dependencies on functions (PFUNC) and
|
||||||
// variables already initialized (InitDone).
|
// variables already initialized (InitDone).
|
||||||
if dep.Class() != PEXTERN || defn.Initorder() == InitDone {
|
if dep.Class() != ir.PEXTERN || defn.Initorder() == InitDone {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
n.Xoffset++
|
n.SetOffset(n.Offset() + 1)
|
||||||
o.blocking[defn] = append(o.blocking[defn], n)
|
o.blocking[defn] = append(o.blocking[defn], n)
|
||||||
}
|
}
|
||||||
|
|
||||||
if n.Xoffset == 0 {
|
if n.Offset() == 0 {
|
||||||
heap.Push(&o.ready, n)
|
heap.Push(&o.ready, n)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -152,23 +154,23 @@ func (o *InitOrder) processAssign(n *Node) {
|
||||||
// flushReady repeatedly applies initialize to the earliest (in
|
// flushReady repeatedly applies initialize to the earliest (in
|
||||||
// declaration order) assignment ready for initialization and updates
|
// declaration order) assignment ready for initialization and updates
|
||||||
// the inverse dependency ("blocking") graph.
|
// the inverse dependency ("blocking") graph.
|
||||||
func (o *InitOrder) flushReady(initialize func(*Node)) {
|
func (o *InitOrder) flushReady(initialize func(ir.Node)) {
|
||||||
for o.ready.Len() != 0 {
|
for o.ready.Len() != 0 {
|
||||||
n := heap.Pop(&o.ready).(*Node)
|
n := heap.Pop(&o.ready).(ir.Node)
|
||||||
if n.Initorder() != InitPending || n.Xoffset != 0 {
|
if n.Initorder() != InitPending || n.Offset() != 0 {
|
||||||
Fatalf("unexpected state: %v, %v, %v", n, n.Initorder(), n.Xoffset)
|
base.Fatalf("unexpected state: %v, %v, %v", n, n.Initorder(), n.Offset())
|
||||||
}
|
}
|
||||||
|
|
||||||
initialize(n)
|
initialize(n)
|
||||||
n.SetInitorder(InitDone)
|
n.SetInitorder(InitDone)
|
||||||
n.Xoffset = BADWIDTH
|
n.SetOffset(types.BADWIDTH)
|
||||||
|
|
||||||
blocked := o.blocking[n]
|
blocked := o.blocking[n]
|
||||||
delete(o.blocking, n)
|
delete(o.blocking, n)
|
||||||
|
|
||||||
for _, m := range blocked {
|
for _, m := range blocked {
|
||||||
m.Xoffset--
|
m.SetOffset(m.Offset() - 1)
|
||||||
if m.Xoffset == 0 {
|
if m.Offset() == 0 {
|
||||||
heap.Push(&o.ready, m)
|
heap.Push(&o.ready, m)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -181,7 +183,7 @@ func (o *InitOrder) flushReady(initialize func(*Node)) {
|
||||||
// path points to a slice used for tracking the sequence of
|
// path points to a slice used for tracking the sequence of
|
||||||
// variables/functions visited. Using a pointer to a slice allows the
|
// variables/functions visited. Using a pointer to a slice allows the
|
||||||
// slice capacity to grow and limit reallocations.
|
// slice capacity to grow and limit reallocations.
|
||||||
func findInitLoopAndExit(n *Node, path *[]*Node) {
|
func findInitLoopAndExit(n ir.Node, path *[]ir.Node) {
|
||||||
// We implement a simple DFS loop-finding algorithm. This
|
// We implement a simple DFS loop-finding algorithm. This
|
||||||
// could be faster, but initialization cycles are rare.
|
// could be faster, but initialization cycles are rare.
|
||||||
|
|
||||||
|
|
@ -194,14 +196,14 @@ func findInitLoopAndExit(n *Node, path *[]*Node) {
|
||||||
|
|
||||||
// There might be multiple loops involving n; by sorting
|
// There might be multiple loops involving n; by sorting
|
||||||
// references, we deterministically pick the one reported.
|
// references, we deterministically pick the one reported.
|
||||||
refers := collectDeps(n.Name.Defn, false).Sorted(func(ni, nj *Node) bool {
|
refers := collectDeps(n.Name().Defn, false).Sorted(func(ni, nj ir.Node) bool {
|
||||||
return ni.Pos.Before(nj.Pos)
|
return ni.Pos().Before(nj.Pos())
|
||||||
})
|
})
|
||||||
|
|
||||||
*path = append(*path, n)
|
*path = append(*path, n)
|
||||||
for _, ref := range refers {
|
for _, ref := range refers {
|
||||||
// Short-circuit variables that were initialized.
|
// Short-circuit variables that were initialized.
|
||||||
if ref.Class() == PEXTERN && ref.Name.Defn.Initorder() == InitDone {
|
if ref.Class() == ir.PEXTERN && ref.Name().Defn.Initorder() == InitDone {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -213,12 +215,12 @@ func findInitLoopAndExit(n *Node, path *[]*Node) {
|
||||||
// reportInitLoopAndExit reports and initialization loop as an error
|
// reportInitLoopAndExit reports and initialization loop as an error
|
||||||
// and exits. However, if l is not actually an initialization loop, it
|
// and exits. However, if l is not actually an initialization loop, it
|
||||||
// simply returns instead.
|
// simply returns instead.
|
||||||
func reportInitLoopAndExit(l []*Node) {
|
func reportInitLoopAndExit(l []ir.Node) {
|
||||||
// Rotate loop so that the earliest variable declaration is at
|
// Rotate loop so that the earliest variable declaration is at
|
||||||
// the start.
|
// the start.
|
||||||
i := -1
|
i := -1
|
||||||
for j, n := range l {
|
for j, n := range l {
|
||||||
if n.Class() == PEXTERN && (i == -1 || n.Pos.Before(l[i].Pos)) {
|
if n.Class() == ir.PEXTERN && (i == -1 || n.Pos().Before(l[i].Pos())) {
|
||||||
i = j
|
i = j
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -236,61 +238,60 @@ func reportInitLoopAndExit(l []*Node) {
|
||||||
var msg bytes.Buffer
|
var msg bytes.Buffer
|
||||||
fmt.Fprintf(&msg, "initialization loop:\n")
|
fmt.Fprintf(&msg, "initialization loop:\n")
|
||||||
for _, n := range l {
|
for _, n := range l {
|
||||||
fmt.Fprintf(&msg, "\t%v: %v refers to\n", n.Line(), n)
|
fmt.Fprintf(&msg, "\t%v: %v refers to\n", ir.Line(n), n)
|
||||||
}
|
}
|
||||||
fmt.Fprintf(&msg, "\t%v: %v", l[0].Line(), l[0])
|
fmt.Fprintf(&msg, "\t%v: %v", ir.Line(l[0]), l[0])
|
||||||
|
|
||||||
yyerrorl(l[0].Pos, msg.String())
|
base.ErrorfAt(l[0].Pos(), msg.String())
|
||||||
errorexit()
|
base.ErrorExit()
|
||||||
}
|
}
|
||||||
|
|
||||||
// collectDeps returns all of the package-level functions and
|
// collectDeps returns all of the package-level functions and
|
||||||
// variables that declaration n depends on. If transitive is true,
|
// variables that declaration n depends on. If transitive is true,
|
||||||
// then it also includes the transitive dependencies of any depended
|
// then it also includes the transitive dependencies of any depended
|
||||||
// upon functions (but not variables).
|
// upon functions (but not variables).
|
||||||
func collectDeps(n *Node, transitive bool) NodeSet {
|
func collectDeps(n ir.Node, transitive bool) ir.NodeSet {
|
||||||
d := initDeps{transitive: transitive}
|
d := initDeps{transitive: transitive}
|
||||||
switch n.Op {
|
switch n.Op() {
|
||||||
case OAS:
|
case ir.OAS:
|
||||||
d.inspect(n.Right)
|
d.inspect(n.Right())
|
||||||
case OAS2DOTTYPE, OAS2FUNC, OAS2MAPR, OAS2RECV:
|
case ir.OAS2DOTTYPE, ir.OAS2FUNC, ir.OAS2MAPR, ir.OAS2RECV:
|
||||||
d.inspect(n.Right)
|
d.inspect(n.Right())
|
||||||
case ODCLFUNC:
|
case ir.ODCLFUNC:
|
||||||
d.inspectList(n.Nbody)
|
d.inspectList(n.Body())
|
||||||
default:
|
default:
|
||||||
Fatalf("unexpected Op: %v", n.Op)
|
base.Fatalf("unexpected Op: %v", n.Op())
|
||||||
}
|
}
|
||||||
return d.seen
|
return d.seen
|
||||||
}
|
}
|
||||||
|
|
||||||
type initDeps struct {
|
type initDeps struct {
|
||||||
transitive bool
|
transitive bool
|
||||||
seen NodeSet
|
seen ir.NodeSet
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *initDeps) inspect(n *Node) { inspect(n, d.visit) }
|
func (d *initDeps) inspect(n ir.Node) { ir.Inspect(n, d.visit) }
|
||||||
func (d *initDeps) inspectList(l Nodes) { inspectList(l, d.visit) }
|
func (d *initDeps) inspectList(l ir.Nodes) { ir.InspectList(l, d.visit) }
|
||||||
|
|
||||||
// visit calls foundDep on any package-level functions or variables
|
// visit calls foundDep on any package-level functions or variables
|
||||||
// referenced by n, if any.
|
// referenced by n, if any.
|
||||||
func (d *initDeps) visit(n *Node) bool {
|
func (d *initDeps) visit(n ir.Node) bool {
|
||||||
switch n.Op {
|
switch n.Op() {
|
||||||
case ONAME:
|
case ir.OMETHEXPR:
|
||||||
if n.isMethodExpression() {
|
d.foundDep(methodExprName(n))
|
||||||
d.foundDep(asNode(n.Type.FuncType().Nname))
|
|
||||||
return false
|
return false
|
||||||
}
|
|
||||||
|
|
||||||
|
case ir.ONAME:
|
||||||
switch n.Class() {
|
switch n.Class() {
|
||||||
case PEXTERN, PFUNC:
|
case ir.PEXTERN, ir.PFUNC:
|
||||||
d.foundDep(n)
|
d.foundDep(n)
|
||||||
}
|
}
|
||||||
|
|
||||||
case OCLOSURE:
|
case ir.OCLOSURE:
|
||||||
d.inspectList(n.Func.Closure.Nbody)
|
d.inspectList(n.Func().Decl.Body())
|
||||||
|
|
||||||
case ODOTMETH, OCALLPART:
|
case ir.ODOTMETH, ir.OCALLPART:
|
||||||
d.foundDep(asNode(n.Type.FuncType().Nname))
|
d.foundDep(methodExprName(n))
|
||||||
}
|
}
|
||||||
|
|
||||||
return true
|
return true
|
||||||
|
|
@ -298,7 +299,7 @@ func (d *initDeps) visit(n *Node) bool {
|
||||||
|
|
||||||
// foundDep records that we've found a dependency on n by adding it to
|
// foundDep records that we've found a dependency on n by adding it to
|
||||||
// seen.
|
// seen.
|
||||||
func (d *initDeps) foundDep(n *Node) {
|
func (d *initDeps) foundDep(n ir.Node) {
|
||||||
// Can happen with method expressions involving interface
|
// Can happen with method expressions involving interface
|
||||||
// types; e.g., fixedbugs/issue4495.go.
|
// types; e.g., fixedbugs/issue4495.go.
|
||||||
if n == nil {
|
if n == nil {
|
||||||
|
|
@ -307,7 +308,7 @@ func (d *initDeps) foundDep(n *Node) {
|
||||||
|
|
||||||
// Names without definitions aren't interesting as far as
|
// Names without definitions aren't interesting as far as
|
||||||
// initialization ordering goes.
|
// initialization ordering goes.
|
||||||
if n.Name.Defn == nil {
|
if n.Name().Defn == nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -315,8 +316,8 @@ func (d *initDeps) foundDep(n *Node) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
d.seen.Add(n)
|
d.seen.Add(n)
|
||||||
if d.transitive && n.Class() == PFUNC {
|
if d.transitive && n.Class() == ir.PFUNC {
|
||||||
d.inspectList(n.Name.Defn.Nbody)
|
d.inspectList(n.Name().Defn.Body())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -327,13 +328,15 @@ func (d *initDeps) foundDep(n *Node) {
|
||||||
// an OAS node's Pos may not be unique. For example, given the
|
// an OAS node's Pos may not be unique. For example, given the
|
||||||
// declaration "var a, b = f(), g()", "a" must be ordered before "b",
|
// declaration "var a, b = f(), g()", "a" must be ordered before "b",
|
||||||
// but both OAS nodes use the "=" token's position as their Pos.
|
// but both OAS nodes use the "=" token's position as their Pos.
|
||||||
type declOrder []*Node
|
type declOrder []ir.Node
|
||||||
|
|
||||||
func (s declOrder) Len() int { return len(s) }
|
func (s declOrder) Len() int { return len(s) }
|
||||||
func (s declOrder) Less(i, j int) bool { return firstLHS(s[i]).Pos.Before(firstLHS(s[j]).Pos) }
|
func (s declOrder) Less(i, j int) bool {
|
||||||
|
return firstLHS(s[i]).Pos().Before(firstLHS(s[j]).Pos())
|
||||||
|
}
|
||||||
func (s declOrder) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
|
func (s declOrder) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
|
||||||
|
|
||||||
func (s *declOrder) Push(x interface{}) { *s = append(*s, x.(*Node)) }
|
func (s *declOrder) Push(x interface{}) { *s = append(*s, x.(ir.Node)) }
|
||||||
func (s *declOrder) Pop() interface{} {
|
func (s *declOrder) Pop() interface{} {
|
||||||
n := (*s)[len(*s)-1]
|
n := (*s)[len(*s)-1]
|
||||||
*s = (*s)[:len(*s)-1]
|
*s = (*s)[:len(*s)-1]
|
||||||
|
|
@ -342,14 +345,14 @@ func (s *declOrder) Pop() interface{} {
|
||||||
|
|
||||||
// firstLHS returns the first expression on the left-hand side of
|
// firstLHS returns the first expression on the left-hand side of
|
||||||
// assignment n.
|
// assignment n.
|
||||||
func firstLHS(n *Node) *Node {
|
func firstLHS(n ir.Node) ir.Node {
|
||||||
switch n.Op {
|
switch n.Op() {
|
||||||
case OAS:
|
case ir.OAS:
|
||||||
return n.Left
|
return n.Left()
|
||||||
case OAS2DOTTYPE, OAS2FUNC, OAS2RECV, OAS2MAPR:
|
case ir.OAS2DOTTYPE, ir.OAS2FUNC, ir.OAS2RECV, ir.OAS2MAPR:
|
||||||
return n.List.First()
|
return n.List().First()
|
||||||
}
|
}
|
||||||
|
|
||||||
Fatalf("unexpected Op: %v", n.Op)
|
base.Fatalf("unexpected Op: %v", n.Op())
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
|
||||||
File diff suppressed because it is too large
Load diff
|
|
@ -5,6 +5,8 @@
|
||||||
package gc
|
package gc
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"cmd/compile/internal/base"
|
||||||
|
"cmd/compile/internal/ir"
|
||||||
"cmd/compile/internal/syntax"
|
"cmd/compile/internal/syntax"
|
||||||
"cmd/internal/objabi"
|
"cmd/internal/objabi"
|
||||||
"cmd/internal/src"
|
"cmd/internal/src"
|
||||||
|
|
@ -12,12 +14,8 @@ import (
|
||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
// lineno is the source position at the start of the most recently lexed token.
|
func makePos(b *src.PosBase, line, col uint) src.XPos {
|
||||||
// TODO(gri) rename and eventually remove
|
return base.Ctxt.PosTable.XPos(src.MakePos(b, line, col))
|
||||||
var lineno src.XPos
|
|
||||||
|
|
||||||
func makePos(base *src.PosBase, line, col uint) src.XPos {
|
|
||||||
return Ctxt.PosTable.XPos(src.MakePos(base, line, col))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func isSpace(c rune) bool {
|
func isSpace(c rune) bool {
|
||||||
|
|
@ -28,78 +26,51 @@ func isQuoted(s string) bool {
|
||||||
return len(s) >= 2 && s[0] == '"' && s[len(s)-1] == '"'
|
return len(s) >= 2 && s[0] == '"' && s[len(s)-1] == '"'
|
||||||
}
|
}
|
||||||
|
|
||||||
type PragmaFlag int16
|
|
||||||
|
|
||||||
const (
|
const (
|
||||||
// Func pragmas.
|
FuncPragmas = ir.Nointerface |
|
||||||
Nointerface PragmaFlag = 1 << iota
|
ir.Noescape |
|
||||||
Noescape // func parameters don't escape
|
ir.Norace |
|
||||||
Norace // func must not have race detector annotations
|
ir.Nosplit |
|
||||||
Nosplit // func should not execute on separate stack
|
ir.Noinline |
|
||||||
Noinline // func should not be inlined
|
ir.NoCheckPtr |
|
||||||
NoCheckPtr // func should not be instrumented by checkptr
|
ir.CgoUnsafeArgs |
|
||||||
CgoUnsafeArgs // treat a pointer to one arg as a pointer to them all
|
ir.UintptrEscapes |
|
||||||
UintptrEscapes // pointers converted to uintptr escape
|
ir.Systemstack |
|
||||||
|
ir.Nowritebarrier |
|
||||||
|
ir.Nowritebarrierrec |
|
||||||
|
ir.Yeswritebarrierrec
|
||||||
|
|
||||||
// Runtime-only func pragmas.
|
TypePragmas = ir.NotInHeap
|
||||||
// See ../../../../runtime/README.md for detailed descriptions.
|
|
||||||
Systemstack // func must run on system stack
|
|
||||||
Nowritebarrier // emit compiler error instead of write barrier
|
|
||||||
Nowritebarrierrec // error on write barrier in this or recursive callees
|
|
||||||
Yeswritebarrierrec // cancels Nowritebarrierrec in this function and callees
|
|
||||||
|
|
||||||
// Runtime and cgo type pragmas
|
|
||||||
NotInHeap // values of this type must not be heap allocated
|
|
||||||
|
|
||||||
// Go command pragmas
|
|
||||||
GoBuildPragma
|
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
func pragmaFlag(verb string) ir.PragmaFlag {
|
||||||
FuncPragmas = Nointerface |
|
|
||||||
Noescape |
|
|
||||||
Norace |
|
|
||||||
Nosplit |
|
|
||||||
Noinline |
|
|
||||||
NoCheckPtr |
|
|
||||||
CgoUnsafeArgs |
|
|
||||||
UintptrEscapes |
|
|
||||||
Systemstack |
|
|
||||||
Nowritebarrier |
|
|
||||||
Nowritebarrierrec |
|
|
||||||
Yeswritebarrierrec
|
|
||||||
|
|
||||||
TypePragmas = NotInHeap
|
|
||||||
)
|
|
||||||
|
|
||||||
func pragmaFlag(verb string) PragmaFlag {
|
|
||||||
switch verb {
|
switch verb {
|
||||||
case "go:build":
|
case "go:build":
|
||||||
return GoBuildPragma
|
return ir.GoBuildPragma
|
||||||
case "go:nointerface":
|
case "go:nointerface":
|
||||||
if objabi.Fieldtrack_enabled != 0 {
|
if objabi.Fieldtrack_enabled != 0 {
|
||||||
return Nointerface
|
return ir.Nointerface
|
||||||
}
|
}
|
||||||
case "go:noescape":
|
case "go:noescape":
|
||||||
return Noescape
|
return ir.Noescape
|
||||||
case "go:norace":
|
case "go:norace":
|
||||||
return Norace
|
return ir.Norace
|
||||||
case "go:nosplit":
|
case "go:nosplit":
|
||||||
return Nosplit | NoCheckPtr // implies NoCheckPtr (see #34972)
|
return ir.Nosplit | ir.NoCheckPtr // implies NoCheckPtr (see #34972)
|
||||||
case "go:noinline":
|
case "go:noinline":
|
||||||
return Noinline
|
return ir.Noinline
|
||||||
case "go:nocheckptr":
|
case "go:nocheckptr":
|
||||||
return NoCheckPtr
|
return ir.NoCheckPtr
|
||||||
case "go:systemstack":
|
case "go:systemstack":
|
||||||
return Systemstack
|
return ir.Systemstack
|
||||||
case "go:nowritebarrier":
|
case "go:nowritebarrier":
|
||||||
return Nowritebarrier
|
return ir.Nowritebarrier
|
||||||
case "go:nowritebarrierrec":
|
case "go:nowritebarrierrec":
|
||||||
return Nowritebarrierrec | Nowritebarrier // implies Nowritebarrier
|
return ir.Nowritebarrierrec | ir.Nowritebarrier // implies Nowritebarrier
|
||||||
case "go:yeswritebarrierrec":
|
case "go:yeswritebarrierrec":
|
||||||
return Yeswritebarrierrec
|
return ir.Yeswritebarrierrec
|
||||||
case "go:cgo_unsafe_args":
|
case "go:cgo_unsafe_args":
|
||||||
return CgoUnsafeArgs | NoCheckPtr // implies NoCheckPtr (see #34968)
|
return ir.CgoUnsafeArgs | ir.NoCheckPtr // implies NoCheckPtr (see #34968)
|
||||||
case "go:uintptrescapes":
|
case "go:uintptrescapes":
|
||||||
// For the next function declared in the file
|
// For the next function declared in the file
|
||||||
// any uintptr arguments may be pointer values
|
// any uintptr arguments may be pointer values
|
||||||
|
|
@ -112,9 +83,9 @@ func pragmaFlag(verb string) PragmaFlag {
|
||||||
// call. The conversion to uintptr must appear
|
// call. The conversion to uintptr must appear
|
||||||
// in the argument list.
|
// in the argument list.
|
||||||
// Used in syscall/dll_windows.go.
|
// Used in syscall/dll_windows.go.
|
||||||
return UintptrEscapes
|
return ir.UintptrEscapes
|
||||||
case "go:notinheap":
|
case "go:notinheap":
|
||||||
return NotInHeap
|
return ir.NotInHeap
|
||||||
}
|
}
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
|
||||||
File diff suppressed because it is too large
Load diff
|
|
@ -35,7 +35,10 @@ func main() {
|
||||||
fmt.Fprintln(&b)
|
fmt.Fprintln(&b)
|
||||||
fmt.Fprintln(&b, "package gc")
|
fmt.Fprintln(&b, "package gc")
|
||||||
fmt.Fprintln(&b)
|
fmt.Fprintln(&b)
|
||||||
fmt.Fprintln(&b, `import "cmd/compile/internal/types"`)
|
fmt.Fprintln(&b, `import (`)
|
||||||
|
fmt.Fprintln(&b, ` "cmd/compile/internal/ir"`)
|
||||||
|
fmt.Fprintln(&b, ` "cmd/compile/internal/types"`)
|
||||||
|
fmt.Fprintln(&b, `)`)
|
||||||
|
|
||||||
mkbuiltin(&b, "runtime")
|
mkbuiltin(&b, "runtime")
|
||||||
|
|
||||||
|
|
@ -144,12 +147,12 @@ func (i *typeInterner) mktype(t ast.Expr) string {
|
||||||
case "rune":
|
case "rune":
|
||||||
return "types.Runetype"
|
return "types.Runetype"
|
||||||
}
|
}
|
||||||
return fmt.Sprintf("types.Types[T%s]", strings.ToUpper(t.Name))
|
return fmt.Sprintf("types.Types[types.T%s]", strings.ToUpper(t.Name))
|
||||||
case *ast.SelectorExpr:
|
case *ast.SelectorExpr:
|
||||||
if t.X.(*ast.Ident).Name != "unsafe" || t.Sel.Name != "Pointer" {
|
if t.X.(*ast.Ident).Name != "unsafe" || t.Sel.Name != "Pointer" {
|
||||||
log.Fatalf("unhandled type: %#v", t)
|
log.Fatalf("unhandled type: %#v", t)
|
||||||
}
|
}
|
||||||
return "types.Types[TUNSAFEPTR]"
|
return "types.Types[types.TUNSAFEPTR]"
|
||||||
|
|
||||||
case *ast.ArrayType:
|
case *ast.ArrayType:
|
||||||
if t.Len == nil {
|
if t.Len == nil {
|
||||||
|
|
@ -171,7 +174,7 @@ func (i *typeInterner) mktype(t ast.Expr) string {
|
||||||
if len(t.Methods.List) != 0 {
|
if len(t.Methods.List) != 0 {
|
||||||
log.Fatal("non-empty interfaces unsupported")
|
log.Fatal("non-empty interfaces unsupported")
|
||||||
}
|
}
|
||||||
return "types.Types[TINTER]"
|
return "types.Types[types.TINTER]"
|
||||||
case *ast.MapType:
|
case *ast.MapType:
|
||||||
return fmt.Sprintf("types.NewMap(%s, %s)", i.subtype(t.Key), i.subtype(t.Value))
|
return fmt.Sprintf("types.NewMap(%s, %s)", i.subtype(t.Key), i.subtype(t.Value))
|
||||||
case *ast.StarExpr:
|
case *ast.StarExpr:
|
||||||
|
|
@ -204,7 +207,7 @@ func (i *typeInterner) fields(fl *ast.FieldList, keepNames bool) string {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return fmt.Sprintf("[]*Node{%s}", strings.Join(res, ", "))
|
return fmt.Sprintf("[]ir.Node{%s}", strings.Join(res, ", "))
|
||||||
}
|
}
|
||||||
|
|
||||||
func intconst(e ast.Expr) int64 {
|
func intconst(e ast.Expr) int64 {
|
||||||
|
|
|
||||||
|
|
@ -1,357 +0,0 @@
|
||||||
// Copyright 2009 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
package gc
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"math"
|
|
||||||
"math/big"
|
|
||||||
)
|
|
||||||
|
|
||||||
// implements float arithmetic
|
|
||||||
|
|
||||||
const (
|
|
||||||
// Maximum size in bits for Mpints before signalling
|
|
||||||
// overflow and also mantissa precision for Mpflts.
|
|
||||||
Mpprec = 512
|
|
||||||
// Turn on for constant arithmetic debugging output.
|
|
||||||
Mpdebug = false
|
|
||||||
)
|
|
||||||
|
|
||||||
// Mpflt represents a floating-point constant.
|
|
||||||
type Mpflt struct {
|
|
||||||
Val big.Float
|
|
||||||
}
|
|
||||||
|
|
||||||
// Mpcplx represents a complex constant.
|
|
||||||
type Mpcplx struct {
|
|
||||||
Real Mpflt
|
|
||||||
Imag Mpflt
|
|
||||||
}
|
|
||||||
|
|
||||||
// Use newMpflt (not new(Mpflt)!) to get the correct default precision.
|
|
||||||
func newMpflt() *Mpflt {
|
|
||||||
var a Mpflt
|
|
||||||
a.Val.SetPrec(Mpprec)
|
|
||||||
return &a
|
|
||||||
}
|
|
||||||
|
|
||||||
// Use newMpcmplx (not new(Mpcplx)!) to get the correct default precision.
|
|
||||||
func newMpcmplx() *Mpcplx {
|
|
||||||
var a Mpcplx
|
|
||||||
a.Real = *newMpflt()
|
|
||||||
a.Imag = *newMpflt()
|
|
||||||
return &a
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *Mpflt) SetInt(b *Mpint) {
|
|
||||||
if b.checkOverflow(0) {
|
|
||||||
// sign doesn't really matter but copy anyway
|
|
||||||
a.Val.SetInf(b.Val.Sign() < 0)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
a.Val.SetInt(&b.Val)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *Mpflt) Set(b *Mpflt) {
|
|
||||||
a.Val.Set(&b.Val)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *Mpflt) Add(b *Mpflt) {
|
|
||||||
if Mpdebug {
|
|
||||||
fmt.Printf("\n%v + %v", a, b)
|
|
||||||
}
|
|
||||||
|
|
||||||
a.Val.Add(&a.Val, &b.Val)
|
|
||||||
|
|
||||||
if Mpdebug {
|
|
||||||
fmt.Printf(" = %v\n\n", a)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *Mpflt) AddFloat64(c float64) {
|
|
||||||
var b Mpflt
|
|
||||||
|
|
||||||
b.SetFloat64(c)
|
|
||||||
a.Add(&b)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *Mpflt) Sub(b *Mpflt) {
|
|
||||||
if Mpdebug {
|
|
||||||
fmt.Printf("\n%v - %v", a, b)
|
|
||||||
}
|
|
||||||
|
|
||||||
a.Val.Sub(&a.Val, &b.Val)
|
|
||||||
|
|
||||||
if Mpdebug {
|
|
||||||
fmt.Printf(" = %v\n\n", a)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *Mpflt) Mul(b *Mpflt) {
|
|
||||||
if Mpdebug {
|
|
||||||
fmt.Printf("%v\n * %v\n", a, b)
|
|
||||||
}
|
|
||||||
|
|
||||||
a.Val.Mul(&a.Val, &b.Val)
|
|
||||||
|
|
||||||
if Mpdebug {
|
|
||||||
fmt.Printf(" = %v\n\n", a)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *Mpflt) MulFloat64(c float64) {
|
|
||||||
var b Mpflt
|
|
||||||
|
|
||||||
b.SetFloat64(c)
|
|
||||||
a.Mul(&b)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *Mpflt) Quo(b *Mpflt) {
|
|
||||||
if Mpdebug {
|
|
||||||
fmt.Printf("%v\n / %v\n", a, b)
|
|
||||||
}
|
|
||||||
|
|
||||||
a.Val.Quo(&a.Val, &b.Val)
|
|
||||||
|
|
||||||
if Mpdebug {
|
|
||||||
fmt.Printf(" = %v\n\n", a)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *Mpflt) Cmp(b *Mpflt) int {
|
|
||||||
return a.Val.Cmp(&b.Val)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *Mpflt) CmpFloat64(c float64) int {
|
|
||||||
if c == 0 {
|
|
||||||
return a.Val.Sign() // common case shortcut
|
|
||||||
}
|
|
||||||
return a.Val.Cmp(big.NewFloat(c))
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *Mpflt) Float64() float64 {
|
|
||||||
x, _ := a.Val.Float64()
|
|
||||||
|
|
||||||
// check for overflow
|
|
||||||
if math.IsInf(x, 0) && nsavederrors+nerrors == 0 {
|
|
||||||
Fatalf("ovf in Mpflt Float64")
|
|
||||||
}
|
|
||||||
|
|
||||||
return x + 0 // avoid -0 (should not be needed, but be conservative)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *Mpflt) Float32() float64 {
|
|
||||||
x32, _ := a.Val.Float32()
|
|
||||||
x := float64(x32)
|
|
||||||
|
|
||||||
// check for overflow
|
|
||||||
if math.IsInf(x, 0) && nsavederrors+nerrors == 0 {
|
|
||||||
Fatalf("ovf in Mpflt Float32")
|
|
||||||
}
|
|
||||||
|
|
||||||
return x + 0 // avoid -0 (should not be needed, but be conservative)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *Mpflt) SetFloat64(c float64) {
|
|
||||||
if Mpdebug {
|
|
||||||
fmt.Printf("\nconst %g", c)
|
|
||||||
}
|
|
||||||
|
|
||||||
// convert -0 to 0
|
|
||||||
if c == 0 {
|
|
||||||
c = 0
|
|
||||||
}
|
|
||||||
a.Val.SetFloat64(c)
|
|
||||||
|
|
||||||
if Mpdebug {
|
|
||||||
fmt.Printf(" = %v\n", a)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *Mpflt) Neg() {
|
|
||||||
// avoid -0
|
|
||||||
if a.Val.Sign() != 0 {
|
|
||||||
a.Val.Neg(&a.Val)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *Mpflt) SetString(as string) {
|
|
||||||
f, _, err := a.Val.Parse(as, 0)
|
|
||||||
if err != nil {
|
|
||||||
yyerror("malformed constant: %s (%v)", as, err)
|
|
||||||
a.Val.SetFloat64(0)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if f.IsInf() {
|
|
||||||
yyerror("constant too large: %s", as)
|
|
||||||
a.Val.SetFloat64(0)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// -0 becomes 0
|
|
||||||
if f.Sign() == 0 && f.Signbit() {
|
|
||||||
a.Val.SetFloat64(0)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f *Mpflt) String() string {
|
|
||||||
return f.Val.Text('b', 0)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (fvp *Mpflt) GoString() string {
|
|
||||||
// determine sign
|
|
||||||
sign := ""
|
|
||||||
f := &fvp.Val
|
|
||||||
if f.Sign() < 0 {
|
|
||||||
sign = "-"
|
|
||||||
f = new(big.Float).Abs(f)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Don't try to convert infinities (will not terminate).
|
|
||||||
if f.IsInf() {
|
|
||||||
return sign + "Inf"
|
|
||||||
}
|
|
||||||
|
|
||||||
// Use exact fmt formatting if in float64 range (common case):
|
|
||||||
// proceed if f doesn't underflow to 0 or overflow to inf.
|
|
||||||
if x, _ := f.Float64(); f.Sign() == 0 == (x == 0) && !math.IsInf(x, 0) {
|
|
||||||
return fmt.Sprintf("%s%.6g", sign, x)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Out of float64 range. Do approximate manual to decimal
|
|
||||||
// conversion to avoid precise but possibly slow Float
|
|
||||||
// formatting.
|
|
||||||
// f = mant * 2**exp
|
|
||||||
var mant big.Float
|
|
||||||
exp := f.MantExp(&mant) // 0.5 <= mant < 1.0
|
|
||||||
|
|
||||||
// approximate float64 mantissa m and decimal exponent d
|
|
||||||
// f ~ m * 10**d
|
|
||||||
m, _ := mant.Float64() // 0.5 <= m < 1.0
|
|
||||||
d := float64(exp) * (math.Ln2 / math.Ln10) // log_10(2)
|
|
||||||
|
|
||||||
// adjust m for truncated (integer) decimal exponent e
|
|
||||||
e := int64(d)
|
|
||||||
m *= math.Pow(10, d-float64(e))
|
|
||||||
|
|
||||||
// ensure 1 <= m < 10
|
|
||||||
switch {
|
|
||||||
case m < 1-0.5e-6:
|
|
||||||
// The %.6g format below rounds m to 5 digits after the
|
|
||||||
// decimal point. Make sure that m*10 < 10 even after
|
|
||||||
// rounding up: m*10 + 0.5e-5 < 10 => m < 1 - 0.5e6.
|
|
||||||
m *= 10
|
|
||||||
e--
|
|
||||||
case m >= 10:
|
|
||||||
m /= 10
|
|
||||||
e++
|
|
||||||
}
|
|
||||||
|
|
||||||
return fmt.Sprintf("%s%.6ge%+d", sign, m, e)
|
|
||||||
}
|
|
||||||
|
|
||||||
// complex multiply v *= rv
|
|
||||||
// (a, b) * (c, d) = (a*c - b*d, b*c + a*d)
|
|
||||||
func (v *Mpcplx) Mul(rv *Mpcplx) {
|
|
||||||
var ac, ad, bc, bd Mpflt
|
|
||||||
|
|
||||||
ac.Set(&v.Real)
|
|
||||||
ac.Mul(&rv.Real) // ac
|
|
||||||
|
|
||||||
bd.Set(&v.Imag)
|
|
||||||
bd.Mul(&rv.Imag) // bd
|
|
||||||
|
|
||||||
bc.Set(&v.Imag)
|
|
||||||
bc.Mul(&rv.Real) // bc
|
|
||||||
|
|
||||||
ad.Set(&v.Real)
|
|
||||||
ad.Mul(&rv.Imag) // ad
|
|
||||||
|
|
||||||
v.Real.Set(&ac)
|
|
||||||
v.Real.Sub(&bd) // ac-bd
|
|
||||||
|
|
||||||
v.Imag.Set(&bc)
|
|
||||||
v.Imag.Add(&ad) // bc+ad
|
|
||||||
}
|
|
||||||
|
|
||||||
// complex divide v /= rv
|
|
||||||
// (a, b) / (c, d) = ((a*c + b*d), (b*c - a*d))/(c*c + d*d)
|
|
||||||
func (v *Mpcplx) Div(rv *Mpcplx) bool {
|
|
||||||
if rv.Real.CmpFloat64(0) == 0 && rv.Imag.CmpFloat64(0) == 0 {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
var ac, ad, bc, bd, cc_plus_dd Mpflt
|
|
||||||
|
|
||||||
cc_plus_dd.Set(&rv.Real)
|
|
||||||
cc_plus_dd.Mul(&rv.Real) // cc
|
|
||||||
|
|
||||||
ac.Set(&rv.Imag)
|
|
||||||
ac.Mul(&rv.Imag) // dd
|
|
||||||
cc_plus_dd.Add(&ac) // cc+dd
|
|
||||||
|
|
||||||
// We already checked that c and d are not both zero, but we can't
|
|
||||||
// assume that c²+d² != 0 follows, because for tiny values of c
|
|
||||||
// and/or d c²+d² can underflow to zero. Check that c²+d² is
|
|
||||||
// nonzero, return if it's not.
|
|
||||||
if cc_plus_dd.CmpFloat64(0) == 0 {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
ac.Set(&v.Real)
|
|
||||||
ac.Mul(&rv.Real) // ac
|
|
||||||
|
|
||||||
bd.Set(&v.Imag)
|
|
||||||
bd.Mul(&rv.Imag) // bd
|
|
||||||
|
|
||||||
bc.Set(&v.Imag)
|
|
||||||
bc.Mul(&rv.Real) // bc
|
|
||||||
|
|
||||||
ad.Set(&v.Real)
|
|
||||||
ad.Mul(&rv.Imag) // ad
|
|
||||||
|
|
||||||
v.Real.Set(&ac)
|
|
||||||
v.Real.Add(&bd) // ac+bd
|
|
||||||
v.Real.Quo(&cc_plus_dd) // (ac+bd)/(cc+dd)
|
|
||||||
|
|
||||||
v.Imag.Set(&bc)
|
|
||||||
v.Imag.Sub(&ad) // bc-ad
|
|
||||||
v.Imag.Quo(&cc_plus_dd) // (bc+ad)/(cc+dd)
|
|
||||||
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
func (v *Mpcplx) String() string {
|
|
||||||
return fmt.Sprintf("(%s+%si)", v.Real.String(), v.Imag.String())
|
|
||||||
}
|
|
||||||
|
|
||||||
func (v *Mpcplx) GoString() string {
|
|
||||||
var re string
|
|
||||||
sre := v.Real.CmpFloat64(0)
|
|
||||||
if sre != 0 {
|
|
||||||
re = v.Real.GoString()
|
|
||||||
}
|
|
||||||
|
|
||||||
var im string
|
|
||||||
sim := v.Imag.CmpFloat64(0)
|
|
||||||
if sim != 0 {
|
|
||||||
im = v.Imag.GoString()
|
|
||||||
}
|
|
||||||
|
|
||||||
switch {
|
|
||||||
case sre == 0 && sim == 0:
|
|
||||||
return "0"
|
|
||||||
case sre == 0:
|
|
||||||
return im + "i"
|
|
||||||
case sim == 0:
|
|
||||||
return re
|
|
||||||
case sim < 0:
|
|
||||||
return fmt.Sprintf("(%s%si)", re, im)
|
|
||||||
default:
|
|
||||||
return fmt.Sprintf("(%s+%si)", re, im)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,304 +0,0 @@
|
||||||
// Copyright 2009 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
package gc
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"math/big"
|
|
||||||
)
|
|
||||||
|
|
||||||
// implements integer arithmetic
|
|
||||||
|
|
||||||
// Mpint represents an integer constant.
|
|
||||||
type Mpint struct {
|
|
||||||
Val big.Int
|
|
||||||
Ovf bool // set if Val overflowed compiler limit (sticky)
|
|
||||||
Rune bool // set if syntax indicates default type rune
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *Mpint) SetOverflow() {
|
|
||||||
a.Val.SetUint64(1) // avoid spurious div-zero errors
|
|
||||||
a.Ovf = true
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *Mpint) checkOverflow(extra int) bool {
|
|
||||||
// We don't need to be precise here, any reasonable upper limit would do.
|
|
||||||
// For now, use existing limit so we pass all the tests unchanged.
|
|
||||||
if a.Val.BitLen()+extra > Mpprec {
|
|
||||||
a.SetOverflow()
|
|
||||||
}
|
|
||||||
return a.Ovf
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *Mpint) Set(b *Mpint) {
|
|
||||||
a.Val.Set(&b.Val)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *Mpint) SetFloat(b *Mpflt) bool {
|
|
||||||
// avoid converting huge floating-point numbers to integers
|
|
||||||
// (2*Mpprec is large enough to permit all tests to pass)
|
|
||||||
if b.Val.MantExp(nil) > 2*Mpprec {
|
|
||||||
a.SetOverflow()
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, acc := b.Val.Int(&a.Val); acc == big.Exact {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
const delta = 16 // a reasonably small number of bits > 0
|
|
||||||
var t big.Float
|
|
||||||
t.SetPrec(Mpprec - delta)
|
|
||||||
|
|
||||||
// try rounding down a little
|
|
||||||
t.SetMode(big.ToZero)
|
|
||||||
t.Set(&b.Val)
|
|
||||||
if _, acc := t.Int(&a.Val); acc == big.Exact {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
// try rounding up a little
|
|
||||||
t.SetMode(big.AwayFromZero)
|
|
||||||
t.Set(&b.Val)
|
|
||||||
if _, acc := t.Int(&a.Val); acc == big.Exact {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
a.Ovf = false
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *Mpint) Add(b *Mpint) {
|
|
||||||
if a.Ovf || b.Ovf {
|
|
||||||
if nsavederrors+nerrors == 0 {
|
|
||||||
Fatalf("ovf in Mpint Add")
|
|
||||||
}
|
|
||||||
a.SetOverflow()
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
a.Val.Add(&a.Val, &b.Val)
|
|
||||||
|
|
||||||
if a.checkOverflow(0) {
|
|
||||||
yyerror("constant addition overflow")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *Mpint) Sub(b *Mpint) {
|
|
||||||
if a.Ovf || b.Ovf {
|
|
||||||
if nsavederrors+nerrors == 0 {
|
|
||||||
Fatalf("ovf in Mpint Sub")
|
|
||||||
}
|
|
||||||
a.SetOverflow()
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
a.Val.Sub(&a.Val, &b.Val)
|
|
||||||
|
|
||||||
if a.checkOverflow(0) {
|
|
||||||
yyerror("constant subtraction overflow")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *Mpint) Mul(b *Mpint) {
|
|
||||||
if a.Ovf || b.Ovf {
|
|
||||||
if nsavederrors+nerrors == 0 {
|
|
||||||
Fatalf("ovf in Mpint Mul")
|
|
||||||
}
|
|
||||||
a.SetOverflow()
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
a.Val.Mul(&a.Val, &b.Val)
|
|
||||||
|
|
||||||
if a.checkOverflow(0) {
|
|
||||||
yyerror("constant multiplication overflow")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *Mpint) Quo(b *Mpint) {
|
|
||||||
if a.Ovf || b.Ovf {
|
|
||||||
if nsavederrors+nerrors == 0 {
|
|
||||||
Fatalf("ovf in Mpint Quo")
|
|
||||||
}
|
|
||||||
a.SetOverflow()
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
a.Val.Quo(&a.Val, &b.Val)
|
|
||||||
|
|
||||||
if a.checkOverflow(0) {
|
|
||||||
// can only happen for div-0 which should be checked elsewhere
|
|
||||||
yyerror("constant division overflow")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *Mpint) Rem(b *Mpint) {
|
|
||||||
if a.Ovf || b.Ovf {
|
|
||||||
if nsavederrors+nerrors == 0 {
|
|
||||||
Fatalf("ovf in Mpint Rem")
|
|
||||||
}
|
|
||||||
a.SetOverflow()
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
a.Val.Rem(&a.Val, &b.Val)
|
|
||||||
|
|
||||||
if a.checkOverflow(0) {
|
|
||||||
// should never happen
|
|
||||||
yyerror("constant modulo overflow")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *Mpint) Or(b *Mpint) {
|
|
||||||
if a.Ovf || b.Ovf {
|
|
||||||
if nsavederrors+nerrors == 0 {
|
|
||||||
Fatalf("ovf in Mpint Or")
|
|
||||||
}
|
|
||||||
a.SetOverflow()
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
a.Val.Or(&a.Val, &b.Val)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *Mpint) And(b *Mpint) {
|
|
||||||
if a.Ovf || b.Ovf {
|
|
||||||
if nsavederrors+nerrors == 0 {
|
|
||||||
Fatalf("ovf in Mpint And")
|
|
||||||
}
|
|
||||||
a.SetOverflow()
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
a.Val.And(&a.Val, &b.Val)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *Mpint) AndNot(b *Mpint) {
|
|
||||||
if a.Ovf || b.Ovf {
|
|
||||||
if nsavederrors+nerrors == 0 {
|
|
||||||
Fatalf("ovf in Mpint AndNot")
|
|
||||||
}
|
|
||||||
a.SetOverflow()
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
a.Val.AndNot(&a.Val, &b.Val)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *Mpint) Xor(b *Mpint) {
|
|
||||||
if a.Ovf || b.Ovf {
|
|
||||||
if nsavederrors+nerrors == 0 {
|
|
||||||
Fatalf("ovf in Mpint Xor")
|
|
||||||
}
|
|
||||||
a.SetOverflow()
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
a.Val.Xor(&a.Val, &b.Val)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *Mpint) Lsh(b *Mpint) {
|
|
||||||
if a.Ovf || b.Ovf {
|
|
||||||
if nsavederrors+nerrors == 0 {
|
|
||||||
Fatalf("ovf in Mpint Lsh")
|
|
||||||
}
|
|
||||||
a.SetOverflow()
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
s := b.Int64()
|
|
||||||
if s < 0 || s >= Mpprec {
|
|
||||||
msg := "shift count too large"
|
|
||||||
if s < 0 {
|
|
||||||
msg = "invalid negative shift count"
|
|
||||||
}
|
|
||||||
yyerror("%s: %d", msg, s)
|
|
||||||
a.SetInt64(0)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if a.checkOverflow(int(s)) {
|
|
||||||
yyerror("constant shift overflow")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
a.Val.Lsh(&a.Val, uint(s))
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *Mpint) Rsh(b *Mpint) {
|
|
||||||
if a.Ovf || b.Ovf {
|
|
||||||
if nsavederrors+nerrors == 0 {
|
|
||||||
Fatalf("ovf in Mpint Rsh")
|
|
||||||
}
|
|
||||||
a.SetOverflow()
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
s := b.Int64()
|
|
||||||
if s < 0 {
|
|
||||||
yyerror("invalid negative shift count: %d", s)
|
|
||||||
if a.Val.Sign() < 0 {
|
|
||||||
a.SetInt64(-1)
|
|
||||||
} else {
|
|
||||||
a.SetInt64(0)
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
a.Val.Rsh(&a.Val, uint(s))
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *Mpint) Cmp(b *Mpint) int {
|
|
||||||
return a.Val.Cmp(&b.Val)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *Mpint) CmpInt64(c int64) int {
|
|
||||||
if c == 0 {
|
|
||||||
return a.Val.Sign() // common case shortcut
|
|
||||||
}
|
|
||||||
return a.Val.Cmp(big.NewInt(c))
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *Mpint) Neg() {
|
|
||||||
a.Val.Neg(&a.Val)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *Mpint) Int64() int64 {
|
|
||||||
if a.Ovf {
|
|
||||||
if nsavederrors+nerrors == 0 {
|
|
||||||
Fatalf("constant overflow")
|
|
||||||
}
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
|
|
||||||
return a.Val.Int64()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *Mpint) SetInt64(c int64) {
|
|
||||||
a.Val.SetInt64(c)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *Mpint) SetString(as string) {
|
|
||||||
_, ok := a.Val.SetString(as, 0)
|
|
||||||
if !ok {
|
|
||||||
// The lexer checks for correct syntax of the literal
|
|
||||||
// and reports detailed errors. Thus SetString should
|
|
||||||
// never fail (in theory it might run out of memory,
|
|
||||||
// but that wouldn't be reported as an error here).
|
|
||||||
Fatalf("malformed integer constant: %s", as)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if a.checkOverflow(0) {
|
|
||||||
yyerror("constant too large: %s", as)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *Mpint) GoString() string {
|
|
||||||
return a.Val.String()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *Mpint) String() string {
|
|
||||||
return fmt.Sprintf("%#x", &a.Val)
|
|
||||||
}
|
|
||||||
File diff suppressed because it is too large
Load diff
|
|
@ -5,6 +5,8 @@
|
||||||
package gc
|
package gc
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"cmd/compile/internal/base"
|
||||||
|
"cmd/compile/internal/ir"
|
||||||
"cmd/compile/internal/types"
|
"cmd/compile/internal/types"
|
||||||
"cmd/internal/bio"
|
"cmd/internal/bio"
|
||||||
"cmd/internal/obj"
|
"cmd/internal/obj"
|
||||||
|
|
@ -13,6 +15,7 @@ import (
|
||||||
"crypto/sha256"
|
"crypto/sha256"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"go/constant"
|
||||||
"io"
|
"io"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"os"
|
"os"
|
||||||
|
|
@ -46,20 +49,20 @@ const (
|
||||||
)
|
)
|
||||||
|
|
||||||
func dumpobj() {
|
func dumpobj() {
|
||||||
if linkobj == "" {
|
if base.Flag.LinkObj == "" {
|
||||||
dumpobj1(outfile, modeCompilerObj|modeLinkerObj)
|
dumpobj1(base.Flag.LowerO, modeCompilerObj|modeLinkerObj)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
dumpobj1(outfile, modeCompilerObj)
|
dumpobj1(base.Flag.LowerO, modeCompilerObj)
|
||||||
dumpobj1(linkobj, modeLinkerObj)
|
dumpobj1(base.Flag.LinkObj, modeLinkerObj)
|
||||||
}
|
}
|
||||||
|
|
||||||
func dumpobj1(outfile string, mode int) {
|
func dumpobj1(outfile string, mode int) {
|
||||||
bout, err := bio.Create(outfile)
|
bout, err := bio.Create(outfile)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
flusherrors()
|
base.FlushErrors()
|
||||||
fmt.Printf("can't create %s: %v\n", outfile, err)
|
fmt.Printf("can't create %s: %v\n", outfile, err)
|
||||||
errorexit()
|
base.ErrorExit()
|
||||||
}
|
}
|
||||||
defer bout.Close()
|
defer bout.Close()
|
||||||
bout.WriteString("!<arch>\n")
|
bout.WriteString("!<arch>\n")
|
||||||
|
|
@ -78,10 +81,10 @@ func dumpobj1(outfile string, mode int) {
|
||||||
|
|
||||||
func printObjHeader(bout *bio.Writer) {
|
func printObjHeader(bout *bio.Writer) {
|
||||||
fmt.Fprintf(bout, "go object %s %s %s %s\n", objabi.GOOS, objabi.GOARCH, objabi.Version, objabi.Expstring())
|
fmt.Fprintf(bout, "go object %s %s %s %s\n", objabi.GOOS, objabi.GOARCH, objabi.Version, objabi.Expstring())
|
||||||
if buildid != "" {
|
if base.Flag.BuildID != "" {
|
||||||
fmt.Fprintf(bout, "build id %q\n", buildid)
|
fmt.Fprintf(bout, "build id %q\n", base.Flag.BuildID)
|
||||||
}
|
}
|
||||||
if localpkg.Name == "main" {
|
if ir.LocalPkg.Name == "main" {
|
||||||
fmt.Fprintf(bout, "main\n")
|
fmt.Fprintf(bout, "main\n")
|
||||||
}
|
}
|
||||||
fmt.Fprintf(bout, "\n") // header ends with blank line
|
fmt.Fprintf(bout, "\n") // header ends with blank line
|
||||||
|
|
@ -139,7 +142,7 @@ func dumpdata() {
|
||||||
for {
|
for {
|
||||||
for i := xtops; i < len(xtop); i++ {
|
for i := xtops; i < len(xtop); i++ {
|
||||||
n := xtop[i]
|
n := xtop[i]
|
||||||
if n.Op == ODCLFUNC {
|
if n.Op() == ir.ODCLFUNC {
|
||||||
funccompile(n)
|
funccompile(n)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -168,13 +171,13 @@ func dumpdata() {
|
||||||
addGCLocals()
|
addGCLocals()
|
||||||
|
|
||||||
if exportlistLen != len(exportlist) {
|
if exportlistLen != len(exportlist) {
|
||||||
Fatalf("exportlist changed after compile functions loop")
|
base.Fatalf("exportlist changed after compile functions loop")
|
||||||
}
|
}
|
||||||
if ptabsLen != len(ptabs) {
|
if ptabsLen != len(ptabs) {
|
||||||
Fatalf("ptabs changed after compile functions loop")
|
base.Fatalf("ptabs changed after compile functions loop")
|
||||||
}
|
}
|
||||||
if itabsLen != len(itabs) {
|
if itabsLen != len(itabs) {
|
||||||
Fatalf("itabs changed after compile functions loop")
|
base.Fatalf("itabs changed after compile functions loop")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -186,27 +189,27 @@ func dumpLinkerObj(bout *bio.Writer) {
|
||||||
fmt.Fprintf(bout, "\n$$\n\n$$\n\n")
|
fmt.Fprintf(bout, "\n$$\n\n$$\n\n")
|
||||||
fmt.Fprintf(bout, "\n$$ // cgo\n")
|
fmt.Fprintf(bout, "\n$$ // cgo\n")
|
||||||
if err := json.NewEncoder(bout).Encode(pragcgobuf); err != nil {
|
if err := json.NewEncoder(bout).Encode(pragcgobuf); err != nil {
|
||||||
Fatalf("serializing pragcgobuf: %v", err)
|
base.Fatalf("serializing pragcgobuf: %v", err)
|
||||||
}
|
}
|
||||||
fmt.Fprintf(bout, "\n$$\n\n")
|
fmt.Fprintf(bout, "\n$$\n\n")
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Fprintf(bout, "\n!\n")
|
fmt.Fprintf(bout, "\n!\n")
|
||||||
|
|
||||||
obj.WriteObjFile(Ctxt, bout)
|
obj.WriteObjFile(base.Ctxt, bout)
|
||||||
}
|
}
|
||||||
|
|
||||||
func addptabs() {
|
func addptabs() {
|
||||||
if !Ctxt.Flag_dynlink || localpkg.Name != "main" {
|
if !base.Ctxt.Flag_dynlink || ir.LocalPkg.Name != "main" {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
for _, exportn := range exportlist {
|
for _, exportn := range exportlist {
|
||||||
s := exportn.Sym
|
s := exportn.Sym()
|
||||||
n := asNode(s.Def)
|
n := ir.AsNode(s.Def)
|
||||||
if n == nil {
|
if n == nil {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if n.Op != ONAME {
|
if n.Op() != ir.ONAME {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if !types.IsExported(s.Name) {
|
if !types.IsExported(s.Name) {
|
||||||
|
|
@ -215,76 +218,61 @@ func addptabs() {
|
||||||
if s.Pkg.Name != "main" {
|
if s.Pkg.Name != "main" {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if n.Type.Etype == TFUNC && n.Class() == PFUNC {
|
if n.Type().Etype == types.TFUNC && n.Class() == ir.PFUNC {
|
||||||
// function
|
// function
|
||||||
ptabs = append(ptabs, ptabEntry{s: s, t: asNode(s.Def).Type})
|
ptabs = append(ptabs, ptabEntry{s: s, t: ir.AsNode(s.Def).Type()})
|
||||||
} else {
|
} else {
|
||||||
// variable
|
// variable
|
||||||
ptabs = append(ptabs, ptabEntry{s: s, t: types.NewPtr(asNode(s.Def).Type)})
|
ptabs = append(ptabs, ptabEntry{s: s, t: types.NewPtr(ir.AsNode(s.Def).Type())})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func dumpGlobal(n *Node) {
|
func dumpGlobal(n ir.Node) {
|
||||||
if n.Type == nil {
|
if n.Type() == nil {
|
||||||
Fatalf("external %v nil type\n", n)
|
base.Fatalf("external %v nil type\n", n)
|
||||||
}
|
}
|
||||||
if n.Class() == PFUNC {
|
if n.Class() == ir.PFUNC {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if n.Sym.Pkg != localpkg {
|
if n.Sym().Pkg != ir.LocalPkg {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
dowidth(n.Type)
|
dowidth(n.Type())
|
||||||
ggloblnod(n)
|
ggloblnod(n)
|
||||||
}
|
}
|
||||||
|
|
||||||
func dumpGlobalConst(n *Node) {
|
func dumpGlobalConst(n ir.Node) {
|
||||||
// only export typed constants
|
// only export typed constants
|
||||||
t := n.Type
|
t := n.Type()
|
||||||
if t == nil {
|
if t == nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if n.Sym.Pkg != localpkg {
|
if n.Sym().Pkg != ir.LocalPkg {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
// only export integer constants for now
|
// only export integer constants for now
|
||||||
switch t.Etype {
|
if !t.IsInteger() {
|
||||||
case TINT8:
|
|
||||||
case TINT16:
|
|
||||||
case TINT32:
|
|
||||||
case TINT64:
|
|
||||||
case TINT:
|
|
||||||
case TUINT8:
|
|
||||||
case TUINT16:
|
|
||||||
case TUINT32:
|
|
||||||
case TUINT64:
|
|
||||||
case TUINT:
|
|
||||||
case TUINTPTR:
|
|
||||||
// ok
|
|
||||||
case TIDEAL:
|
|
||||||
if !Isconst(n, CTINT) {
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
x := n.Val().U.(*Mpint)
|
v := n.Val()
|
||||||
if x.Cmp(minintval[TINT]) < 0 || x.Cmp(maxintval[TINT]) > 0 {
|
if t.IsUntyped() {
|
||||||
|
// Export untyped integers as int (if they fit).
|
||||||
|
t = types.Types[types.TINT]
|
||||||
|
if doesoverflow(v, t) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
// Ideal integers we export as int (if they fit).
|
|
||||||
t = types.Types[TINT]
|
|
||||||
default:
|
|
||||||
return
|
|
||||||
}
|
}
|
||||||
Ctxt.DwarfIntConst(myimportpath, n.Sym.Name, typesymname(t), n.Int64Val())
|
base.Ctxt.DwarfIntConst(base.Ctxt.Pkgpath, n.Sym().Name, typesymname(t), ir.Int64Val(t, v))
|
||||||
}
|
}
|
||||||
|
|
||||||
func dumpglobls() {
|
func dumpglobls() {
|
||||||
// add globals
|
// add globals
|
||||||
for _, n := range externdcl {
|
for _, n := range externdcl {
|
||||||
switch n.Op {
|
switch n.Op() {
|
||||||
case ONAME:
|
case ir.ONAME:
|
||||||
dumpGlobal(n)
|
dumpGlobal(n)
|
||||||
case OLITERAL:
|
case ir.OLITERAL:
|
||||||
dumpGlobalConst(n)
|
dumpGlobalConst(n)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -307,7 +295,7 @@ func dumpglobls() {
|
||||||
// This is done during the sequential phase after compilation, since
|
// This is done during the sequential phase after compilation, since
|
||||||
// global symbols can't be declared during parallel compilation.
|
// global symbols can't be declared during parallel compilation.
|
||||||
func addGCLocals() {
|
func addGCLocals() {
|
||||||
for _, s := range Ctxt.Text {
|
for _, s := range base.Ctxt.Text {
|
||||||
fn := s.Func()
|
fn := s.Func()
|
||||||
if fn == nil {
|
if fn == nil {
|
||||||
continue
|
continue
|
||||||
|
|
@ -330,9 +318,9 @@ func addGCLocals() {
|
||||||
|
|
||||||
func duintxx(s *obj.LSym, off int, v uint64, wid int) int {
|
func duintxx(s *obj.LSym, off int, v uint64, wid int) int {
|
||||||
if off&(wid-1) != 0 {
|
if off&(wid-1) != 0 {
|
||||||
Fatalf("duintxxLSym: misaligned: v=%d wid=%d off=%d", v, wid, off)
|
base.Fatalf("duintxxLSym: misaligned: v=%d wid=%d off=%d", v, wid, off)
|
||||||
}
|
}
|
||||||
s.WriteInt(Ctxt, int64(off), wid, int64(v))
|
s.WriteInt(base.Ctxt, int64(off), wid, int64(v))
|
||||||
return off + wid
|
return off + wid
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -383,7 +371,7 @@ func stringsym(pos src.XPos, s string) (data *obj.LSym) {
|
||||||
symname = strconv.Quote(s)
|
symname = strconv.Quote(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
symdata := Ctxt.Lookup(stringSymPrefix + symname)
|
symdata := base.Ctxt.Lookup(stringSymPrefix + symname)
|
||||||
if !symdata.OnList() {
|
if !symdata.OnList() {
|
||||||
off := dstringdata(symdata, 0, s, pos, "string")
|
off := dstringdata(symdata, 0, s, pos, "string")
|
||||||
ggloblsym(symdata, int32(off), obj.DUPOK|obj.RODATA|obj.LOCAL)
|
ggloblsym(symdata, int32(off), obj.DUPOK|obj.RODATA|obj.LOCAL)
|
||||||
|
|
@ -426,7 +414,7 @@ func fileStringSym(pos src.XPos, file string, readonly bool, hash []byte) (*obj.
|
||||||
if readonly {
|
if readonly {
|
||||||
sym = stringsym(pos, string(data))
|
sym = stringsym(pos, string(data))
|
||||||
} else {
|
} else {
|
||||||
sym = slicedata(pos, string(data)).Sym.Linksym()
|
sym = slicedata(pos, string(data)).Sym().Linksym()
|
||||||
}
|
}
|
||||||
if len(hash) > 0 {
|
if len(hash) > 0 {
|
||||||
sum := sha256.Sum256(data)
|
sum := sha256.Sum256(data)
|
||||||
|
|
@ -461,7 +449,7 @@ func fileStringSym(pos src.XPos, file string, readonly bool, hash []byte) (*obj.
|
||||||
var symdata *obj.LSym
|
var symdata *obj.LSym
|
||||||
if readonly {
|
if readonly {
|
||||||
symname := fmt.Sprintf(stringSymPattern, size, sum)
|
symname := fmt.Sprintf(stringSymPattern, size, sum)
|
||||||
symdata = Ctxt.Lookup(stringSymPrefix + symname)
|
symdata = base.Ctxt.Lookup(stringSymPrefix + symname)
|
||||||
if !symdata.OnList() {
|
if !symdata.OnList() {
|
||||||
info := symdata.NewFileInfo()
|
info := symdata.NewFileInfo()
|
||||||
info.Name = file
|
info.Name = file
|
||||||
|
|
@ -474,7 +462,7 @@ func fileStringSym(pos src.XPos, file string, readonly bool, hash []byte) (*obj.
|
||||||
} else {
|
} else {
|
||||||
// Emit a zero-length data symbol
|
// Emit a zero-length data symbol
|
||||||
// and then fix up length and content to use file.
|
// and then fix up length and content to use file.
|
||||||
symdata = slicedata(pos, "").Sym.Linksym()
|
symdata = slicedata(pos, "").Sym().Linksym()
|
||||||
symdata.Size = size
|
symdata.Size = size
|
||||||
symdata.Type = objabi.SNOPTRDATA
|
symdata.Type = objabi.SNOPTRDATA
|
||||||
info := symdata.NewFileInfo()
|
info := symdata.NewFileInfo()
|
||||||
|
|
@ -487,12 +475,12 @@ func fileStringSym(pos src.XPos, file string, readonly bool, hash []byte) (*obj.
|
||||||
|
|
||||||
var slicedataGen int
|
var slicedataGen int
|
||||||
|
|
||||||
func slicedata(pos src.XPos, s string) *Node {
|
func slicedata(pos src.XPos, s string) ir.Node {
|
||||||
slicedataGen++
|
slicedataGen++
|
||||||
symname := fmt.Sprintf(".gobytes.%d", slicedataGen)
|
symname := fmt.Sprintf(".gobytes.%d", slicedataGen)
|
||||||
sym := localpkg.Lookup(symname)
|
sym := ir.LocalPkg.Lookup(symname)
|
||||||
symnode := newname(sym)
|
symnode := NewName(sym)
|
||||||
sym.Def = asTypesNode(symnode)
|
sym.Def = symnode
|
||||||
|
|
||||||
lsym := sym.Linksym()
|
lsym := sym.Linksym()
|
||||||
off := dstringdata(lsym, 0, s, pos, "slice")
|
off := dstringdata(lsym, 0, s, pos, "slice")
|
||||||
|
|
@ -501,11 +489,11 @@ func slicedata(pos src.XPos, s string) *Node {
|
||||||
return symnode
|
return symnode
|
||||||
}
|
}
|
||||||
|
|
||||||
func slicebytes(nam *Node, s string) {
|
func slicebytes(nam ir.Node, s string) {
|
||||||
if nam.Op != ONAME {
|
if nam.Op() != ir.ONAME {
|
||||||
Fatalf("slicebytes %v", nam)
|
base.Fatalf("slicebytes %v", nam)
|
||||||
}
|
}
|
||||||
slicesym(nam, slicedata(nam.Pos, s), int64(len(s)))
|
slicesym(nam, slicedata(nam.Pos(), s), int64(len(s)))
|
||||||
}
|
}
|
||||||
|
|
||||||
func dstringdata(s *obj.LSym, off int, t string, pos src.XPos, what string) int {
|
func dstringdata(s *obj.LSym, off int, t string, pos src.XPos, what string) int {
|
||||||
|
|
@ -513,126 +501,133 @@ func dstringdata(s *obj.LSym, off int, t string, pos src.XPos, what string) int
|
||||||
// causing a cryptic error message by the linker. Check for oversize objects here
|
// causing a cryptic error message by the linker. Check for oversize objects here
|
||||||
// and provide a useful error message instead.
|
// and provide a useful error message instead.
|
||||||
if int64(len(t)) > 2e9 {
|
if int64(len(t)) > 2e9 {
|
||||||
yyerrorl(pos, "%v with length %v is too big", what, len(t))
|
base.ErrorfAt(pos, "%v with length %v is too big", what, len(t))
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
s.WriteString(Ctxt, int64(off), len(t), t)
|
s.WriteString(base.Ctxt, int64(off), len(t), t)
|
||||||
return off + len(t)
|
return off + len(t)
|
||||||
}
|
}
|
||||||
|
|
||||||
func dsymptr(s *obj.LSym, off int, x *obj.LSym, xoff int) int {
|
func dsymptr(s *obj.LSym, off int, x *obj.LSym, xoff int) int {
|
||||||
off = int(Rnd(int64(off), int64(Widthptr)))
|
off = int(Rnd(int64(off), int64(Widthptr)))
|
||||||
s.WriteAddr(Ctxt, int64(off), Widthptr, x, int64(xoff))
|
s.WriteAddr(base.Ctxt, int64(off), Widthptr, x, int64(xoff))
|
||||||
off += Widthptr
|
off += Widthptr
|
||||||
return off
|
return off
|
||||||
}
|
}
|
||||||
|
|
||||||
func dsymptrOff(s *obj.LSym, off int, x *obj.LSym) int {
|
func dsymptrOff(s *obj.LSym, off int, x *obj.LSym) int {
|
||||||
s.WriteOff(Ctxt, int64(off), x, 0)
|
s.WriteOff(base.Ctxt, int64(off), x, 0)
|
||||||
off += 4
|
off += 4
|
||||||
return off
|
return off
|
||||||
}
|
}
|
||||||
|
|
||||||
func dsymptrWeakOff(s *obj.LSym, off int, x *obj.LSym) int {
|
func dsymptrWeakOff(s *obj.LSym, off int, x *obj.LSym) int {
|
||||||
s.WriteWeakOff(Ctxt, int64(off), x, 0)
|
s.WriteWeakOff(base.Ctxt, int64(off), x, 0)
|
||||||
off += 4
|
off += 4
|
||||||
return off
|
return off
|
||||||
}
|
}
|
||||||
|
|
||||||
// slicesym writes a static slice symbol {&arr, lencap, lencap} to n.
|
// slicesym writes a static slice symbol {&arr, lencap, lencap} to n.
|
||||||
// arr must be an ONAME. slicesym does not modify n.
|
// arr must be an ONAME. slicesym does not modify n.
|
||||||
func slicesym(n, arr *Node, lencap int64) {
|
func slicesym(n, arr ir.Node, lencap int64) {
|
||||||
s := n.Sym.Linksym()
|
s := n.Sym().Linksym()
|
||||||
base := n.Xoffset
|
off := n.Offset()
|
||||||
if arr.Op != ONAME {
|
if arr.Op() != ir.ONAME {
|
||||||
Fatalf("slicesym non-name arr %v", arr)
|
base.Fatalf("slicesym non-name arr %v", arr)
|
||||||
}
|
}
|
||||||
s.WriteAddr(Ctxt, base, Widthptr, arr.Sym.Linksym(), arr.Xoffset)
|
s.WriteAddr(base.Ctxt, off, Widthptr, arr.Sym().Linksym(), arr.Offset())
|
||||||
s.WriteInt(Ctxt, base+sliceLenOffset, Widthptr, lencap)
|
s.WriteInt(base.Ctxt, off+sliceLenOffset, Widthptr, lencap)
|
||||||
s.WriteInt(Ctxt, base+sliceCapOffset, Widthptr, lencap)
|
s.WriteInt(base.Ctxt, off+sliceCapOffset, Widthptr, lencap)
|
||||||
}
|
}
|
||||||
|
|
||||||
// addrsym writes the static address of a to n. a must be an ONAME.
|
// addrsym writes the static address of a to n. a must be an ONAME.
|
||||||
// Neither n nor a is modified.
|
// Neither n nor a is modified.
|
||||||
func addrsym(n, a *Node) {
|
func addrsym(n, a ir.Node) {
|
||||||
if n.Op != ONAME {
|
if n.Op() != ir.ONAME {
|
||||||
Fatalf("addrsym n op %v", n.Op)
|
base.Fatalf("addrsym n op %v", n.Op())
|
||||||
}
|
}
|
||||||
if n.Sym == nil {
|
if n.Sym() == nil {
|
||||||
Fatalf("addrsym nil n sym")
|
base.Fatalf("addrsym nil n sym")
|
||||||
}
|
}
|
||||||
if a.Op != ONAME {
|
if a.Op() != ir.ONAME {
|
||||||
Fatalf("addrsym a op %v", a.Op)
|
base.Fatalf("addrsym a op %v", a.Op())
|
||||||
}
|
}
|
||||||
s := n.Sym.Linksym()
|
s := n.Sym().Linksym()
|
||||||
s.WriteAddr(Ctxt, n.Xoffset, Widthptr, a.Sym.Linksym(), a.Xoffset)
|
s.WriteAddr(base.Ctxt, n.Offset(), Widthptr, a.Sym().Linksym(), a.Offset())
|
||||||
}
|
}
|
||||||
|
|
||||||
// pfuncsym writes the static address of f to n. f must be a global function.
|
// pfuncsym writes the static address of f to n. f must be a global function.
|
||||||
// Neither n nor f is modified.
|
// Neither n nor f is modified.
|
||||||
func pfuncsym(n, f *Node) {
|
func pfuncsym(n, f ir.Node) {
|
||||||
if n.Op != ONAME {
|
if n.Op() != ir.ONAME {
|
||||||
Fatalf("pfuncsym n op %v", n.Op)
|
base.Fatalf("pfuncsym n op %v", n.Op())
|
||||||
}
|
}
|
||||||
if n.Sym == nil {
|
if n.Sym() == nil {
|
||||||
Fatalf("pfuncsym nil n sym")
|
base.Fatalf("pfuncsym nil n sym")
|
||||||
}
|
}
|
||||||
if f.Class() != PFUNC {
|
if f.Class() != ir.PFUNC {
|
||||||
Fatalf("pfuncsym class not PFUNC %d", f.Class())
|
base.Fatalf("pfuncsym class not PFUNC %d", f.Class())
|
||||||
}
|
}
|
||||||
s := n.Sym.Linksym()
|
s := n.Sym().Linksym()
|
||||||
s.WriteAddr(Ctxt, n.Xoffset, Widthptr, funcsym(f.Sym).Linksym(), f.Xoffset)
|
s.WriteAddr(base.Ctxt, n.Offset(), Widthptr, funcsym(f.Sym()).Linksym(), f.Offset())
|
||||||
}
|
}
|
||||||
|
|
||||||
// litsym writes the static literal c to n.
|
// litsym writes the static literal c to n.
|
||||||
// Neither n nor c is modified.
|
// Neither n nor c is modified.
|
||||||
func litsym(n, c *Node, wid int) {
|
func litsym(n, c ir.Node, wid int) {
|
||||||
if n.Op != ONAME {
|
if n.Op() != ir.ONAME {
|
||||||
Fatalf("litsym n op %v", n.Op)
|
base.Fatalf("litsym n op %v", n.Op())
|
||||||
}
|
}
|
||||||
if c.Op != OLITERAL {
|
if n.Sym() == nil {
|
||||||
Fatalf("litsym c op %v", c.Op)
|
base.Fatalf("litsym nil n sym")
|
||||||
}
|
}
|
||||||
if n.Sym == nil {
|
if !types.Identical(n.Type(), c.Type()) {
|
||||||
Fatalf("litsym nil n sym")
|
base.Fatalf("litsym: type mismatch: %v has type %v, but %v has type %v", n, n.Type(), c, c.Type())
|
||||||
}
|
}
|
||||||
s := n.Sym.Linksym()
|
if c.Op() == ir.ONIL {
|
||||||
switch u := c.Val().U.(type) {
|
return
|
||||||
case bool:
|
}
|
||||||
i := int64(obj.Bool2int(u))
|
if c.Op() != ir.OLITERAL {
|
||||||
s.WriteInt(Ctxt, n.Xoffset, wid, i)
|
base.Fatalf("litsym c op %v", c.Op())
|
||||||
|
}
|
||||||
|
s := n.Sym().Linksym()
|
||||||
|
switch u := c.Val(); u.Kind() {
|
||||||
|
case constant.Bool:
|
||||||
|
i := int64(obj.Bool2int(constant.BoolVal(u)))
|
||||||
|
s.WriteInt(base.Ctxt, n.Offset(), wid, i)
|
||||||
|
|
||||||
case *Mpint:
|
case constant.Int:
|
||||||
s.WriteInt(Ctxt, n.Xoffset, wid, u.Int64())
|
s.WriteInt(base.Ctxt, n.Offset(), wid, ir.Int64Val(n.Type(), u))
|
||||||
|
|
||||||
case *Mpflt:
|
case constant.Float:
|
||||||
f := u.Float64()
|
f, _ := constant.Float64Val(u)
|
||||||
switch n.Type.Etype {
|
switch n.Type().Etype {
|
||||||
case TFLOAT32:
|
case types.TFLOAT32:
|
||||||
s.WriteFloat32(Ctxt, n.Xoffset, float32(f))
|
s.WriteFloat32(base.Ctxt, n.Offset(), float32(f))
|
||||||
case TFLOAT64:
|
case types.TFLOAT64:
|
||||||
s.WriteFloat64(Ctxt, n.Xoffset, f)
|
s.WriteFloat64(base.Ctxt, n.Offset(), f)
|
||||||
}
|
}
|
||||||
|
|
||||||
case *Mpcplx:
|
case constant.Complex:
|
||||||
r := u.Real.Float64()
|
re, _ := constant.Float64Val(constant.Real(u))
|
||||||
i := u.Imag.Float64()
|
im, _ := constant.Float64Val(constant.Imag(u))
|
||||||
switch n.Type.Etype {
|
switch n.Type().Etype {
|
||||||
case TCOMPLEX64:
|
case types.TCOMPLEX64:
|
||||||
s.WriteFloat32(Ctxt, n.Xoffset, float32(r))
|
s.WriteFloat32(base.Ctxt, n.Offset(), float32(re))
|
||||||
s.WriteFloat32(Ctxt, n.Xoffset+4, float32(i))
|
s.WriteFloat32(base.Ctxt, n.Offset()+4, float32(im))
|
||||||
case TCOMPLEX128:
|
case types.TCOMPLEX128:
|
||||||
s.WriteFloat64(Ctxt, n.Xoffset, r)
|
s.WriteFloat64(base.Ctxt, n.Offset(), re)
|
||||||
s.WriteFloat64(Ctxt, n.Xoffset+8, i)
|
s.WriteFloat64(base.Ctxt, n.Offset()+8, im)
|
||||||
}
|
}
|
||||||
|
|
||||||
case string:
|
case constant.String:
|
||||||
symdata := stringsym(n.Pos, u)
|
i := constant.StringVal(u)
|
||||||
s.WriteAddr(Ctxt, n.Xoffset, Widthptr, symdata, 0)
|
symdata := stringsym(n.Pos(), i)
|
||||||
s.WriteInt(Ctxt, n.Xoffset+int64(Widthptr), Widthptr, int64(len(u)))
|
s.WriteAddr(base.Ctxt, n.Offset(), Widthptr, symdata, 0)
|
||||||
|
s.WriteInt(base.Ctxt, n.Offset()+int64(Widthptr), Widthptr, int64(len(i)))
|
||||||
|
|
||||||
default:
|
default:
|
||||||
Fatalf("litsym unhandled OLITERAL %v", c)
|
base.Fatalf("litsym unhandled OLITERAL %v", c)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,175 +0,0 @@
|
||||||
// Code generated by "stringer -type=Op -trimprefix=O"; DO NOT EDIT.
|
|
||||||
|
|
||||||
package gc
|
|
||||||
|
|
||||||
import "strconv"
|
|
||||||
|
|
||||||
func _() {
|
|
||||||
// An "invalid array index" compiler error signifies that the constant values have changed.
|
|
||||||
// Re-run the stringer command to generate them again.
|
|
||||||
var x [1]struct{}
|
|
||||||
_ = x[OXXX-0]
|
|
||||||
_ = x[ONAME-1]
|
|
||||||
_ = x[ONONAME-2]
|
|
||||||
_ = x[OTYPE-3]
|
|
||||||
_ = x[OPACK-4]
|
|
||||||
_ = x[OLITERAL-5]
|
|
||||||
_ = x[OADD-6]
|
|
||||||
_ = x[OSUB-7]
|
|
||||||
_ = x[OOR-8]
|
|
||||||
_ = x[OXOR-9]
|
|
||||||
_ = x[OADDSTR-10]
|
|
||||||
_ = x[OADDR-11]
|
|
||||||
_ = x[OANDAND-12]
|
|
||||||
_ = x[OAPPEND-13]
|
|
||||||
_ = x[OBYTES2STR-14]
|
|
||||||
_ = x[OBYTES2STRTMP-15]
|
|
||||||
_ = x[ORUNES2STR-16]
|
|
||||||
_ = x[OSTR2BYTES-17]
|
|
||||||
_ = x[OSTR2BYTESTMP-18]
|
|
||||||
_ = x[OSTR2RUNES-19]
|
|
||||||
_ = x[OAS-20]
|
|
||||||
_ = x[OAS2-21]
|
|
||||||
_ = x[OAS2DOTTYPE-22]
|
|
||||||
_ = x[OAS2FUNC-23]
|
|
||||||
_ = x[OAS2MAPR-24]
|
|
||||||
_ = x[OAS2RECV-25]
|
|
||||||
_ = x[OASOP-26]
|
|
||||||
_ = x[OCALL-27]
|
|
||||||
_ = x[OCALLFUNC-28]
|
|
||||||
_ = x[OCALLMETH-29]
|
|
||||||
_ = x[OCALLINTER-30]
|
|
||||||
_ = x[OCALLPART-31]
|
|
||||||
_ = x[OCAP-32]
|
|
||||||
_ = x[OCLOSE-33]
|
|
||||||
_ = x[OCLOSURE-34]
|
|
||||||
_ = x[OCOMPLIT-35]
|
|
||||||
_ = x[OMAPLIT-36]
|
|
||||||
_ = x[OSTRUCTLIT-37]
|
|
||||||
_ = x[OARRAYLIT-38]
|
|
||||||
_ = x[OSLICELIT-39]
|
|
||||||
_ = x[OPTRLIT-40]
|
|
||||||
_ = x[OCONV-41]
|
|
||||||
_ = x[OCONVIFACE-42]
|
|
||||||
_ = x[OCONVNOP-43]
|
|
||||||
_ = x[OCOPY-44]
|
|
||||||
_ = x[ODCL-45]
|
|
||||||
_ = x[ODCLFUNC-46]
|
|
||||||
_ = x[ODCLFIELD-47]
|
|
||||||
_ = x[ODCLCONST-48]
|
|
||||||
_ = x[ODCLTYPE-49]
|
|
||||||
_ = x[ODELETE-50]
|
|
||||||
_ = x[ODOT-51]
|
|
||||||
_ = x[ODOTPTR-52]
|
|
||||||
_ = x[ODOTMETH-53]
|
|
||||||
_ = x[ODOTINTER-54]
|
|
||||||
_ = x[OXDOT-55]
|
|
||||||
_ = x[ODOTTYPE-56]
|
|
||||||
_ = x[ODOTTYPE2-57]
|
|
||||||
_ = x[OEQ-58]
|
|
||||||
_ = x[ONE-59]
|
|
||||||
_ = x[OLT-60]
|
|
||||||
_ = x[OLE-61]
|
|
||||||
_ = x[OGE-62]
|
|
||||||
_ = x[OGT-63]
|
|
||||||
_ = x[ODEREF-64]
|
|
||||||
_ = x[OINDEX-65]
|
|
||||||
_ = x[OINDEXMAP-66]
|
|
||||||
_ = x[OKEY-67]
|
|
||||||
_ = x[OSTRUCTKEY-68]
|
|
||||||
_ = x[OLEN-69]
|
|
||||||
_ = x[OMAKE-70]
|
|
||||||
_ = x[OMAKECHAN-71]
|
|
||||||
_ = x[OMAKEMAP-72]
|
|
||||||
_ = x[OMAKESLICE-73]
|
|
||||||
_ = x[OMAKESLICECOPY-74]
|
|
||||||
_ = x[OMUL-75]
|
|
||||||
_ = x[ODIV-76]
|
|
||||||
_ = x[OMOD-77]
|
|
||||||
_ = x[OLSH-78]
|
|
||||||
_ = x[ORSH-79]
|
|
||||||
_ = x[OAND-80]
|
|
||||||
_ = x[OANDNOT-81]
|
|
||||||
_ = x[ONEW-82]
|
|
||||||
_ = x[ONEWOBJ-83]
|
|
||||||
_ = x[ONOT-84]
|
|
||||||
_ = x[OBITNOT-85]
|
|
||||||
_ = x[OPLUS-86]
|
|
||||||
_ = x[ONEG-87]
|
|
||||||
_ = x[OOROR-88]
|
|
||||||
_ = x[OPANIC-89]
|
|
||||||
_ = x[OPRINT-90]
|
|
||||||
_ = x[OPRINTN-91]
|
|
||||||
_ = x[OPAREN-92]
|
|
||||||
_ = x[OSEND-93]
|
|
||||||
_ = x[OSLICE-94]
|
|
||||||
_ = x[OSLICEARR-95]
|
|
||||||
_ = x[OSLICESTR-96]
|
|
||||||
_ = x[OSLICE3-97]
|
|
||||||
_ = x[OSLICE3ARR-98]
|
|
||||||
_ = x[OSLICEHEADER-99]
|
|
||||||
_ = x[ORECOVER-100]
|
|
||||||
_ = x[ORECV-101]
|
|
||||||
_ = x[ORUNESTR-102]
|
|
||||||
_ = x[OSELRECV-103]
|
|
||||||
_ = x[OSELRECV2-104]
|
|
||||||
_ = x[OIOTA-105]
|
|
||||||
_ = x[OREAL-106]
|
|
||||||
_ = x[OIMAG-107]
|
|
||||||
_ = x[OCOMPLEX-108]
|
|
||||||
_ = x[OALIGNOF-109]
|
|
||||||
_ = x[OOFFSETOF-110]
|
|
||||||
_ = x[OSIZEOF-111]
|
|
||||||
_ = x[OBLOCK-112]
|
|
||||||
_ = x[OBREAK-113]
|
|
||||||
_ = x[OCASE-114]
|
|
||||||
_ = x[OCONTINUE-115]
|
|
||||||
_ = x[ODEFER-116]
|
|
||||||
_ = x[OEMPTY-117]
|
|
||||||
_ = x[OFALL-118]
|
|
||||||
_ = x[OFOR-119]
|
|
||||||
_ = x[OFORUNTIL-120]
|
|
||||||
_ = x[OGOTO-121]
|
|
||||||
_ = x[OIF-122]
|
|
||||||
_ = x[OLABEL-123]
|
|
||||||
_ = x[OGO-124]
|
|
||||||
_ = x[ORANGE-125]
|
|
||||||
_ = x[ORETURN-126]
|
|
||||||
_ = x[OSELECT-127]
|
|
||||||
_ = x[OSWITCH-128]
|
|
||||||
_ = x[OTYPESW-129]
|
|
||||||
_ = x[OTCHAN-130]
|
|
||||||
_ = x[OTMAP-131]
|
|
||||||
_ = x[OTSTRUCT-132]
|
|
||||||
_ = x[OTINTER-133]
|
|
||||||
_ = x[OTFUNC-134]
|
|
||||||
_ = x[OTARRAY-135]
|
|
||||||
_ = x[ODDD-136]
|
|
||||||
_ = x[OINLCALL-137]
|
|
||||||
_ = x[OEFACE-138]
|
|
||||||
_ = x[OITAB-139]
|
|
||||||
_ = x[OIDATA-140]
|
|
||||||
_ = x[OSPTR-141]
|
|
||||||
_ = x[OCLOSUREVAR-142]
|
|
||||||
_ = x[OCFUNC-143]
|
|
||||||
_ = x[OCHECKNIL-144]
|
|
||||||
_ = x[OVARDEF-145]
|
|
||||||
_ = x[OVARKILL-146]
|
|
||||||
_ = x[OVARLIVE-147]
|
|
||||||
_ = x[ORESULT-148]
|
|
||||||
_ = x[OINLMARK-149]
|
|
||||||
_ = x[ORETJMP-150]
|
|
||||||
_ = x[OGETG-151]
|
|
||||||
_ = x[OEND-152]
|
|
||||||
}
|
|
||||||
|
|
||||||
const _Op_name = "XXXNAMENONAMETYPEPACKLITERALADDSUBORXORADDSTRADDRANDANDAPPENDBYTES2STRBYTES2STRTMPRUNES2STRSTR2BYTESSTR2BYTESTMPSTR2RUNESASAS2AS2DOTTYPEAS2FUNCAS2MAPRAS2RECVASOPCALLCALLFUNCCALLMETHCALLINTERCALLPARTCAPCLOSECLOSURECOMPLITMAPLITSTRUCTLITARRAYLITSLICELITPTRLITCONVCONVIFACECONVNOPCOPYDCLDCLFUNCDCLFIELDDCLCONSTDCLTYPEDELETEDOTDOTPTRDOTMETHDOTINTERXDOTDOTTYPEDOTTYPE2EQNELTLEGEGTDEREFINDEXINDEXMAPKEYSTRUCTKEYLENMAKEMAKECHANMAKEMAPMAKESLICEMAKESLICECOPYMULDIVMODLSHRSHANDANDNOTNEWNEWOBJNOTBITNOTPLUSNEGORORPANICPRINTPRINTNPARENSENDSLICESLICEARRSLICESTRSLICE3SLICE3ARRSLICEHEADERRECOVERRECVRUNESTRSELRECVSELRECV2IOTAREALIMAGCOMPLEXALIGNOFOFFSETOFSIZEOFBLOCKBREAKCASECONTINUEDEFEREMPTYFALLFORFORUNTILGOTOIFLABELGORANGERETURNSELECTSWITCHTYPESWTCHANTMAPTSTRUCTTINTERTFUNCTARRAYDDDINLCALLEFACEITABIDATASPTRCLOSUREVARCFUNCCHECKNILVARDEFVARKILLVARLIVERESULTINLMARKRETJMPGETGEND"
|
|
||||||
|
|
||||||
var _Op_index = [...]uint16{0, 3, 7, 13, 17, 21, 28, 31, 34, 36, 39, 45, 49, 55, 61, 70, 82, 91, 100, 112, 121, 123, 126, 136, 143, 150, 157, 161, 165, 173, 181, 190, 198, 201, 206, 213, 220, 226, 235, 243, 251, 257, 261, 270, 277, 281, 284, 291, 299, 307, 314, 320, 323, 329, 336, 344, 348, 355, 363, 365, 367, 369, 371, 373, 375, 380, 385, 393, 396, 405, 408, 412, 420, 427, 436, 449, 452, 455, 458, 461, 464, 467, 473, 476, 482, 485, 491, 495, 498, 502, 507, 512, 518, 523, 527, 532, 540, 548, 554, 563, 574, 581, 585, 592, 599, 607, 611, 615, 619, 626, 633, 641, 647, 652, 657, 661, 669, 674, 679, 683, 686, 694, 698, 700, 705, 707, 712, 718, 724, 730, 736, 741, 745, 752, 758, 763, 769, 772, 779, 784, 788, 793, 797, 807, 812, 820, 826, 833, 840, 846, 853, 859, 863, 866}
|
|
||||||
|
|
||||||
func (i Op) String() string {
|
|
||||||
if i >= Op(len(_Op_index)-1) {
|
|
||||||
return "Op(" + strconv.FormatInt(int64(i), 10) + ")"
|
|
||||||
}
|
|
||||||
return _Op_name[_Op_index[i]:_Op_index[i+1]]
|
|
||||||
}
|
|
||||||
File diff suppressed because it is too large
Load diff
|
|
@ -5,6 +5,8 @@
|
||||||
package gc
|
package gc
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"cmd/compile/internal/base"
|
||||||
|
"cmd/compile/internal/ir"
|
||||||
"cmd/compile/internal/ssa"
|
"cmd/compile/internal/ssa"
|
||||||
"cmd/compile/internal/types"
|
"cmd/compile/internal/types"
|
||||||
"cmd/internal/dwarf"
|
"cmd/internal/dwarf"
|
||||||
|
|
@ -22,35 +24,34 @@ import (
|
||||||
// "Portable" code generation.
|
// "Portable" code generation.
|
||||||
|
|
||||||
var (
|
var (
|
||||||
nBackendWorkers int // number of concurrent backend workers, set by a compiler flag
|
compilequeue []ir.Node // functions waiting to be compiled
|
||||||
compilequeue []*Node // functions waiting to be compiled
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func emitptrargsmap(fn *Node) {
|
func emitptrargsmap(fn ir.Node) {
|
||||||
if fn.funcname() == "_" || fn.Func.Nname.Sym.Linkname != "" {
|
if ir.FuncName(fn) == "_" || fn.Func().Nname.Sym().Linkname != "" {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
lsym := Ctxt.Lookup(fn.Func.lsym.Name + ".args_stackmap")
|
lsym := base.Ctxt.Lookup(fn.Func().LSym.Name + ".args_stackmap")
|
||||||
|
|
||||||
nptr := int(fn.Type.ArgWidth() / int64(Widthptr))
|
nptr := int(fn.Type().ArgWidth() / int64(Widthptr))
|
||||||
bv := bvalloc(int32(nptr) * 2)
|
bv := bvalloc(int32(nptr) * 2)
|
||||||
nbitmap := 1
|
nbitmap := 1
|
||||||
if fn.Type.NumResults() > 0 {
|
if fn.Type().NumResults() > 0 {
|
||||||
nbitmap = 2
|
nbitmap = 2
|
||||||
}
|
}
|
||||||
off := duint32(lsym, 0, uint32(nbitmap))
|
off := duint32(lsym, 0, uint32(nbitmap))
|
||||||
off = duint32(lsym, off, uint32(bv.n))
|
off = duint32(lsym, off, uint32(bv.n))
|
||||||
|
|
||||||
if fn.IsMethod() {
|
if ir.IsMethod(fn) {
|
||||||
onebitwalktype1(fn.Type.Recvs(), 0, bv)
|
onebitwalktype1(fn.Type().Recvs(), 0, bv)
|
||||||
}
|
}
|
||||||
if fn.Type.NumParams() > 0 {
|
if fn.Type().NumParams() > 0 {
|
||||||
onebitwalktype1(fn.Type.Params(), 0, bv)
|
onebitwalktype1(fn.Type().Params(), 0, bv)
|
||||||
}
|
}
|
||||||
off = dbvec(lsym, off, bv)
|
off = dbvec(lsym, off, bv)
|
||||||
|
|
||||||
if fn.Type.NumResults() > 0 {
|
if fn.Type().NumResults() > 0 {
|
||||||
onebitwalktype1(fn.Type.Results(), 0, bv)
|
onebitwalktype1(fn.Type().Results(), 0, bv)
|
||||||
off = dbvec(lsym, off, bv)
|
off = dbvec(lsym, off, bv)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -67,40 +68,40 @@ func emitptrargsmap(fn *Node) {
|
||||||
// really means, in memory, things with pointers needing zeroing at
|
// really means, in memory, things with pointers needing zeroing at
|
||||||
// the top of the stack and increasing in size.
|
// the top of the stack and increasing in size.
|
||||||
// Non-autos sort on offset.
|
// Non-autos sort on offset.
|
||||||
func cmpstackvarlt(a, b *Node) bool {
|
func cmpstackvarlt(a, b ir.Node) bool {
|
||||||
if (a.Class() == PAUTO) != (b.Class() == PAUTO) {
|
if (a.Class() == ir.PAUTO) != (b.Class() == ir.PAUTO) {
|
||||||
return b.Class() == PAUTO
|
return b.Class() == ir.PAUTO
|
||||||
}
|
}
|
||||||
|
|
||||||
if a.Class() != PAUTO {
|
if a.Class() != ir.PAUTO {
|
||||||
return a.Xoffset < b.Xoffset
|
return a.Offset() < b.Offset()
|
||||||
}
|
}
|
||||||
|
|
||||||
if a.Name.Used() != b.Name.Used() {
|
if a.Name().Used() != b.Name().Used() {
|
||||||
return a.Name.Used()
|
return a.Name().Used()
|
||||||
}
|
}
|
||||||
|
|
||||||
ap := a.Type.HasPointers()
|
ap := a.Type().HasPointers()
|
||||||
bp := b.Type.HasPointers()
|
bp := b.Type().HasPointers()
|
||||||
if ap != bp {
|
if ap != bp {
|
||||||
return ap
|
return ap
|
||||||
}
|
}
|
||||||
|
|
||||||
ap = a.Name.Needzero()
|
ap = a.Name().Needzero()
|
||||||
bp = b.Name.Needzero()
|
bp = b.Name().Needzero()
|
||||||
if ap != bp {
|
if ap != bp {
|
||||||
return ap
|
return ap
|
||||||
}
|
}
|
||||||
|
|
||||||
if a.Type.Width != b.Type.Width {
|
if a.Type().Width != b.Type().Width {
|
||||||
return a.Type.Width > b.Type.Width
|
return a.Type().Width > b.Type().Width
|
||||||
}
|
}
|
||||||
|
|
||||||
return a.Sym.Name < b.Sym.Name
|
return a.Sym().Name < b.Sym().Name
|
||||||
}
|
}
|
||||||
|
|
||||||
// byStackvar implements sort.Interface for []*Node using cmpstackvarlt.
|
// byStackvar implements sort.Interface for []*Node using cmpstackvarlt.
|
||||||
type byStackVar []*Node
|
type byStackVar []ir.Node
|
||||||
|
|
||||||
func (s byStackVar) Len() int { return len(s) }
|
func (s byStackVar) Len() int { return len(s) }
|
||||||
func (s byStackVar) Less(i, j int) bool { return cmpstackvarlt(s[i], s[j]) }
|
func (s byStackVar) Less(i, j int) bool { return cmpstackvarlt(s[i], s[j]) }
|
||||||
|
|
@ -109,33 +110,33 @@ func (s byStackVar) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
|
||||||
func (s *ssafn) AllocFrame(f *ssa.Func) {
|
func (s *ssafn) AllocFrame(f *ssa.Func) {
|
||||||
s.stksize = 0
|
s.stksize = 0
|
||||||
s.stkptrsize = 0
|
s.stkptrsize = 0
|
||||||
fn := s.curfn.Func
|
fn := s.curfn.Func()
|
||||||
|
|
||||||
// Mark the PAUTO's unused.
|
// Mark the PAUTO's unused.
|
||||||
for _, ln := range fn.Dcl {
|
for _, ln := range fn.Dcl {
|
||||||
if ln.Class() == PAUTO {
|
if ln.Class() == ir.PAUTO {
|
||||||
ln.Name.SetUsed(false)
|
ln.Name().SetUsed(false)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, l := range f.RegAlloc {
|
for _, l := range f.RegAlloc {
|
||||||
if ls, ok := l.(ssa.LocalSlot); ok {
|
if ls, ok := l.(ssa.LocalSlot); ok {
|
||||||
ls.N.(*Node).Name.SetUsed(true)
|
ls.N.Name().SetUsed(true)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
scratchUsed := false
|
scratchUsed := false
|
||||||
for _, b := range f.Blocks {
|
for _, b := range f.Blocks {
|
||||||
for _, v := range b.Values {
|
for _, v := range b.Values {
|
||||||
if n, ok := v.Aux.(*Node); ok {
|
if n, ok := v.Aux.(ir.Node); ok {
|
||||||
switch n.Class() {
|
switch n.Class() {
|
||||||
case PPARAM, PPARAMOUT:
|
case ir.PPARAM, ir.PPARAMOUT:
|
||||||
// Don't modify nodfp; it is a global.
|
// Don't modify nodfp; it is a global.
|
||||||
if n != nodfp {
|
if n != nodfp {
|
||||||
n.Name.SetUsed(true)
|
n.Name().SetUsed(true)
|
||||||
}
|
}
|
||||||
case PAUTO:
|
case ir.PAUTO:
|
||||||
n.Name.SetUsed(true)
|
n.Name().SetUsed(true)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if !scratchUsed {
|
if !scratchUsed {
|
||||||
|
|
@ -146,7 +147,7 @@ func (s *ssafn) AllocFrame(f *ssa.Func) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if f.Config.NeedsFpScratch && scratchUsed {
|
if f.Config.NeedsFpScratch && scratchUsed {
|
||||||
s.scratchFpMem = tempAt(src.NoXPos, s.curfn, types.Types[TUINT64])
|
s.scratchFpMem = tempAt(src.NoXPos, s.curfn, types.Types[types.TUINT64])
|
||||||
}
|
}
|
||||||
|
|
||||||
sort.Sort(byStackVar(fn.Dcl))
|
sort.Sort(byStackVar(fn.Dcl))
|
||||||
|
|
@ -154,18 +155,18 @@ func (s *ssafn) AllocFrame(f *ssa.Func) {
|
||||||
// Reassign stack offsets of the locals that are used.
|
// Reassign stack offsets of the locals that are used.
|
||||||
lastHasPtr := false
|
lastHasPtr := false
|
||||||
for i, n := range fn.Dcl {
|
for i, n := range fn.Dcl {
|
||||||
if n.Op != ONAME || n.Class() != PAUTO {
|
if n.Op() != ir.ONAME || n.Class() != ir.PAUTO {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if !n.Name.Used() {
|
if !n.Name().Used() {
|
||||||
fn.Dcl = fn.Dcl[:i]
|
fn.Dcl = fn.Dcl[:i]
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
dowidth(n.Type)
|
dowidth(n.Type())
|
||||||
w := n.Type.Width
|
w := n.Type().Width
|
||||||
if w >= thearch.MAXWIDTH || w < 0 {
|
if w >= thearch.MAXWIDTH || w < 0 {
|
||||||
Fatalf("bad width")
|
base.Fatalf("bad width")
|
||||||
}
|
}
|
||||||
if w == 0 && lastHasPtr {
|
if w == 0 && lastHasPtr {
|
||||||
// Pad between a pointer-containing object and a zero-sized object.
|
// Pad between a pointer-containing object and a zero-sized object.
|
||||||
|
|
@ -175,8 +176,8 @@ func (s *ssafn) AllocFrame(f *ssa.Func) {
|
||||||
w = 1
|
w = 1
|
||||||
}
|
}
|
||||||
s.stksize += w
|
s.stksize += w
|
||||||
s.stksize = Rnd(s.stksize, int64(n.Type.Align))
|
s.stksize = Rnd(s.stksize, int64(n.Type().Align))
|
||||||
if n.Type.HasPointers() {
|
if n.Type().HasPointers() {
|
||||||
s.stkptrsize = s.stksize
|
s.stkptrsize = s.stksize
|
||||||
lastHasPtr = true
|
lastHasPtr = true
|
||||||
} else {
|
} else {
|
||||||
|
|
@ -185,59 +186,58 @@ func (s *ssafn) AllocFrame(f *ssa.Func) {
|
||||||
if thearch.LinkArch.InFamily(sys.MIPS, sys.MIPS64, sys.ARM, sys.ARM64, sys.PPC64, sys.S390X) {
|
if thearch.LinkArch.InFamily(sys.MIPS, sys.MIPS64, sys.ARM, sys.ARM64, sys.PPC64, sys.S390X) {
|
||||||
s.stksize = Rnd(s.stksize, int64(Widthptr))
|
s.stksize = Rnd(s.stksize, int64(Widthptr))
|
||||||
}
|
}
|
||||||
n.Xoffset = -s.stksize
|
n.SetOffset(-s.stksize)
|
||||||
}
|
}
|
||||||
|
|
||||||
s.stksize = Rnd(s.stksize, int64(Widthreg))
|
s.stksize = Rnd(s.stksize, int64(Widthreg))
|
||||||
s.stkptrsize = Rnd(s.stkptrsize, int64(Widthreg))
|
s.stkptrsize = Rnd(s.stkptrsize, int64(Widthreg))
|
||||||
}
|
}
|
||||||
|
|
||||||
func funccompile(fn *Node) {
|
func funccompile(fn ir.Node) {
|
||||||
if Curfn != nil {
|
if Curfn != nil {
|
||||||
Fatalf("funccompile %v inside %v", fn.Func.Nname.Sym, Curfn.Func.Nname.Sym)
|
base.Fatalf("funccompile %v inside %v", fn.Func().Nname.Sym(), Curfn.Func().Nname.Sym())
|
||||||
}
|
}
|
||||||
|
|
||||||
if fn.Type == nil {
|
if fn.Type() == nil {
|
||||||
if nerrors == 0 {
|
if base.Errors() == 0 {
|
||||||
Fatalf("funccompile missing type")
|
base.Fatalf("funccompile missing type")
|
||||||
}
|
}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// assign parameter offsets
|
// assign parameter offsets
|
||||||
dowidth(fn.Type)
|
dowidth(fn.Type())
|
||||||
|
|
||||||
if fn.Nbody.Len() == 0 {
|
if fn.Body().Len() == 0 {
|
||||||
// Initialize ABI wrappers if necessary.
|
// Initialize ABI wrappers if necessary.
|
||||||
fn.Func.initLSym(false)
|
initLSym(fn.Func(), false)
|
||||||
emitptrargsmap(fn)
|
emitptrargsmap(fn)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
dclcontext = PAUTO
|
dclcontext = ir.PAUTO
|
||||||
Curfn = fn
|
Curfn = fn
|
||||||
|
|
||||||
compile(fn)
|
compile(fn)
|
||||||
|
|
||||||
Curfn = nil
|
Curfn = nil
|
||||||
dclcontext = PEXTERN
|
dclcontext = ir.PEXTERN
|
||||||
}
|
}
|
||||||
|
|
||||||
func compile(fn *Node) {
|
func compile(fn ir.Node) {
|
||||||
saveerrors()
|
errorsBefore := base.Errors()
|
||||||
|
|
||||||
order(fn)
|
order(fn)
|
||||||
if nerrors != 0 {
|
if base.Errors() > errorsBefore {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set up the function's LSym early to avoid data races with the assemblers.
|
// Set up the function's LSym early to avoid data races with the assemblers.
|
||||||
// Do this before walk, as walk needs the LSym to set attributes/relocations
|
// Do this before walk, as walk needs the LSym to set attributes/relocations
|
||||||
// (e.g. in markTypeUsedInInterface).
|
// (e.g. in markTypeUsedInInterface).
|
||||||
fn.Func.initLSym(true)
|
initLSym(fn.Func(), true)
|
||||||
|
|
||||||
walk(fn)
|
walk(fn)
|
||||||
if nerrors != 0 {
|
if base.Errors() > errorsBefore {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if instrumenting {
|
if instrumenting {
|
||||||
|
|
@ -247,7 +247,7 @@ func compile(fn *Node) {
|
||||||
// From this point, there should be no uses of Curfn. Enforce that.
|
// From this point, there should be no uses of Curfn. Enforce that.
|
||||||
Curfn = nil
|
Curfn = nil
|
||||||
|
|
||||||
if fn.funcname() == "_" {
|
if ir.FuncName(fn) == "_" {
|
||||||
// We don't need to generate code for this function, just report errors in its body.
|
// We don't need to generate code for this function, just report errors in its body.
|
||||||
// At this point we've generated any errors needed.
|
// At this point we've generated any errors needed.
|
||||||
// (Beyond here we generate only non-spec errors, like "stack frame too large".)
|
// (Beyond here we generate only non-spec errors, like "stack frame too large".)
|
||||||
|
|
@ -259,15 +259,15 @@ func compile(fn *Node) {
|
||||||
// be types of stack objects. We need to do this here
|
// be types of stack objects. We need to do this here
|
||||||
// because symbols must be allocated before the parallel
|
// because symbols must be allocated before the parallel
|
||||||
// phase of the compiler.
|
// phase of the compiler.
|
||||||
for _, n := range fn.Func.Dcl {
|
for _, n := range fn.Func().Dcl {
|
||||||
switch n.Class() {
|
switch n.Class() {
|
||||||
case PPARAM, PPARAMOUT, PAUTO:
|
case ir.PPARAM, ir.PPARAMOUT, ir.PAUTO:
|
||||||
if livenessShouldTrack(n) && n.Name.Addrtaken() {
|
if livenessShouldTrack(n) && n.Name().Addrtaken() {
|
||||||
dtypesym(n.Type)
|
dtypesym(n.Type())
|
||||||
// Also make sure we allocate a linker symbol
|
// Also make sure we allocate a linker symbol
|
||||||
// for the stack object data, for the same reason.
|
// for the stack object data, for the same reason.
|
||||||
if fn.Func.lsym.Func().StackObjects == nil {
|
if fn.Func().LSym.Func().StackObjects == nil {
|
||||||
fn.Func.lsym.Func().StackObjects = Ctxt.Lookup(fn.Func.lsym.Name + ".stkobj")
|
fn.Func().LSym.Func().StackObjects = base.Ctxt.Lookup(fn.Func().LSym.Name + ".stkobj")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -284,29 +284,29 @@ func compile(fn *Node) {
|
||||||
// If functions are not compiled immediately,
|
// If functions are not compiled immediately,
|
||||||
// they are enqueued in compilequeue,
|
// they are enqueued in compilequeue,
|
||||||
// which is drained by compileFunctions.
|
// which is drained by compileFunctions.
|
||||||
func compilenow(fn *Node) bool {
|
func compilenow(fn ir.Node) bool {
|
||||||
// Issue 38068: if this function is a method AND an inline
|
// Issue 38068: if this function is a method AND an inline
|
||||||
// candidate AND was not inlined (yet), put it onto the compile
|
// candidate AND was not inlined (yet), put it onto the compile
|
||||||
// queue instead of compiling it immediately. This is in case we
|
// queue instead of compiling it immediately. This is in case we
|
||||||
// wind up inlining it into a method wrapper that is generated by
|
// wind up inlining it into a method wrapper that is generated by
|
||||||
// compiling a function later on in the xtop list.
|
// compiling a function later on in the xtop list.
|
||||||
if fn.IsMethod() && isInlinableButNotInlined(fn) {
|
if ir.IsMethod(fn) && isInlinableButNotInlined(fn) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
return nBackendWorkers == 1 && Debug_compilelater == 0
|
return base.Flag.LowerC == 1 && base.Debug.CompileLater == 0
|
||||||
}
|
}
|
||||||
|
|
||||||
// isInlinableButNotInlined returns true if 'fn' was marked as an
|
// isInlinableButNotInlined returns true if 'fn' was marked as an
|
||||||
// inline candidate but then never inlined (presumably because we
|
// inline candidate but then never inlined (presumably because we
|
||||||
// found no call sites).
|
// found no call sites).
|
||||||
func isInlinableButNotInlined(fn *Node) bool {
|
func isInlinableButNotInlined(fn ir.Node) bool {
|
||||||
if fn.Func.Nname.Func.Inl == nil {
|
if fn.Func().Nname.Func().Inl == nil {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
if fn.Sym == nil {
|
if fn.Sym() == nil {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
return !fn.Sym.Linksym().WasInlined()
|
return !fn.Sym().Linksym().WasInlined()
|
||||||
}
|
}
|
||||||
|
|
||||||
const maxStackSize = 1 << 30
|
const maxStackSize = 1 << 30
|
||||||
|
|
@ -315,12 +315,12 @@ const maxStackSize = 1 << 30
|
||||||
// uses it to generate a plist,
|
// uses it to generate a plist,
|
||||||
// and flushes that plist to machine code.
|
// and flushes that plist to machine code.
|
||||||
// worker indicates which of the backend workers is doing the processing.
|
// worker indicates which of the backend workers is doing the processing.
|
||||||
func compileSSA(fn *Node, worker int) {
|
func compileSSA(fn ir.Node, worker int) {
|
||||||
f := buildssa(fn, worker)
|
f := buildssa(fn, worker)
|
||||||
// Note: check arg size to fix issue 25507.
|
// Note: check arg size to fix issue 25507.
|
||||||
if f.Frontend().(*ssafn).stksize >= maxStackSize || fn.Type.ArgWidth() >= maxStackSize {
|
if f.Frontend().(*ssafn).stksize >= maxStackSize || fn.Type().ArgWidth() >= maxStackSize {
|
||||||
largeStackFramesMu.Lock()
|
largeStackFramesMu.Lock()
|
||||||
largeStackFrames = append(largeStackFrames, largeStack{locals: f.Frontend().(*ssafn).stksize, args: fn.Type.ArgWidth(), pos: fn.Pos})
|
largeStackFrames = append(largeStackFrames, largeStack{locals: f.Frontend().(*ssafn).stksize, args: fn.Type().ArgWidth(), pos: fn.Pos()})
|
||||||
largeStackFramesMu.Unlock()
|
largeStackFramesMu.Unlock()
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
@ -336,14 +336,14 @@ func compileSSA(fn *Node, worker int) {
|
||||||
if pp.Text.To.Offset >= maxStackSize {
|
if pp.Text.To.Offset >= maxStackSize {
|
||||||
largeStackFramesMu.Lock()
|
largeStackFramesMu.Lock()
|
||||||
locals := f.Frontend().(*ssafn).stksize
|
locals := f.Frontend().(*ssafn).stksize
|
||||||
largeStackFrames = append(largeStackFrames, largeStack{locals: locals, args: fn.Type.ArgWidth(), callee: pp.Text.To.Offset - locals, pos: fn.Pos})
|
largeStackFrames = append(largeStackFrames, largeStack{locals: locals, args: fn.Type().ArgWidth(), callee: pp.Text.To.Offset - locals, pos: fn.Pos()})
|
||||||
largeStackFramesMu.Unlock()
|
largeStackFramesMu.Unlock()
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
pp.Flush() // assemble, fill in boilerplate, etc.
|
pp.Flush() // assemble, fill in boilerplate, etc.
|
||||||
// fieldtrack must be called after pp.Flush. See issue 20014.
|
// fieldtrack must be called after pp.Flush. See issue 20014.
|
||||||
fieldtrack(pp.Text.From.Sym, fn.Func.FieldTrack)
|
fieldtrack(pp.Text.From.Sym, fn.Func().FieldTrack)
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
|
@ -360,7 +360,7 @@ func compileFunctions() {
|
||||||
sizeCalculationDisabled = true // not safe to calculate sizes concurrently
|
sizeCalculationDisabled = true // not safe to calculate sizes concurrently
|
||||||
if race.Enabled {
|
if race.Enabled {
|
||||||
// Randomize compilation order to try to shake out races.
|
// Randomize compilation order to try to shake out races.
|
||||||
tmp := make([]*Node, len(compilequeue))
|
tmp := make([]ir.Node, len(compilequeue))
|
||||||
perm := rand.Perm(len(compilequeue))
|
perm := rand.Perm(len(compilequeue))
|
||||||
for i, v := range perm {
|
for i, v := range perm {
|
||||||
tmp[v] = compilequeue[i]
|
tmp[v] = compilequeue[i]
|
||||||
|
|
@ -371,13 +371,13 @@ func compileFunctions() {
|
||||||
// since they're most likely to be the slowest.
|
// since they're most likely to be the slowest.
|
||||||
// This helps avoid stragglers.
|
// This helps avoid stragglers.
|
||||||
sort.Slice(compilequeue, func(i, j int) bool {
|
sort.Slice(compilequeue, func(i, j int) bool {
|
||||||
return compilequeue[i].Nbody.Len() > compilequeue[j].Nbody.Len()
|
return compilequeue[i].Body().Len() > compilequeue[j].Body().Len()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
var wg sync.WaitGroup
|
var wg sync.WaitGroup
|
||||||
Ctxt.InParallel = true
|
base.Ctxt.InParallel = true
|
||||||
c := make(chan *Node, nBackendWorkers)
|
c := make(chan ir.Node, base.Flag.LowerC)
|
||||||
for i := 0; i < nBackendWorkers; i++ {
|
for i := 0; i < base.Flag.LowerC; i++ {
|
||||||
wg.Add(1)
|
wg.Add(1)
|
||||||
go func(worker int) {
|
go func(worker int) {
|
||||||
for fn := range c {
|
for fn := range c {
|
||||||
|
|
@ -392,46 +392,75 @@ func compileFunctions() {
|
||||||
close(c)
|
close(c)
|
||||||
compilequeue = nil
|
compilequeue = nil
|
||||||
wg.Wait()
|
wg.Wait()
|
||||||
Ctxt.InParallel = false
|
base.Ctxt.InParallel = false
|
||||||
sizeCalculationDisabled = false
|
sizeCalculationDisabled = false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func debuginfo(fnsym *obj.LSym, infosym *obj.LSym, curfn interface{}) ([]dwarf.Scope, dwarf.InlCalls) {
|
func debuginfo(fnsym *obj.LSym, infosym *obj.LSym, curfn interface{}) ([]dwarf.Scope, dwarf.InlCalls) {
|
||||||
fn := curfn.(*Node)
|
fn := curfn.(ir.Node)
|
||||||
if fn.Func.Nname != nil {
|
if fn.Func().Nname != nil {
|
||||||
if expect := fn.Func.Nname.Sym.Linksym(); fnsym != expect {
|
if expect := fn.Func().Nname.Sym().Linksym(); fnsym != expect {
|
||||||
Fatalf("unexpected fnsym: %v != %v", fnsym, expect)
|
base.Fatalf("unexpected fnsym: %v != %v", fnsym, expect)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
var apdecls []*Node
|
// Back when there were two different *Funcs for a function, this code
|
||||||
|
// was not consistent about whether a particular *Node being processed
|
||||||
|
// was an ODCLFUNC or ONAME node. Partly this is because inlined function
|
||||||
|
// bodies have no ODCLFUNC node, which was it's own inconsistency.
|
||||||
|
// In any event, the handling of the two different nodes for DWARF purposes
|
||||||
|
// was subtly different, likely in unintended ways. CL 272253 merged the
|
||||||
|
// two nodes' Func fields, so that code sees the same *Func whether it is
|
||||||
|
// holding the ODCLFUNC or the ONAME. This resulted in changes in the
|
||||||
|
// DWARF output. To preserve the existing DWARF output and leave an
|
||||||
|
// intentional change for a future CL, this code does the following when
|
||||||
|
// fn.Op == ONAME:
|
||||||
|
//
|
||||||
|
// 1. Disallow use of createComplexVars in createDwarfVars.
|
||||||
|
// It was not possible to reach that code for an ONAME before,
|
||||||
|
// because the DebugInfo was set only on the ODCLFUNC Func.
|
||||||
|
// Calling into it in the ONAME case causes an index out of bounds panic.
|
||||||
|
//
|
||||||
|
// 2. Do not populate apdecls. fn.Func.Dcl was in the ODCLFUNC Func,
|
||||||
|
// not the ONAME Func. Populating apdecls for the ONAME case results
|
||||||
|
// in selected being populated after createSimpleVars is called in
|
||||||
|
// createDwarfVars, and then that causes the loop to skip all the entries
|
||||||
|
// in dcl, meaning that the RecordAutoType calls don't happen.
|
||||||
|
//
|
||||||
|
// These two adjustments keep toolstash -cmp working for now.
|
||||||
|
// Deciding the right answer is, as they say, future work.
|
||||||
|
isODCLFUNC := fn.Op() == ir.ODCLFUNC
|
||||||
|
|
||||||
|
var apdecls []ir.Node
|
||||||
// Populate decls for fn.
|
// Populate decls for fn.
|
||||||
for _, n := range fn.Func.Dcl {
|
if isODCLFUNC {
|
||||||
if n.Op != ONAME { // might be OTYPE or OLITERAL
|
for _, n := range fn.Func().Dcl {
|
||||||
|
if n.Op() != ir.ONAME { // might be OTYPE or OLITERAL
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
switch n.Class() {
|
switch n.Class() {
|
||||||
case PAUTO:
|
case ir.PAUTO:
|
||||||
if !n.Name.Used() {
|
if !n.Name().Used() {
|
||||||
// Text == nil -> generating abstract function
|
// Text == nil -> generating abstract function
|
||||||
if fnsym.Func().Text != nil {
|
if fnsym.Func().Text != nil {
|
||||||
Fatalf("debuginfo unused node (AllocFrame should truncate fn.Func.Dcl)")
|
base.Fatalf("debuginfo unused node (AllocFrame should truncate fn.Func.Dcl)")
|
||||||
}
|
}
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
case PPARAM, PPARAMOUT:
|
case ir.PPARAM, ir.PPARAMOUT:
|
||||||
default:
|
default:
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
apdecls = append(apdecls, n)
|
apdecls = append(apdecls, n)
|
||||||
fnsym.Func().RecordAutoType(ngotype(n).Linksym())
|
fnsym.Func().RecordAutoType(ngotype(n).Linksym())
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
decls, dwarfVars := createDwarfVars(fnsym, fn.Func, apdecls)
|
decls, dwarfVars := createDwarfVars(fnsym, isODCLFUNC, fn.Func(), apdecls)
|
||||||
|
|
||||||
// For each type referenced by the functions auto vars but not
|
// For each type referenced by the functions auto vars but not
|
||||||
// already referenced by a dwarf var, attach a dummy relocation to
|
// already referenced by a dwarf var, attach an R_USETYPE relocation to
|
||||||
// the function symbol to insure that the type included in DWARF
|
// the function symbol to insure that the type included in DWARF
|
||||||
// processing during linking.
|
// processing during linking.
|
||||||
typesyms := []*obj.LSym{}
|
typesyms := []*obj.LSym{}
|
||||||
|
|
@ -446,22 +475,22 @@ func debuginfo(fnsym *obj.LSym, infosym *obj.LSym, curfn interface{}) ([]dwarf.S
|
||||||
}
|
}
|
||||||
fnsym.Func().Autot = nil
|
fnsym.Func().Autot = nil
|
||||||
|
|
||||||
var varScopes []ScopeID
|
var varScopes []ir.ScopeID
|
||||||
for _, decl := range decls {
|
for _, decl := range decls {
|
||||||
pos := declPos(decl)
|
pos := declPos(decl)
|
||||||
varScopes = append(varScopes, findScope(fn.Func.Marks, pos))
|
varScopes = append(varScopes, findScope(fn.Func().Marks, pos))
|
||||||
}
|
}
|
||||||
|
|
||||||
scopes := assembleScopes(fnsym, fn, dwarfVars, varScopes)
|
scopes := assembleScopes(fnsym, fn, dwarfVars, varScopes)
|
||||||
var inlcalls dwarf.InlCalls
|
var inlcalls dwarf.InlCalls
|
||||||
if genDwarfInline > 0 {
|
if base.Flag.GenDwarfInl > 0 {
|
||||||
inlcalls = assembleInlines(fnsym, dwarfVars)
|
inlcalls = assembleInlines(fnsym, dwarfVars)
|
||||||
}
|
}
|
||||||
return scopes, inlcalls
|
return scopes, inlcalls
|
||||||
}
|
}
|
||||||
|
|
||||||
func declPos(decl *Node) src.XPos {
|
func declPos(decl ir.Node) src.XPos {
|
||||||
if decl.Name.Defn != nil && (decl.Name.Captured() || decl.Name.Byval()) {
|
if decl.Name().Defn != nil && (decl.Name().Captured() || decl.Name().Byval()) {
|
||||||
// It's not clear which position is correct for captured variables here:
|
// It's not clear which position is correct for captured variables here:
|
||||||
// * decl.Pos is the wrong position for captured variables, in the inner
|
// * decl.Pos is the wrong position for captured variables, in the inner
|
||||||
// function, but it is the right position in the outer function.
|
// function, but it is the right position in the outer function.
|
||||||
|
|
@ -476,19 +505,19 @@ func declPos(decl *Node) src.XPos {
|
||||||
// case statement.
|
// case statement.
|
||||||
// This code is probably wrong for type switch variables that are also
|
// This code is probably wrong for type switch variables that are also
|
||||||
// captured.
|
// captured.
|
||||||
return decl.Name.Defn.Pos
|
return decl.Name().Defn.Pos()
|
||||||
}
|
}
|
||||||
return decl.Pos
|
return decl.Pos()
|
||||||
}
|
}
|
||||||
|
|
||||||
// createSimpleVars creates a DWARF entry for every variable declared in the
|
// createSimpleVars creates a DWARF entry for every variable declared in the
|
||||||
// function, claiming that they are permanently on the stack.
|
// function, claiming that they are permanently on the stack.
|
||||||
func createSimpleVars(fnsym *obj.LSym, apDecls []*Node) ([]*Node, []*dwarf.Var, map[*Node]bool) {
|
func createSimpleVars(fnsym *obj.LSym, apDecls []ir.Node) ([]ir.Node, []*dwarf.Var, map[ir.Node]bool) {
|
||||||
var vars []*dwarf.Var
|
var vars []*dwarf.Var
|
||||||
var decls []*Node
|
var decls []ir.Node
|
||||||
selected := make(map[*Node]bool)
|
selected := make(map[ir.Node]bool)
|
||||||
for _, n := range apDecls {
|
for _, n := range apDecls {
|
||||||
if n.IsAutoTmp() {
|
if ir.IsAutoTmp(n) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -499,14 +528,14 @@ func createSimpleVars(fnsym *obj.LSym, apDecls []*Node) ([]*Node, []*dwarf.Var,
|
||||||
return decls, vars, selected
|
return decls, vars, selected
|
||||||
}
|
}
|
||||||
|
|
||||||
func createSimpleVar(fnsym *obj.LSym, n *Node) *dwarf.Var {
|
func createSimpleVar(fnsym *obj.LSym, n ir.Node) *dwarf.Var {
|
||||||
var abbrev int
|
var abbrev int
|
||||||
offs := n.Xoffset
|
offs := n.Offset()
|
||||||
|
|
||||||
switch n.Class() {
|
switch n.Class() {
|
||||||
case PAUTO:
|
case ir.PAUTO:
|
||||||
abbrev = dwarf.DW_ABRV_AUTO
|
abbrev = dwarf.DW_ABRV_AUTO
|
||||||
if Ctxt.FixedFrameSize() == 0 {
|
if base.Ctxt.FixedFrameSize() == 0 {
|
||||||
offs -= int64(Widthptr)
|
offs -= int64(Widthptr)
|
||||||
}
|
}
|
||||||
if objabi.Framepointer_enabled || objabi.GOARCH == "arm64" {
|
if objabi.Framepointer_enabled || objabi.GOARCH == "arm64" {
|
||||||
|
|
@ -514,32 +543,32 @@ func createSimpleVar(fnsym *obj.LSym, n *Node) *dwarf.Var {
|
||||||
offs -= int64(Widthptr)
|
offs -= int64(Widthptr)
|
||||||
}
|
}
|
||||||
|
|
||||||
case PPARAM, PPARAMOUT:
|
case ir.PPARAM, ir.PPARAMOUT:
|
||||||
abbrev = dwarf.DW_ABRV_PARAM
|
abbrev = dwarf.DW_ABRV_PARAM
|
||||||
offs += Ctxt.FixedFrameSize()
|
offs += base.Ctxt.FixedFrameSize()
|
||||||
default:
|
default:
|
||||||
Fatalf("createSimpleVar unexpected class %v for node %v", n.Class(), n)
|
base.Fatalf("createSimpleVar unexpected class %v for node %v", n.Class(), n)
|
||||||
}
|
}
|
||||||
|
|
||||||
typename := dwarf.InfoPrefix + typesymname(n.Type)
|
typename := dwarf.InfoPrefix + typesymname(n.Type())
|
||||||
delete(fnsym.Func().Autot, ngotype(n).Linksym())
|
delete(fnsym.Func().Autot, ngotype(n).Linksym())
|
||||||
inlIndex := 0
|
inlIndex := 0
|
||||||
if genDwarfInline > 1 {
|
if base.Flag.GenDwarfInl > 1 {
|
||||||
if n.Name.InlFormal() || n.Name.InlLocal() {
|
if n.Name().InlFormal() || n.Name().InlLocal() {
|
||||||
inlIndex = posInlIndex(n.Pos) + 1
|
inlIndex = posInlIndex(n.Pos()) + 1
|
||||||
if n.Name.InlFormal() {
|
if n.Name().InlFormal() {
|
||||||
abbrev = dwarf.DW_ABRV_PARAM
|
abbrev = dwarf.DW_ABRV_PARAM
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
declpos := Ctxt.InnermostPos(declPos(n))
|
declpos := base.Ctxt.InnermostPos(declPos(n))
|
||||||
return &dwarf.Var{
|
return &dwarf.Var{
|
||||||
Name: n.Sym.Name,
|
Name: n.Sym().Name,
|
||||||
IsReturnValue: n.Class() == PPARAMOUT,
|
IsReturnValue: n.Class() == ir.PPARAMOUT,
|
||||||
IsInlFormal: n.Name.InlFormal(),
|
IsInlFormal: n.Name().InlFormal(),
|
||||||
Abbrev: abbrev,
|
Abbrev: abbrev,
|
||||||
StackOffset: int32(offs),
|
StackOffset: int32(offs),
|
||||||
Type: Ctxt.Lookup(typename),
|
Type: base.Ctxt.Lookup(typename),
|
||||||
DeclFile: declpos.RelFilename(),
|
DeclFile: declpos.RelFilename(),
|
||||||
DeclLine: declpos.RelLine(),
|
DeclLine: declpos.RelLine(),
|
||||||
DeclCol: declpos.Col(),
|
DeclCol: declpos.Col(),
|
||||||
|
|
@ -550,19 +579,19 @@ func createSimpleVar(fnsym *obj.LSym, n *Node) *dwarf.Var {
|
||||||
|
|
||||||
// createComplexVars creates recomposed DWARF vars with location lists,
|
// createComplexVars creates recomposed DWARF vars with location lists,
|
||||||
// suitable for describing optimized code.
|
// suitable for describing optimized code.
|
||||||
func createComplexVars(fnsym *obj.LSym, fn *Func) ([]*Node, []*dwarf.Var, map[*Node]bool) {
|
func createComplexVars(fnsym *obj.LSym, fn *ir.Func) ([]ir.Node, []*dwarf.Var, map[ir.Node]bool) {
|
||||||
debugInfo := fn.DebugInfo
|
debugInfo := fn.DebugInfo.(*ssa.FuncDebug)
|
||||||
|
|
||||||
// Produce a DWARF variable entry for each user variable.
|
// Produce a DWARF variable entry for each user variable.
|
||||||
var decls []*Node
|
var decls []ir.Node
|
||||||
var vars []*dwarf.Var
|
var vars []*dwarf.Var
|
||||||
ssaVars := make(map[*Node]bool)
|
ssaVars := make(map[ir.Node]bool)
|
||||||
|
|
||||||
for varID, dvar := range debugInfo.Vars {
|
for varID, dvar := range debugInfo.Vars {
|
||||||
n := dvar.(*Node)
|
n := dvar
|
||||||
ssaVars[n] = true
|
ssaVars[n] = true
|
||||||
for _, slot := range debugInfo.VarSlots[varID] {
|
for _, slot := range debugInfo.VarSlots[varID] {
|
||||||
ssaVars[debugInfo.Slots[slot].N.(*Node)] = true
|
ssaVars[debugInfo.Slots[slot].N] = true
|
||||||
}
|
}
|
||||||
|
|
||||||
if dvar := createComplexVar(fnsym, fn, ssa.VarID(varID)); dvar != nil {
|
if dvar := createComplexVar(fnsym, fn, ssa.VarID(varID)); dvar != nil {
|
||||||
|
|
@ -576,12 +605,12 @@ func createComplexVars(fnsym *obj.LSym, fn *Func) ([]*Node, []*dwarf.Var, map[*N
|
||||||
|
|
||||||
// createDwarfVars process fn, returning a list of DWARF variables and the
|
// createDwarfVars process fn, returning a list of DWARF variables and the
|
||||||
// Nodes they represent.
|
// Nodes they represent.
|
||||||
func createDwarfVars(fnsym *obj.LSym, fn *Func, apDecls []*Node) ([]*Node, []*dwarf.Var) {
|
func createDwarfVars(fnsym *obj.LSym, complexOK bool, fn *ir.Func, apDecls []ir.Node) ([]ir.Node, []*dwarf.Var) {
|
||||||
// Collect a raw list of DWARF vars.
|
// Collect a raw list of DWARF vars.
|
||||||
var vars []*dwarf.Var
|
var vars []*dwarf.Var
|
||||||
var decls []*Node
|
var decls []ir.Node
|
||||||
var selected map[*Node]bool
|
var selected map[ir.Node]bool
|
||||||
if Ctxt.Flag_locationlists && Ctxt.Flag_optimize && fn.DebugInfo != nil {
|
if base.Ctxt.Flag_locationlists && base.Ctxt.Flag_optimize && fn.DebugInfo != nil && complexOK {
|
||||||
decls, vars, selected = createComplexVars(fnsym, fn)
|
decls, vars, selected = createComplexVars(fnsym, fn)
|
||||||
} else {
|
} else {
|
||||||
decls, vars, selected = createSimpleVars(fnsym, apDecls)
|
decls, vars, selected = createSimpleVars(fnsym, apDecls)
|
||||||
|
|
@ -608,11 +637,11 @@ func createDwarfVars(fnsym *obj.LSym, fn *Func, apDecls []*Node) ([]*Node, []*dw
|
||||||
if _, found := selected[n]; found {
|
if _, found := selected[n]; found {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
c := n.Sym.Name[0]
|
c := n.Sym().Name[0]
|
||||||
if c == '.' || n.Type.IsUntyped() {
|
if c == '.' || n.Type().IsUntyped() {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if n.Class() == PPARAM && !canSSAType(n.Type) {
|
if n.Class() == ir.PPARAM && !canSSAType(n.Type()) {
|
||||||
// SSA-able args get location lists, and may move in and
|
// SSA-able args get location lists, and may move in and
|
||||||
// out of registers, so those are handled elsewhere.
|
// out of registers, so those are handled elsewhere.
|
||||||
// Autos and named output params seem to get handled
|
// Autos and named output params seem to get handled
|
||||||
|
|
@ -624,13 +653,13 @@ func createDwarfVars(fnsym *obj.LSym, fn *Func, apDecls []*Node) ([]*Node, []*dw
|
||||||
decls = append(decls, n)
|
decls = append(decls, n)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
typename := dwarf.InfoPrefix + typesymname(n.Type)
|
typename := dwarf.InfoPrefix + typesymname(n.Type())
|
||||||
decls = append(decls, n)
|
decls = append(decls, n)
|
||||||
abbrev := dwarf.DW_ABRV_AUTO_LOCLIST
|
abbrev := dwarf.DW_ABRV_AUTO_LOCLIST
|
||||||
isReturnValue := (n.Class() == PPARAMOUT)
|
isReturnValue := (n.Class() == ir.PPARAMOUT)
|
||||||
if n.Class() == PPARAM || n.Class() == PPARAMOUT {
|
if n.Class() == ir.PPARAM || n.Class() == ir.PPARAMOUT {
|
||||||
abbrev = dwarf.DW_ABRV_PARAM_LOCLIST
|
abbrev = dwarf.DW_ABRV_PARAM_LOCLIST
|
||||||
} else if n.Class() == PAUTOHEAP {
|
} else if n.Class() == ir.PAUTOHEAP {
|
||||||
// If dcl in question has been promoted to heap, do a bit
|
// If dcl in question has been promoted to heap, do a bit
|
||||||
// of extra work to recover original class (auto or param);
|
// of extra work to recover original class (auto or param);
|
||||||
// see issue 30908. This insures that we get the proper
|
// see issue 30908. This insures that we get the proper
|
||||||
|
|
@ -638,28 +667,28 @@ func createDwarfVars(fnsym *obj.LSym, fn *Func, apDecls []*Node) ([]*Node, []*dw
|
||||||
// misleading location for the param (we want pointer-to-heap
|
// misleading location for the param (we want pointer-to-heap
|
||||||
// and not stack).
|
// and not stack).
|
||||||
// TODO(thanm): generate a better location expression
|
// TODO(thanm): generate a better location expression
|
||||||
stackcopy := n.Name.Param.Stackcopy
|
stackcopy := n.Name().Param.Stackcopy
|
||||||
if stackcopy != nil && (stackcopy.Class() == PPARAM || stackcopy.Class() == PPARAMOUT) {
|
if stackcopy != nil && (stackcopy.Class() == ir.PPARAM || stackcopy.Class() == ir.PPARAMOUT) {
|
||||||
abbrev = dwarf.DW_ABRV_PARAM_LOCLIST
|
abbrev = dwarf.DW_ABRV_PARAM_LOCLIST
|
||||||
isReturnValue = (stackcopy.Class() == PPARAMOUT)
|
isReturnValue = (stackcopy.Class() == ir.PPARAMOUT)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
inlIndex := 0
|
inlIndex := 0
|
||||||
if genDwarfInline > 1 {
|
if base.Flag.GenDwarfInl > 1 {
|
||||||
if n.Name.InlFormal() || n.Name.InlLocal() {
|
if n.Name().InlFormal() || n.Name().InlLocal() {
|
||||||
inlIndex = posInlIndex(n.Pos) + 1
|
inlIndex = posInlIndex(n.Pos()) + 1
|
||||||
if n.Name.InlFormal() {
|
if n.Name().InlFormal() {
|
||||||
abbrev = dwarf.DW_ABRV_PARAM_LOCLIST
|
abbrev = dwarf.DW_ABRV_PARAM_LOCLIST
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
declpos := Ctxt.InnermostPos(n.Pos)
|
declpos := base.Ctxt.InnermostPos(n.Pos())
|
||||||
vars = append(vars, &dwarf.Var{
|
vars = append(vars, &dwarf.Var{
|
||||||
Name: n.Sym.Name,
|
Name: n.Sym().Name,
|
||||||
IsReturnValue: isReturnValue,
|
IsReturnValue: isReturnValue,
|
||||||
Abbrev: abbrev,
|
Abbrev: abbrev,
|
||||||
StackOffset: int32(n.Xoffset),
|
StackOffset: int32(n.Offset()),
|
||||||
Type: Ctxt.Lookup(typename),
|
Type: base.Ctxt.Lookup(typename),
|
||||||
DeclFile: declpos.RelFilename(),
|
DeclFile: declpos.RelFilename(),
|
||||||
DeclLine: declpos.RelLine(),
|
DeclLine: declpos.RelLine(),
|
||||||
DeclCol: declpos.Col(),
|
DeclCol: declpos.Col(),
|
||||||
|
|
@ -679,14 +708,14 @@ func createDwarfVars(fnsym *obj.LSym, fn *Func, apDecls []*Node) ([]*Node, []*dw
|
||||||
// function that is not local to the package being compiled, then the
|
// function that is not local to the package being compiled, then the
|
||||||
// names of the variables may have been "versioned" to avoid conflicts
|
// names of the variables may have been "versioned" to avoid conflicts
|
||||||
// with local vars; disregard this versioning when sorting.
|
// with local vars; disregard this versioning when sorting.
|
||||||
func preInliningDcls(fnsym *obj.LSym) []*Node {
|
func preInliningDcls(fnsym *obj.LSym) []ir.Node {
|
||||||
fn := Ctxt.DwFixups.GetPrecursorFunc(fnsym).(*Node)
|
fn := base.Ctxt.DwFixups.GetPrecursorFunc(fnsym).(ir.Node)
|
||||||
var rdcl []*Node
|
var rdcl []ir.Node
|
||||||
for _, n := range fn.Func.Inl.Dcl {
|
for _, n := range fn.Func().Inl.Dcl {
|
||||||
c := n.Sym.Name[0]
|
c := n.Sym().Name[0]
|
||||||
// Avoid reporting "_" parameters, since if there are more than
|
// Avoid reporting "_" parameters, since if there are more than
|
||||||
// one, it can result in a collision later on, as in #23179.
|
// one, it can result in a collision later on, as in #23179.
|
||||||
if unversion(n.Sym.Name) == "_" || c == '.' || n.Type.IsUntyped() {
|
if unversion(n.Sym().Name) == "_" || c == '.' || n.Type().IsUntyped() {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
rdcl = append(rdcl, n)
|
rdcl = append(rdcl, n)
|
||||||
|
|
@ -698,33 +727,33 @@ func preInliningDcls(fnsym *obj.LSym) []*Node {
|
||||||
// stack pointer, suitable for use in a DWARF location entry. This has nothing
|
// stack pointer, suitable for use in a DWARF location entry. This has nothing
|
||||||
// to do with its offset in the user variable.
|
// to do with its offset in the user variable.
|
||||||
func stackOffset(slot ssa.LocalSlot) int32 {
|
func stackOffset(slot ssa.LocalSlot) int32 {
|
||||||
n := slot.N.(*Node)
|
n := slot.N
|
||||||
var base int64
|
var off int64
|
||||||
switch n.Class() {
|
switch n.Class() {
|
||||||
case PAUTO:
|
case ir.PAUTO:
|
||||||
if Ctxt.FixedFrameSize() == 0 {
|
if base.Ctxt.FixedFrameSize() == 0 {
|
||||||
base -= int64(Widthptr)
|
off -= int64(Widthptr)
|
||||||
}
|
}
|
||||||
if objabi.Framepointer_enabled || objabi.GOARCH == "arm64" {
|
if objabi.Framepointer_enabled || objabi.GOARCH == "arm64" {
|
||||||
// There is a word space for FP on ARM64 even if the frame pointer is disabled
|
// There is a word space for FP on ARM64 even if the frame pointer is disabled
|
||||||
base -= int64(Widthptr)
|
off -= int64(Widthptr)
|
||||||
}
|
}
|
||||||
case PPARAM, PPARAMOUT:
|
case ir.PPARAM, ir.PPARAMOUT:
|
||||||
base += Ctxt.FixedFrameSize()
|
off += base.Ctxt.FixedFrameSize()
|
||||||
}
|
}
|
||||||
return int32(base + n.Xoffset + slot.Off)
|
return int32(off + n.Offset() + slot.Off)
|
||||||
}
|
}
|
||||||
|
|
||||||
// createComplexVar builds a single DWARF variable entry and location list.
|
// createComplexVar builds a single DWARF variable entry and location list.
|
||||||
func createComplexVar(fnsym *obj.LSym, fn *Func, varID ssa.VarID) *dwarf.Var {
|
func createComplexVar(fnsym *obj.LSym, fn *ir.Func, varID ssa.VarID) *dwarf.Var {
|
||||||
debug := fn.DebugInfo
|
debug := fn.DebugInfo.(*ssa.FuncDebug)
|
||||||
n := debug.Vars[varID].(*Node)
|
n := debug.Vars[varID]
|
||||||
|
|
||||||
var abbrev int
|
var abbrev int
|
||||||
switch n.Class() {
|
switch n.Class() {
|
||||||
case PAUTO:
|
case ir.PAUTO:
|
||||||
abbrev = dwarf.DW_ABRV_AUTO_LOCLIST
|
abbrev = dwarf.DW_ABRV_AUTO_LOCLIST
|
||||||
case PPARAM, PPARAMOUT:
|
case ir.PPARAM, ir.PPARAMOUT:
|
||||||
abbrev = dwarf.DW_ABRV_PARAM_LOCLIST
|
abbrev = dwarf.DW_ABRV_PARAM_LOCLIST
|
||||||
default:
|
default:
|
||||||
return nil
|
return nil
|
||||||
|
|
@ -734,21 +763,21 @@ func createComplexVar(fnsym *obj.LSym, fn *Func, varID ssa.VarID) *dwarf.Var {
|
||||||
delete(fnsym.Func().Autot, gotype)
|
delete(fnsym.Func().Autot, gotype)
|
||||||
typename := dwarf.InfoPrefix + gotype.Name[len("type."):]
|
typename := dwarf.InfoPrefix + gotype.Name[len("type."):]
|
||||||
inlIndex := 0
|
inlIndex := 0
|
||||||
if genDwarfInline > 1 {
|
if base.Flag.GenDwarfInl > 1 {
|
||||||
if n.Name.InlFormal() || n.Name.InlLocal() {
|
if n.Name().InlFormal() || n.Name().InlLocal() {
|
||||||
inlIndex = posInlIndex(n.Pos) + 1
|
inlIndex = posInlIndex(n.Pos()) + 1
|
||||||
if n.Name.InlFormal() {
|
if n.Name().InlFormal() {
|
||||||
abbrev = dwarf.DW_ABRV_PARAM_LOCLIST
|
abbrev = dwarf.DW_ABRV_PARAM_LOCLIST
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
declpos := Ctxt.InnermostPos(n.Pos)
|
declpos := base.Ctxt.InnermostPos(n.Pos())
|
||||||
dvar := &dwarf.Var{
|
dvar := &dwarf.Var{
|
||||||
Name: n.Sym.Name,
|
Name: n.Sym().Name,
|
||||||
IsReturnValue: n.Class() == PPARAMOUT,
|
IsReturnValue: n.Class() == ir.PPARAMOUT,
|
||||||
IsInlFormal: n.Name.InlFormal(),
|
IsInlFormal: n.Name().InlFormal(),
|
||||||
Abbrev: abbrev,
|
Abbrev: abbrev,
|
||||||
Type: Ctxt.Lookup(typename),
|
Type: base.Ctxt.Lookup(typename),
|
||||||
// The stack offset is used as a sorting key, so for decomposed
|
// The stack offset is used as a sorting key, so for decomposed
|
||||||
// variables just give it the first one. It's not used otherwise.
|
// variables just give it the first one. It's not used otherwise.
|
||||||
// This won't work well if the first slot hasn't been assigned a stack
|
// This won't work well if the first slot hasn't been assigned a stack
|
||||||
|
|
@ -763,7 +792,7 @@ func createComplexVar(fnsym *obj.LSym, fn *Func, varID ssa.VarID) *dwarf.Var {
|
||||||
list := debug.LocationLists[varID]
|
list := debug.LocationLists[varID]
|
||||||
if len(list) != 0 {
|
if len(list) != 0 {
|
||||||
dvar.PutLocationList = func(listSym, startPC dwarf.Sym) {
|
dvar.PutLocationList = func(listSym, startPC dwarf.Sym) {
|
||||||
debug.PutLocationList(list, Ctxt, listSym.(*obj.LSym), startPC.(*obj.LSym))
|
debug.PutLocationList(list, base.Ctxt, listSym.(*obj.LSym), startPC.(*obj.LSym))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return dvar
|
return dvar
|
||||||
|
|
|
||||||
|
|
@ -5,6 +5,7 @@
|
||||||
package gc
|
package gc
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"cmd/compile/internal/ir"
|
||||||
"cmd/compile/internal/types"
|
"cmd/compile/internal/types"
|
||||||
"reflect"
|
"reflect"
|
||||||
"sort"
|
"sort"
|
||||||
|
|
@ -12,129 +13,133 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
func typeWithoutPointers() *types.Type {
|
func typeWithoutPointers() *types.Type {
|
||||||
t := types.New(TSTRUCT)
|
t := types.New(types.TSTRUCT)
|
||||||
f := &types.Field{Type: types.New(TINT)}
|
f := &types.Field{Type: types.New(types.TINT)}
|
||||||
t.SetFields([]*types.Field{f})
|
t.SetFields([]*types.Field{f})
|
||||||
return t
|
return t
|
||||||
}
|
}
|
||||||
|
|
||||||
func typeWithPointers() *types.Type {
|
func typeWithPointers() *types.Type {
|
||||||
t := types.New(TSTRUCT)
|
t := types.New(types.TSTRUCT)
|
||||||
f := &types.Field{Type: types.NewPtr(types.New(TINT))}
|
f := &types.Field{Type: types.NewPtr(types.New(types.TINT))}
|
||||||
t.SetFields([]*types.Field{f})
|
t.SetFields([]*types.Field{f})
|
||||||
return t
|
return t
|
||||||
}
|
}
|
||||||
|
|
||||||
func markUsed(n *Node) *Node {
|
func markUsed(n ir.Node) ir.Node {
|
||||||
n.Name.SetUsed(true)
|
n.Name().SetUsed(true)
|
||||||
return n
|
return n
|
||||||
}
|
}
|
||||||
|
|
||||||
func markNeedZero(n *Node) *Node {
|
func markNeedZero(n ir.Node) ir.Node {
|
||||||
n.Name.SetNeedzero(true)
|
n.Name().SetNeedzero(true)
|
||||||
return n
|
return n
|
||||||
}
|
}
|
||||||
|
|
||||||
func nodeWithClass(n Node, c Class) *Node {
|
|
||||||
n.SetClass(c)
|
|
||||||
n.Name = new(Name)
|
|
||||||
return &n
|
|
||||||
}
|
|
||||||
|
|
||||||
// Test all code paths for cmpstackvarlt.
|
// Test all code paths for cmpstackvarlt.
|
||||||
func TestCmpstackvar(t *testing.T) {
|
func TestCmpstackvar(t *testing.T) {
|
||||||
|
nod := func(xoffset int64, t *types.Type, s *types.Sym, cl ir.Class) ir.Node {
|
||||||
|
if s == nil {
|
||||||
|
s = &types.Sym{Name: "."}
|
||||||
|
}
|
||||||
|
n := NewName(s)
|
||||||
|
n.SetType(t)
|
||||||
|
n.SetOffset(xoffset)
|
||||||
|
n.SetClass(cl)
|
||||||
|
return n
|
||||||
|
}
|
||||||
testdata := []struct {
|
testdata := []struct {
|
||||||
a, b *Node
|
a, b ir.Node
|
||||||
lt bool
|
lt bool
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
nodeWithClass(Node{}, PAUTO),
|
nod(0, nil, nil, ir.PAUTO),
|
||||||
nodeWithClass(Node{}, PFUNC),
|
nod(0, nil, nil, ir.PFUNC),
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
nodeWithClass(Node{}, PFUNC),
|
nod(0, nil, nil, ir.PFUNC),
|
||||||
nodeWithClass(Node{}, PAUTO),
|
nod(0, nil, nil, ir.PAUTO),
|
||||||
true,
|
true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
nodeWithClass(Node{Xoffset: 0}, PFUNC),
|
nod(0, nil, nil, ir.PFUNC),
|
||||||
nodeWithClass(Node{Xoffset: 10}, PFUNC),
|
nod(10, nil, nil, ir.PFUNC),
|
||||||
true,
|
true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
nodeWithClass(Node{Xoffset: 20}, PFUNC),
|
nod(20, nil, nil, ir.PFUNC),
|
||||||
nodeWithClass(Node{Xoffset: 10}, PFUNC),
|
nod(10, nil, nil, ir.PFUNC),
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
nodeWithClass(Node{Xoffset: 10}, PFUNC),
|
nod(10, nil, nil, ir.PFUNC),
|
||||||
nodeWithClass(Node{Xoffset: 10}, PFUNC),
|
nod(10, nil, nil, ir.PFUNC),
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
nodeWithClass(Node{Xoffset: 10}, PPARAM),
|
nod(10, nil, nil, ir.PPARAM),
|
||||||
nodeWithClass(Node{Xoffset: 20}, PPARAMOUT),
|
nod(20, nil, nil, ir.PPARAMOUT),
|
||||||
true,
|
true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
nodeWithClass(Node{Xoffset: 10}, PPARAMOUT),
|
nod(10, nil, nil, ir.PPARAMOUT),
|
||||||
nodeWithClass(Node{Xoffset: 20}, PPARAM),
|
nod(20, nil, nil, ir.PPARAM),
|
||||||
true,
|
true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
markUsed(nodeWithClass(Node{}, PAUTO)),
|
markUsed(nod(0, nil, nil, ir.PAUTO)),
|
||||||
nodeWithClass(Node{}, PAUTO),
|
nod(0, nil, nil, ir.PAUTO),
|
||||||
true,
|
true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
nodeWithClass(Node{}, PAUTO),
|
nod(0, nil, nil, ir.PAUTO),
|
||||||
markUsed(nodeWithClass(Node{}, PAUTO)),
|
markUsed(nod(0, nil, nil, ir.PAUTO)),
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
nodeWithClass(Node{Type: typeWithoutPointers()}, PAUTO),
|
nod(0, typeWithoutPointers(), nil, ir.PAUTO),
|
||||||
nodeWithClass(Node{Type: typeWithPointers()}, PAUTO),
|
nod(0, typeWithPointers(), nil, ir.PAUTO),
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
nodeWithClass(Node{Type: typeWithPointers()}, PAUTO),
|
nod(0, typeWithPointers(), nil, ir.PAUTO),
|
||||||
nodeWithClass(Node{Type: typeWithoutPointers()}, PAUTO),
|
nod(0, typeWithoutPointers(), nil, ir.PAUTO),
|
||||||
true,
|
true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
markNeedZero(nodeWithClass(Node{Type: &types.Type{}}, PAUTO)),
|
markNeedZero(nod(0, &types.Type{}, nil, ir.PAUTO)),
|
||||||
nodeWithClass(Node{Type: &types.Type{}, Name: &Name{}}, PAUTO),
|
nod(0, &types.Type{}, nil, ir.PAUTO),
|
||||||
true,
|
true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
nodeWithClass(Node{Type: &types.Type{}, Name: &Name{}}, PAUTO),
|
nod(0, &types.Type{}, nil, ir.PAUTO),
|
||||||
markNeedZero(nodeWithClass(Node{Type: &types.Type{}}, PAUTO)),
|
markNeedZero(nod(0, &types.Type{}, nil, ir.PAUTO)),
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
nodeWithClass(Node{Type: &types.Type{Width: 1}, Name: &Name{}}, PAUTO),
|
nod(0, &types.Type{Width: 1}, nil, ir.PAUTO),
|
||||||
nodeWithClass(Node{Type: &types.Type{Width: 2}, Name: &Name{}}, PAUTO),
|
nod(0, &types.Type{Width: 2}, nil, ir.PAUTO),
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
nodeWithClass(Node{Type: &types.Type{Width: 2}, Name: &Name{}}, PAUTO),
|
nod(0, &types.Type{Width: 2}, nil, ir.PAUTO),
|
||||||
nodeWithClass(Node{Type: &types.Type{Width: 1}, Name: &Name{}}, PAUTO),
|
nod(0, &types.Type{Width: 1}, nil, ir.PAUTO),
|
||||||
true,
|
true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
nodeWithClass(Node{Type: &types.Type{}, Sym: &types.Sym{Name: "abc"}}, PAUTO),
|
nod(0, &types.Type{}, &types.Sym{Name: "abc"}, ir.PAUTO),
|
||||||
nodeWithClass(Node{Type: &types.Type{}, Sym: &types.Sym{Name: "xyz"}}, PAUTO),
|
nod(0, &types.Type{}, &types.Sym{Name: "xyz"}, ir.PAUTO),
|
||||||
true,
|
true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
nodeWithClass(Node{Type: &types.Type{}, Sym: &types.Sym{Name: "abc"}}, PAUTO),
|
nod(0, &types.Type{}, &types.Sym{Name: "abc"}, ir.PAUTO),
|
||||||
nodeWithClass(Node{Type: &types.Type{}, Sym: &types.Sym{Name: "abc"}}, PAUTO),
|
nod(0, &types.Type{}, &types.Sym{Name: "abc"}, ir.PAUTO),
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
nodeWithClass(Node{Type: &types.Type{}, Sym: &types.Sym{Name: "xyz"}}, PAUTO),
|
nod(0, &types.Type{}, &types.Sym{Name: "xyz"}, ir.PAUTO),
|
||||||
nodeWithClass(Node{Type: &types.Type{}, Sym: &types.Sym{Name: "abc"}}, PAUTO),
|
nod(0, &types.Type{}, &types.Sym{Name: "abc"}, ir.PAUTO),
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
@ -151,35 +156,42 @@ func TestCmpstackvar(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestStackvarSort(t *testing.T) {
|
func TestStackvarSort(t *testing.T) {
|
||||||
inp := []*Node{
|
nod := func(xoffset int64, t *types.Type, s *types.Sym, cl ir.Class) ir.Node {
|
||||||
nodeWithClass(Node{Type: &types.Type{}, Sym: &types.Sym{}}, PFUNC),
|
n := NewName(s)
|
||||||
nodeWithClass(Node{Type: &types.Type{}, Sym: &types.Sym{}}, PAUTO),
|
n.SetType(t)
|
||||||
nodeWithClass(Node{Xoffset: 0, Type: &types.Type{}, Sym: &types.Sym{}}, PFUNC),
|
n.SetOffset(xoffset)
|
||||||
nodeWithClass(Node{Xoffset: 10, Type: &types.Type{}, Sym: &types.Sym{}}, PFUNC),
|
n.SetClass(cl)
|
||||||
nodeWithClass(Node{Xoffset: 20, Type: &types.Type{}, Sym: &types.Sym{}}, PFUNC),
|
return n
|
||||||
markUsed(nodeWithClass(Node{Type: &types.Type{}, Sym: &types.Sym{}}, PAUTO)),
|
|
||||||
nodeWithClass(Node{Type: typeWithoutPointers(), Sym: &types.Sym{}}, PAUTO),
|
|
||||||
nodeWithClass(Node{Type: &types.Type{}, Sym: &types.Sym{}}, PAUTO),
|
|
||||||
markNeedZero(nodeWithClass(Node{Type: &types.Type{}, Sym: &types.Sym{}}, PAUTO)),
|
|
||||||
nodeWithClass(Node{Type: &types.Type{Width: 1}, Sym: &types.Sym{}}, PAUTO),
|
|
||||||
nodeWithClass(Node{Type: &types.Type{Width: 2}, Sym: &types.Sym{}}, PAUTO),
|
|
||||||
nodeWithClass(Node{Type: &types.Type{}, Sym: &types.Sym{Name: "abc"}}, PAUTO),
|
|
||||||
nodeWithClass(Node{Type: &types.Type{}, Sym: &types.Sym{Name: "xyz"}}, PAUTO),
|
|
||||||
}
|
}
|
||||||
want := []*Node{
|
inp := []ir.Node{
|
||||||
nodeWithClass(Node{Type: &types.Type{}, Sym: &types.Sym{}}, PFUNC),
|
nod(0, &types.Type{}, &types.Sym{}, ir.PFUNC),
|
||||||
nodeWithClass(Node{Xoffset: 0, Type: &types.Type{}, Sym: &types.Sym{}}, PFUNC),
|
nod(0, &types.Type{}, &types.Sym{}, ir.PAUTO),
|
||||||
nodeWithClass(Node{Xoffset: 10, Type: &types.Type{}, Sym: &types.Sym{}}, PFUNC),
|
nod(0, &types.Type{}, &types.Sym{}, ir.PFUNC),
|
||||||
nodeWithClass(Node{Xoffset: 20, Type: &types.Type{}, Sym: &types.Sym{}}, PFUNC),
|
nod(10, &types.Type{}, &types.Sym{}, ir.PFUNC),
|
||||||
markUsed(nodeWithClass(Node{Type: &types.Type{}, Sym: &types.Sym{}}, PAUTO)),
|
nod(20, &types.Type{}, &types.Sym{}, ir.PFUNC),
|
||||||
markNeedZero(nodeWithClass(Node{Type: &types.Type{}, Sym: &types.Sym{}}, PAUTO)),
|
markUsed(nod(0, &types.Type{}, &types.Sym{}, ir.PAUTO)),
|
||||||
nodeWithClass(Node{Type: &types.Type{Width: 2}, Sym: &types.Sym{}}, PAUTO),
|
nod(0, typeWithoutPointers(), &types.Sym{}, ir.PAUTO),
|
||||||
nodeWithClass(Node{Type: &types.Type{Width: 1}, Sym: &types.Sym{}}, PAUTO),
|
nod(0, &types.Type{}, &types.Sym{}, ir.PAUTO),
|
||||||
nodeWithClass(Node{Type: &types.Type{}, Sym: &types.Sym{}}, PAUTO),
|
markNeedZero(nod(0, &types.Type{}, &types.Sym{}, ir.PAUTO)),
|
||||||
nodeWithClass(Node{Type: &types.Type{}, Sym: &types.Sym{}}, PAUTO),
|
nod(0, &types.Type{Width: 1}, &types.Sym{}, ir.PAUTO),
|
||||||
nodeWithClass(Node{Type: &types.Type{}, Sym: &types.Sym{Name: "abc"}}, PAUTO),
|
nod(0, &types.Type{Width: 2}, &types.Sym{}, ir.PAUTO),
|
||||||
nodeWithClass(Node{Type: &types.Type{}, Sym: &types.Sym{Name: "xyz"}}, PAUTO),
|
nod(0, &types.Type{}, &types.Sym{Name: "abc"}, ir.PAUTO),
|
||||||
nodeWithClass(Node{Type: typeWithoutPointers(), Sym: &types.Sym{}}, PAUTO),
|
nod(0, &types.Type{}, &types.Sym{Name: "xyz"}, ir.PAUTO),
|
||||||
|
}
|
||||||
|
want := []ir.Node{
|
||||||
|
nod(0, &types.Type{}, &types.Sym{}, ir.PFUNC),
|
||||||
|
nod(0, &types.Type{}, &types.Sym{}, ir.PFUNC),
|
||||||
|
nod(10, &types.Type{}, &types.Sym{}, ir.PFUNC),
|
||||||
|
nod(20, &types.Type{}, &types.Sym{}, ir.PFUNC),
|
||||||
|
markUsed(nod(0, &types.Type{}, &types.Sym{}, ir.PAUTO)),
|
||||||
|
markNeedZero(nod(0, &types.Type{}, &types.Sym{}, ir.PAUTO)),
|
||||||
|
nod(0, &types.Type{Width: 2}, &types.Sym{}, ir.PAUTO),
|
||||||
|
nod(0, &types.Type{Width: 1}, &types.Sym{}, ir.PAUTO),
|
||||||
|
nod(0, &types.Type{}, &types.Sym{}, ir.PAUTO),
|
||||||
|
nod(0, &types.Type{}, &types.Sym{}, ir.PAUTO),
|
||||||
|
nod(0, &types.Type{}, &types.Sym{Name: "abc"}, ir.PAUTO),
|
||||||
|
nod(0, &types.Type{}, &types.Sym{Name: "xyz"}, ir.PAUTO),
|
||||||
|
nod(0, typeWithoutPointers(), &types.Sym{}, ir.PAUTO),
|
||||||
}
|
}
|
||||||
sort.Sort(byStackVar(inp))
|
sort.Sort(byStackVar(inp))
|
||||||
if !reflect.DeepEqual(want, inp) {
|
if !reflect.DeepEqual(want, inp) {
|
||||||
|
|
|
||||||
|
|
@ -5,6 +5,7 @@
|
||||||
package gc
|
package gc
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"cmd/compile/internal/ir"
|
||||||
"cmd/compile/internal/ssa"
|
"cmd/compile/internal/ssa"
|
||||||
"cmd/compile/internal/types"
|
"cmd/compile/internal/types"
|
||||||
"cmd/internal/src"
|
"cmd/internal/src"
|
||||||
|
|
@ -42,9 +43,9 @@ func (s *state) insertPhis() {
|
||||||
type phiState struct {
|
type phiState struct {
|
||||||
s *state // SSA state
|
s *state // SSA state
|
||||||
f *ssa.Func // function to work on
|
f *ssa.Func // function to work on
|
||||||
defvars []map[*Node]*ssa.Value // defined variables at end of each block
|
defvars []map[ir.Node]*ssa.Value // defined variables at end of each block
|
||||||
|
|
||||||
varnum map[*Node]int32 // variable numbering
|
varnum map[ir.Node]int32 // variable numbering
|
||||||
|
|
||||||
// properties of the dominator tree
|
// properties of the dominator tree
|
||||||
idom []*ssa.Block // dominator parents
|
idom []*ssa.Block // dominator parents
|
||||||
|
|
@ -59,7 +60,7 @@ type phiState struct {
|
||||||
hasDef *sparseSet // has a write of the variable we're processing
|
hasDef *sparseSet // has a write of the variable we're processing
|
||||||
|
|
||||||
// miscellaneous
|
// miscellaneous
|
||||||
placeholder *ssa.Value // dummy value to use as a "not set yet" placeholder.
|
placeholder *ssa.Value // value to use as a "not set yet" placeholder.
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *phiState) insertPhis() {
|
func (s *phiState) insertPhis() {
|
||||||
|
|
@ -70,15 +71,15 @@ func (s *phiState) insertPhis() {
|
||||||
// Find all the variables for which we need to match up reads & writes.
|
// Find all the variables for which we need to match up reads & writes.
|
||||||
// This step prunes any basic-block-only variables from consideration.
|
// This step prunes any basic-block-only variables from consideration.
|
||||||
// Generate a numbering for these variables.
|
// Generate a numbering for these variables.
|
||||||
s.varnum = map[*Node]int32{}
|
s.varnum = map[ir.Node]int32{}
|
||||||
var vars []*Node
|
var vars []ir.Node
|
||||||
var vartypes []*types.Type
|
var vartypes []*types.Type
|
||||||
for _, b := range s.f.Blocks {
|
for _, b := range s.f.Blocks {
|
||||||
for _, v := range b.Values {
|
for _, v := range b.Values {
|
||||||
if v.Op != ssa.OpFwdRef {
|
if v.Op != ssa.OpFwdRef {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
var_ := v.Aux.(*Node)
|
var_ := v.Aux.(ir.Node)
|
||||||
|
|
||||||
// Optimization: look back 1 block for the definition.
|
// Optimization: look back 1 block for the definition.
|
||||||
if len(b.Preds) == 1 {
|
if len(b.Preds) == 1 {
|
||||||
|
|
@ -183,7 +184,7 @@ levels:
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *phiState) insertVarPhis(n int, var_ *Node, defs []*ssa.Block, typ *types.Type) {
|
func (s *phiState) insertVarPhis(n int, var_ ir.Node, defs []*ssa.Block, typ *types.Type) {
|
||||||
priq := &s.priq
|
priq := &s.priq
|
||||||
q := s.q
|
q := s.q
|
||||||
queued := s.queued
|
queued := s.queued
|
||||||
|
|
@ -318,7 +319,7 @@ func (s *phiState) resolveFwdRefs() {
|
||||||
if v.Op != ssa.OpFwdRef {
|
if v.Op != ssa.OpFwdRef {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
n := s.varnum[v.Aux.(*Node)]
|
n := s.varnum[v.Aux.(ir.Node)]
|
||||||
v.Op = ssa.OpCopy
|
v.Op = ssa.OpCopy
|
||||||
v.Aux = nil
|
v.Aux = nil
|
||||||
v.AddArg(values[n])
|
v.AddArg(values[n])
|
||||||
|
|
@ -435,7 +436,7 @@ type simplePhiState struct {
|
||||||
s *state // SSA state
|
s *state // SSA state
|
||||||
f *ssa.Func // function to work on
|
f *ssa.Func // function to work on
|
||||||
fwdrefs []*ssa.Value // list of FwdRefs to be processed
|
fwdrefs []*ssa.Value // list of FwdRefs to be processed
|
||||||
defvars []map[*Node]*ssa.Value // defined variables at end of each block
|
defvars []map[ir.Node]*ssa.Value // defined variables at end of each block
|
||||||
reachable []bool // which blocks are reachable
|
reachable []bool // which blocks are reachable
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -449,7 +450,7 @@ func (s *simplePhiState) insertPhis() {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
s.fwdrefs = append(s.fwdrefs, v)
|
s.fwdrefs = append(s.fwdrefs, v)
|
||||||
var_ := v.Aux.(*Node)
|
var_ := v.Aux.(ir.Node)
|
||||||
if _, ok := s.defvars[b.ID][var_]; !ok {
|
if _, ok := s.defvars[b.ID][var_]; !ok {
|
||||||
s.defvars[b.ID][var_] = v // treat FwdDefs as definitions.
|
s.defvars[b.ID][var_] = v // treat FwdDefs as definitions.
|
||||||
}
|
}
|
||||||
|
|
@ -463,7 +464,7 @@ loop:
|
||||||
v := s.fwdrefs[len(s.fwdrefs)-1]
|
v := s.fwdrefs[len(s.fwdrefs)-1]
|
||||||
s.fwdrefs = s.fwdrefs[:len(s.fwdrefs)-1]
|
s.fwdrefs = s.fwdrefs[:len(s.fwdrefs)-1]
|
||||||
b := v.Block
|
b := v.Block
|
||||||
var_ := v.Aux.(*Node)
|
var_ := v.Aux.(ir.Node)
|
||||||
if b == s.f.Entry {
|
if b == s.f.Entry {
|
||||||
// No variable should be live at entry.
|
// No variable should be live at entry.
|
||||||
s.s.Fatalf("Value live at entry. It shouldn't be. func %s, node %v, value %v", s.f.Name, var_, v)
|
s.s.Fatalf("Value live at entry. It shouldn't be. func %s, node %v, value %v", s.f.Name, var_, v)
|
||||||
|
|
@ -511,7 +512,7 @@ loop:
|
||||||
}
|
}
|
||||||
|
|
||||||
// lookupVarOutgoing finds the variable's value at the end of block b.
|
// lookupVarOutgoing finds the variable's value at the end of block b.
|
||||||
func (s *simplePhiState) lookupVarOutgoing(b *ssa.Block, t *types.Type, var_ *Node, line src.XPos) *ssa.Value {
|
func (s *simplePhiState) lookupVarOutgoing(b *ssa.Block, t *types.Type, var_ ir.Node, line src.XPos) *ssa.Value {
|
||||||
for {
|
for {
|
||||||
if v := s.defvars[b.ID][var_]; v != nil {
|
if v := s.defvars[b.ID][var_]; v != nil {
|
||||||
return v
|
return v
|
||||||
|
|
|
||||||
|
|
@ -15,6 +15,8 @@
|
||||||
package gc
|
package gc
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"cmd/compile/internal/base"
|
||||||
|
"cmd/compile/internal/ir"
|
||||||
"cmd/compile/internal/ssa"
|
"cmd/compile/internal/ssa"
|
||||||
"cmd/compile/internal/types"
|
"cmd/compile/internal/types"
|
||||||
"cmd/internal/obj"
|
"cmd/internal/obj"
|
||||||
|
|
@ -99,10 +101,10 @@ type BlockEffects struct {
|
||||||
|
|
||||||
// A collection of global state used by liveness analysis.
|
// A collection of global state used by liveness analysis.
|
||||||
type Liveness struct {
|
type Liveness struct {
|
||||||
fn *Node
|
fn ir.Node
|
||||||
f *ssa.Func
|
f *ssa.Func
|
||||||
vars []*Node
|
vars []ir.Node
|
||||||
idx map[*Node]int32
|
idx map[ir.Node]int32
|
||||||
stkptrsize int64
|
stkptrsize int64
|
||||||
|
|
||||||
be []BlockEffects
|
be []BlockEffects
|
||||||
|
|
@ -204,20 +206,20 @@ type progeffectscache struct {
|
||||||
// nor do we care about non-local variables,
|
// nor do we care about non-local variables,
|
||||||
// nor do we care about empty structs (handled by the pointer check),
|
// nor do we care about empty structs (handled by the pointer check),
|
||||||
// nor do we care about the fake PAUTOHEAP variables.
|
// nor do we care about the fake PAUTOHEAP variables.
|
||||||
func livenessShouldTrack(n *Node) bool {
|
func livenessShouldTrack(n ir.Node) bool {
|
||||||
return n.Op == ONAME && (n.Class() == PAUTO || n.Class() == PPARAM || n.Class() == PPARAMOUT) && n.Type.HasPointers()
|
return n.Op() == ir.ONAME && (n.Class() == ir.PAUTO || n.Class() == ir.PPARAM || n.Class() == ir.PPARAMOUT) && n.Type().HasPointers()
|
||||||
}
|
}
|
||||||
|
|
||||||
// getvariables returns the list of on-stack variables that we need to track
|
// getvariables returns the list of on-stack variables that we need to track
|
||||||
// and a map for looking up indices by *Node.
|
// and a map for looking up indices by *Node.
|
||||||
func getvariables(fn *Node) ([]*Node, map[*Node]int32) {
|
func getvariables(fn ir.Node) ([]ir.Node, map[ir.Node]int32) {
|
||||||
var vars []*Node
|
var vars []ir.Node
|
||||||
for _, n := range fn.Func.Dcl {
|
for _, n := range fn.Func().Dcl {
|
||||||
if livenessShouldTrack(n) {
|
if livenessShouldTrack(n) {
|
||||||
vars = append(vars, n)
|
vars = append(vars, n)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
idx := make(map[*Node]int32, len(vars))
|
idx := make(map[ir.Node]int32, len(vars))
|
||||||
for i, n := range vars {
|
for i, n := range vars {
|
||||||
idx[n] = int32(i)
|
idx[n] = int32(i)
|
||||||
}
|
}
|
||||||
|
|
@ -226,14 +228,14 @@ func getvariables(fn *Node) ([]*Node, map[*Node]int32) {
|
||||||
|
|
||||||
func (lv *Liveness) initcache() {
|
func (lv *Liveness) initcache() {
|
||||||
if lv.cache.initialized {
|
if lv.cache.initialized {
|
||||||
Fatalf("liveness cache initialized twice")
|
base.Fatalf("liveness cache initialized twice")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
lv.cache.initialized = true
|
lv.cache.initialized = true
|
||||||
|
|
||||||
for i, node := range lv.vars {
|
for i, node := range lv.vars {
|
||||||
switch node.Class() {
|
switch node.Class() {
|
||||||
case PPARAM:
|
case ir.PPARAM:
|
||||||
// A return instruction with a p.to is a tail return, which brings
|
// A return instruction with a p.to is a tail return, which brings
|
||||||
// the stack pointer back up (if it ever went down) and then jumps
|
// the stack pointer back up (if it ever went down) and then jumps
|
||||||
// to a new function entirely. That form of instruction must read
|
// to a new function entirely. That form of instruction must read
|
||||||
|
|
@ -242,7 +244,7 @@ func (lv *Liveness) initcache() {
|
||||||
// function runs.
|
// function runs.
|
||||||
lv.cache.tailuevar = append(lv.cache.tailuevar, int32(i))
|
lv.cache.tailuevar = append(lv.cache.tailuevar, int32(i))
|
||||||
|
|
||||||
case PPARAMOUT:
|
case ir.PPARAMOUT:
|
||||||
// All results are live at every return point.
|
// All results are live at every return point.
|
||||||
// Note that this point is after escaping return values
|
// Note that this point is after escaping return values
|
||||||
// are copied back to the stack using their PAUTOHEAP references.
|
// are copied back to the stack using their PAUTOHEAP references.
|
||||||
|
|
@ -270,7 +272,7 @@ const (
|
||||||
// If v does not affect any tracked variables, it returns -1, 0.
|
// If v does not affect any tracked variables, it returns -1, 0.
|
||||||
func (lv *Liveness) valueEffects(v *ssa.Value) (int32, liveEffect) {
|
func (lv *Liveness) valueEffects(v *ssa.Value) (int32, liveEffect) {
|
||||||
n, e := affectedNode(v)
|
n, e := affectedNode(v)
|
||||||
if e == 0 || n == nil || n.Op != ONAME { // cheapest checks first
|
if e == 0 || n == nil || n.Op() != ir.ONAME { // cheapest checks first
|
||||||
return -1, 0
|
return -1, 0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -280,7 +282,7 @@ func (lv *Liveness) valueEffects(v *ssa.Value) (int32, liveEffect) {
|
||||||
// variable" ICEs (issue 19632).
|
// variable" ICEs (issue 19632).
|
||||||
switch v.Op {
|
switch v.Op {
|
||||||
case ssa.OpVarDef, ssa.OpVarKill, ssa.OpVarLive, ssa.OpKeepAlive:
|
case ssa.OpVarDef, ssa.OpVarKill, ssa.OpVarLive, ssa.OpKeepAlive:
|
||||||
if !n.Name.Used() {
|
if !n.Name().Used() {
|
||||||
return -1, 0
|
return -1, 0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -295,7 +297,7 @@ func (lv *Liveness) valueEffects(v *ssa.Value) (int32, liveEffect) {
|
||||||
if e&(ssa.SymRead|ssa.SymAddr) != 0 {
|
if e&(ssa.SymRead|ssa.SymAddr) != 0 {
|
||||||
effect |= uevar
|
effect |= uevar
|
||||||
}
|
}
|
||||||
if e&ssa.SymWrite != 0 && (!isfat(n.Type) || v.Op == ssa.OpVarDef) {
|
if e&ssa.SymWrite != 0 && (!isfat(n.Type()) || v.Op == ssa.OpVarDef) {
|
||||||
effect |= varkill
|
effect |= varkill
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -310,7 +312,7 @@ func (lv *Liveness) valueEffects(v *ssa.Value) (int32, liveEffect) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// affectedNode returns the *Node affected by v
|
// affectedNode returns the *Node affected by v
|
||||||
func affectedNode(v *ssa.Value) (*Node, ssa.SymEffect) {
|
func affectedNode(v *ssa.Value) (ir.Node, ssa.SymEffect) {
|
||||||
// Special cases.
|
// Special cases.
|
||||||
switch v.Op {
|
switch v.Op {
|
||||||
case ssa.OpLoadReg:
|
case ssa.OpLoadReg:
|
||||||
|
|
@ -321,9 +323,9 @@ func affectedNode(v *ssa.Value) (*Node, ssa.SymEffect) {
|
||||||
return n, ssa.SymWrite
|
return n, ssa.SymWrite
|
||||||
|
|
||||||
case ssa.OpVarLive:
|
case ssa.OpVarLive:
|
||||||
return v.Aux.(*Node), ssa.SymRead
|
return v.Aux.(ir.Node), ssa.SymRead
|
||||||
case ssa.OpVarDef, ssa.OpVarKill:
|
case ssa.OpVarDef, ssa.OpVarKill:
|
||||||
return v.Aux.(*Node), ssa.SymWrite
|
return v.Aux.(ir.Node), ssa.SymWrite
|
||||||
case ssa.OpKeepAlive:
|
case ssa.OpKeepAlive:
|
||||||
n, _ := AutoVar(v.Args[0])
|
n, _ := AutoVar(v.Args[0])
|
||||||
return n, ssa.SymRead
|
return n, ssa.SymRead
|
||||||
|
|
@ -338,10 +340,10 @@ func affectedNode(v *ssa.Value) (*Node, ssa.SymEffect) {
|
||||||
case nil, *obj.LSym:
|
case nil, *obj.LSym:
|
||||||
// ok, but no node
|
// ok, but no node
|
||||||
return nil, e
|
return nil, e
|
||||||
case *Node:
|
case ir.Node:
|
||||||
return a, e
|
return a, e
|
||||||
default:
|
default:
|
||||||
Fatalf("weird aux: %s", v.LongString())
|
base.Fatalf("weird aux: %s", v.LongString())
|
||||||
return nil, e
|
return nil, e
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -354,7 +356,7 @@ type livenessFuncCache struct {
|
||||||
// Constructs a new liveness structure used to hold the global state of the
|
// Constructs a new liveness structure used to hold the global state of the
|
||||||
// liveness computation. The cfg argument is a slice of *BasicBlocks and the
|
// liveness computation. The cfg argument is a slice of *BasicBlocks and the
|
||||||
// vars argument is a slice of *Nodes.
|
// vars argument is a slice of *Nodes.
|
||||||
func newliveness(fn *Node, f *ssa.Func, vars []*Node, idx map[*Node]int32, stkptrsize int64) *Liveness {
|
func newliveness(fn ir.Node, f *ssa.Func, vars []ir.Node, idx map[ir.Node]int32, stkptrsize int64) *Liveness {
|
||||||
lv := &Liveness{
|
lv := &Liveness{
|
||||||
fn: fn,
|
fn: fn,
|
||||||
f: f,
|
f: f,
|
||||||
|
|
@ -406,7 +408,7 @@ func (lv *Liveness) blockEffects(b *ssa.Block) *BlockEffects {
|
||||||
// on future calls with the same type t.
|
// on future calls with the same type t.
|
||||||
func onebitwalktype1(t *types.Type, off int64, bv bvec) {
|
func onebitwalktype1(t *types.Type, off int64, bv bvec) {
|
||||||
if t.Align > 0 && off&int64(t.Align-1) != 0 {
|
if t.Align > 0 && off&int64(t.Align-1) != 0 {
|
||||||
Fatalf("onebitwalktype1: invalid initial alignment: type %v has alignment %d, but offset is %v", t, t.Align, off)
|
base.Fatalf("onebitwalktype1: invalid initial alignment: type %v has alignment %d, but offset is %v", t, t.Align, off)
|
||||||
}
|
}
|
||||||
if !t.HasPointers() {
|
if !t.HasPointers() {
|
||||||
// Note: this case ensures that pointers to go:notinheap types
|
// Note: this case ensures that pointers to go:notinheap types
|
||||||
|
|
@ -415,25 +417,25 @@ func onebitwalktype1(t *types.Type, off int64, bv bvec) {
|
||||||
}
|
}
|
||||||
|
|
||||||
switch t.Etype {
|
switch t.Etype {
|
||||||
case TPTR, TUNSAFEPTR, TFUNC, TCHAN, TMAP:
|
case types.TPTR, types.TUNSAFEPTR, types.TFUNC, types.TCHAN, types.TMAP:
|
||||||
if off&int64(Widthptr-1) != 0 {
|
if off&int64(Widthptr-1) != 0 {
|
||||||
Fatalf("onebitwalktype1: invalid alignment, %v", t)
|
base.Fatalf("onebitwalktype1: invalid alignment, %v", t)
|
||||||
}
|
}
|
||||||
bv.Set(int32(off / int64(Widthptr))) // pointer
|
bv.Set(int32(off / int64(Widthptr))) // pointer
|
||||||
|
|
||||||
case TSTRING:
|
case types.TSTRING:
|
||||||
// struct { byte *str; intgo len; }
|
// struct { byte *str; intgo len; }
|
||||||
if off&int64(Widthptr-1) != 0 {
|
if off&int64(Widthptr-1) != 0 {
|
||||||
Fatalf("onebitwalktype1: invalid alignment, %v", t)
|
base.Fatalf("onebitwalktype1: invalid alignment, %v", t)
|
||||||
}
|
}
|
||||||
bv.Set(int32(off / int64(Widthptr))) //pointer in first slot
|
bv.Set(int32(off / int64(Widthptr))) //pointer in first slot
|
||||||
|
|
||||||
case TINTER:
|
case types.TINTER:
|
||||||
// struct { Itab *tab; void *data; }
|
// struct { Itab *tab; void *data; }
|
||||||
// or, when isnilinter(t)==true:
|
// or, when isnilinter(t)==true:
|
||||||
// struct { Type *type; void *data; }
|
// struct { Type *type; void *data; }
|
||||||
if off&int64(Widthptr-1) != 0 {
|
if off&int64(Widthptr-1) != 0 {
|
||||||
Fatalf("onebitwalktype1: invalid alignment, %v", t)
|
base.Fatalf("onebitwalktype1: invalid alignment, %v", t)
|
||||||
}
|
}
|
||||||
// The first word of an interface is a pointer, but we don't
|
// The first word of an interface is a pointer, but we don't
|
||||||
// treat it as such.
|
// treat it as such.
|
||||||
|
|
@ -449,14 +451,14 @@ func onebitwalktype1(t *types.Type, off int64, bv bvec) {
|
||||||
// well as scan itabs to update their itab._type fields).
|
// well as scan itabs to update their itab._type fields).
|
||||||
bv.Set(int32(off/int64(Widthptr) + 1)) // pointer in second slot
|
bv.Set(int32(off/int64(Widthptr) + 1)) // pointer in second slot
|
||||||
|
|
||||||
case TSLICE:
|
case types.TSLICE:
|
||||||
// struct { byte *array; uintgo len; uintgo cap; }
|
// struct { byte *array; uintgo len; uintgo cap; }
|
||||||
if off&int64(Widthptr-1) != 0 {
|
if off&int64(Widthptr-1) != 0 {
|
||||||
Fatalf("onebitwalktype1: invalid TARRAY alignment, %v", t)
|
base.Fatalf("onebitwalktype1: invalid TARRAY alignment, %v", t)
|
||||||
}
|
}
|
||||||
bv.Set(int32(off / int64(Widthptr))) // pointer in first slot (BitsPointer)
|
bv.Set(int32(off / int64(Widthptr))) // pointer in first slot (BitsPointer)
|
||||||
|
|
||||||
case TARRAY:
|
case types.TARRAY:
|
||||||
elt := t.Elem()
|
elt := t.Elem()
|
||||||
if elt.Width == 0 {
|
if elt.Width == 0 {
|
||||||
// Short-circuit for #20739.
|
// Short-circuit for #20739.
|
||||||
|
|
@ -467,20 +469,20 @@ func onebitwalktype1(t *types.Type, off int64, bv bvec) {
|
||||||
off += elt.Width
|
off += elt.Width
|
||||||
}
|
}
|
||||||
|
|
||||||
case TSTRUCT:
|
case types.TSTRUCT:
|
||||||
for _, f := range t.Fields().Slice() {
|
for _, f := range t.Fields().Slice() {
|
||||||
onebitwalktype1(f.Type, off+f.Offset, bv)
|
onebitwalktype1(f.Type, off+f.Offset, bv)
|
||||||
}
|
}
|
||||||
|
|
||||||
default:
|
default:
|
||||||
Fatalf("onebitwalktype1: unexpected type, %v", t)
|
base.Fatalf("onebitwalktype1: unexpected type, %v", t)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Generates live pointer value maps for arguments and local variables. The
|
// Generates live pointer value maps for arguments and local variables. The
|
||||||
// this argument and the in arguments are always assumed live. The vars
|
// this argument and the in arguments are always assumed live. The vars
|
||||||
// argument is a slice of *Nodes.
|
// argument is a slice of *Nodes.
|
||||||
func (lv *Liveness) pointerMap(liveout bvec, vars []*Node, args, locals bvec) {
|
func (lv *Liveness) pointerMap(liveout bvec, vars []ir.Node, args, locals bvec) {
|
||||||
for i := int32(0); ; i++ {
|
for i := int32(0); ; i++ {
|
||||||
i = liveout.Next(i)
|
i = liveout.Next(i)
|
||||||
if i < 0 {
|
if i < 0 {
|
||||||
|
|
@ -488,11 +490,11 @@ func (lv *Liveness) pointerMap(liveout bvec, vars []*Node, args, locals bvec) {
|
||||||
}
|
}
|
||||||
node := vars[i]
|
node := vars[i]
|
||||||
switch node.Class() {
|
switch node.Class() {
|
||||||
case PAUTO:
|
case ir.PAUTO:
|
||||||
onebitwalktype1(node.Type, node.Xoffset+lv.stkptrsize, locals)
|
onebitwalktype1(node.Type(), node.Offset()+lv.stkptrsize, locals)
|
||||||
|
|
||||||
case PPARAM, PPARAMOUT:
|
case ir.PPARAM, ir.PPARAMOUT:
|
||||||
onebitwalktype1(node.Type, node.Xoffset, args)
|
onebitwalktype1(node.Type(), node.Offset(), args)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -509,7 +511,7 @@ func allUnsafe(f *ssa.Func) bool {
|
||||||
// go:nosplit functions are similar. Since safe points used to
|
// go:nosplit functions are similar. Since safe points used to
|
||||||
// be coupled with stack checks, go:nosplit often actually
|
// be coupled with stack checks, go:nosplit often actually
|
||||||
// means "no safe points in this function".
|
// means "no safe points in this function".
|
||||||
return compiling_runtime || f.NoSplit
|
return base.Flag.CompilingRuntime || f.NoSplit
|
||||||
}
|
}
|
||||||
|
|
||||||
// markUnsafePoints finds unsafe points and computes lv.unsafePoints.
|
// markUnsafePoints finds unsafe points and computes lv.unsafePoints.
|
||||||
|
|
@ -786,14 +788,14 @@ func (lv *Liveness) epilogue() {
|
||||||
// pointers to copy values back to the stack).
|
// pointers to copy values back to the stack).
|
||||||
// TODO: if the output parameter is heap-allocated, then we
|
// TODO: if the output parameter is heap-allocated, then we
|
||||||
// don't need to keep the stack copy live?
|
// don't need to keep the stack copy live?
|
||||||
if lv.fn.Func.HasDefer() {
|
if lv.fn.Func().HasDefer() {
|
||||||
for i, n := range lv.vars {
|
for i, n := range lv.vars {
|
||||||
if n.Class() == PPARAMOUT {
|
if n.Class() == ir.PPARAMOUT {
|
||||||
if n.Name.IsOutputParamHeapAddr() {
|
if n.Name().IsOutputParamHeapAddr() {
|
||||||
// Just to be paranoid. Heap addresses are PAUTOs.
|
// Just to be paranoid. Heap addresses are PAUTOs.
|
||||||
Fatalf("variable %v both output param and heap output param", n)
|
base.Fatalf("variable %v both output param and heap output param", n)
|
||||||
}
|
}
|
||||||
if n.Name.Param.Heapaddr != nil {
|
if n.Name().Param.Heapaddr != nil {
|
||||||
// If this variable moved to the heap, then
|
// If this variable moved to the heap, then
|
||||||
// its stack copy is not live.
|
// its stack copy is not live.
|
||||||
continue
|
continue
|
||||||
|
|
@ -801,22 +803,22 @@ func (lv *Liveness) epilogue() {
|
||||||
// Note: zeroing is handled by zeroResults in walk.go.
|
// Note: zeroing is handled by zeroResults in walk.go.
|
||||||
livedefer.Set(int32(i))
|
livedefer.Set(int32(i))
|
||||||
}
|
}
|
||||||
if n.Name.IsOutputParamHeapAddr() {
|
if n.Name().IsOutputParamHeapAddr() {
|
||||||
// This variable will be overwritten early in the function
|
// This variable will be overwritten early in the function
|
||||||
// prologue (from the result of a mallocgc) but we need to
|
// prologue (from the result of a mallocgc) but we need to
|
||||||
// zero it in case that malloc causes a stack scan.
|
// zero it in case that malloc causes a stack scan.
|
||||||
n.Name.SetNeedzero(true)
|
n.Name().SetNeedzero(true)
|
||||||
livedefer.Set(int32(i))
|
livedefer.Set(int32(i))
|
||||||
}
|
}
|
||||||
if n.Name.OpenDeferSlot() {
|
if n.Name().OpenDeferSlot() {
|
||||||
// Open-coded defer args slots must be live
|
// Open-coded defer args slots must be live
|
||||||
// everywhere in a function, since a panic can
|
// everywhere in a function, since a panic can
|
||||||
// occur (almost) anywhere. Because it is live
|
// occur (almost) anywhere. Because it is live
|
||||||
// everywhere, it must be zeroed on entry.
|
// everywhere, it must be zeroed on entry.
|
||||||
livedefer.Set(int32(i))
|
livedefer.Set(int32(i))
|
||||||
// It was already marked as Needzero when created.
|
// It was already marked as Needzero when created.
|
||||||
if !n.Name.Needzero() {
|
if !n.Name().Needzero() {
|
||||||
Fatalf("all pointer-containing defer arg slots should have Needzero set")
|
base.Fatalf("all pointer-containing defer arg slots should have Needzero set")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -878,7 +880,7 @@ func (lv *Liveness) epilogue() {
|
||||||
|
|
||||||
if b == lv.f.Entry {
|
if b == lv.f.Entry {
|
||||||
if index != 0 {
|
if index != 0 {
|
||||||
Fatalf("bad index for entry point: %v", index)
|
base.Fatalf("bad index for entry point: %v", index)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check to make sure only input variables are live.
|
// Check to make sure only input variables are live.
|
||||||
|
|
@ -886,10 +888,10 @@ func (lv *Liveness) epilogue() {
|
||||||
if !liveout.Get(int32(i)) {
|
if !liveout.Get(int32(i)) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if n.Class() == PPARAM {
|
if n.Class() == ir.PPARAM {
|
||||||
continue // ok
|
continue // ok
|
||||||
}
|
}
|
||||||
Fatalf("bad live variable at entry of %v: %L", lv.fn.Func.Nname, n)
|
base.Fatalf("bad live variable at entry of %v: %L", lv.fn.Func().Nname, n)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Record live variables.
|
// Record live variables.
|
||||||
|
|
@ -902,7 +904,7 @@ func (lv *Liveness) epilogue() {
|
||||||
}
|
}
|
||||||
|
|
||||||
// If we have an open-coded deferreturn call, make a liveness map for it.
|
// If we have an open-coded deferreturn call, make a liveness map for it.
|
||||||
if lv.fn.Func.OpenCodedDeferDisallowed() {
|
if lv.fn.Func().OpenCodedDeferDisallowed() {
|
||||||
lv.livenessMap.deferreturn = LivenessDontCare
|
lv.livenessMap.deferreturn = LivenessDontCare
|
||||||
} else {
|
} else {
|
||||||
lv.livenessMap.deferreturn = LivenessIndex{
|
lv.livenessMap.deferreturn = LivenessIndex{
|
||||||
|
|
@ -919,8 +921,8 @@ func (lv *Liveness) epilogue() {
|
||||||
// the only things that can possibly be live are the
|
// the only things that can possibly be live are the
|
||||||
// input parameters.
|
// input parameters.
|
||||||
for j, n := range lv.vars {
|
for j, n := range lv.vars {
|
||||||
if n.Class() != PPARAM && lv.stackMaps[0].Get(int32(j)) {
|
if n.Class() != ir.PPARAM && lv.stackMaps[0].Get(int32(j)) {
|
||||||
lv.f.Fatalf("%v %L recorded as live on entry", lv.fn.Func.Nname, n)
|
lv.f.Fatalf("%v %L recorded as live on entry", lv.fn.Func().Nname, n)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -966,7 +968,7 @@ func (lv *Liveness) compact(b *ssa.Block) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (lv *Liveness) showlive(v *ssa.Value, live bvec) {
|
func (lv *Liveness) showlive(v *ssa.Value, live bvec) {
|
||||||
if debuglive == 0 || lv.fn.funcname() == "init" || strings.HasPrefix(lv.fn.funcname(), ".") {
|
if base.Flag.Live == 0 || ir.FuncName(lv.fn) == "init" || strings.HasPrefix(ir.FuncName(lv.fn), ".") {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if !(v == nil || v.Op.IsCall()) {
|
if !(v == nil || v.Op.IsCall()) {
|
||||||
|
|
@ -978,14 +980,14 @@ func (lv *Liveness) showlive(v *ssa.Value, live bvec) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
pos := lv.fn.Func.Nname.Pos
|
pos := lv.fn.Func().Nname.Pos()
|
||||||
if v != nil {
|
if v != nil {
|
||||||
pos = v.Pos
|
pos = v.Pos
|
||||||
}
|
}
|
||||||
|
|
||||||
s := "live at "
|
s := "live at "
|
||||||
if v == nil {
|
if v == nil {
|
||||||
s += fmt.Sprintf("entry to %s:", lv.fn.funcname())
|
s += fmt.Sprintf("entry to %s:", ir.FuncName(lv.fn))
|
||||||
} else if sym, ok := v.Aux.(*ssa.AuxCall); ok && sym.Fn != nil {
|
} else if sym, ok := v.Aux.(*ssa.AuxCall); ok && sym.Fn != nil {
|
||||||
fn := sym.Fn.Name
|
fn := sym.Fn.Name
|
||||||
if pos := strings.Index(fn, "."); pos >= 0 {
|
if pos := strings.Index(fn, "."); pos >= 0 {
|
||||||
|
|
@ -1002,7 +1004,7 @@ func (lv *Liveness) showlive(v *ssa.Value, live bvec) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Warnl(pos, s)
|
base.WarnfAt(pos, s)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (lv *Liveness) printbvec(printed bool, name string, live bvec) bool {
|
func (lv *Liveness) printbvec(printed bool, name string, live bvec) bool {
|
||||||
|
|
@ -1022,7 +1024,7 @@ func (lv *Liveness) printbvec(printed bool, name string, live bvec) bool {
|
||||||
if !live.Get(int32(i)) {
|
if !live.Get(int32(i)) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
fmt.Printf("%s%s", comma, n.Sym.Name)
|
fmt.Printf("%s%s", comma, n.Sym().Name)
|
||||||
comma = ","
|
comma = ","
|
||||||
}
|
}
|
||||||
return true
|
return true
|
||||||
|
|
@ -1040,7 +1042,7 @@ func (lv *Liveness) printeffect(printed bool, name string, pos int32, x bool) bo
|
||||||
}
|
}
|
||||||
fmt.Printf("%s=", name)
|
fmt.Printf("%s=", name)
|
||||||
if x {
|
if x {
|
||||||
fmt.Printf("%s", lv.vars[pos].Sym.Name)
|
fmt.Printf("%s", lv.vars[pos].Sym().Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
return true
|
return true
|
||||||
|
|
@ -1050,7 +1052,7 @@ func (lv *Liveness) printeffect(printed bool, name string, pos int32, x bool) bo
|
||||||
// This format synthesizes the information used during the multiple passes
|
// This format synthesizes the information used during the multiple passes
|
||||||
// into a single presentation.
|
// into a single presentation.
|
||||||
func (lv *Liveness) printDebug() {
|
func (lv *Liveness) printDebug() {
|
||||||
fmt.Printf("liveness: %s\n", lv.fn.funcname())
|
fmt.Printf("liveness: %s\n", ir.FuncName(lv.fn))
|
||||||
|
|
||||||
for i, b := range lv.f.Blocks {
|
for i, b := range lv.f.Blocks {
|
||||||
if i > 0 {
|
if i > 0 {
|
||||||
|
|
@ -1088,7 +1090,7 @@ func (lv *Liveness) printDebug() {
|
||||||
|
|
||||||
if b == lv.f.Entry {
|
if b == lv.f.Entry {
|
||||||
live := lv.stackMaps[0]
|
live := lv.stackMaps[0]
|
||||||
fmt.Printf("(%s) function entry\n", linestr(lv.fn.Func.Nname.Pos))
|
fmt.Printf("(%s) function entry\n", base.FmtPos(lv.fn.Func().Nname.Pos()))
|
||||||
fmt.Printf("\tlive=")
|
fmt.Printf("\tlive=")
|
||||||
printed = false
|
printed = false
|
||||||
for j, n := range lv.vars {
|
for j, n := range lv.vars {
|
||||||
|
|
@ -1105,7 +1107,7 @@ func (lv *Liveness) printDebug() {
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, v := range b.Values {
|
for _, v := range b.Values {
|
||||||
fmt.Printf("(%s) %v\n", linestr(v.Pos), v.LongString())
|
fmt.Printf("(%s) %v\n", base.FmtPos(v.Pos), v.LongString())
|
||||||
|
|
||||||
pcdata := lv.livenessMap.Get(v)
|
pcdata := lv.livenessMap.Get(v)
|
||||||
|
|
||||||
|
|
@ -1162,11 +1164,11 @@ func (lv *Liveness) emit() (argsSym, liveSym *obj.LSym) {
|
||||||
// Size args bitmaps to be just large enough to hold the largest pointer.
|
// Size args bitmaps to be just large enough to hold the largest pointer.
|
||||||
// First, find the largest Xoffset node we care about.
|
// First, find the largest Xoffset node we care about.
|
||||||
// (Nodes without pointers aren't in lv.vars; see livenessShouldTrack.)
|
// (Nodes without pointers aren't in lv.vars; see livenessShouldTrack.)
|
||||||
var maxArgNode *Node
|
var maxArgNode ir.Node
|
||||||
for _, n := range lv.vars {
|
for _, n := range lv.vars {
|
||||||
switch n.Class() {
|
switch n.Class() {
|
||||||
case PPARAM, PPARAMOUT:
|
case ir.PPARAM, ir.PPARAMOUT:
|
||||||
if maxArgNode == nil || n.Xoffset > maxArgNode.Xoffset {
|
if maxArgNode == nil || n.Offset() > maxArgNode.Offset() {
|
||||||
maxArgNode = n
|
maxArgNode = n
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -1174,7 +1176,7 @@ func (lv *Liveness) emit() (argsSym, liveSym *obj.LSym) {
|
||||||
// Next, find the offset of the largest pointer in the largest node.
|
// Next, find the offset of the largest pointer in the largest node.
|
||||||
var maxArgs int64
|
var maxArgs int64
|
||||||
if maxArgNode != nil {
|
if maxArgNode != nil {
|
||||||
maxArgs = maxArgNode.Xoffset + typeptrdata(maxArgNode.Type)
|
maxArgs = maxArgNode.Offset() + typeptrdata(maxArgNode.Type())
|
||||||
}
|
}
|
||||||
|
|
||||||
// Size locals bitmaps to be stkptrsize sized.
|
// Size locals bitmaps to be stkptrsize sized.
|
||||||
|
|
@ -1214,7 +1216,7 @@ func (lv *Liveness) emit() (argsSym, liveSym *obj.LSym) {
|
||||||
// These symbols will be added to Ctxt.Data by addGCLocals
|
// These symbols will be added to Ctxt.Data by addGCLocals
|
||||||
// after parallel compilation is done.
|
// after parallel compilation is done.
|
||||||
makeSym := func(tmpSym *obj.LSym) *obj.LSym {
|
makeSym := func(tmpSym *obj.LSym) *obj.LSym {
|
||||||
return Ctxt.LookupInit(fmt.Sprintf("gclocals·%x", md5.Sum(tmpSym.P)), func(lsym *obj.LSym) {
|
return base.Ctxt.LookupInit(fmt.Sprintf("gclocals·%x", md5.Sum(tmpSym.P)), func(lsym *obj.LSym) {
|
||||||
lsym.P = tmpSym.P
|
lsym.P = tmpSym.P
|
||||||
lsym.Set(obj.AttrContentAddressable, true)
|
lsym.Set(obj.AttrContentAddressable, true)
|
||||||
})
|
})
|
||||||
|
|
@ -1235,7 +1237,7 @@ func liveness(e *ssafn, f *ssa.Func, pp *Progs) LivenessMap {
|
||||||
lv.prologue()
|
lv.prologue()
|
||||||
lv.solve()
|
lv.solve()
|
||||||
lv.epilogue()
|
lv.epilogue()
|
||||||
if debuglive > 0 {
|
if base.Flag.Live > 0 {
|
||||||
lv.showlive(nil, lv.stackMaps[0])
|
lv.showlive(nil, lv.stackMaps[0])
|
||||||
for _, b := range f.Blocks {
|
for _, b := range f.Blocks {
|
||||||
for _, val := range b.Values {
|
for _, val := range b.Values {
|
||||||
|
|
@ -1245,7 +1247,7 @@ func liveness(e *ssafn, f *ssa.Func, pp *Progs) LivenessMap {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if debuglive >= 2 {
|
if base.Flag.Live >= 2 {
|
||||||
lv.printDebug()
|
lv.printDebug()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -1264,7 +1266,7 @@ func liveness(e *ssafn, f *ssa.Func, pp *Progs) LivenessMap {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Emit the live pointer map data structures
|
// Emit the live pointer map data structures
|
||||||
ls := e.curfn.Func.lsym
|
ls := e.curfn.Func().LSym
|
||||||
fninfo := ls.Func()
|
fninfo := ls.Func()
|
||||||
fninfo.GCArgs, fninfo.GCLocals = lv.emit()
|
fninfo.GCArgs, fninfo.GCLocals = lv.emit()
|
||||||
|
|
||||||
|
|
@ -1299,16 +1301,16 @@ func liveness(e *ssafn, f *ssa.Func, pp *Progs) LivenessMap {
|
||||||
func isfat(t *types.Type) bool {
|
func isfat(t *types.Type) bool {
|
||||||
if t != nil {
|
if t != nil {
|
||||||
switch t.Etype {
|
switch t.Etype {
|
||||||
case TSLICE, TSTRING,
|
case types.TSLICE, types.TSTRING,
|
||||||
TINTER: // maybe remove later
|
types.TINTER: // maybe remove later
|
||||||
return true
|
return true
|
||||||
case TARRAY:
|
case types.TARRAY:
|
||||||
// Array of 1 element, check if element is fat
|
// Array of 1 element, check if element is fat
|
||||||
if t.NumElem() == 1 {
|
if t.NumElem() == 1 {
|
||||||
return isfat(t.Elem())
|
return isfat(t.Elem())
|
||||||
}
|
}
|
||||||
return true
|
return true
|
||||||
case TSTRUCT:
|
case types.TSTRUCT:
|
||||||
// Struct with 1 field, check if field is fat
|
// Struct with 1 field, check if field is fat
|
||||||
if t.NumFields() == 1 {
|
if t.NumFields() == 1 {
|
||||||
return isfat(t.Field(0).Type)
|
return isfat(t.Field(0).Type)
|
||||||
|
|
|
||||||
|
|
@ -5,6 +5,8 @@
|
||||||
package gc
|
package gc
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"cmd/compile/internal/base"
|
||||||
|
"cmd/compile/internal/ir"
|
||||||
"cmd/compile/internal/types"
|
"cmd/compile/internal/types"
|
||||||
"cmd/internal/src"
|
"cmd/internal/src"
|
||||||
"cmd/internal/sys"
|
"cmd/internal/sys"
|
||||||
|
|
@ -47,9 +49,9 @@ var omit_pkgs = []string{
|
||||||
var norace_inst_pkgs = []string{"sync", "sync/atomic"}
|
var norace_inst_pkgs = []string{"sync", "sync/atomic"}
|
||||||
|
|
||||||
func ispkgin(pkgs []string) bool {
|
func ispkgin(pkgs []string) bool {
|
||||||
if myimportpath != "" {
|
if base.Ctxt.Pkgpath != "" {
|
||||||
for _, p := range pkgs {
|
for _, p := range pkgs {
|
||||||
if myimportpath == p {
|
if base.Ctxt.Pkgpath == p {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -58,22 +60,22 @@ func ispkgin(pkgs []string) bool {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
func instrument(fn *Node) {
|
func instrument(fn ir.Node) {
|
||||||
if fn.Func.Pragma&Norace != 0 {
|
if fn.Func().Pragma&ir.Norace != 0 {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if !flag_race || !ispkgin(norace_inst_pkgs) {
|
if !base.Flag.Race || !ispkgin(norace_inst_pkgs) {
|
||||||
fn.Func.SetInstrumentBody(true)
|
fn.Func().SetInstrumentBody(true)
|
||||||
}
|
}
|
||||||
|
|
||||||
if flag_race {
|
if base.Flag.Race {
|
||||||
lno := lineno
|
lno := base.Pos
|
||||||
lineno = src.NoXPos
|
base.Pos = src.NoXPos
|
||||||
|
|
||||||
if thearch.LinkArch.Arch.Family != sys.AMD64 {
|
if thearch.LinkArch.Arch.Family != sys.AMD64 {
|
||||||
fn.Func.Enter.Prepend(mkcall("racefuncenterfp", nil, nil))
|
fn.Func().Enter.Prepend(mkcall("racefuncenterfp", nil, nil))
|
||||||
fn.Func.Exit.Append(mkcall("racefuncexit", nil, nil))
|
fn.Func().Exit.Append(mkcall("racefuncexit", nil, nil))
|
||||||
} else {
|
} else {
|
||||||
|
|
||||||
// nodpc is the PC of the caller as extracted by
|
// nodpc is the PC of the caller as extracted by
|
||||||
|
|
@ -81,13 +83,13 @@ func instrument(fn *Node) {
|
||||||
// This only works for amd64. This will not
|
// This only works for amd64. This will not
|
||||||
// work on arm or others that might support
|
// work on arm or others that might support
|
||||||
// race in the future.
|
// race in the future.
|
||||||
nodpc := nodfp.copy()
|
nodpc := ir.Copy(nodfp)
|
||||||
nodpc.Type = types.Types[TUINTPTR]
|
nodpc.SetType(types.Types[types.TUINTPTR])
|
||||||
nodpc.Xoffset = int64(-Widthptr)
|
nodpc.SetOffset(int64(-Widthptr))
|
||||||
fn.Func.Dcl = append(fn.Func.Dcl, nodpc)
|
fn.Func().Dcl = append(fn.Func().Dcl, nodpc)
|
||||||
fn.Func.Enter.Prepend(mkcall("racefuncenter", nil, nil, nodpc))
|
fn.Func().Enter.Prepend(mkcall("racefuncenter", nil, nil, nodpc))
|
||||||
fn.Func.Exit.Append(mkcall("racefuncexit", nil, nil))
|
fn.Func().Exit.Append(mkcall("racefuncexit", nil, nil))
|
||||||
}
|
}
|
||||||
lineno = lno
|
base.Pos = lno
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -5,13 +5,15 @@
|
||||||
package gc
|
package gc
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"cmd/compile/internal/base"
|
||||||
|
"cmd/compile/internal/ir"
|
||||||
"cmd/compile/internal/types"
|
"cmd/compile/internal/types"
|
||||||
"cmd/internal/sys"
|
"cmd/internal/sys"
|
||||||
"unicode/utf8"
|
"unicode/utf8"
|
||||||
)
|
)
|
||||||
|
|
||||||
// range
|
// range
|
||||||
func typecheckrange(n *Node) {
|
func typecheckrange(n ir.Node) {
|
||||||
// Typechecking order is important here:
|
// Typechecking order is important here:
|
||||||
// 0. first typecheck range expression (slice/map/chan),
|
// 0. first typecheck range expression (slice/map/chan),
|
||||||
// it is evaluated only once and so logically it is not part of the loop.
|
// it is evaluated only once and so logically it is not part of the loop.
|
||||||
|
|
@ -25,7 +27,7 @@ func typecheckrange(n *Node) {
|
||||||
|
|
||||||
// second half of dance, the first half being typecheckrangeExpr
|
// second half of dance, the first half being typecheckrangeExpr
|
||||||
n.SetTypecheck(1)
|
n.SetTypecheck(1)
|
||||||
ls := n.List.Slice()
|
ls := n.List().Slice()
|
||||||
for i1, n1 := range ls {
|
for i1, n1 := range ls {
|
||||||
if n1.Typecheck() == 0 {
|
if n1.Typecheck() == 0 {
|
||||||
ls[i1] = typecheck(ls[i1], ctxExpr|ctxAssign)
|
ls[i1] = typecheck(ls[i1], ctxExpr|ctxAssign)
|
||||||
|
|
@ -33,21 +35,21 @@ func typecheckrange(n *Node) {
|
||||||
}
|
}
|
||||||
|
|
||||||
decldepth++
|
decldepth++
|
||||||
typecheckslice(n.Nbody.Slice(), ctxStmt)
|
typecheckslice(n.Body().Slice(), ctxStmt)
|
||||||
decldepth--
|
decldepth--
|
||||||
}
|
}
|
||||||
|
|
||||||
func typecheckrangeExpr(n *Node) {
|
func typecheckrangeExpr(n ir.Node) {
|
||||||
n.Right = typecheck(n.Right, ctxExpr)
|
n.SetRight(typecheck(n.Right(), ctxExpr))
|
||||||
|
|
||||||
t := n.Right.Type
|
t := n.Right().Type()
|
||||||
if t == nil {
|
if t == nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
// delicate little dance. see typecheckas2
|
// delicate little dance. see typecheckas2
|
||||||
ls := n.List.Slice()
|
ls := n.List().Slice()
|
||||||
for i1, n1 := range ls {
|
for i1, n1 := range ls {
|
||||||
if n1.Name == nil || n1.Name.Defn != n {
|
if n1.Name() == nil || n1.Name().Defn != n {
|
||||||
ls[i1] = typecheck(ls[i1], ctxExpr|ctxAssign)
|
ls[i1] = typecheck(ls[i1], ctxExpr|ctxAssign)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -55,80 +57,80 @@ func typecheckrangeExpr(n *Node) {
|
||||||
if t.IsPtr() && t.Elem().IsArray() {
|
if t.IsPtr() && t.Elem().IsArray() {
|
||||||
t = t.Elem()
|
t = t.Elem()
|
||||||
}
|
}
|
||||||
n.Type = t
|
n.SetType(t)
|
||||||
|
|
||||||
var t1, t2 *types.Type
|
var t1, t2 *types.Type
|
||||||
toomany := false
|
toomany := false
|
||||||
switch t.Etype {
|
switch t.Etype {
|
||||||
default:
|
default:
|
||||||
yyerrorl(n.Pos, "cannot range over %L", n.Right)
|
base.ErrorfAt(n.Pos(), "cannot range over %L", n.Right())
|
||||||
return
|
return
|
||||||
|
|
||||||
case TARRAY, TSLICE:
|
case types.TARRAY, types.TSLICE:
|
||||||
t1 = types.Types[TINT]
|
t1 = types.Types[types.TINT]
|
||||||
t2 = t.Elem()
|
t2 = t.Elem()
|
||||||
|
|
||||||
case TMAP:
|
case types.TMAP:
|
||||||
t1 = t.Key()
|
t1 = t.Key()
|
||||||
t2 = t.Elem()
|
t2 = t.Elem()
|
||||||
|
|
||||||
case TCHAN:
|
case types.TCHAN:
|
||||||
if !t.ChanDir().CanRecv() {
|
if !t.ChanDir().CanRecv() {
|
||||||
yyerrorl(n.Pos, "invalid operation: range %v (receive from send-only type %v)", n.Right, n.Right.Type)
|
base.ErrorfAt(n.Pos(), "invalid operation: range %v (receive from send-only type %v)", n.Right(), n.Right().Type())
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
t1 = t.Elem()
|
t1 = t.Elem()
|
||||||
t2 = nil
|
t2 = nil
|
||||||
if n.List.Len() == 2 {
|
if n.List().Len() == 2 {
|
||||||
toomany = true
|
toomany = true
|
||||||
}
|
}
|
||||||
|
|
||||||
case TSTRING:
|
case types.TSTRING:
|
||||||
t1 = types.Types[TINT]
|
t1 = types.Types[types.TINT]
|
||||||
t2 = types.Runetype
|
t2 = types.Runetype
|
||||||
}
|
}
|
||||||
|
|
||||||
if n.List.Len() > 2 || toomany {
|
if n.List().Len() > 2 || toomany {
|
||||||
yyerrorl(n.Pos, "too many variables in range")
|
base.ErrorfAt(n.Pos(), "too many variables in range")
|
||||||
}
|
}
|
||||||
|
|
||||||
var v1, v2 *Node
|
var v1, v2 ir.Node
|
||||||
if n.List.Len() != 0 {
|
if n.List().Len() != 0 {
|
||||||
v1 = n.List.First()
|
v1 = n.List().First()
|
||||||
}
|
}
|
||||||
if n.List.Len() > 1 {
|
if n.List().Len() > 1 {
|
||||||
v2 = n.List.Second()
|
v2 = n.List().Second()
|
||||||
}
|
}
|
||||||
|
|
||||||
// this is not only an optimization but also a requirement in the spec.
|
// this is not only an optimization but also a requirement in the spec.
|
||||||
// "if the second iteration variable is the blank identifier, the range
|
// "if the second iteration variable is the blank identifier, the range
|
||||||
// clause is equivalent to the same clause with only the first variable
|
// clause is equivalent to the same clause with only the first variable
|
||||||
// present."
|
// present."
|
||||||
if v2.isBlank() {
|
if ir.IsBlank(v2) {
|
||||||
if v1 != nil {
|
if v1 != nil {
|
||||||
n.List.Set1(v1)
|
n.PtrList().Set1(v1)
|
||||||
}
|
}
|
||||||
v2 = nil
|
v2 = nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if v1 != nil {
|
if v1 != nil {
|
||||||
if v1.Name != nil && v1.Name.Defn == n {
|
if v1.Name() != nil && v1.Name().Defn == n {
|
||||||
v1.Type = t1
|
v1.SetType(t1)
|
||||||
} else if v1.Type != nil {
|
} else if v1.Type() != nil {
|
||||||
if op, why := assignop(t1, v1.Type); op == OXXX {
|
if op, why := assignop(t1, v1.Type()); op == ir.OXXX {
|
||||||
yyerrorl(n.Pos, "cannot assign type %v to %L in range%s", t1, v1, why)
|
base.ErrorfAt(n.Pos(), "cannot assign type %v to %L in range%s", t1, v1, why)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
checkassign(n, v1)
|
checkassign(n, v1)
|
||||||
}
|
}
|
||||||
|
|
||||||
if v2 != nil {
|
if v2 != nil {
|
||||||
if v2.Name != nil && v2.Name.Defn == n {
|
if v2.Name() != nil && v2.Name().Defn == n {
|
||||||
v2.Type = t2
|
v2.SetType(t2)
|
||||||
} else if v2.Type != nil {
|
} else if v2.Type() != nil {
|
||||||
if op, why := assignop(t2, v2.Type); op == OXXX {
|
if op, why := assignop(t2, v2.Type()); op == ir.OXXX {
|
||||||
yyerrorl(n.Pos, "cannot assign type %v to %L in range%s", t2, v2, why)
|
base.ErrorfAt(n.Pos(), "cannot assign type %v to %L in range%s", t2, v2, why)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
checkassign(n, v2)
|
checkassign(n, v2)
|
||||||
|
|
@ -155,12 +157,12 @@ func cheapComputableIndex(width int64) bool {
|
||||||
// simpler forms. The result must be assigned back to n.
|
// simpler forms. The result must be assigned back to n.
|
||||||
// Node n may also be modified in place, and may also be
|
// Node n may also be modified in place, and may also be
|
||||||
// the returned node.
|
// the returned node.
|
||||||
func walkrange(n *Node) *Node {
|
func walkrange(n ir.Node) ir.Node {
|
||||||
if isMapClear(n) {
|
if isMapClear(n) {
|
||||||
m := n.Right
|
m := n.Right()
|
||||||
lno := setlineno(m)
|
lno := setlineno(m)
|
||||||
n = mapClear(m)
|
n = mapClear(m)
|
||||||
lineno = lno
|
base.Pos = lno
|
||||||
return n
|
return n
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -171,65 +173,65 @@ func walkrange(n *Node) *Node {
|
||||||
// hb: hidden bool
|
// hb: hidden bool
|
||||||
// a, v1, v2: not hidden aggregate, val 1, 2
|
// a, v1, v2: not hidden aggregate, val 1, 2
|
||||||
|
|
||||||
t := n.Type
|
t := n.Type()
|
||||||
|
|
||||||
a := n.Right
|
a := n.Right()
|
||||||
lno := setlineno(a)
|
lno := setlineno(a)
|
||||||
n.Right = nil
|
n.SetRight(nil)
|
||||||
|
|
||||||
var v1, v2 *Node
|
var v1, v2 ir.Node
|
||||||
l := n.List.Len()
|
l := n.List().Len()
|
||||||
if l > 0 {
|
if l > 0 {
|
||||||
v1 = n.List.First()
|
v1 = n.List().First()
|
||||||
}
|
}
|
||||||
|
|
||||||
if l > 1 {
|
if l > 1 {
|
||||||
v2 = n.List.Second()
|
v2 = n.List().Second()
|
||||||
}
|
}
|
||||||
|
|
||||||
if v2.isBlank() {
|
if ir.IsBlank(v2) {
|
||||||
v2 = nil
|
v2 = nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if v1.isBlank() && v2 == nil {
|
if ir.IsBlank(v1) && v2 == nil {
|
||||||
v1 = nil
|
v1 = nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if v1 == nil && v2 != nil {
|
if v1 == nil && v2 != nil {
|
||||||
Fatalf("walkrange: v2 != nil while v1 == nil")
|
base.Fatalf("walkrange: v2 != nil while v1 == nil")
|
||||||
}
|
}
|
||||||
|
|
||||||
// n.List has no meaning anymore, clear it
|
// n.List has no meaning anymore, clear it
|
||||||
// to avoid erroneous processing by racewalk.
|
// to avoid erroneous processing by racewalk.
|
||||||
n.List.Set(nil)
|
n.PtrList().Set(nil)
|
||||||
|
|
||||||
var ifGuard *Node
|
var ifGuard ir.Node
|
||||||
|
|
||||||
translatedLoopOp := OFOR
|
translatedLoopOp := ir.OFOR
|
||||||
|
|
||||||
var body []*Node
|
var body []ir.Node
|
||||||
var init []*Node
|
var init []ir.Node
|
||||||
switch t.Etype {
|
switch t.Etype {
|
||||||
default:
|
default:
|
||||||
Fatalf("walkrange")
|
base.Fatalf("walkrange")
|
||||||
|
|
||||||
case TARRAY, TSLICE:
|
case types.TARRAY, types.TSLICE:
|
||||||
if arrayClear(n, v1, v2, a) {
|
if arrayClear(n, v1, v2, a) {
|
||||||
lineno = lno
|
base.Pos = lno
|
||||||
return n
|
return n
|
||||||
}
|
}
|
||||||
|
|
||||||
// order.stmt arranged for a copy of the array/slice variable if needed.
|
// order.stmt arranged for a copy of the array/slice variable if needed.
|
||||||
ha := a
|
ha := a
|
||||||
|
|
||||||
hv1 := temp(types.Types[TINT])
|
hv1 := temp(types.Types[types.TINT])
|
||||||
hn := temp(types.Types[TINT])
|
hn := temp(types.Types[types.TINT])
|
||||||
|
|
||||||
init = append(init, nod(OAS, hv1, nil))
|
init = append(init, ir.Nod(ir.OAS, hv1, nil))
|
||||||
init = append(init, nod(OAS, hn, nod(OLEN, ha, nil)))
|
init = append(init, ir.Nod(ir.OAS, hn, ir.Nod(ir.OLEN, ha, nil)))
|
||||||
|
|
||||||
n.Left = nod(OLT, hv1, hn)
|
n.SetLeft(ir.Nod(ir.OLT, hv1, hn))
|
||||||
n.Right = nod(OAS, hv1, nod(OADD, hv1, nodintconst(1)))
|
n.SetRight(ir.Nod(ir.OAS, hv1, ir.Nod(ir.OADD, hv1, nodintconst(1))))
|
||||||
|
|
||||||
// for range ha { body }
|
// for range ha { body }
|
||||||
if v1 == nil {
|
if v1 == nil {
|
||||||
|
|
@ -238,21 +240,21 @@ func walkrange(n *Node) *Node {
|
||||||
|
|
||||||
// for v1 := range ha { body }
|
// for v1 := range ha { body }
|
||||||
if v2 == nil {
|
if v2 == nil {
|
||||||
body = []*Node{nod(OAS, v1, hv1)}
|
body = []ir.Node{ir.Nod(ir.OAS, v1, hv1)}
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
// for v1, v2 := range ha { body }
|
// for v1, v2 := range ha { body }
|
||||||
if cheapComputableIndex(n.Type.Elem().Width) {
|
if cheapComputableIndex(n.Type().Elem().Width) {
|
||||||
// v1, v2 = hv1, ha[hv1]
|
// v1, v2 = hv1, ha[hv1]
|
||||||
tmp := nod(OINDEX, ha, hv1)
|
tmp := ir.Nod(ir.OINDEX, ha, hv1)
|
||||||
tmp.SetBounded(true)
|
tmp.SetBounded(true)
|
||||||
// Use OAS2 to correctly handle assignments
|
// Use OAS2 to correctly handle assignments
|
||||||
// of the form "v1, a[v1] := range".
|
// of the form "v1, a[v1] := range".
|
||||||
a := nod(OAS2, nil, nil)
|
a := ir.Nod(ir.OAS2, nil, nil)
|
||||||
a.List.Set2(v1, v2)
|
a.PtrList().Set2(v1, v2)
|
||||||
a.Rlist.Set2(hv1, tmp)
|
a.PtrRlist().Set2(hv1, tmp)
|
||||||
body = []*Node{a}
|
body = []ir.Node{a}
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -268,20 +270,20 @@ func walkrange(n *Node) *Node {
|
||||||
// TODO(austin): OFORUNTIL inhibits bounds-check
|
// TODO(austin): OFORUNTIL inhibits bounds-check
|
||||||
// elimination on the index variable (see #20711).
|
// elimination on the index variable (see #20711).
|
||||||
// Enhance the prove pass to understand this.
|
// Enhance the prove pass to understand this.
|
||||||
ifGuard = nod(OIF, nil, nil)
|
ifGuard = ir.Nod(ir.OIF, nil, nil)
|
||||||
ifGuard.Left = nod(OLT, hv1, hn)
|
ifGuard.SetLeft(ir.Nod(ir.OLT, hv1, hn))
|
||||||
translatedLoopOp = OFORUNTIL
|
translatedLoopOp = ir.OFORUNTIL
|
||||||
|
|
||||||
hp := temp(types.NewPtr(n.Type.Elem()))
|
hp := temp(types.NewPtr(n.Type().Elem()))
|
||||||
tmp := nod(OINDEX, ha, nodintconst(0))
|
tmp := ir.Nod(ir.OINDEX, ha, nodintconst(0))
|
||||||
tmp.SetBounded(true)
|
tmp.SetBounded(true)
|
||||||
init = append(init, nod(OAS, hp, nod(OADDR, tmp, nil)))
|
init = append(init, ir.Nod(ir.OAS, hp, ir.Nod(ir.OADDR, tmp, nil)))
|
||||||
|
|
||||||
// Use OAS2 to correctly handle assignments
|
// Use OAS2 to correctly handle assignments
|
||||||
// of the form "v1, a[v1] := range".
|
// of the form "v1, a[v1] := range".
|
||||||
a := nod(OAS2, nil, nil)
|
a := ir.Nod(ir.OAS2, nil, nil)
|
||||||
a.List.Set2(v1, v2)
|
a.PtrList().Set2(v1, v2)
|
||||||
a.Rlist.Set2(hv1, nod(ODEREF, hp, nil))
|
a.PtrRlist().Set2(hv1, ir.Nod(ir.ODEREF, hp, nil))
|
||||||
body = append(body, a)
|
body = append(body, a)
|
||||||
|
|
||||||
// Advance pointer as part of the late increment.
|
// Advance pointer as part of the late increment.
|
||||||
|
|
@ -289,76 +291,76 @@ func walkrange(n *Node) *Node {
|
||||||
// This runs *after* the condition check, so we know
|
// This runs *after* the condition check, so we know
|
||||||
// advancing the pointer is safe and won't go past the
|
// advancing the pointer is safe and won't go past the
|
||||||
// end of the allocation.
|
// end of the allocation.
|
||||||
a = nod(OAS, hp, addptr(hp, t.Elem().Width))
|
a = ir.Nod(ir.OAS, hp, addptr(hp, t.Elem().Width))
|
||||||
a = typecheck(a, ctxStmt)
|
a = typecheck(a, ctxStmt)
|
||||||
n.List.Set1(a)
|
n.PtrList().Set1(a)
|
||||||
|
|
||||||
case TMAP:
|
case types.TMAP:
|
||||||
// order.stmt allocated the iterator for us.
|
// order.stmt allocated the iterator for us.
|
||||||
// we only use a once, so no copy needed.
|
// we only use a once, so no copy needed.
|
||||||
ha := a
|
ha := a
|
||||||
|
|
||||||
hit := prealloc[n]
|
hit := prealloc[n]
|
||||||
th := hit.Type
|
th := hit.Type()
|
||||||
n.Left = nil
|
n.SetLeft(nil)
|
||||||
keysym := th.Field(0).Sym // depends on layout of iterator struct. See reflect.go:hiter
|
keysym := th.Field(0).Sym // depends on layout of iterator struct. See reflect.go:hiter
|
||||||
elemsym := th.Field(1).Sym // ditto
|
elemsym := th.Field(1).Sym // ditto
|
||||||
|
|
||||||
fn := syslook("mapiterinit")
|
fn := syslook("mapiterinit")
|
||||||
|
|
||||||
fn = substArgTypes(fn, t.Key(), t.Elem(), th)
|
fn = substArgTypes(fn, t.Key(), t.Elem(), th)
|
||||||
init = append(init, mkcall1(fn, nil, nil, typename(t), ha, nod(OADDR, hit, nil)))
|
init = append(init, mkcall1(fn, nil, nil, typename(t), ha, ir.Nod(ir.OADDR, hit, nil)))
|
||||||
n.Left = nod(ONE, nodSym(ODOT, hit, keysym), nodnil())
|
n.SetLeft(ir.Nod(ir.ONE, nodSym(ir.ODOT, hit, keysym), nodnil()))
|
||||||
|
|
||||||
fn = syslook("mapiternext")
|
fn = syslook("mapiternext")
|
||||||
fn = substArgTypes(fn, th)
|
fn = substArgTypes(fn, th)
|
||||||
n.Right = mkcall1(fn, nil, nil, nod(OADDR, hit, nil))
|
n.SetRight(mkcall1(fn, nil, nil, ir.Nod(ir.OADDR, hit, nil)))
|
||||||
|
|
||||||
key := nodSym(ODOT, hit, keysym)
|
key := nodSym(ir.ODOT, hit, keysym)
|
||||||
key = nod(ODEREF, key, nil)
|
key = ir.Nod(ir.ODEREF, key, nil)
|
||||||
if v1 == nil {
|
if v1 == nil {
|
||||||
body = nil
|
body = nil
|
||||||
} else if v2 == nil {
|
} else if v2 == nil {
|
||||||
body = []*Node{nod(OAS, v1, key)}
|
body = []ir.Node{ir.Nod(ir.OAS, v1, key)}
|
||||||
} else {
|
} else {
|
||||||
elem := nodSym(ODOT, hit, elemsym)
|
elem := nodSym(ir.ODOT, hit, elemsym)
|
||||||
elem = nod(ODEREF, elem, nil)
|
elem = ir.Nod(ir.ODEREF, elem, nil)
|
||||||
a := nod(OAS2, nil, nil)
|
a := ir.Nod(ir.OAS2, nil, nil)
|
||||||
a.List.Set2(v1, v2)
|
a.PtrList().Set2(v1, v2)
|
||||||
a.Rlist.Set2(key, elem)
|
a.PtrRlist().Set2(key, elem)
|
||||||
body = []*Node{a}
|
body = []ir.Node{a}
|
||||||
}
|
}
|
||||||
|
|
||||||
case TCHAN:
|
case types.TCHAN:
|
||||||
// order.stmt arranged for a copy of the channel variable.
|
// order.stmt arranged for a copy of the channel variable.
|
||||||
ha := a
|
ha := a
|
||||||
|
|
||||||
n.Left = nil
|
n.SetLeft(nil)
|
||||||
|
|
||||||
hv1 := temp(t.Elem())
|
hv1 := temp(t.Elem())
|
||||||
hv1.SetTypecheck(1)
|
hv1.SetTypecheck(1)
|
||||||
if t.Elem().HasPointers() {
|
if t.Elem().HasPointers() {
|
||||||
init = append(init, nod(OAS, hv1, nil))
|
init = append(init, ir.Nod(ir.OAS, hv1, nil))
|
||||||
}
|
}
|
||||||
hb := temp(types.Types[TBOOL])
|
hb := temp(types.Types[types.TBOOL])
|
||||||
|
|
||||||
n.Left = nod(ONE, hb, nodbool(false))
|
n.SetLeft(ir.Nod(ir.ONE, hb, nodbool(false)))
|
||||||
a := nod(OAS2RECV, nil, nil)
|
a := ir.Nod(ir.OAS2RECV, nil, nil)
|
||||||
a.SetTypecheck(1)
|
a.SetTypecheck(1)
|
||||||
a.List.Set2(hv1, hb)
|
a.PtrList().Set2(hv1, hb)
|
||||||
a.Right = nod(ORECV, ha, nil)
|
a.SetRight(ir.Nod(ir.ORECV, ha, nil))
|
||||||
n.Left.Ninit.Set1(a)
|
n.Left().PtrInit().Set1(a)
|
||||||
if v1 == nil {
|
if v1 == nil {
|
||||||
body = nil
|
body = nil
|
||||||
} else {
|
} else {
|
||||||
body = []*Node{nod(OAS, v1, hv1)}
|
body = []ir.Node{ir.Nod(ir.OAS, v1, hv1)}
|
||||||
}
|
}
|
||||||
// Zero hv1. This prevents hv1 from being the sole, inaccessible
|
// Zero hv1. This prevents hv1 from being the sole, inaccessible
|
||||||
// reference to an otherwise GC-able value during the next channel receive.
|
// reference to an otherwise GC-able value during the next channel receive.
|
||||||
// See issue 15281.
|
// See issue 15281.
|
||||||
body = append(body, nod(OAS, hv1, nil))
|
body = append(body, ir.Nod(ir.OAS, hv1, nil))
|
||||||
|
|
||||||
case TSTRING:
|
case types.TSTRING:
|
||||||
// Transform string range statements like "for v1, v2 = range a" into
|
// Transform string range statements like "for v1, v2 = range a" into
|
||||||
//
|
//
|
||||||
// ha := a
|
// ha := a
|
||||||
|
|
@ -377,84 +379,84 @@ func walkrange(n *Node) *Node {
|
||||||
// order.stmt arranged for a copy of the string variable.
|
// order.stmt arranged for a copy of the string variable.
|
||||||
ha := a
|
ha := a
|
||||||
|
|
||||||
hv1 := temp(types.Types[TINT])
|
hv1 := temp(types.Types[types.TINT])
|
||||||
hv1t := temp(types.Types[TINT])
|
hv1t := temp(types.Types[types.TINT])
|
||||||
hv2 := temp(types.Runetype)
|
hv2 := temp(types.Runetype)
|
||||||
|
|
||||||
// hv1 := 0
|
// hv1 := 0
|
||||||
init = append(init, nod(OAS, hv1, nil))
|
init = append(init, ir.Nod(ir.OAS, hv1, nil))
|
||||||
|
|
||||||
// hv1 < len(ha)
|
// hv1 < len(ha)
|
||||||
n.Left = nod(OLT, hv1, nod(OLEN, ha, nil))
|
n.SetLeft(ir.Nod(ir.OLT, hv1, ir.Nod(ir.OLEN, ha, nil)))
|
||||||
|
|
||||||
if v1 != nil {
|
if v1 != nil {
|
||||||
// hv1t = hv1
|
// hv1t = hv1
|
||||||
body = append(body, nod(OAS, hv1t, hv1))
|
body = append(body, ir.Nod(ir.OAS, hv1t, hv1))
|
||||||
}
|
}
|
||||||
|
|
||||||
// hv2 := rune(ha[hv1])
|
// hv2 := rune(ha[hv1])
|
||||||
nind := nod(OINDEX, ha, hv1)
|
nind := ir.Nod(ir.OINDEX, ha, hv1)
|
||||||
nind.SetBounded(true)
|
nind.SetBounded(true)
|
||||||
body = append(body, nod(OAS, hv2, conv(nind, types.Runetype)))
|
body = append(body, ir.Nod(ir.OAS, hv2, conv(nind, types.Runetype)))
|
||||||
|
|
||||||
// if hv2 < utf8.RuneSelf
|
// if hv2 < utf8.RuneSelf
|
||||||
nif := nod(OIF, nil, nil)
|
nif := ir.Nod(ir.OIF, nil, nil)
|
||||||
nif.Left = nod(OLT, hv2, nodintconst(utf8.RuneSelf))
|
nif.SetLeft(ir.Nod(ir.OLT, hv2, nodintconst(utf8.RuneSelf)))
|
||||||
|
|
||||||
// hv1++
|
// hv1++
|
||||||
nif.Nbody.Set1(nod(OAS, hv1, nod(OADD, hv1, nodintconst(1))))
|
nif.PtrBody().Set1(ir.Nod(ir.OAS, hv1, ir.Nod(ir.OADD, hv1, nodintconst(1))))
|
||||||
|
|
||||||
// } else {
|
// } else {
|
||||||
eif := nod(OAS2, nil, nil)
|
eif := ir.Nod(ir.OAS2, nil, nil)
|
||||||
nif.Rlist.Set1(eif)
|
nif.PtrRlist().Set1(eif)
|
||||||
|
|
||||||
// hv2, hv1 = decoderune(ha, hv1)
|
// hv2, hv1 = decoderune(ha, hv1)
|
||||||
eif.List.Set2(hv2, hv1)
|
eif.PtrList().Set2(hv2, hv1)
|
||||||
fn := syslook("decoderune")
|
fn := syslook("decoderune")
|
||||||
eif.Rlist.Set1(mkcall1(fn, fn.Type.Results(), nil, ha, hv1))
|
eif.PtrRlist().Set1(mkcall1(fn, fn.Type().Results(), nil, ha, hv1))
|
||||||
|
|
||||||
body = append(body, nif)
|
body = append(body, nif)
|
||||||
|
|
||||||
if v1 != nil {
|
if v1 != nil {
|
||||||
if v2 != nil {
|
if v2 != nil {
|
||||||
// v1, v2 = hv1t, hv2
|
// v1, v2 = hv1t, hv2
|
||||||
a := nod(OAS2, nil, nil)
|
a := ir.Nod(ir.OAS2, nil, nil)
|
||||||
a.List.Set2(v1, v2)
|
a.PtrList().Set2(v1, v2)
|
||||||
a.Rlist.Set2(hv1t, hv2)
|
a.PtrRlist().Set2(hv1t, hv2)
|
||||||
body = append(body, a)
|
body = append(body, a)
|
||||||
} else {
|
} else {
|
||||||
// v1 = hv1t
|
// v1 = hv1t
|
||||||
body = append(body, nod(OAS, v1, hv1t))
|
body = append(body, ir.Nod(ir.OAS, v1, hv1t))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
n.Op = translatedLoopOp
|
n.SetOp(translatedLoopOp)
|
||||||
typecheckslice(init, ctxStmt)
|
typecheckslice(init, ctxStmt)
|
||||||
|
|
||||||
if ifGuard != nil {
|
if ifGuard != nil {
|
||||||
ifGuard.Ninit.Append(init...)
|
ifGuard.PtrInit().Append(init...)
|
||||||
ifGuard = typecheck(ifGuard, ctxStmt)
|
ifGuard = typecheck(ifGuard, ctxStmt)
|
||||||
} else {
|
} else {
|
||||||
n.Ninit.Append(init...)
|
n.PtrInit().Append(init...)
|
||||||
}
|
}
|
||||||
|
|
||||||
typecheckslice(n.Left.Ninit.Slice(), ctxStmt)
|
typecheckslice(n.Left().Init().Slice(), ctxStmt)
|
||||||
|
|
||||||
n.Left = typecheck(n.Left, ctxExpr)
|
n.SetLeft(typecheck(n.Left(), ctxExpr))
|
||||||
n.Left = defaultlit(n.Left, nil)
|
n.SetLeft(defaultlit(n.Left(), nil))
|
||||||
n.Right = typecheck(n.Right, ctxStmt)
|
n.SetRight(typecheck(n.Right(), ctxStmt))
|
||||||
typecheckslice(body, ctxStmt)
|
typecheckslice(body, ctxStmt)
|
||||||
n.Nbody.Prepend(body...)
|
n.PtrBody().Prepend(body...)
|
||||||
|
|
||||||
if ifGuard != nil {
|
if ifGuard != nil {
|
||||||
ifGuard.Nbody.Set1(n)
|
ifGuard.PtrBody().Set1(n)
|
||||||
n = ifGuard
|
n = ifGuard
|
||||||
}
|
}
|
||||||
|
|
||||||
n = walkstmt(n)
|
n = walkstmt(n)
|
||||||
|
|
||||||
lineno = lno
|
base.Pos = lno
|
||||||
return n
|
return n
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -465,41 +467,41 @@ func walkrange(n *Node) *Node {
|
||||||
// }
|
// }
|
||||||
//
|
//
|
||||||
// where == for keys of map m is reflexive.
|
// where == for keys of map m is reflexive.
|
||||||
func isMapClear(n *Node) bool {
|
func isMapClear(n ir.Node) bool {
|
||||||
if Debug.N != 0 || instrumenting {
|
if base.Flag.N != 0 || instrumenting {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
if n.Op != ORANGE || n.Type.Etype != TMAP || n.List.Len() != 1 {
|
if n.Op() != ir.ORANGE || n.Type().Etype != types.TMAP || n.List().Len() != 1 {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
k := n.List.First()
|
k := n.List().First()
|
||||||
if k == nil || k.isBlank() {
|
if k == nil || ir.IsBlank(k) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
// Require k to be a new variable name.
|
// Require k to be a new variable name.
|
||||||
if k.Name == nil || k.Name.Defn != n {
|
if k.Name() == nil || k.Name().Defn != n {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
if n.Nbody.Len() != 1 {
|
if n.Body().Len() != 1 {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
stmt := n.Nbody.First() // only stmt in body
|
stmt := n.Body().First() // only stmt in body
|
||||||
if stmt == nil || stmt.Op != ODELETE {
|
if stmt == nil || stmt.Op() != ir.ODELETE {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
m := n.Right
|
m := n.Right()
|
||||||
if !samesafeexpr(stmt.List.First(), m) || !samesafeexpr(stmt.List.Second(), k) {
|
if !samesafeexpr(stmt.List().First(), m) || !samesafeexpr(stmt.List().Second(), k) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
// Keys where equality is not reflexive can not be deleted from maps.
|
// Keys where equality is not reflexive can not be deleted from maps.
|
||||||
if !isreflexive(m.Type.Key()) {
|
if !isreflexive(m.Type().Key()) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -507,8 +509,8 @@ func isMapClear(n *Node) bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
// mapClear constructs a call to runtime.mapclear for the map m.
|
// mapClear constructs a call to runtime.mapclear for the map m.
|
||||||
func mapClear(m *Node) *Node {
|
func mapClear(m ir.Node) ir.Node {
|
||||||
t := m.Type
|
t := m.Type()
|
||||||
|
|
||||||
// instantiate mapclear(typ *type, hmap map[any]any)
|
// instantiate mapclear(typ *type, hmap map[any]any)
|
||||||
fn := syslook("mapclear")
|
fn := syslook("mapclear")
|
||||||
|
|
@ -532,8 +534,8 @@ func mapClear(m *Node) *Node {
|
||||||
// in which the evaluation of a is side-effect-free.
|
// in which the evaluation of a is side-effect-free.
|
||||||
//
|
//
|
||||||
// Parameters are as in walkrange: "for v1, v2 = range a".
|
// Parameters are as in walkrange: "for v1, v2 = range a".
|
||||||
func arrayClear(n, v1, v2, a *Node) bool {
|
func arrayClear(n, v1, v2, a ir.Node) bool {
|
||||||
if Debug.N != 0 || instrumenting {
|
if base.Flag.N != 0 || instrumenting {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -541,21 +543,21 @@ func arrayClear(n, v1, v2, a *Node) bool {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
if n.Nbody.Len() != 1 || n.Nbody.First() == nil {
|
if n.Body().Len() != 1 || n.Body().First() == nil {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
stmt := n.Nbody.First() // only stmt in body
|
stmt := n.Body().First() // only stmt in body
|
||||||
if stmt.Op != OAS || stmt.Left.Op != OINDEX {
|
if stmt.Op() != ir.OAS || stmt.Left().Op() != ir.OINDEX {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
if !samesafeexpr(stmt.Left.Left, a) || !samesafeexpr(stmt.Left.Right, v1) {
|
if !samesafeexpr(stmt.Left().Left(), a) || !samesafeexpr(stmt.Left().Right(), v1) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
elemsize := n.Type.Elem().Width
|
elemsize := n.Type().Elem().Width
|
||||||
if elemsize <= 0 || !isZero(stmt.Right) {
|
if elemsize <= 0 || !isZero(stmt.Right()) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -566,63 +568,63 @@ func arrayClear(n, v1, v2, a *Node) bool {
|
||||||
// memclr{NoHeap,Has}Pointers(hp, hn)
|
// memclr{NoHeap,Has}Pointers(hp, hn)
|
||||||
// i = len(a) - 1
|
// i = len(a) - 1
|
||||||
// }
|
// }
|
||||||
n.Op = OIF
|
n.SetOp(ir.OIF)
|
||||||
|
|
||||||
n.Nbody.Set(nil)
|
n.PtrBody().Set(nil)
|
||||||
n.Left = nod(ONE, nod(OLEN, a, nil), nodintconst(0))
|
n.SetLeft(ir.Nod(ir.ONE, ir.Nod(ir.OLEN, a, nil), nodintconst(0)))
|
||||||
|
|
||||||
// hp = &a[0]
|
// hp = &a[0]
|
||||||
hp := temp(types.Types[TUNSAFEPTR])
|
hp := temp(types.Types[types.TUNSAFEPTR])
|
||||||
|
|
||||||
tmp := nod(OINDEX, a, nodintconst(0))
|
tmp := ir.Nod(ir.OINDEX, a, nodintconst(0))
|
||||||
tmp.SetBounded(true)
|
tmp.SetBounded(true)
|
||||||
tmp = nod(OADDR, tmp, nil)
|
tmp = ir.Nod(ir.OADDR, tmp, nil)
|
||||||
tmp = convnop(tmp, types.Types[TUNSAFEPTR])
|
tmp = convnop(tmp, types.Types[types.TUNSAFEPTR])
|
||||||
n.Nbody.Append(nod(OAS, hp, tmp))
|
n.PtrBody().Append(ir.Nod(ir.OAS, hp, tmp))
|
||||||
|
|
||||||
// hn = len(a) * sizeof(elem(a))
|
// hn = len(a) * sizeof(elem(a))
|
||||||
hn := temp(types.Types[TUINTPTR])
|
hn := temp(types.Types[types.TUINTPTR])
|
||||||
|
|
||||||
tmp = nod(OLEN, a, nil)
|
tmp = ir.Nod(ir.OLEN, a, nil)
|
||||||
tmp = nod(OMUL, tmp, nodintconst(elemsize))
|
tmp = ir.Nod(ir.OMUL, tmp, nodintconst(elemsize))
|
||||||
tmp = conv(tmp, types.Types[TUINTPTR])
|
tmp = conv(tmp, types.Types[types.TUINTPTR])
|
||||||
n.Nbody.Append(nod(OAS, hn, tmp))
|
n.PtrBody().Append(ir.Nod(ir.OAS, hn, tmp))
|
||||||
|
|
||||||
var fn *Node
|
var fn ir.Node
|
||||||
if a.Type.Elem().HasPointers() {
|
if a.Type().Elem().HasPointers() {
|
||||||
// memclrHasPointers(hp, hn)
|
// memclrHasPointers(hp, hn)
|
||||||
Curfn.Func.setWBPos(stmt.Pos)
|
Curfn.Func().SetWBPos(stmt.Pos())
|
||||||
fn = mkcall("memclrHasPointers", nil, nil, hp, hn)
|
fn = mkcall("memclrHasPointers", nil, nil, hp, hn)
|
||||||
} else {
|
} else {
|
||||||
// memclrNoHeapPointers(hp, hn)
|
// memclrNoHeapPointers(hp, hn)
|
||||||
fn = mkcall("memclrNoHeapPointers", nil, nil, hp, hn)
|
fn = mkcall("memclrNoHeapPointers", nil, nil, hp, hn)
|
||||||
}
|
}
|
||||||
|
|
||||||
n.Nbody.Append(fn)
|
n.PtrBody().Append(fn)
|
||||||
|
|
||||||
// i = len(a) - 1
|
// i = len(a) - 1
|
||||||
v1 = nod(OAS, v1, nod(OSUB, nod(OLEN, a, nil), nodintconst(1)))
|
v1 = ir.Nod(ir.OAS, v1, ir.Nod(ir.OSUB, ir.Nod(ir.OLEN, a, nil), nodintconst(1)))
|
||||||
|
|
||||||
n.Nbody.Append(v1)
|
n.PtrBody().Append(v1)
|
||||||
|
|
||||||
n.Left = typecheck(n.Left, ctxExpr)
|
n.SetLeft(typecheck(n.Left(), ctxExpr))
|
||||||
n.Left = defaultlit(n.Left, nil)
|
n.SetLeft(defaultlit(n.Left(), nil))
|
||||||
typecheckslice(n.Nbody.Slice(), ctxStmt)
|
typecheckslice(n.Body().Slice(), ctxStmt)
|
||||||
n = walkstmt(n)
|
n = walkstmt(n)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
// addptr returns (*T)(uintptr(p) + n).
|
// addptr returns (*T)(uintptr(p) + n).
|
||||||
func addptr(p *Node, n int64) *Node {
|
func addptr(p ir.Node, n int64) ir.Node {
|
||||||
t := p.Type
|
t := p.Type()
|
||||||
|
|
||||||
p = nod(OCONVNOP, p, nil)
|
p = ir.Nod(ir.OCONVNOP, p, nil)
|
||||||
p.Type = types.Types[TUINTPTR]
|
p.SetType(types.Types[types.TUINTPTR])
|
||||||
|
|
||||||
p = nod(OADD, p, nodintconst(n))
|
p = ir.Nod(ir.OADD, p, nodintconst(n))
|
||||||
|
|
||||||
p = nod(OCONVNOP, p, nil)
|
p = ir.Nod(ir.OCONVNOP, p, nil)
|
||||||
p.Type = t
|
p.SetType(t)
|
||||||
|
|
||||||
return p
|
return p
|
||||||
}
|
}
|
||||||
|
|
|
||||||
File diff suppressed because it is too large
Load diff
|
|
@ -4,6 +4,8 @@
|
||||||
|
|
||||||
package gc
|
package gc
|
||||||
|
|
||||||
|
import "cmd/compile/internal/ir"
|
||||||
|
|
||||||
// Strongly connected components.
|
// Strongly connected components.
|
||||||
//
|
//
|
||||||
// Run analysis on minimal sets of mutually recursive functions
|
// Run analysis on minimal sets of mutually recursive functions
|
||||||
|
|
@ -30,10 +32,10 @@ package gc
|
||||||
// when analyzing a set of mutually recursive functions.
|
// when analyzing a set of mutually recursive functions.
|
||||||
|
|
||||||
type bottomUpVisitor struct {
|
type bottomUpVisitor struct {
|
||||||
analyze func([]*Node, bool)
|
analyze func([]ir.Node, bool)
|
||||||
visitgen uint32
|
visitgen uint32
|
||||||
nodeID map[*Node]uint32
|
nodeID map[ir.Node]uint32
|
||||||
stack []*Node
|
stack []ir.Node
|
||||||
}
|
}
|
||||||
|
|
||||||
// visitBottomUp invokes analyze on the ODCLFUNC nodes listed in list.
|
// visitBottomUp invokes analyze on the ODCLFUNC nodes listed in list.
|
||||||
|
|
@ -49,18 +51,18 @@ type bottomUpVisitor struct {
|
||||||
// If recursive is false, the list consists of only a single function and its closures.
|
// If recursive is false, the list consists of only a single function and its closures.
|
||||||
// If recursive is true, the list may still contain only a single function,
|
// If recursive is true, the list may still contain only a single function,
|
||||||
// if that function is itself recursive.
|
// if that function is itself recursive.
|
||||||
func visitBottomUp(list []*Node, analyze func(list []*Node, recursive bool)) {
|
func visitBottomUp(list []ir.Node, analyze func(list []ir.Node, recursive bool)) {
|
||||||
var v bottomUpVisitor
|
var v bottomUpVisitor
|
||||||
v.analyze = analyze
|
v.analyze = analyze
|
||||||
v.nodeID = make(map[*Node]uint32)
|
v.nodeID = make(map[ir.Node]uint32)
|
||||||
for _, n := range list {
|
for _, n := range list {
|
||||||
if n.Op == ODCLFUNC && !n.Func.IsHiddenClosure() {
|
if n.Op() == ir.ODCLFUNC && !n.Func().IsHiddenClosure() {
|
||||||
v.visit(n)
|
v.visit(n)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (v *bottomUpVisitor) visit(n *Node) uint32 {
|
func (v *bottomUpVisitor) visit(n ir.Node) uint32 {
|
||||||
if id := v.nodeID[n]; id > 0 {
|
if id := v.nodeID[n]; id > 0 {
|
||||||
// already visited
|
// already visited
|
||||||
return id
|
return id
|
||||||
|
|
@ -73,42 +75,46 @@ func (v *bottomUpVisitor) visit(n *Node) uint32 {
|
||||||
min := v.visitgen
|
min := v.visitgen
|
||||||
v.stack = append(v.stack, n)
|
v.stack = append(v.stack, n)
|
||||||
|
|
||||||
inspectList(n.Nbody, func(n *Node) bool {
|
ir.InspectList(n.Body(), func(n ir.Node) bool {
|
||||||
switch n.Op {
|
switch n.Op() {
|
||||||
case ONAME:
|
case ir.ONAME:
|
||||||
if n.Class() == PFUNC {
|
if n.Class() == ir.PFUNC {
|
||||||
if n.isMethodExpression() {
|
if n != nil && n.Name().Defn != nil {
|
||||||
n = asNode(n.Type.Nname())
|
if m := v.visit(n.Name().Defn); m < min {
|
||||||
}
|
|
||||||
if n != nil && n.Name.Defn != nil {
|
|
||||||
if m := v.visit(n.Name.Defn); m < min {
|
|
||||||
min = m
|
min = m
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
case ODOTMETH:
|
case ir.OMETHEXPR:
|
||||||
fn := asNode(n.Type.Nname())
|
fn := methodExprName(n)
|
||||||
if fn != nil && fn.Op == ONAME && fn.Class() == PFUNC && fn.Name.Defn != nil {
|
if fn != nil && fn.Name().Defn != nil {
|
||||||
if m := v.visit(fn.Name.Defn); m < min {
|
if m := v.visit(fn.Name().Defn); m < min {
|
||||||
min = m
|
min = m
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
case OCALLPART:
|
case ir.ODOTMETH:
|
||||||
fn := asNode(callpartMethod(n).Type.Nname())
|
fn := methodExprName(n)
|
||||||
if fn != nil && fn.Op == ONAME && fn.Class() == PFUNC && fn.Name.Defn != nil {
|
if fn != nil && fn.Op() == ir.ONAME && fn.Class() == ir.PFUNC && fn.Name().Defn != nil {
|
||||||
if m := v.visit(fn.Name.Defn); m < min {
|
if m := v.visit(fn.Name().Defn); m < min {
|
||||||
min = m
|
min = m
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
case OCLOSURE:
|
case ir.OCALLPART:
|
||||||
if m := v.visit(n.Func.Closure); m < min {
|
fn := ir.AsNode(callpartMethod(n).Nname)
|
||||||
|
if fn != nil && fn.Op() == ir.ONAME && fn.Class() == ir.PFUNC && fn.Name().Defn != nil {
|
||||||
|
if m := v.visit(fn.Name().Defn); m < min {
|
||||||
|
min = m
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case ir.OCLOSURE:
|
||||||
|
if m := v.visit(n.Func().Decl); m < min {
|
||||||
min = m
|
min = m
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return true
|
return true
|
||||||
})
|
})
|
||||||
|
|
||||||
if (min == id || min == id+1) && !n.Func.IsHiddenClosure() {
|
if (min == id || min == id+1) && !n.Func().IsHiddenClosure() {
|
||||||
// This node is the root of a strongly connected component.
|
// This node is the root of a strongly connected component.
|
||||||
|
|
||||||
// The original min passed to visitcodelist was v.nodeID[n]+1.
|
// The original min passed to visitcodelist was v.nodeID[n]+1.
|
||||||
|
|
|
||||||
|
|
@ -5,6 +5,8 @@
|
||||||
package gc
|
package gc
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"cmd/compile/internal/base"
|
||||||
|
"cmd/compile/internal/ir"
|
||||||
"cmd/internal/dwarf"
|
"cmd/internal/dwarf"
|
||||||
"cmd/internal/obj"
|
"cmd/internal/obj"
|
||||||
"cmd/internal/src"
|
"cmd/internal/src"
|
||||||
|
|
@ -13,10 +15,10 @@ import (
|
||||||
|
|
||||||
// See golang.org/issue/20390.
|
// See golang.org/issue/20390.
|
||||||
func xposBefore(p, q src.XPos) bool {
|
func xposBefore(p, q src.XPos) bool {
|
||||||
return Ctxt.PosTable.Pos(p).Before(Ctxt.PosTable.Pos(q))
|
return base.Ctxt.PosTable.Pos(p).Before(base.Ctxt.PosTable.Pos(q))
|
||||||
}
|
}
|
||||||
|
|
||||||
func findScope(marks []Mark, pos src.XPos) ScopeID {
|
func findScope(marks []ir.Mark, pos src.XPos) ir.ScopeID {
|
||||||
i := sort.Search(len(marks), func(i int) bool {
|
i := sort.Search(len(marks), func(i int) bool {
|
||||||
return xposBefore(pos, marks[i].Pos)
|
return xposBefore(pos, marks[i].Pos)
|
||||||
})
|
})
|
||||||
|
|
@ -26,20 +28,20 @@ func findScope(marks []Mark, pos src.XPos) ScopeID {
|
||||||
return marks[i-1].Scope
|
return marks[i-1].Scope
|
||||||
}
|
}
|
||||||
|
|
||||||
func assembleScopes(fnsym *obj.LSym, fn *Node, dwarfVars []*dwarf.Var, varScopes []ScopeID) []dwarf.Scope {
|
func assembleScopes(fnsym *obj.LSym, fn ir.Node, dwarfVars []*dwarf.Var, varScopes []ir.ScopeID) []dwarf.Scope {
|
||||||
// Initialize the DWARF scope tree based on lexical scopes.
|
// Initialize the DWARF scope tree based on lexical scopes.
|
||||||
dwarfScopes := make([]dwarf.Scope, 1+len(fn.Func.Parents))
|
dwarfScopes := make([]dwarf.Scope, 1+len(fn.Func().Parents))
|
||||||
for i, parent := range fn.Func.Parents {
|
for i, parent := range fn.Func().Parents {
|
||||||
dwarfScopes[i+1].Parent = int32(parent)
|
dwarfScopes[i+1].Parent = int32(parent)
|
||||||
}
|
}
|
||||||
|
|
||||||
scopeVariables(dwarfVars, varScopes, dwarfScopes)
|
scopeVariables(dwarfVars, varScopes, dwarfScopes)
|
||||||
scopePCs(fnsym, fn.Func.Marks, dwarfScopes)
|
scopePCs(fnsym, fn.Func().Marks, dwarfScopes)
|
||||||
return compactScopes(dwarfScopes)
|
return compactScopes(dwarfScopes)
|
||||||
}
|
}
|
||||||
|
|
||||||
// scopeVariables assigns DWARF variable records to their scopes.
|
// scopeVariables assigns DWARF variable records to their scopes.
|
||||||
func scopeVariables(dwarfVars []*dwarf.Var, varScopes []ScopeID, dwarfScopes []dwarf.Scope) {
|
func scopeVariables(dwarfVars []*dwarf.Var, varScopes []ir.ScopeID, dwarfScopes []dwarf.Scope) {
|
||||||
sort.Stable(varsByScopeAndOffset{dwarfVars, varScopes})
|
sort.Stable(varsByScopeAndOffset{dwarfVars, varScopes})
|
||||||
|
|
||||||
i0 := 0
|
i0 := 0
|
||||||
|
|
@ -56,7 +58,7 @@ func scopeVariables(dwarfVars []*dwarf.Var, varScopes []ScopeID, dwarfScopes []d
|
||||||
}
|
}
|
||||||
|
|
||||||
// scopePCs assigns PC ranges to their scopes.
|
// scopePCs assigns PC ranges to their scopes.
|
||||||
func scopePCs(fnsym *obj.LSym, marks []Mark, dwarfScopes []dwarf.Scope) {
|
func scopePCs(fnsym *obj.LSym, marks []ir.Mark, dwarfScopes []dwarf.Scope) {
|
||||||
// If there aren't any child scopes (in particular, when scope
|
// If there aren't any child scopes (in particular, when scope
|
||||||
// tracking is disabled), we can skip a whole lot of work.
|
// tracking is disabled), we can skip a whole lot of work.
|
||||||
if len(marks) == 0 {
|
if len(marks) == 0 {
|
||||||
|
|
@ -89,7 +91,7 @@ func compactScopes(dwarfScopes []dwarf.Scope) []dwarf.Scope {
|
||||||
|
|
||||||
type varsByScopeAndOffset struct {
|
type varsByScopeAndOffset struct {
|
||||||
vars []*dwarf.Var
|
vars []*dwarf.Var
|
||||||
scopes []ScopeID
|
scopes []ir.ScopeID
|
||||||
}
|
}
|
||||||
|
|
||||||
func (v varsByScopeAndOffset) Len() int {
|
func (v varsByScopeAndOffset) Len() int {
|
||||||
|
|
|
||||||
|
|
@ -4,152 +4,156 @@
|
||||||
|
|
||||||
package gc
|
package gc
|
||||||
|
|
||||||
import "cmd/compile/internal/types"
|
import (
|
||||||
|
"cmd/compile/internal/base"
|
||||||
|
"cmd/compile/internal/ir"
|
||||||
|
"cmd/compile/internal/types"
|
||||||
|
)
|
||||||
|
|
||||||
// select
|
// select
|
||||||
func typecheckselect(sel *Node) {
|
func typecheckselect(sel ir.Node) {
|
||||||
var def *Node
|
var def ir.Node
|
||||||
lno := setlineno(sel)
|
lno := setlineno(sel)
|
||||||
typecheckslice(sel.Ninit.Slice(), ctxStmt)
|
typecheckslice(sel.Init().Slice(), ctxStmt)
|
||||||
for _, ncase := range sel.List.Slice() {
|
for _, ncase := range sel.List().Slice() {
|
||||||
if ncase.Op != OCASE {
|
if ncase.Op() != ir.OCASE {
|
||||||
setlineno(ncase)
|
setlineno(ncase)
|
||||||
Fatalf("typecheckselect %v", ncase.Op)
|
base.Fatalf("typecheckselect %v", ncase.Op())
|
||||||
}
|
}
|
||||||
|
|
||||||
if ncase.List.Len() == 0 {
|
if ncase.List().Len() == 0 {
|
||||||
// default
|
// default
|
||||||
if def != nil {
|
if def != nil {
|
||||||
yyerrorl(ncase.Pos, "multiple defaults in select (first at %v)", def.Line())
|
base.ErrorfAt(ncase.Pos(), "multiple defaults in select (first at %v)", ir.Line(def))
|
||||||
} else {
|
} else {
|
||||||
def = ncase
|
def = ncase
|
||||||
}
|
}
|
||||||
} else if ncase.List.Len() > 1 {
|
} else if ncase.List().Len() > 1 {
|
||||||
yyerrorl(ncase.Pos, "select cases cannot be lists")
|
base.ErrorfAt(ncase.Pos(), "select cases cannot be lists")
|
||||||
} else {
|
} else {
|
||||||
ncase.List.SetFirst(typecheck(ncase.List.First(), ctxStmt))
|
ncase.List().SetFirst(typecheck(ncase.List().First(), ctxStmt))
|
||||||
n := ncase.List.First()
|
n := ncase.List().First()
|
||||||
ncase.Left = n
|
ncase.SetLeft(n)
|
||||||
ncase.List.Set(nil)
|
ncase.PtrList().Set(nil)
|
||||||
switch n.Op {
|
switch n.Op() {
|
||||||
default:
|
default:
|
||||||
pos := n.Pos
|
pos := n.Pos()
|
||||||
if n.Op == ONAME {
|
if n.Op() == ir.ONAME {
|
||||||
// We don't have the right position for ONAME nodes (see #15459 and
|
// We don't have the right position for ONAME nodes (see #15459 and
|
||||||
// others). Using ncase.Pos for now as it will provide the correct
|
// others). Using ncase.Pos for now as it will provide the correct
|
||||||
// line number (assuming the expression follows the "case" keyword
|
// line number (assuming the expression follows the "case" keyword
|
||||||
// on the same line). This matches the approach before 1.10.
|
// on the same line). This matches the approach before 1.10.
|
||||||
pos = ncase.Pos
|
pos = ncase.Pos()
|
||||||
}
|
}
|
||||||
yyerrorl(pos, "select case must be receive, send or assign recv")
|
base.ErrorfAt(pos, "select case must be receive, send or assign recv")
|
||||||
|
|
||||||
// convert x = <-c into OSELRECV(x, <-c).
|
// convert x = <-c into OSELRECV(x, <-c).
|
||||||
// remove implicit conversions; the eventual assignment
|
// remove implicit conversions; the eventual assignment
|
||||||
// will reintroduce them.
|
// will reintroduce them.
|
||||||
case OAS:
|
case ir.OAS:
|
||||||
if (n.Right.Op == OCONVNOP || n.Right.Op == OCONVIFACE) && n.Right.Implicit() {
|
if (n.Right().Op() == ir.OCONVNOP || n.Right().Op() == ir.OCONVIFACE) && n.Right().Implicit() {
|
||||||
n.Right = n.Right.Left
|
n.SetRight(n.Right().Left())
|
||||||
}
|
}
|
||||||
|
|
||||||
if n.Right.Op != ORECV {
|
if n.Right().Op() != ir.ORECV {
|
||||||
yyerrorl(n.Pos, "select assignment must have receive on right hand side")
|
base.ErrorfAt(n.Pos(), "select assignment must have receive on right hand side")
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
n.Op = OSELRECV
|
n.SetOp(ir.OSELRECV)
|
||||||
|
|
||||||
// convert x, ok = <-c into OSELRECV2(x, <-c) with ntest=ok
|
// convert x, ok = <-c into OSELRECV2(x, <-c) with ntest=ok
|
||||||
case OAS2RECV:
|
case ir.OAS2RECV:
|
||||||
if n.Right.Op != ORECV {
|
if n.Right().Op() != ir.ORECV {
|
||||||
yyerrorl(n.Pos, "select assignment must have receive on right hand side")
|
base.ErrorfAt(n.Pos(), "select assignment must have receive on right hand side")
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
n.Op = OSELRECV2
|
n.SetOp(ir.OSELRECV2)
|
||||||
n.Left = n.List.First()
|
n.SetLeft(n.List().First())
|
||||||
n.List.Set1(n.List.Second())
|
n.PtrList().Set1(n.List().Second())
|
||||||
|
|
||||||
// convert <-c into OSELRECV(N, <-c)
|
// convert <-c into OSELRECV(N, <-c)
|
||||||
case ORECV:
|
case ir.ORECV:
|
||||||
n = nodl(n.Pos, OSELRECV, nil, n)
|
n = ir.NodAt(n.Pos(), ir.OSELRECV, nil, n)
|
||||||
|
|
||||||
n.SetTypecheck(1)
|
n.SetTypecheck(1)
|
||||||
ncase.Left = n
|
ncase.SetLeft(n)
|
||||||
|
|
||||||
case OSEND:
|
case ir.OSEND:
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
typecheckslice(ncase.Nbody.Slice(), ctxStmt)
|
typecheckslice(ncase.Body().Slice(), ctxStmt)
|
||||||
}
|
}
|
||||||
|
|
||||||
lineno = lno
|
base.Pos = lno
|
||||||
}
|
}
|
||||||
|
|
||||||
func walkselect(sel *Node) {
|
func walkselect(sel ir.Node) {
|
||||||
lno := setlineno(sel)
|
lno := setlineno(sel)
|
||||||
if sel.Nbody.Len() != 0 {
|
if sel.Body().Len() != 0 {
|
||||||
Fatalf("double walkselect")
|
base.Fatalf("double walkselect")
|
||||||
}
|
}
|
||||||
|
|
||||||
init := sel.Ninit.Slice()
|
init := sel.Init().Slice()
|
||||||
sel.Ninit.Set(nil)
|
sel.PtrInit().Set(nil)
|
||||||
|
|
||||||
init = append(init, walkselectcases(&sel.List)...)
|
init = append(init, walkselectcases(sel.PtrList())...)
|
||||||
sel.List.Set(nil)
|
sel.PtrList().Set(nil)
|
||||||
|
|
||||||
sel.Nbody.Set(init)
|
sel.PtrBody().Set(init)
|
||||||
walkstmtlist(sel.Nbody.Slice())
|
walkstmtlist(sel.Body().Slice())
|
||||||
|
|
||||||
lineno = lno
|
base.Pos = lno
|
||||||
}
|
}
|
||||||
|
|
||||||
func walkselectcases(cases *Nodes) []*Node {
|
func walkselectcases(cases *ir.Nodes) []ir.Node {
|
||||||
ncas := cases.Len()
|
ncas := cases.Len()
|
||||||
sellineno := lineno
|
sellineno := base.Pos
|
||||||
|
|
||||||
// optimization: zero-case select
|
// optimization: zero-case select
|
||||||
if ncas == 0 {
|
if ncas == 0 {
|
||||||
return []*Node{mkcall("block", nil, nil)}
|
return []ir.Node{mkcall("block", nil, nil)}
|
||||||
}
|
}
|
||||||
|
|
||||||
// optimization: one-case select: single op.
|
// optimization: one-case select: single op.
|
||||||
if ncas == 1 {
|
if ncas == 1 {
|
||||||
cas := cases.First()
|
cas := cases.First()
|
||||||
setlineno(cas)
|
setlineno(cas)
|
||||||
l := cas.Ninit.Slice()
|
l := cas.Init().Slice()
|
||||||
if cas.Left != nil { // not default:
|
if cas.Left() != nil { // not default:
|
||||||
n := cas.Left
|
n := cas.Left()
|
||||||
l = append(l, n.Ninit.Slice()...)
|
l = append(l, n.Init().Slice()...)
|
||||||
n.Ninit.Set(nil)
|
n.PtrInit().Set(nil)
|
||||||
switch n.Op {
|
switch n.Op() {
|
||||||
default:
|
default:
|
||||||
Fatalf("select %v", n.Op)
|
base.Fatalf("select %v", n.Op())
|
||||||
|
|
||||||
case OSEND:
|
case ir.OSEND:
|
||||||
// already ok
|
// already ok
|
||||||
|
|
||||||
case OSELRECV, OSELRECV2:
|
case ir.OSELRECV, ir.OSELRECV2:
|
||||||
if n.Op == OSELRECV || n.List.Len() == 0 {
|
if n.Op() == ir.OSELRECV || n.List().Len() == 0 {
|
||||||
if n.Left == nil {
|
if n.Left() == nil {
|
||||||
n = n.Right
|
n = n.Right()
|
||||||
} else {
|
} else {
|
||||||
n.Op = OAS
|
n.SetOp(ir.OAS)
|
||||||
}
|
}
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
if n.Left == nil {
|
if n.Left() == nil {
|
||||||
nblank = typecheck(nblank, ctxExpr|ctxAssign)
|
ir.BlankNode = typecheck(ir.BlankNode, ctxExpr|ctxAssign)
|
||||||
n.Left = nblank
|
n.SetLeft(ir.BlankNode)
|
||||||
}
|
}
|
||||||
|
|
||||||
n.Op = OAS2
|
n.SetOp(ir.OAS2)
|
||||||
n.List.Prepend(n.Left)
|
n.PtrList().Prepend(n.Left())
|
||||||
n.Rlist.Set1(n.Right)
|
n.PtrRlist().Set1(n.Right())
|
||||||
n.Right = nil
|
n.SetRight(nil)
|
||||||
n.Left = nil
|
n.SetLeft(nil)
|
||||||
n.SetTypecheck(0)
|
n.SetTypecheck(0)
|
||||||
n = typecheck(n, ctxStmt)
|
n = typecheck(n, ctxStmt)
|
||||||
}
|
}
|
||||||
|
|
@ -157,34 +161,34 @@ func walkselectcases(cases *Nodes) []*Node {
|
||||||
l = append(l, n)
|
l = append(l, n)
|
||||||
}
|
}
|
||||||
|
|
||||||
l = append(l, cas.Nbody.Slice()...)
|
l = append(l, cas.Body().Slice()...)
|
||||||
l = append(l, nod(OBREAK, nil, nil))
|
l = append(l, ir.Nod(ir.OBREAK, nil, nil))
|
||||||
return l
|
return l
|
||||||
}
|
}
|
||||||
|
|
||||||
// convert case value arguments to addresses.
|
// convert case value arguments to addresses.
|
||||||
// this rewrite is used by both the general code and the next optimization.
|
// this rewrite is used by both the general code and the next optimization.
|
||||||
var dflt *Node
|
var dflt ir.Node
|
||||||
for _, cas := range cases.Slice() {
|
for _, cas := range cases.Slice() {
|
||||||
setlineno(cas)
|
setlineno(cas)
|
||||||
n := cas.Left
|
n := cas.Left()
|
||||||
if n == nil {
|
if n == nil {
|
||||||
dflt = cas
|
dflt = cas
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
switch n.Op {
|
switch n.Op() {
|
||||||
case OSEND:
|
case ir.OSEND:
|
||||||
n.Right = nod(OADDR, n.Right, nil)
|
n.SetRight(ir.Nod(ir.OADDR, n.Right(), nil))
|
||||||
n.Right = typecheck(n.Right, ctxExpr)
|
n.SetRight(typecheck(n.Right(), ctxExpr))
|
||||||
|
|
||||||
case OSELRECV, OSELRECV2:
|
case ir.OSELRECV, ir.OSELRECV2:
|
||||||
if n.Op == OSELRECV2 && n.List.Len() == 0 {
|
if n.Op() == ir.OSELRECV2 && n.List().Len() == 0 {
|
||||||
n.Op = OSELRECV
|
n.SetOp(ir.OSELRECV)
|
||||||
}
|
}
|
||||||
|
|
||||||
if n.Left != nil {
|
if n.Left() != nil {
|
||||||
n.Left = nod(OADDR, n.Left, nil)
|
n.SetLeft(ir.Nod(ir.OADDR, n.Left(), nil))
|
||||||
n.Left = typecheck(n.Left, ctxExpr)
|
n.SetLeft(typecheck(n.Left(), ctxExpr))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -196,68 +200,68 @@ func walkselectcases(cases *Nodes) []*Node {
|
||||||
cas = cases.Second()
|
cas = cases.Second()
|
||||||
}
|
}
|
||||||
|
|
||||||
n := cas.Left
|
n := cas.Left()
|
||||||
setlineno(n)
|
setlineno(n)
|
||||||
r := nod(OIF, nil, nil)
|
r := ir.Nod(ir.OIF, nil, nil)
|
||||||
r.Ninit.Set(cas.Ninit.Slice())
|
r.PtrInit().Set(cas.Init().Slice())
|
||||||
switch n.Op {
|
switch n.Op() {
|
||||||
default:
|
default:
|
||||||
Fatalf("select %v", n.Op)
|
base.Fatalf("select %v", n.Op())
|
||||||
|
|
||||||
case OSEND:
|
case ir.OSEND:
|
||||||
// if selectnbsend(c, v) { body } else { default body }
|
// if selectnbsend(c, v) { body } else { default body }
|
||||||
ch := n.Left
|
ch := n.Left()
|
||||||
r.Left = mkcall1(chanfn("selectnbsend", 2, ch.Type), types.Types[TBOOL], &r.Ninit, ch, n.Right)
|
r.SetLeft(mkcall1(chanfn("selectnbsend", 2, ch.Type()), types.Types[types.TBOOL], r.PtrInit(), ch, n.Right()))
|
||||||
|
|
||||||
case OSELRECV:
|
case ir.OSELRECV:
|
||||||
// if selectnbrecv(&v, c) { body } else { default body }
|
// if selectnbrecv(&v, c) { body } else { default body }
|
||||||
ch := n.Right.Left
|
ch := n.Right().Left()
|
||||||
elem := n.Left
|
elem := n.Left()
|
||||||
if elem == nil {
|
if elem == nil {
|
||||||
elem = nodnil()
|
elem = nodnil()
|
||||||
}
|
}
|
||||||
r.Left = mkcall1(chanfn("selectnbrecv", 2, ch.Type), types.Types[TBOOL], &r.Ninit, elem, ch)
|
r.SetLeft(mkcall1(chanfn("selectnbrecv", 2, ch.Type()), types.Types[types.TBOOL], r.PtrInit(), elem, ch))
|
||||||
|
|
||||||
case OSELRECV2:
|
case ir.OSELRECV2:
|
||||||
// if selectnbrecv2(&v, &received, c) { body } else { default body }
|
// if selectnbrecv2(&v, &received, c) { body } else { default body }
|
||||||
ch := n.Right.Left
|
ch := n.Right().Left()
|
||||||
elem := n.Left
|
elem := n.Left()
|
||||||
if elem == nil {
|
if elem == nil {
|
||||||
elem = nodnil()
|
elem = nodnil()
|
||||||
}
|
}
|
||||||
receivedp := nod(OADDR, n.List.First(), nil)
|
receivedp := ir.Nod(ir.OADDR, n.List().First(), nil)
|
||||||
receivedp = typecheck(receivedp, ctxExpr)
|
receivedp = typecheck(receivedp, ctxExpr)
|
||||||
r.Left = mkcall1(chanfn("selectnbrecv2", 2, ch.Type), types.Types[TBOOL], &r.Ninit, elem, receivedp, ch)
|
r.SetLeft(mkcall1(chanfn("selectnbrecv2", 2, ch.Type()), types.Types[types.TBOOL], r.PtrInit(), elem, receivedp, ch))
|
||||||
}
|
}
|
||||||
|
|
||||||
r.Left = typecheck(r.Left, ctxExpr)
|
r.SetLeft(typecheck(r.Left(), ctxExpr))
|
||||||
r.Nbody.Set(cas.Nbody.Slice())
|
r.PtrBody().Set(cas.Body().Slice())
|
||||||
r.Rlist.Set(append(dflt.Ninit.Slice(), dflt.Nbody.Slice()...))
|
r.PtrRlist().Set(append(dflt.Init().Slice(), dflt.Body().Slice()...))
|
||||||
return []*Node{r, nod(OBREAK, nil, nil)}
|
return []ir.Node{r, ir.Nod(ir.OBREAK, nil, nil)}
|
||||||
}
|
}
|
||||||
|
|
||||||
if dflt != nil {
|
if dflt != nil {
|
||||||
ncas--
|
ncas--
|
||||||
}
|
}
|
||||||
casorder := make([]*Node, ncas)
|
casorder := make([]ir.Node, ncas)
|
||||||
nsends, nrecvs := 0, 0
|
nsends, nrecvs := 0, 0
|
||||||
|
|
||||||
var init []*Node
|
var init []ir.Node
|
||||||
|
|
||||||
// generate sel-struct
|
// generate sel-struct
|
||||||
lineno = sellineno
|
base.Pos = sellineno
|
||||||
selv := temp(types.NewArray(scasetype(), int64(ncas)))
|
selv := temp(types.NewArray(scasetype(), int64(ncas)))
|
||||||
r := nod(OAS, selv, nil)
|
r := ir.Nod(ir.OAS, selv, nil)
|
||||||
r = typecheck(r, ctxStmt)
|
r = typecheck(r, ctxStmt)
|
||||||
init = append(init, r)
|
init = append(init, r)
|
||||||
|
|
||||||
// No initialization for order; runtime.selectgo is responsible for that.
|
// No initialization for order; runtime.selectgo is responsible for that.
|
||||||
order := temp(types.NewArray(types.Types[TUINT16], 2*int64(ncas)))
|
order := temp(types.NewArray(types.Types[types.TUINT16], 2*int64(ncas)))
|
||||||
|
|
||||||
var pc0, pcs *Node
|
var pc0, pcs ir.Node
|
||||||
if flag_race {
|
if base.Flag.Race {
|
||||||
pcs = temp(types.NewArray(types.Types[TUINTPTR], int64(ncas)))
|
pcs = temp(types.NewArray(types.Types[types.TUINTPTR], int64(ncas)))
|
||||||
pc0 = typecheck(nod(OADDR, nod(OINDEX, pcs, nodintconst(0)), nil), ctxExpr)
|
pc0 = typecheck(ir.Nod(ir.OADDR, ir.Nod(ir.OINDEX, pcs, nodintconst(0)), nil), ctxExpr)
|
||||||
} else {
|
} else {
|
||||||
pc0 = nodnil()
|
pc0 = nodnil()
|
||||||
}
|
}
|
||||||
|
|
@ -266,109 +270,109 @@ func walkselectcases(cases *Nodes) []*Node {
|
||||||
for _, cas := range cases.Slice() {
|
for _, cas := range cases.Slice() {
|
||||||
setlineno(cas)
|
setlineno(cas)
|
||||||
|
|
||||||
init = append(init, cas.Ninit.Slice()...)
|
init = append(init, cas.Init().Slice()...)
|
||||||
cas.Ninit.Set(nil)
|
cas.PtrInit().Set(nil)
|
||||||
|
|
||||||
n := cas.Left
|
n := cas.Left()
|
||||||
if n == nil { // default:
|
if n == nil { // default:
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
var i int
|
var i int
|
||||||
var c, elem *Node
|
var c, elem ir.Node
|
||||||
switch n.Op {
|
switch n.Op() {
|
||||||
default:
|
default:
|
||||||
Fatalf("select %v", n.Op)
|
base.Fatalf("select %v", n.Op())
|
||||||
case OSEND:
|
case ir.OSEND:
|
||||||
i = nsends
|
i = nsends
|
||||||
nsends++
|
nsends++
|
||||||
c = n.Left
|
c = n.Left()
|
||||||
elem = n.Right
|
elem = n.Right()
|
||||||
case OSELRECV, OSELRECV2:
|
case ir.OSELRECV, ir.OSELRECV2:
|
||||||
nrecvs++
|
nrecvs++
|
||||||
i = ncas - nrecvs
|
i = ncas - nrecvs
|
||||||
c = n.Right.Left
|
c = n.Right().Left()
|
||||||
elem = n.Left
|
elem = n.Left()
|
||||||
}
|
}
|
||||||
|
|
||||||
casorder[i] = cas
|
casorder[i] = cas
|
||||||
|
|
||||||
setField := func(f string, val *Node) {
|
setField := func(f string, val ir.Node) {
|
||||||
r := nod(OAS, nodSym(ODOT, nod(OINDEX, selv, nodintconst(int64(i))), lookup(f)), val)
|
r := ir.Nod(ir.OAS, nodSym(ir.ODOT, ir.Nod(ir.OINDEX, selv, nodintconst(int64(i))), lookup(f)), val)
|
||||||
r = typecheck(r, ctxStmt)
|
r = typecheck(r, ctxStmt)
|
||||||
init = append(init, r)
|
init = append(init, r)
|
||||||
}
|
}
|
||||||
|
|
||||||
c = convnop(c, types.Types[TUNSAFEPTR])
|
c = convnop(c, types.Types[types.TUNSAFEPTR])
|
||||||
setField("c", c)
|
setField("c", c)
|
||||||
if elem != nil {
|
if elem != nil {
|
||||||
elem = convnop(elem, types.Types[TUNSAFEPTR])
|
elem = convnop(elem, types.Types[types.TUNSAFEPTR])
|
||||||
setField("elem", elem)
|
setField("elem", elem)
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO(mdempsky): There should be a cleaner way to
|
// TODO(mdempsky): There should be a cleaner way to
|
||||||
// handle this.
|
// handle this.
|
||||||
if flag_race {
|
if base.Flag.Race {
|
||||||
r = mkcall("selectsetpc", nil, nil, nod(OADDR, nod(OINDEX, pcs, nodintconst(int64(i))), nil))
|
r = mkcall("selectsetpc", nil, nil, ir.Nod(ir.OADDR, ir.Nod(ir.OINDEX, pcs, nodintconst(int64(i))), nil))
|
||||||
init = append(init, r)
|
init = append(init, r)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if nsends+nrecvs != ncas {
|
if nsends+nrecvs != ncas {
|
||||||
Fatalf("walkselectcases: miscount: %v + %v != %v", nsends, nrecvs, ncas)
|
base.Fatalf("walkselectcases: miscount: %v + %v != %v", nsends, nrecvs, ncas)
|
||||||
}
|
}
|
||||||
|
|
||||||
// run the select
|
// run the select
|
||||||
lineno = sellineno
|
base.Pos = sellineno
|
||||||
chosen := temp(types.Types[TINT])
|
chosen := temp(types.Types[types.TINT])
|
||||||
recvOK := temp(types.Types[TBOOL])
|
recvOK := temp(types.Types[types.TBOOL])
|
||||||
r = nod(OAS2, nil, nil)
|
r = ir.Nod(ir.OAS2, nil, nil)
|
||||||
r.List.Set2(chosen, recvOK)
|
r.PtrList().Set2(chosen, recvOK)
|
||||||
fn := syslook("selectgo")
|
fn := syslook("selectgo")
|
||||||
r.Rlist.Set1(mkcall1(fn, fn.Type.Results(), nil, bytePtrToIndex(selv, 0), bytePtrToIndex(order, 0), pc0, nodintconst(int64(nsends)), nodintconst(int64(nrecvs)), nodbool(dflt == nil)))
|
r.PtrRlist().Set1(mkcall1(fn, fn.Type().Results(), nil, bytePtrToIndex(selv, 0), bytePtrToIndex(order, 0), pc0, nodintconst(int64(nsends)), nodintconst(int64(nrecvs)), nodbool(dflt == nil)))
|
||||||
r = typecheck(r, ctxStmt)
|
r = typecheck(r, ctxStmt)
|
||||||
init = append(init, r)
|
init = append(init, r)
|
||||||
|
|
||||||
// selv and order are no longer alive after selectgo.
|
// selv and order are no longer alive after selectgo.
|
||||||
init = append(init, nod(OVARKILL, selv, nil))
|
init = append(init, ir.Nod(ir.OVARKILL, selv, nil))
|
||||||
init = append(init, nod(OVARKILL, order, nil))
|
init = append(init, ir.Nod(ir.OVARKILL, order, nil))
|
||||||
if flag_race {
|
if base.Flag.Race {
|
||||||
init = append(init, nod(OVARKILL, pcs, nil))
|
init = append(init, ir.Nod(ir.OVARKILL, pcs, nil))
|
||||||
}
|
}
|
||||||
|
|
||||||
// dispatch cases
|
// dispatch cases
|
||||||
dispatch := func(cond, cas *Node) {
|
dispatch := func(cond, cas ir.Node) {
|
||||||
cond = typecheck(cond, ctxExpr)
|
cond = typecheck(cond, ctxExpr)
|
||||||
cond = defaultlit(cond, nil)
|
cond = defaultlit(cond, nil)
|
||||||
|
|
||||||
r := nod(OIF, cond, nil)
|
r := ir.Nod(ir.OIF, cond, nil)
|
||||||
|
|
||||||
if n := cas.Left; n != nil && n.Op == OSELRECV2 {
|
if n := cas.Left(); n != nil && n.Op() == ir.OSELRECV2 {
|
||||||
x := nod(OAS, n.List.First(), recvOK)
|
x := ir.Nod(ir.OAS, n.List().First(), recvOK)
|
||||||
x = typecheck(x, ctxStmt)
|
x = typecheck(x, ctxStmt)
|
||||||
r.Nbody.Append(x)
|
r.PtrBody().Append(x)
|
||||||
}
|
}
|
||||||
|
|
||||||
r.Nbody.AppendNodes(&cas.Nbody)
|
r.PtrBody().AppendNodes(cas.PtrBody())
|
||||||
r.Nbody.Append(nod(OBREAK, nil, nil))
|
r.PtrBody().Append(ir.Nod(ir.OBREAK, nil, nil))
|
||||||
init = append(init, r)
|
init = append(init, r)
|
||||||
}
|
}
|
||||||
|
|
||||||
if dflt != nil {
|
if dflt != nil {
|
||||||
setlineno(dflt)
|
setlineno(dflt)
|
||||||
dispatch(nod(OLT, chosen, nodintconst(0)), dflt)
|
dispatch(ir.Nod(ir.OLT, chosen, nodintconst(0)), dflt)
|
||||||
}
|
}
|
||||||
for i, cas := range casorder {
|
for i, cas := range casorder {
|
||||||
setlineno(cas)
|
setlineno(cas)
|
||||||
dispatch(nod(OEQ, chosen, nodintconst(int64(i))), cas)
|
dispatch(ir.Nod(ir.OEQ, chosen, nodintconst(int64(i))), cas)
|
||||||
}
|
}
|
||||||
|
|
||||||
return init
|
return init
|
||||||
}
|
}
|
||||||
|
|
||||||
// bytePtrToIndex returns a Node representing "(*byte)(&n[i])".
|
// bytePtrToIndex returns a Node representing "(*byte)(&n[i])".
|
||||||
func bytePtrToIndex(n *Node, i int64) *Node {
|
func bytePtrToIndex(n ir.Node, i int64) ir.Node {
|
||||||
s := nod(OADDR, nod(OINDEX, n, nodintconst(i)), nil)
|
s := ir.Nod(ir.OADDR, ir.Nod(ir.OINDEX, n, nodintconst(i)), nil)
|
||||||
t := types.NewPtr(types.Types[TUINT8])
|
t := types.NewPtr(types.Types[types.TUINT8])
|
||||||
return convnop(s, t)
|
return convnop(s, t)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -377,9 +381,9 @@ var scase *types.Type
|
||||||
// Keep in sync with src/runtime/select.go.
|
// Keep in sync with src/runtime/select.go.
|
||||||
func scasetype() *types.Type {
|
func scasetype() *types.Type {
|
||||||
if scase == nil {
|
if scase == nil {
|
||||||
scase = tostruct([]*Node{
|
scase = tostruct([]ir.Node{
|
||||||
namedfield("c", types.Types[TUNSAFEPTR]),
|
namedfield("c", types.Types[types.TUNSAFEPTR]),
|
||||||
namedfield("elem", types.Types[TUNSAFEPTR]),
|
namedfield("elem", types.Types[types.TUNSAFEPTR]),
|
||||||
})
|
})
|
||||||
scase.SetNoalg(true)
|
scase.SetNoalg(true)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
|
@ -5,43 +5,47 @@
|
||||||
package gc
|
package gc
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"cmd/compile/internal/base"
|
||||||
|
"cmd/compile/internal/ir"
|
||||||
"cmd/compile/internal/types"
|
"cmd/compile/internal/types"
|
||||||
"cmd/internal/src"
|
"cmd/internal/src"
|
||||||
|
"go/constant"
|
||||||
|
"go/token"
|
||||||
"sort"
|
"sort"
|
||||||
)
|
)
|
||||||
|
|
||||||
// typecheckswitch typechecks a switch statement.
|
// typecheckswitch typechecks a switch statement.
|
||||||
func typecheckswitch(n *Node) {
|
func typecheckswitch(n ir.Node) {
|
||||||
typecheckslice(n.Ninit.Slice(), ctxStmt)
|
typecheckslice(n.Init().Slice(), ctxStmt)
|
||||||
if n.Left != nil && n.Left.Op == OTYPESW {
|
if n.Left() != nil && n.Left().Op() == ir.OTYPESW {
|
||||||
typecheckTypeSwitch(n)
|
typecheckTypeSwitch(n)
|
||||||
} else {
|
} else {
|
||||||
typecheckExprSwitch(n)
|
typecheckExprSwitch(n)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func typecheckTypeSwitch(n *Node) {
|
func typecheckTypeSwitch(n ir.Node) {
|
||||||
n.Left.Right = typecheck(n.Left.Right, ctxExpr)
|
n.Left().SetRight(typecheck(n.Left().Right(), ctxExpr))
|
||||||
t := n.Left.Right.Type
|
t := n.Left().Right().Type()
|
||||||
if t != nil && !t.IsInterface() {
|
if t != nil && !t.IsInterface() {
|
||||||
yyerrorl(n.Pos, "cannot type switch on non-interface value %L", n.Left.Right)
|
base.ErrorfAt(n.Pos(), "cannot type switch on non-interface value %L", n.Left().Right())
|
||||||
t = nil
|
t = nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// We don't actually declare the type switch's guarded
|
// We don't actually declare the type switch's guarded
|
||||||
// declaration itself. So if there are no cases, we won't
|
// declaration itself. So if there are no cases, we won't
|
||||||
// notice that it went unused.
|
// notice that it went unused.
|
||||||
if v := n.Left.Left; v != nil && !v.isBlank() && n.List.Len() == 0 {
|
if v := n.Left().Left(); v != nil && !ir.IsBlank(v) && n.List().Len() == 0 {
|
||||||
yyerrorl(v.Pos, "%v declared but not used", v.Sym)
|
base.ErrorfAt(v.Pos(), "%v declared but not used", v.Sym())
|
||||||
}
|
}
|
||||||
|
|
||||||
var defCase, nilCase *Node
|
var defCase, nilCase ir.Node
|
||||||
var ts typeSet
|
var ts typeSet
|
||||||
for _, ncase := range n.List.Slice() {
|
for _, ncase := range n.List().Slice() {
|
||||||
ls := ncase.List.Slice()
|
ls := ncase.List().Slice()
|
||||||
if len(ls) == 0 { // default:
|
if len(ls) == 0 { // default:
|
||||||
if defCase != nil {
|
if defCase != nil {
|
||||||
yyerrorl(ncase.Pos, "multiple defaults in switch (first at %v)", defCase.Line())
|
base.ErrorfAt(ncase.Pos(), "multiple defaults in switch (first at %v)", ir.Line(defCase))
|
||||||
} else {
|
} else {
|
||||||
defCase = ncase
|
defCase = ncase
|
||||||
}
|
}
|
||||||
|
|
@ -50,65 +54,65 @@ func typecheckTypeSwitch(n *Node) {
|
||||||
for i := range ls {
|
for i := range ls {
|
||||||
ls[i] = typecheck(ls[i], ctxExpr|ctxType)
|
ls[i] = typecheck(ls[i], ctxExpr|ctxType)
|
||||||
n1 := ls[i]
|
n1 := ls[i]
|
||||||
if t == nil || n1.Type == nil {
|
if t == nil || n1.Type() == nil {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
var missing, have *types.Field
|
var missing, have *types.Field
|
||||||
var ptr int
|
var ptr int
|
||||||
switch {
|
switch {
|
||||||
case n1.isNil(): // case nil:
|
case ir.IsNil(n1): // case nil:
|
||||||
if nilCase != nil {
|
if nilCase != nil {
|
||||||
yyerrorl(ncase.Pos, "multiple nil cases in type switch (first at %v)", nilCase.Line())
|
base.ErrorfAt(ncase.Pos(), "multiple nil cases in type switch (first at %v)", ir.Line(nilCase))
|
||||||
} else {
|
} else {
|
||||||
nilCase = ncase
|
nilCase = ncase
|
||||||
}
|
}
|
||||||
case n1.Op != OTYPE:
|
case n1.Op() != ir.OTYPE:
|
||||||
yyerrorl(ncase.Pos, "%L is not a type", n1)
|
base.ErrorfAt(ncase.Pos(), "%L is not a type", n1)
|
||||||
case !n1.Type.IsInterface() && !implements(n1.Type, t, &missing, &have, &ptr) && !missing.Broke():
|
case !n1.Type().IsInterface() && !implements(n1.Type(), t, &missing, &have, &ptr) && !missing.Broke():
|
||||||
if have != nil && !have.Broke() {
|
if have != nil && !have.Broke() {
|
||||||
yyerrorl(ncase.Pos, "impossible type switch case: %L cannot have dynamic type %v"+
|
base.ErrorfAt(ncase.Pos(), "impossible type switch case: %L cannot have dynamic type %v"+
|
||||||
" (wrong type for %v method)\n\thave %v%S\n\twant %v%S", n.Left.Right, n1.Type, missing.Sym, have.Sym, have.Type, missing.Sym, missing.Type)
|
" (wrong type for %v method)\n\thave %v%S\n\twant %v%S", n.Left().Right(), n1.Type(), missing.Sym, have.Sym, have.Type, missing.Sym, missing.Type)
|
||||||
} else if ptr != 0 {
|
} else if ptr != 0 {
|
||||||
yyerrorl(ncase.Pos, "impossible type switch case: %L cannot have dynamic type %v"+
|
base.ErrorfAt(ncase.Pos(), "impossible type switch case: %L cannot have dynamic type %v"+
|
||||||
" (%v method has pointer receiver)", n.Left.Right, n1.Type, missing.Sym)
|
" (%v method has pointer receiver)", n.Left().Right(), n1.Type(), missing.Sym)
|
||||||
} else {
|
} else {
|
||||||
yyerrorl(ncase.Pos, "impossible type switch case: %L cannot have dynamic type %v"+
|
base.ErrorfAt(ncase.Pos(), "impossible type switch case: %L cannot have dynamic type %v"+
|
||||||
" (missing %v method)", n.Left.Right, n1.Type, missing.Sym)
|
" (missing %v method)", n.Left().Right(), n1.Type(), missing.Sym)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if n1.Op == OTYPE {
|
if n1.Op() == ir.OTYPE {
|
||||||
ts.add(ncase.Pos, n1.Type)
|
ts.add(ncase.Pos(), n1.Type())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if ncase.Rlist.Len() != 0 {
|
if ncase.Rlist().Len() != 0 {
|
||||||
// Assign the clause variable's type.
|
// Assign the clause variable's type.
|
||||||
vt := t
|
vt := t
|
||||||
if len(ls) == 1 {
|
if len(ls) == 1 {
|
||||||
if ls[0].Op == OTYPE {
|
if ls[0].Op() == ir.OTYPE {
|
||||||
vt = ls[0].Type
|
vt = ls[0].Type()
|
||||||
} else if ls[0].Op != OLITERAL { // TODO(mdempsky): Should be !ls[0].isNil()
|
} else if !ir.IsNil(ls[0]) {
|
||||||
// Invalid single-type case;
|
// Invalid single-type case;
|
||||||
// mark variable as broken.
|
// mark variable as broken.
|
||||||
vt = nil
|
vt = nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO(mdempsky): It should be possible to
|
nvar := ncase.Rlist().First()
|
||||||
// still typecheck the case body.
|
nvar.SetType(vt)
|
||||||
if vt == nil {
|
if vt != nil {
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
nvar := ncase.Rlist.First()
|
|
||||||
nvar.Type = vt
|
|
||||||
nvar = typecheck(nvar, ctxExpr|ctxAssign)
|
nvar = typecheck(nvar, ctxExpr|ctxAssign)
|
||||||
ncase.Rlist.SetFirst(nvar)
|
} else {
|
||||||
|
// Clause variable is broken; prevent typechecking.
|
||||||
|
nvar.SetTypecheck(1)
|
||||||
|
nvar.SetWalkdef(1)
|
||||||
|
}
|
||||||
|
ncase.Rlist().SetFirst(nvar)
|
||||||
}
|
}
|
||||||
|
|
||||||
typecheckslice(ncase.Nbody.Slice(), ctxStmt)
|
typecheckslice(ncase.Body().Slice(), ctxStmt)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -133,19 +137,19 @@ func (s *typeSet) add(pos src.XPos, typ *types.Type) {
|
||||||
prevs := s.m[ls]
|
prevs := s.m[ls]
|
||||||
for _, prev := range prevs {
|
for _, prev := range prevs {
|
||||||
if types.Identical(typ, prev.typ) {
|
if types.Identical(typ, prev.typ) {
|
||||||
yyerrorl(pos, "duplicate case %v in type switch\n\tprevious case at %s", typ, linestr(prev.pos))
|
base.ErrorfAt(pos, "duplicate case %v in type switch\n\tprevious case at %s", typ, base.FmtPos(prev.pos))
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
s.m[ls] = append(prevs, typeSetEntry{pos, typ})
|
s.m[ls] = append(prevs, typeSetEntry{pos, typ})
|
||||||
}
|
}
|
||||||
|
|
||||||
func typecheckExprSwitch(n *Node) {
|
func typecheckExprSwitch(n ir.Node) {
|
||||||
t := types.Types[TBOOL]
|
t := types.Types[types.TBOOL]
|
||||||
if n.Left != nil {
|
if n.Left() != nil {
|
||||||
n.Left = typecheck(n.Left, ctxExpr)
|
n.SetLeft(typecheck(n.Left(), ctxExpr))
|
||||||
n.Left = defaultlit(n.Left, nil)
|
n.SetLeft(defaultlit(n.Left(), nil))
|
||||||
t = n.Left.Type
|
t = n.Left().Type()
|
||||||
}
|
}
|
||||||
|
|
||||||
var nilonly string
|
var nilonly string
|
||||||
|
|
@ -153,28 +157,28 @@ func typecheckExprSwitch(n *Node) {
|
||||||
switch {
|
switch {
|
||||||
case t.IsMap():
|
case t.IsMap():
|
||||||
nilonly = "map"
|
nilonly = "map"
|
||||||
case t.Etype == TFUNC:
|
case t.Etype == types.TFUNC:
|
||||||
nilonly = "func"
|
nilonly = "func"
|
||||||
case t.IsSlice():
|
case t.IsSlice():
|
||||||
nilonly = "slice"
|
nilonly = "slice"
|
||||||
|
|
||||||
case !IsComparable(t):
|
case !IsComparable(t):
|
||||||
if t.IsStruct() {
|
if t.IsStruct() {
|
||||||
yyerrorl(n.Pos, "cannot switch on %L (struct containing %v cannot be compared)", n.Left, IncomparableField(t).Type)
|
base.ErrorfAt(n.Pos(), "cannot switch on %L (struct containing %v cannot be compared)", n.Left(), IncomparableField(t).Type)
|
||||||
} else {
|
} else {
|
||||||
yyerrorl(n.Pos, "cannot switch on %L", n.Left)
|
base.ErrorfAt(n.Pos(), "cannot switch on %L", n.Left())
|
||||||
}
|
}
|
||||||
t = nil
|
t = nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
var defCase *Node
|
var defCase ir.Node
|
||||||
var cs constSet
|
var cs constSet
|
||||||
for _, ncase := range n.List.Slice() {
|
for _, ncase := range n.List().Slice() {
|
||||||
ls := ncase.List.Slice()
|
ls := ncase.List().Slice()
|
||||||
if len(ls) == 0 { // default:
|
if len(ls) == 0 { // default:
|
||||||
if defCase != nil {
|
if defCase != nil {
|
||||||
yyerrorl(ncase.Pos, "multiple defaults in switch (first at %v)", defCase.Line())
|
base.ErrorfAt(ncase.Pos(), "multiple defaults in switch (first at %v)", ir.Line(defCase))
|
||||||
} else {
|
} else {
|
||||||
defCase = ncase
|
defCase = ncase
|
||||||
}
|
}
|
||||||
|
|
@ -185,22 +189,22 @@ func typecheckExprSwitch(n *Node) {
|
||||||
ls[i] = typecheck(ls[i], ctxExpr)
|
ls[i] = typecheck(ls[i], ctxExpr)
|
||||||
ls[i] = defaultlit(ls[i], t)
|
ls[i] = defaultlit(ls[i], t)
|
||||||
n1 := ls[i]
|
n1 := ls[i]
|
||||||
if t == nil || n1.Type == nil {
|
if t == nil || n1.Type() == nil {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
if nilonly != "" && !n1.isNil() {
|
if nilonly != "" && !ir.IsNil(n1) {
|
||||||
yyerrorl(ncase.Pos, "invalid case %v in switch (can only compare %s %v to nil)", n1, nilonly, n.Left)
|
base.ErrorfAt(ncase.Pos(), "invalid case %v in switch (can only compare %s %v to nil)", n1, nilonly, n.Left())
|
||||||
} else if t.IsInterface() && !n1.Type.IsInterface() && !IsComparable(n1.Type) {
|
} else if t.IsInterface() && !n1.Type().IsInterface() && !IsComparable(n1.Type()) {
|
||||||
yyerrorl(ncase.Pos, "invalid case %L in switch (incomparable type)", n1)
|
base.ErrorfAt(ncase.Pos(), "invalid case %L in switch (incomparable type)", n1)
|
||||||
} else {
|
} else {
|
||||||
op1, _ := assignop(n1.Type, t)
|
op1, _ := assignop(n1.Type(), t)
|
||||||
op2, _ := assignop(t, n1.Type)
|
op2, _ := assignop(t, n1.Type())
|
||||||
if op1 == OXXX && op2 == OXXX {
|
if op1 == ir.OXXX && op2 == ir.OXXX {
|
||||||
if n.Left != nil {
|
if n.Left() != nil {
|
||||||
yyerrorl(ncase.Pos, "invalid case %v in switch on %v (mismatched types %v and %v)", n1, n.Left, n1.Type, t)
|
base.ErrorfAt(ncase.Pos(), "invalid case %v in switch on %v (mismatched types %v and %v)", n1, n.Left(), n1.Type(), t)
|
||||||
} else {
|
} else {
|
||||||
yyerrorl(ncase.Pos, "invalid case %v in switch (mismatched types %v and bool)", n1, n1.Type)
|
base.ErrorfAt(ncase.Pos(), "invalid case %v in switch (mismatched types %v and bool)", n1, n1.Type())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -211,23 +215,23 @@ func typecheckExprSwitch(n *Node) {
|
||||||
// case GOARCH == "arm" && GOARM == "5":
|
// case GOARCH == "arm" && GOARM == "5":
|
||||||
// case GOARCH == "arm":
|
// case GOARCH == "arm":
|
||||||
// which would both evaluate to false for non-ARM compiles.
|
// which would both evaluate to false for non-ARM compiles.
|
||||||
if !n1.Type.IsBoolean() {
|
if !n1.Type().IsBoolean() {
|
||||||
cs.add(ncase.Pos, n1, "case", "switch")
|
cs.add(ncase.Pos(), n1, "case", "switch")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
typecheckslice(ncase.Nbody.Slice(), ctxStmt)
|
typecheckslice(ncase.Body().Slice(), ctxStmt)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// walkswitch walks a switch statement.
|
// walkswitch walks a switch statement.
|
||||||
func walkswitch(sw *Node) {
|
func walkswitch(sw ir.Node) {
|
||||||
// Guard against double walk, see #25776.
|
// Guard against double walk, see #25776.
|
||||||
if sw.List.Len() == 0 && sw.Nbody.Len() > 0 {
|
if sw.List().Len() == 0 && sw.Body().Len() > 0 {
|
||||||
return // Was fatal, but eliminating every possible source of double-walking is hard
|
return // Was fatal, but eliminating every possible source of double-walking is hard
|
||||||
}
|
}
|
||||||
|
|
||||||
if sw.Left != nil && sw.Left.Op == OTYPESW {
|
if sw.Left() != nil && sw.Left().Op() == ir.OTYPESW {
|
||||||
walkTypeSwitch(sw)
|
walkTypeSwitch(sw)
|
||||||
} else {
|
} else {
|
||||||
walkExprSwitch(sw)
|
walkExprSwitch(sw)
|
||||||
|
|
@ -236,11 +240,11 @@ func walkswitch(sw *Node) {
|
||||||
|
|
||||||
// walkExprSwitch generates an AST implementing sw. sw is an
|
// walkExprSwitch generates an AST implementing sw. sw is an
|
||||||
// expression switch.
|
// expression switch.
|
||||||
func walkExprSwitch(sw *Node) {
|
func walkExprSwitch(sw ir.Node) {
|
||||||
lno := setlineno(sw)
|
lno := setlineno(sw)
|
||||||
|
|
||||||
cond := sw.Left
|
cond := sw.Left()
|
||||||
sw.Left = nil
|
sw.SetLeft(nil)
|
||||||
|
|
||||||
// convert switch {...} to switch true {...}
|
// convert switch {...} to switch true {...}
|
||||||
if cond == nil {
|
if cond == nil {
|
||||||
|
|
@ -256,79 +260,79 @@ func walkExprSwitch(sw *Node) {
|
||||||
// because walkexpr will lower the string
|
// because walkexpr will lower the string
|
||||||
// conversion into a runtime call.
|
// conversion into a runtime call.
|
||||||
// See issue 24937 for more discussion.
|
// See issue 24937 for more discussion.
|
||||||
if cond.Op == OBYTES2STR && allCaseExprsAreSideEffectFree(sw) {
|
if cond.Op() == ir.OBYTES2STR && allCaseExprsAreSideEffectFree(sw) {
|
||||||
cond.Op = OBYTES2STRTMP
|
cond.SetOp(ir.OBYTES2STRTMP)
|
||||||
}
|
}
|
||||||
|
|
||||||
cond = walkexpr(cond, &sw.Ninit)
|
cond = walkexpr(cond, sw.PtrInit())
|
||||||
if cond.Op != OLITERAL {
|
if cond.Op() != ir.OLITERAL && cond.Op() != ir.ONIL {
|
||||||
cond = copyexpr(cond, cond.Type, &sw.Nbody)
|
cond = copyexpr(cond, cond.Type(), sw.PtrBody())
|
||||||
}
|
}
|
||||||
|
|
||||||
lineno = lno
|
base.Pos = lno
|
||||||
|
|
||||||
s := exprSwitch{
|
s := exprSwitch{
|
||||||
exprname: cond,
|
exprname: cond,
|
||||||
}
|
}
|
||||||
|
|
||||||
var defaultGoto *Node
|
var defaultGoto ir.Node
|
||||||
var body Nodes
|
var body ir.Nodes
|
||||||
for _, ncase := range sw.List.Slice() {
|
for _, ncase := range sw.List().Slice() {
|
||||||
label := autolabel(".s")
|
label := autolabel(".s")
|
||||||
jmp := npos(ncase.Pos, nodSym(OGOTO, nil, label))
|
jmp := npos(ncase.Pos(), nodSym(ir.OGOTO, nil, label))
|
||||||
|
|
||||||
// Process case dispatch.
|
// Process case dispatch.
|
||||||
if ncase.List.Len() == 0 {
|
if ncase.List().Len() == 0 {
|
||||||
if defaultGoto != nil {
|
if defaultGoto != nil {
|
||||||
Fatalf("duplicate default case not detected during typechecking")
|
base.Fatalf("duplicate default case not detected during typechecking")
|
||||||
}
|
}
|
||||||
defaultGoto = jmp
|
defaultGoto = jmp
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, n1 := range ncase.List.Slice() {
|
for _, n1 := range ncase.List().Slice() {
|
||||||
s.Add(ncase.Pos, n1, jmp)
|
s.Add(ncase.Pos(), n1, jmp)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Process body.
|
// Process body.
|
||||||
body.Append(npos(ncase.Pos, nodSym(OLABEL, nil, label)))
|
body.Append(npos(ncase.Pos(), nodSym(ir.OLABEL, nil, label)))
|
||||||
body.Append(ncase.Nbody.Slice()...)
|
body.Append(ncase.Body().Slice()...)
|
||||||
if fall, pos := hasFall(ncase.Nbody.Slice()); !fall {
|
if fall, pos := hasFall(ncase.Body().Slice()); !fall {
|
||||||
br := nod(OBREAK, nil, nil)
|
br := ir.Nod(ir.OBREAK, nil, nil)
|
||||||
br.Pos = pos
|
br.SetPos(pos)
|
||||||
body.Append(br)
|
body.Append(br)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
sw.List.Set(nil)
|
sw.PtrList().Set(nil)
|
||||||
|
|
||||||
if defaultGoto == nil {
|
if defaultGoto == nil {
|
||||||
br := nod(OBREAK, nil, nil)
|
br := ir.Nod(ir.OBREAK, nil, nil)
|
||||||
br.Pos = br.Pos.WithNotStmt()
|
br.SetPos(br.Pos().WithNotStmt())
|
||||||
defaultGoto = br
|
defaultGoto = br
|
||||||
}
|
}
|
||||||
|
|
||||||
s.Emit(&sw.Nbody)
|
s.Emit(sw.PtrBody())
|
||||||
sw.Nbody.Append(defaultGoto)
|
sw.PtrBody().Append(defaultGoto)
|
||||||
sw.Nbody.AppendNodes(&body)
|
sw.PtrBody().AppendNodes(&body)
|
||||||
walkstmtlist(sw.Nbody.Slice())
|
walkstmtlist(sw.Body().Slice())
|
||||||
}
|
}
|
||||||
|
|
||||||
// An exprSwitch walks an expression switch.
|
// An exprSwitch walks an expression switch.
|
||||||
type exprSwitch struct {
|
type exprSwitch struct {
|
||||||
exprname *Node // value being switched on
|
exprname ir.Node // value being switched on
|
||||||
|
|
||||||
done Nodes
|
done ir.Nodes
|
||||||
clauses []exprClause
|
clauses []exprClause
|
||||||
}
|
}
|
||||||
|
|
||||||
type exprClause struct {
|
type exprClause struct {
|
||||||
pos src.XPos
|
pos src.XPos
|
||||||
lo, hi *Node
|
lo, hi ir.Node
|
||||||
jmp *Node
|
jmp ir.Node
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *exprSwitch) Add(pos src.XPos, expr, jmp *Node) {
|
func (s *exprSwitch) Add(pos src.XPos, expr, jmp ir.Node) {
|
||||||
c := exprClause{pos: pos, lo: expr, hi: expr, jmp: jmp}
|
c := exprClause{pos: pos, lo: expr, hi: expr, jmp: jmp}
|
||||||
if okforcmp[s.exprname.Type.Etype] && expr.Op == OLITERAL {
|
if okforcmp[s.exprname.Type().Etype] && expr.Op() == ir.OLITERAL {
|
||||||
s.clauses = append(s.clauses, c)
|
s.clauses = append(s.clauses, c)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
@ -338,7 +342,7 @@ func (s *exprSwitch) Add(pos src.XPos, expr, jmp *Node) {
|
||||||
s.flush()
|
s.flush()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *exprSwitch) Emit(out *Nodes) {
|
func (s *exprSwitch) Emit(out *ir.Nodes) {
|
||||||
s.flush()
|
s.flush()
|
||||||
out.AppendNodes(&s.done)
|
out.AppendNodes(&s.done)
|
||||||
}
|
}
|
||||||
|
|
@ -355,7 +359,7 @@ func (s *exprSwitch) flush() {
|
||||||
// (e.g., sort.Slice doesn't need to invoke the less function
|
// (e.g., sort.Slice doesn't need to invoke the less function
|
||||||
// when there's only a single slice element).
|
// when there's only a single slice element).
|
||||||
|
|
||||||
if s.exprname.Type.IsString() && len(cc) >= 2 {
|
if s.exprname.Type().IsString() && len(cc) >= 2 {
|
||||||
// Sort strings by length and then by value. It is
|
// Sort strings by length and then by value. It is
|
||||||
// much cheaper to compare lengths than values, and
|
// much cheaper to compare lengths than values, and
|
||||||
// all we need here is consistency. We respect this
|
// all we need here is consistency. We respect this
|
||||||
|
|
@ -385,26 +389,25 @@ func (s *exprSwitch) flush() {
|
||||||
runs = append(runs, cc[start:])
|
runs = append(runs, cc[start:])
|
||||||
|
|
||||||
// Perform two-level binary search.
|
// Perform two-level binary search.
|
||||||
nlen := nod(OLEN, s.exprname, nil)
|
|
||||||
binarySearch(len(runs), &s.done,
|
binarySearch(len(runs), &s.done,
|
||||||
func(i int) *Node {
|
func(i int) ir.Node {
|
||||||
return nod(OLE, nlen, nodintconst(runLen(runs[i-1])))
|
return ir.Nod(ir.OLE, ir.Nod(ir.OLEN, s.exprname, nil), nodintconst(runLen(runs[i-1])))
|
||||||
},
|
},
|
||||||
func(i int, nif *Node) {
|
func(i int, nif ir.Node) {
|
||||||
run := runs[i]
|
run := runs[i]
|
||||||
nif.Left = nod(OEQ, nlen, nodintconst(runLen(run)))
|
nif.SetLeft(ir.Nod(ir.OEQ, ir.Nod(ir.OLEN, s.exprname, nil), nodintconst(runLen(run))))
|
||||||
s.search(run, &nif.Nbody)
|
s.search(run, nif.PtrBody())
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
sort.Slice(cc, func(i, j int) bool {
|
sort.Slice(cc, func(i, j int) bool {
|
||||||
return compareOp(cc[i].lo.Val(), OLT, cc[j].lo.Val())
|
return constant.Compare(cc[i].lo.Val(), token.LSS, cc[j].lo.Val())
|
||||||
})
|
})
|
||||||
|
|
||||||
// Merge consecutive integer cases.
|
// Merge consecutive integer cases.
|
||||||
if s.exprname.Type.IsInteger() {
|
if s.exprname.Type().IsInteger() {
|
||||||
merged := cc[:1]
|
merged := cc[:1]
|
||||||
for _, c := range cc[1:] {
|
for _, c := range cc[1:] {
|
||||||
last := &merged[len(merged)-1]
|
last := &merged[len(merged)-1]
|
||||||
|
|
@ -420,40 +423,40 @@ func (s *exprSwitch) flush() {
|
||||||
s.search(cc, &s.done)
|
s.search(cc, &s.done)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *exprSwitch) search(cc []exprClause, out *Nodes) {
|
func (s *exprSwitch) search(cc []exprClause, out *ir.Nodes) {
|
||||||
binarySearch(len(cc), out,
|
binarySearch(len(cc), out,
|
||||||
func(i int) *Node {
|
func(i int) ir.Node {
|
||||||
return nod(OLE, s.exprname, cc[i-1].hi)
|
return ir.Nod(ir.OLE, s.exprname, cc[i-1].hi)
|
||||||
},
|
},
|
||||||
func(i int, nif *Node) {
|
func(i int, nif ir.Node) {
|
||||||
c := &cc[i]
|
c := &cc[i]
|
||||||
nif.Left = c.test(s.exprname)
|
nif.SetLeft(c.test(s.exprname))
|
||||||
nif.Nbody.Set1(c.jmp)
|
nif.PtrBody().Set1(c.jmp)
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *exprClause) test(exprname *Node) *Node {
|
func (c *exprClause) test(exprname ir.Node) ir.Node {
|
||||||
// Integer range.
|
// Integer range.
|
||||||
if c.hi != c.lo {
|
if c.hi != c.lo {
|
||||||
low := nodl(c.pos, OGE, exprname, c.lo)
|
low := ir.NodAt(c.pos, ir.OGE, exprname, c.lo)
|
||||||
high := nodl(c.pos, OLE, exprname, c.hi)
|
high := ir.NodAt(c.pos, ir.OLE, exprname, c.hi)
|
||||||
return nodl(c.pos, OANDAND, low, high)
|
return ir.NodAt(c.pos, ir.OANDAND, low, high)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Optimize "switch true { ...}" and "switch false { ... }".
|
// Optimize "switch true { ...}" and "switch false { ... }".
|
||||||
if Isconst(exprname, CTBOOL) && !c.lo.Type.IsInterface() {
|
if ir.IsConst(exprname, constant.Bool) && !c.lo.Type().IsInterface() {
|
||||||
if exprname.BoolVal() {
|
if exprname.BoolVal() {
|
||||||
return c.lo
|
return c.lo
|
||||||
} else {
|
} else {
|
||||||
return nodl(c.pos, ONOT, c.lo, nil)
|
return ir.NodAt(c.pos, ir.ONOT, c.lo, nil)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return nodl(c.pos, OEQ, exprname, c.lo)
|
return ir.NodAt(c.pos, ir.OEQ, exprname, c.lo)
|
||||||
}
|
}
|
||||||
|
|
||||||
func allCaseExprsAreSideEffectFree(sw *Node) bool {
|
func allCaseExprsAreSideEffectFree(sw ir.Node) bool {
|
||||||
// In theory, we could be more aggressive, allowing any
|
// In theory, we could be more aggressive, allowing any
|
||||||
// side-effect-free expressions in cases, but it's a bit
|
// side-effect-free expressions in cases, but it's a bit
|
||||||
// tricky because some of that information is unavailable due
|
// tricky because some of that information is unavailable due
|
||||||
|
|
@ -461,12 +464,12 @@ func allCaseExprsAreSideEffectFree(sw *Node) bool {
|
||||||
// Restricting to constants is simple and probably powerful
|
// Restricting to constants is simple and probably powerful
|
||||||
// enough.
|
// enough.
|
||||||
|
|
||||||
for _, ncase := range sw.List.Slice() {
|
for _, ncase := range sw.List().Slice() {
|
||||||
if ncase.Op != OCASE {
|
if ncase.Op() != ir.OCASE {
|
||||||
Fatalf("switch string(byteslice) bad op: %v", ncase.Op)
|
base.Fatalf("switch string(byteslice) bad op: %v", ncase.Op())
|
||||||
}
|
}
|
||||||
for _, v := range ncase.List.Slice() {
|
for _, v := range ncase.List().Slice() {
|
||||||
if v.Op != OLITERAL {
|
if v.Op() != ir.OLITERAL {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -475,7 +478,7 @@ func allCaseExprsAreSideEffectFree(sw *Node) bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
// hasFall reports whether stmts ends with a "fallthrough" statement.
|
// hasFall reports whether stmts ends with a "fallthrough" statement.
|
||||||
func hasFall(stmts []*Node) (bool, src.XPos) {
|
func hasFall(stmts []ir.Node) (bool, src.XPos) {
|
||||||
// Search backwards for the index of the fallthrough
|
// Search backwards for the index of the fallthrough
|
||||||
// statement. Do not assume it'll be in the last
|
// statement. Do not assume it'll be in the last
|
||||||
// position, since in some cases (e.g. when the statement
|
// position, since in some cases (e.g. when the statement
|
||||||
|
|
@ -483,30 +486,30 @@ func hasFall(stmts []*Node) (bool, src.XPos) {
|
||||||
// nodes will be at the end of the list.
|
// nodes will be at the end of the list.
|
||||||
|
|
||||||
i := len(stmts) - 1
|
i := len(stmts) - 1
|
||||||
for i >= 0 && stmts[i].Op == OVARKILL {
|
for i >= 0 && stmts[i].Op() == ir.OVARKILL {
|
||||||
i--
|
i--
|
||||||
}
|
}
|
||||||
if i < 0 {
|
if i < 0 {
|
||||||
return false, src.NoXPos
|
return false, src.NoXPos
|
||||||
}
|
}
|
||||||
return stmts[i].Op == OFALL, stmts[i].Pos
|
return stmts[i].Op() == ir.OFALL, stmts[i].Pos()
|
||||||
}
|
}
|
||||||
|
|
||||||
// walkTypeSwitch generates an AST that implements sw, where sw is a
|
// walkTypeSwitch generates an AST that implements sw, where sw is a
|
||||||
// type switch.
|
// type switch.
|
||||||
func walkTypeSwitch(sw *Node) {
|
func walkTypeSwitch(sw ir.Node) {
|
||||||
var s typeSwitch
|
var s typeSwitch
|
||||||
s.facename = sw.Left.Right
|
s.facename = sw.Left().Right()
|
||||||
sw.Left = nil
|
sw.SetLeft(nil)
|
||||||
|
|
||||||
s.facename = walkexpr(s.facename, &sw.Ninit)
|
s.facename = walkexpr(s.facename, sw.PtrInit())
|
||||||
s.facename = copyexpr(s.facename, s.facename.Type, &sw.Nbody)
|
s.facename = copyexpr(s.facename, s.facename.Type(), sw.PtrBody())
|
||||||
s.okname = temp(types.Types[TBOOL])
|
s.okname = temp(types.Types[types.TBOOL])
|
||||||
|
|
||||||
// Get interface descriptor word.
|
// Get interface descriptor word.
|
||||||
// For empty interfaces this will be the type.
|
// For empty interfaces this will be the type.
|
||||||
// For non-empty interfaces this will be the itab.
|
// For non-empty interfaces this will be the itab.
|
||||||
itab := nod(OITAB, s.facename, nil)
|
itab := ir.Nod(ir.OITAB, s.facename, nil)
|
||||||
|
|
||||||
// For empty interfaces, do:
|
// For empty interfaces, do:
|
||||||
// if e._type == nil {
|
// if e._type == nil {
|
||||||
|
|
@ -514,92 +517,92 @@ func walkTypeSwitch(sw *Node) {
|
||||||
// }
|
// }
|
||||||
// h := e._type.hash
|
// h := e._type.hash
|
||||||
// Use a similar strategy for non-empty interfaces.
|
// Use a similar strategy for non-empty interfaces.
|
||||||
ifNil := nod(OIF, nil, nil)
|
ifNil := ir.Nod(ir.OIF, nil, nil)
|
||||||
ifNil.Left = nod(OEQ, itab, nodnil())
|
ifNil.SetLeft(ir.Nod(ir.OEQ, itab, nodnil()))
|
||||||
lineno = lineno.WithNotStmt() // disable statement marks after the first check.
|
base.Pos = base.Pos.WithNotStmt() // disable statement marks after the first check.
|
||||||
ifNil.Left = typecheck(ifNil.Left, ctxExpr)
|
ifNil.SetLeft(typecheck(ifNil.Left(), ctxExpr))
|
||||||
ifNil.Left = defaultlit(ifNil.Left, nil)
|
ifNil.SetLeft(defaultlit(ifNil.Left(), nil))
|
||||||
// ifNil.Nbody assigned at end.
|
// ifNil.Nbody assigned at end.
|
||||||
sw.Nbody.Append(ifNil)
|
sw.PtrBody().Append(ifNil)
|
||||||
|
|
||||||
// Load hash from type or itab.
|
// Load hash from type or itab.
|
||||||
dotHash := nodSym(ODOTPTR, itab, nil)
|
dotHash := nodSym(ir.ODOTPTR, itab, nil)
|
||||||
dotHash.Type = types.Types[TUINT32]
|
dotHash.SetType(types.Types[types.TUINT32])
|
||||||
dotHash.SetTypecheck(1)
|
dotHash.SetTypecheck(1)
|
||||||
if s.facename.Type.IsEmptyInterface() {
|
if s.facename.Type().IsEmptyInterface() {
|
||||||
dotHash.Xoffset = int64(2 * Widthptr) // offset of hash in runtime._type
|
dotHash.SetOffset(int64(2 * Widthptr)) // offset of hash in runtime._type
|
||||||
} else {
|
} else {
|
||||||
dotHash.Xoffset = int64(2 * Widthptr) // offset of hash in runtime.itab
|
dotHash.SetOffset(int64(2 * Widthptr)) // offset of hash in runtime.itab
|
||||||
}
|
}
|
||||||
dotHash.SetBounded(true) // guaranteed not to fault
|
dotHash.SetBounded(true) // guaranteed not to fault
|
||||||
s.hashname = copyexpr(dotHash, dotHash.Type, &sw.Nbody)
|
s.hashname = copyexpr(dotHash, dotHash.Type(), sw.PtrBody())
|
||||||
|
|
||||||
br := nod(OBREAK, nil, nil)
|
br := ir.Nod(ir.OBREAK, nil, nil)
|
||||||
var defaultGoto, nilGoto *Node
|
var defaultGoto, nilGoto ir.Node
|
||||||
var body Nodes
|
var body ir.Nodes
|
||||||
for _, ncase := range sw.List.Slice() {
|
for _, ncase := range sw.List().Slice() {
|
||||||
var caseVar *Node
|
var caseVar ir.Node
|
||||||
if ncase.Rlist.Len() != 0 {
|
if ncase.Rlist().Len() != 0 {
|
||||||
caseVar = ncase.Rlist.First()
|
caseVar = ncase.Rlist().First()
|
||||||
}
|
}
|
||||||
|
|
||||||
// For single-type cases with an interface type,
|
// For single-type cases with an interface type,
|
||||||
// we initialize the case variable as part of the type assertion.
|
// we initialize the case variable as part of the type assertion.
|
||||||
// In other cases, we initialize it in the body.
|
// In other cases, we initialize it in the body.
|
||||||
var singleType *types.Type
|
var singleType *types.Type
|
||||||
if ncase.List.Len() == 1 && ncase.List.First().Op == OTYPE {
|
if ncase.List().Len() == 1 && ncase.List().First().Op() == ir.OTYPE {
|
||||||
singleType = ncase.List.First().Type
|
singleType = ncase.List().First().Type()
|
||||||
}
|
}
|
||||||
caseVarInitialized := false
|
caseVarInitialized := false
|
||||||
|
|
||||||
label := autolabel(".s")
|
label := autolabel(".s")
|
||||||
jmp := npos(ncase.Pos, nodSym(OGOTO, nil, label))
|
jmp := npos(ncase.Pos(), nodSym(ir.OGOTO, nil, label))
|
||||||
|
|
||||||
if ncase.List.Len() == 0 { // default:
|
if ncase.List().Len() == 0 { // default:
|
||||||
if defaultGoto != nil {
|
if defaultGoto != nil {
|
||||||
Fatalf("duplicate default case not detected during typechecking")
|
base.Fatalf("duplicate default case not detected during typechecking")
|
||||||
}
|
}
|
||||||
defaultGoto = jmp
|
defaultGoto = jmp
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, n1 := range ncase.List.Slice() {
|
for _, n1 := range ncase.List().Slice() {
|
||||||
if n1.isNil() { // case nil:
|
if ir.IsNil(n1) { // case nil:
|
||||||
if nilGoto != nil {
|
if nilGoto != nil {
|
||||||
Fatalf("duplicate nil case not detected during typechecking")
|
base.Fatalf("duplicate nil case not detected during typechecking")
|
||||||
}
|
}
|
||||||
nilGoto = jmp
|
nilGoto = jmp
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
if singleType != nil && singleType.IsInterface() {
|
if singleType != nil && singleType.IsInterface() {
|
||||||
s.Add(ncase.Pos, n1.Type, caseVar, jmp)
|
s.Add(ncase.Pos(), n1.Type(), caseVar, jmp)
|
||||||
caseVarInitialized = true
|
caseVarInitialized = true
|
||||||
} else {
|
} else {
|
||||||
s.Add(ncase.Pos, n1.Type, nil, jmp)
|
s.Add(ncase.Pos(), n1.Type(), nil, jmp)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
body.Append(npos(ncase.Pos, nodSym(OLABEL, nil, label)))
|
body.Append(npos(ncase.Pos(), nodSym(ir.OLABEL, nil, label)))
|
||||||
if caseVar != nil && !caseVarInitialized {
|
if caseVar != nil && !caseVarInitialized {
|
||||||
val := s.facename
|
val := s.facename
|
||||||
if singleType != nil {
|
if singleType != nil {
|
||||||
// We have a single concrete type. Extract the data.
|
// We have a single concrete type. Extract the data.
|
||||||
if singleType.IsInterface() {
|
if singleType.IsInterface() {
|
||||||
Fatalf("singleType interface should have been handled in Add")
|
base.Fatalf("singleType interface should have been handled in Add")
|
||||||
}
|
}
|
||||||
val = ifaceData(ncase.Pos, s.facename, singleType)
|
val = ifaceData(ncase.Pos(), s.facename, singleType)
|
||||||
}
|
}
|
||||||
l := []*Node{
|
l := []ir.Node{
|
||||||
nodl(ncase.Pos, ODCL, caseVar, nil),
|
ir.NodAt(ncase.Pos(), ir.ODCL, caseVar, nil),
|
||||||
nodl(ncase.Pos, OAS, caseVar, val),
|
ir.NodAt(ncase.Pos(), ir.OAS, caseVar, val),
|
||||||
}
|
}
|
||||||
typecheckslice(l, ctxStmt)
|
typecheckslice(l, ctxStmt)
|
||||||
body.Append(l...)
|
body.Append(l...)
|
||||||
}
|
}
|
||||||
body.Append(ncase.Nbody.Slice()...)
|
body.Append(ncase.Body().Slice()...)
|
||||||
body.Append(br)
|
body.Append(br)
|
||||||
}
|
}
|
||||||
sw.List.Set(nil)
|
sw.PtrList().Set(nil)
|
||||||
|
|
||||||
if defaultGoto == nil {
|
if defaultGoto == nil {
|
||||||
defaultGoto = br
|
defaultGoto = br
|
||||||
|
|
@ -607,58 +610,58 @@ func walkTypeSwitch(sw *Node) {
|
||||||
if nilGoto == nil {
|
if nilGoto == nil {
|
||||||
nilGoto = defaultGoto
|
nilGoto = defaultGoto
|
||||||
}
|
}
|
||||||
ifNil.Nbody.Set1(nilGoto)
|
ifNil.PtrBody().Set1(nilGoto)
|
||||||
|
|
||||||
s.Emit(&sw.Nbody)
|
s.Emit(sw.PtrBody())
|
||||||
sw.Nbody.Append(defaultGoto)
|
sw.PtrBody().Append(defaultGoto)
|
||||||
sw.Nbody.AppendNodes(&body)
|
sw.PtrBody().AppendNodes(&body)
|
||||||
|
|
||||||
walkstmtlist(sw.Nbody.Slice())
|
walkstmtlist(sw.Body().Slice())
|
||||||
}
|
}
|
||||||
|
|
||||||
// A typeSwitch walks a type switch.
|
// A typeSwitch walks a type switch.
|
||||||
type typeSwitch struct {
|
type typeSwitch struct {
|
||||||
// Temporary variables (i.e., ONAMEs) used by type switch dispatch logic:
|
// Temporary variables (i.e., ONAMEs) used by type switch dispatch logic:
|
||||||
facename *Node // value being type-switched on
|
facename ir.Node // value being type-switched on
|
||||||
hashname *Node // type hash of the value being type-switched on
|
hashname ir.Node // type hash of the value being type-switched on
|
||||||
okname *Node // boolean used for comma-ok type assertions
|
okname ir.Node // boolean used for comma-ok type assertions
|
||||||
|
|
||||||
done Nodes
|
done ir.Nodes
|
||||||
clauses []typeClause
|
clauses []typeClause
|
||||||
}
|
}
|
||||||
|
|
||||||
type typeClause struct {
|
type typeClause struct {
|
||||||
hash uint32
|
hash uint32
|
||||||
body Nodes
|
body ir.Nodes
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *typeSwitch) Add(pos src.XPos, typ *types.Type, caseVar, jmp *Node) {
|
func (s *typeSwitch) Add(pos src.XPos, typ *types.Type, caseVar, jmp ir.Node) {
|
||||||
var body Nodes
|
var body ir.Nodes
|
||||||
if caseVar != nil {
|
if caseVar != nil {
|
||||||
l := []*Node{
|
l := []ir.Node{
|
||||||
nodl(pos, ODCL, caseVar, nil),
|
ir.NodAt(pos, ir.ODCL, caseVar, nil),
|
||||||
nodl(pos, OAS, caseVar, nil),
|
ir.NodAt(pos, ir.OAS, caseVar, nil),
|
||||||
}
|
}
|
||||||
typecheckslice(l, ctxStmt)
|
typecheckslice(l, ctxStmt)
|
||||||
body.Append(l...)
|
body.Append(l...)
|
||||||
} else {
|
} else {
|
||||||
caseVar = nblank
|
caseVar = ir.BlankNode
|
||||||
}
|
}
|
||||||
|
|
||||||
// cv, ok = iface.(type)
|
// cv, ok = iface.(type)
|
||||||
as := nodl(pos, OAS2, nil, nil)
|
as := ir.NodAt(pos, ir.OAS2, nil, nil)
|
||||||
as.List.Set2(caseVar, s.okname) // cv, ok =
|
as.PtrList().Set2(caseVar, s.okname) // cv, ok =
|
||||||
dot := nodl(pos, ODOTTYPE, s.facename, nil)
|
dot := ir.NodAt(pos, ir.ODOTTYPE, s.facename, nil)
|
||||||
dot.Type = typ // iface.(type)
|
dot.SetType(typ) // iface.(type)
|
||||||
as.Rlist.Set1(dot)
|
as.PtrRlist().Set1(dot)
|
||||||
as = typecheck(as, ctxStmt)
|
as = typecheck(as, ctxStmt)
|
||||||
as = walkexpr(as, &body)
|
as = walkexpr(as, &body)
|
||||||
body.Append(as)
|
body.Append(as)
|
||||||
|
|
||||||
// if ok { goto label }
|
// if ok { goto label }
|
||||||
nif := nodl(pos, OIF, nil, nil)
|
nif := ir.NodAt(pos, ir.OIF, nil, nil)
|
||||||
nif.Left = s.okname
|
nif.SetLeft(s.okname)
|
||||||
nif.Nbody.Set1(jmp)
|
nif.PtrBody().Set1(jmp)
|
||||||
body.Append(nif)
|
body.Append(nif)
|
||||||
|
|
||||||
if !typ.IsInterface() {
|
if !typ.IsInterface() {
|
||||||
|
|
@ -673,7 +676,7 @@ func (s *typeSwitch) Add(pos src.XPos, typ *types.Type, caseVar, jmp *Node) {
|
||||||
s.done.AppendNodes(&body)
|
s.done.AppendNodes(&body)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *typeSwitch) Emit(out *Nodes) {
|
func (s *typeSwitch) Emit(out *ir.Nodes) {
|
||||||
s.flush()
|
s.flush()
|
||||||
out.AppendNodes(&s.done)
|
out.AppendNodes(&s.done)
|
||||||
}
|
}
|
||||||
|
|
@ -700,15 +703,15 @@ func (s *typeSwitch) flush() {
|
||||||
cc = merged
|
cc = merged
|
||||||
|
|
||||||
binarySearch(len(cc), &s.done,
|
binarySearch(len(cc), &s.done,
|
||||||
func(i int) *Node {
|
func(i int) ir.Node {
|
||||||
return nod(OLE, s.hashname, nodintconst(int64(cc[i-1].hash)))
|
return ir.Nod(ir.OLE, s.hashname, nodintconst(int64(cc[i-1].hash)))
|
||||||
},
|
},
|
||||||
func(i int, nif *Node) {
|
func(i int, nif ir.Node) {
|
||||||
// TODO(mdempsky): Omit hash equality check if
|
// TODO(mdempsky): Omit hash equality check if
|
||||||
// there's only one type.
|
// there's only one type.
|
||||||
c := cc[i]
|
c := cc[i]
|
||||||
nif.Left = nod(OEQ, s.hashname, nodintconst(int64(c.hash)))
|
nif.SetLeft(ir.Nod(ir.OEQ, s.hashname, nodintconst(int64(c.hash))))
|
||||||
nif.Nbody.AppendNodes(&c.body)
|
nif.PtrBody().AppendNodes(&c.body)
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
@ -720,35 +723,35 @@ func (s *typeSwitch) flush() {
|
||||||
// less(i) should return a boolean expression. If it evaluates true,
|
// less(i) should return a boolean expression. If it evaluates true,
|
||||||
// then cases before i will be tested; otherwise, cases i and later.
|
// then cases before i will be tested; otherwise, cases i and later.
|
||||||
//
|
//
|
||||||
// base(i, nif) should setup nif (an OIF node) to test case i. In
|
// leaf(i, nif) should setup nif (an OIF node) to test case i. In
|
||||||
// particular, it should set nif.Left and nif.Nbody.
|
// particular, it should set nif.Left and nif.Nbody.
|
||||||
func binarySearch(n int, out *Nodes, less func(i int) *Node, base func(i int, nif *Node)) {
|
func binarySearch(n int, out *ir.Nodes, less func(i int) ir.Node, leaf func(i int, nif ir.Node)) {
|
||||||
const binarySearchMin = 4 // minimum number of cases for binary search
|
const binarySearchMin = 4 // minimum number of cases for binary search
|
||||||
|
|
||||||
var do func(lo, hi int, out *Nodes)
|
var do func(lo, hi int, out *ir.Nodes)
|
||||||
do = func(lo, hi int, out *Nodes) {
|
do = func(lo, hi int, out *ir.Nodes) {
|
||||||
n := hi - lo
|
n := hi - lo
|
||||||
if n < binarySearchMin {
|
if n < binarySearchMin {
|
||||||
for i := lo; i < hi; i++ {
|
for i := lo; i < hi; i++ {
|
||||||
nif := nod(OIF, nil, nil)
|
nif := ir.Nod(ir.OIF, nil, nil)
|
||||||
base(i, nif)
|
leaf(i, nif)
|
||||||
lineno = lineno.WithNotStmt()
|
base.Pos = base.Pos.WithNotStmt()
|
||||||
nif.Left = typecheck(nif.Left, ctxExpr)
|
nif.SetLeft(typecheck(nif.Left(), ctxExpr))
|
||||||
nif.Left = defaultlit(nif.Left, nil)
|
nif.SetLeft(defaultlit(nif.Left(), nil))
|
||||||
out.Append(nif)
|
out.Append(nif)
|
||||||
out = &nif.Rlist
|
out = nif.PtrRlist()
|
||||||
}
|
}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
half := lo + n/2
|
half := lo + n/2
|
||||||
nif := nod(OIF, nil, nil)
|
nif := ir.Nod(ir.OIF, nil, nil)
|
||||||
nif.Left = less(half)
|
nif.SetLeft(less(half))
|
||||||
lineno = lineno.WithNotStmt()
|
base.Pos = base.Pos.WithNotStmt()
|
||||||
nif.Left = typecheck(nif.Left, ctxExpr)
|
nif.SetLeft(typecheck(nif.Left(), ctxExpr))
|
||||||
nif.Left = defaultlit(nif.Left, nil)
|
nif.SetLeft(defaultlit(nif.Left(), nil))
|
||||||
do(lo, half, &nif.Nbody)
|
do(lo, half, nif.PtrBody())
|
||||||
do(half, hi, &nif.Rlist)
|
do(half, hi, nif.PtrRlist())
|
||||||
out.Append(nif)
|
out.Append(nif)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -9,6 +9,8 @@ package gc
|
||||||
import (
|
import (
|
||||||
"os"
|
"os"
|
||||||
tracepkg "runtime/trace"
|
tracepkg "runtime/trace"
|
||||||
|
|
||||||
|
"cmd/compile/internal/base"
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
|
@ -18,10 +20,10 @@ func init() {
|
||||||
func traceHandlerGo17(traceprofile string) {
|
func traceHandlerGo17(traceprofile string) {
|
||||||
f, err := os.Create(traceprofile)
|
f, err := os.Create(traceprofile)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
Fatalf("%v", err)
|
base.Fatalf("%v", err)
|
||||||
}
|
}
|
||||||
if err := tracepkg.Start(f); err != nil {
|
if err := tracepkg.Start(f); err != nil {
|
||||||
Fatalf("%v", err)
|
base.Fatalf("%v", err)
|
||||||
}
|
}
|
||||||
atExit(tracepkg.Stop)
|
base.AtExit(tracepkg.Stop)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
File diff suppressed because it is too large
Load diff
|
|
@ -3,56 +3,3 @@
|
||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
package gc
|
package gc
|
||||||
|
|
||||||
import (
|
|
||||||
"cmd/compile/internal/types"
|
|
||||||
)
|
|
||||||
|
|
||||||
// convenience constants
|
|
||||||
const (
|
|
||||||
Txxx = types.Txxx
|
|
||||||
|
|
||||||
TINT8 = types.TINT8
|
|
||||||
TUINT8 = types.TUINT8
|
|
||||||
TINT16 = types.TINT16
|
|
||||||
TUINT16 = types.TUINT16
|
|
||||||
TINT32 = types.TINT32
|
|
||||||
TUINT32 = types.TUINT32
|
|
||||||
TINT64 = types.TINT64
|
|
||||||
TUINT64 = types.TUINT64
|
|
||||||
TINT = types.TINT
|
|
||||||
TUINT = types.TUINT
|
|
||||||
TUINTPTR = types.TUINTPTR
|
|
||||||
|
|
||||||
TCOMPLEX64 = types.TCOMPLEX64
|
|
||||||
TCOMPLEX128 = types.TCOMPLEX128
|
|
||||||
|
|
||||||
TFLOAT32 = types.TFLOAT32
|
|
||||||
TFLOAT64 = types.TFLOAT64
|
|
||||||
|
|
||||||
TBOOL = types.TBOOL
|
|
||||||
|
|
||||||
TPTR = types.TPTR
|
|
||||||
TFUNC = types.TFUNC
|
|
||||||
TSLICE = types.TSLICE
|
|
||||||
TARRAY = types.TARRAY
|
|
||||||
TSTRUCT = types.TSTRUCT
|
|
||||||
TCHAN = types.TCHAN
|
|
||||||
TMAP = types.TMAP
|
|
||||||
TINTER = types.TINTER
|
|
||||||
TFORW = types.TFORW
|
|
||||||
TANY = types.TANY
|
|
||||||
TSTRING = types.TSTRING
|
|
||||||
TUNSAFEPTR = types.TUNSAFEPTR
|
|
||||||
|
|
||||||
// pseudo-types for literals
|
|
||||||
TIDEAL = types.TIDEAL
|
|
||||||
TNIL = types.TNIL
|
|
||||||
TBLANK = types.TBLANK
|
|
||||||
|
|
||||||
// pseudo-types for frame layout
|
|
||||||
TFUNCARGS = types.TFUNCARGS
|
|
||||||
TCHANARGS = types.TCHANARGS
|
|
||||||
|
|
||||||
NTYPE = types.NTYPE
|
|
||||||
)
|
|
||||||
|
|
|
||||||
|
|
@ -6,11 +6,3 @@
|
||||||
// TODO(gri) try to eliminate these soon
|
// TODO(gri) try to eliminate these soon
|
||||||
|
|
||||||
package gc
|
package gc
|
||||||
|
|
||||||
import (
|
|
||||||
"cmd/compile/internal/types"
|
|
||||||
"unsafe"
|
|
||||||
)
|
|
||||||
|
|
||||||
func asNode(n *types.Node) *Node { return (*Node)(unsafe.Pointer(n)) }
|
|
||||||
func asTypesNode(n *Node) *types.Node { return (*types.Node)(unsafe.Pointer(n)) }
|
|
||||||
|
|
|
||||||
|
|
@ -6,29 +6,31 @@
|
||||||
|
|
||||||
package gc
|
package gc
|
||||||
|
|
||||||
import "cmd/compile/internal/types"
|
import (
|
||||||
|
"cmd/compile/internal/base"
|
||||||
// builtinpkg is a fake package that declares the universe block.
|
"cmd/compile/internal/ir"
|
||||||
var builtinpkg *types.Pkg
|
"cmd/compile/internal/types"
|
||||||
|
"cmd/internal/src"
|
||||||
|
)
|
||||||
|
|
||||||
var basicTypes = [...]struct {
|
var basicTypes = [...]struct {
|
||||||
name string
|
name string
|
||||||
etype types.EType
|
etype types.EType
|
||||||
}{
|
}{
|
||||||
{"int8", TINT8},
|
{"int8", types.TINT8},
|
||||||
{"int16", TINT16},
|
{"int16", types.TINT16},
|
||||||
{"int32", TINT32},
|
{"int32", types.TINT32},
|
||||||
{"int64", TINT64},
|
{"int64", types.TINT64},
|
||||||
{"uint8", TUINT8},
|
{"uint8", types.TUINT8},
|
||||||
{"uint16", TUINT16},
|
{"uint16", types.TUINT16},
|
||||||
{"uint32", TUINT32},
|
{"uint32", types.TUINT32},
|
||||||
{"uint64", TUINT64},
|
{"uint64", types.TUINT64},
|
||||||
{"float32", TFLOAT32},
|
{"float32", types.TFLOAT32},
|
||||||
{"float64", TFLOAT64},
|
{"float64", types.TFLOAT64},
|
||||||
{"complex64", TCOMPLEX64},
|
{"complex64", types.TCOMPLEX64},
|
||||||
{"complex128", TCOMPLEX128},
|
{"complex128", types.TCOMPLEX128},
|
||||||
{"bool", TBOOL},
|
{"bool", types.TBOOL},
|
||||||
{"string", TSTRING},
|
{"string", types.TSTRING},
|
||||||
}
|
}
|
||||||
|
|
||||||
var typedefs = [...]struct {
|
var typedefs = [...]struct {
|
||||||
|
|
@ -37,30 +39,30 @@ var typedefs = [...]struct {
|
||||||
sameas32 types.EType
|
sameas32 types.EType
|
||||||
sameas64 types.EType
|
sameas64 types.EType
|
||||||
}{
|
}{
|
||||||
{"int", TINT, TINT32, TINT64},
|
{"int", types.TINT, types.TINT32, types.TINT64},
|
||||||
{"uint", TUINT, TUINT32, TUINT64},
|
{"uint", types.TUINT, types.TUINT32, types.TUINT64},
|
||||||
{"uintptr", TUINTPTR, TUINT32, TUINT64},
|
{"uintptr", types.TUINTPTR, types.TUINT32, types.TUINT64},
|
||||||
}
|
}
|
||||||
|
|
||||||
var builtinFuncs = [...]struct {
|
var builtinFuncs = [...]struct {
|
||||||
name string
|
name string
|
||||||
op Op
|
op ir.Op
|
||||||
}{
|
}{
|
||||||
{"append", OAPPEND},
|
{"append", ir.OAPPEND},
|
||||||
{"cap", OCAP},
|
{"cap", ir.OCAP},
|
||||||
{"close", OCLOSE},
|
{"close", ir.OCLOSE},
|
||||||
{"complex", OCOMPLEX},
|
{"complex", ir.OCOMPLEX},
|
||||||
{"copy", OCOPY},
|
{"copy", ir.OCOPY},
|
||||||
{"delete", ODELETE},
|
{"delete", ir.ODELETE},
|
||||||
{"imag", OIMAG},
|
{"imag", ir.OIMAG},
|
||||||
{"len", OLEN},
|
{"len", ir.OLEN},
|
||||||
{"make", OMAKE},
|
{"make", ir.OMAKE},
|
||||||
{"new", ONEW},
|
{"new", ir.ONEW},
|
||||||
{"panic", OPANIC},
|
{"panic", ir.OPANIC},
|
||||||
{"print", OPRINT},
|
{"print", ir.OPRINT},
|
||||||
{"println", OPRINTN},
|
{"println", ir.OPRINTN},
|
||||||
{"real", OREAL},
|
{"real", ir.OREAL},
|
||||||
{"recover", ORECOVER},
|
{"recover", ir.ORECOVER},
|
||||||
}
|
}
|
||||||
|
|
||||||
// isBuiltinFuncName reports whether name matches a builtin function
|
// isBuiltinFuncName reports whether name matches a builtin function
|
||||||
|
|
@ -76,11 +78,11 @@ func isBuiltinFuncName(name string) bool {
|
||||||
|
|
||||||
var unsafeFuncs = [...]struct {
|
var unsafeFuncs = [...]struct {
|
||||||
name string
|
name string
|
||||||
op Op
|
op ir.Op
|
||||||
}{
|
}{
|
||||||
{"Alignof", OALIGNOF},
|
{"Alignof", ir.OALIGNOF},
|
||||||
{"Offsetof", OOFFSETOF},
|
{"Offsetof", ir.OOFFSETOF},
|
||||||
{"Sizeof", OSIZEOF},
|
{"Sizeof", ir.OSIZEOF},
|
||||||
}
|
}
|
||||||
|
|
||||||
// initUniverse initializes the universe block.
|
// initUniverse initializes the universe block.
|
||||||
|
|
@ -95,121 +97,117 @@ func lexinit() {
|
||||||
for _, s := range &basicTypes {
|
for _, s := range &basicTypes {
|
||||||
etype := s.etype
|
etype := s.etype
|
||||||
if int(etype) >= len(types.Types) {
|
if int(etype) >= len(types.Types) {
|
||||||
Fatalf("lexinit: %s bad etype", s.name)
|
base.Fatalf("lexinit: %s bad etype", s.name)
|
||||||
}
|
}
|
||||||
s2 := builtinpkg.Lookup(s.name)
|
s2 := ir.BuiltinPkg.Lookup(s.name)
|
||||||
t := types.Types[etype]
|
t := types.Types[etype]
|
||||||
if t == nil {
|
if t == nil {
|
||||||
t = types.New(etype)
|
t = types.New(etype)
|
||||||
t.Sym = s2
|
t.Sym = s2
|
||||||
if etype != TANY && etype != TSTRING {
|
if etype != types.TANY && etype != types.TSTRING {
|
||||||
dowidth(t)
|
dowidth(t)
|
||||||
}
|
}
|
||||||
types.Types[etype] = t
|
types.Types[etype] = t
|
||||||
}
|
}
|
||||||
s2.Def = asTypesNode(typenod(t))
|
s2.Def = typenod(t)
|
||||||
asNode(s2.Def).Name = new(Name)
|
ir.AsNode(s2.Def).SetName(new(ir.Name))
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, s := range &builtinFuncs {
|
for _, s := range &builtinFuncs {
|
||||||
s2 := builtinpkg.Lookup(s.name)
|
s2 := ir.BuiltinPkg.Lookup(s.name)
|
||||||
s2.Def = asTypesNode(newname(s2))
|
s2.Def = NewName(s2)
|
||||||
asNode(s2.Def).SetSubOp(s.op)
|
ir.AsNode(s2.Def).SetSubOp(s.op)
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, s := range &unsafeFuncs {
|
for _, s := range &unsafeFuncs {
|
||||||
s2 := unsafepkg.Lookup(s.name)
|
s2 := unsafepkg.Lookup(s.name)
|
||||||
s2.Def = asTypesNode(newname(s2))
|
s2.Def = NewName(s2)
|
||||||
asNode(s2.Def).SetSubOp(s.op)
|
ir.AsNode(s2.Def).SetSubOp(s.op)
|
||||||
}
|
}
|
||||||
|
|
||||||
types.UntypedString = types.New(TSTRING)
|
types.UntypedString = types.New(types.TSTRING)
|
||||||
types.UntypedBool = types.New(TBOOL)
|
types.UntypedBool = types.New(types.TBOOL)
|
||||||
types.Types[TANY] = types.New(TANY)
|
types.Types[types.TANY] = types.New(types.TANY)
|
||||||
|
|
||||||
s := builtinpkg.Lookup("true")
|
s := ir.BuiltinPkg.Lookup("true")
|
||||||
s.Def = asTypesNode(nodbool(true))
|
s.Def = nodbool(true)
|
||||||
asNode(s.Def).Sym = lookup("true")
|
ir.AsNode(s.Def).SetSym(lookup("true"))
|
||||||
asNode(s.Def).Name = new(Name)
|
ir.AsNode(s.Def).SetName(new(ir.Name))
|
||||||
asNode(s.Def).Type = types.UntypedBool
|
ir.AsNode(s.Def).SetType(types.UntypedBool)
|
||||||
|
|
||||||
s = builtinpkg.Lookup("false")
|
s = ir.BuiltinPkg.Lookup("false")
|
||||||
s.Def = asTypesNode(nodbool(false))
|
s.Def = nodbool(false)
|
||||||
asNode(s.Def).Sym = lookup("false")
|
ir.AsNode(s.Def).SetSym(lookup("false"))
|
||||||
asNode(s.Def).Name = new(Name)
|
ir.AsNode(s.Def).SetName(new(ir.Name))
|
||||||
asNode(s.Def).Type = types.UntypedBool
|
ir.AsNode(s.Def).SetType(types.UntypedBool)
|
||||||
|
|
||||||
s = lookup("_")
|
s = lookup("_")
|
||||||
s.Block = -100
|
s.Block = -100
|
||||||
s.Def = asTypesNode(newname(s))
|
s.Def = NewName(s)
|
||||||
types.Types[TBLANK] = types.New(TBLANK)
|
types.Types[types.TBLANK] = types.New(types.TBLANK)
|
||||||
asNode(s.Def).Type = types.Types[TBLANK]
|
ir.AsNode(s.Def).SetType(types.Types[types.TBLANK])
|
||||||
nblank = asNode(s.Def)
|
ir.BlankNode = ir.AsNode(s.Def)
|
||||||
|
|
||||||
s = builtinpkg.Lookup("_")
|
s = ir.BuiltinPkg.Lookup("_")
|
||||||
s.Block = -100
|
s.Block = -100
|
||||||
s.Def = asTypesNode(newname(s))
|
s.Def = NewName(s)
|
||||||
types.Types[TBLANK] = types.New(TBLANK)
|
types.Types[types.TBLANK] = types.New(types.TBLANK)
|
||||||
asNode(s.Def).Type = types.Types[TBLANK]
|
ir.AsNode(s.Def).SetType(types.Types[types.TBLANK])
|
||||||
|
|
||||||
types.Types[TNIL] = types.New(TNIL)
|
types.Types[types.TNIL] = types.New(types.TNIL)
|
||||||
s = builtinpkg.Lookup("nil")
|
s = ir.BuiltinPkg.Lookup("nil")
|
||||||
var v Val
|
s.Def = nodnil()
|
||||||
v.U = new(NilVal)
|
ir.AsNode(s.Def).SetSym(s)
|
||||||
s.Def = asTypesNode(nodlit(v))
|
ir.AsNode(s.Def).SetName(new(ir.Name))
|
||||||
asNode(s.Def).Sym = s
|
|
||||||
asNode(s.Def).Name = new(Name)
|
|
||||||
|
|
||||||
s = builtinpkg.Lookup("iota")
|
s = ir.BuiltinPkg.Lookup("iota")
|
||||||
s.Def = asTypesNode(nod(OIOTA, nil, nil))
|
s.Def = ir.Nod(ir.OIOTA, nil, nil)
|
||||||
asNode(s.Def).Sym = s
|
ir.AsNode(s.Def).SetSym(s)
|
||||||
asNode(s.Def).Name = new(Name)
|
ir.AsNode(s.Def).SetName(new(ir.Name))
|
||||||
}
|
}
|
||||||
|
|
||||||
func typeinit() {
|
func typeinit() {
|
||||||
if Widthptr == 0 {
|
if Widthptr == 0 {
|
||||||
Fatalf("typeinit before betypeinit")
|
base.Fatalf("typeinit before betypeinit")
|
||||||
}
|
}
|
||||||
|
|
||||||
for et := types.EType(0); et < NTYPE; et++ {
|
for et := types.EType(0); et < types.NTYPE; et++ {
|
||||||
simtype[et] = et
|
simtype[et] = et
|
||||||
}
|
}
|
||||||
|
|
||||||
types.Types[TPTR] = types.New(TPTR)
|
types.Types[types.TPTR] = types.New(types.TPTR)
|
||||||
dowidth(types.Types[TPTR])
|
dowidth(types.Types[types.TPTR])
|
||||||
|
|
||||||
t := types.New(TUNSAFEPTR)
|
t := types.New(types.TUNSAFEPTR)
|
||||||
types.Types[TUNSAFEPTR] = t
|
types.Types[types.TUNSAFEPTR] = t
|
||||||
t.Sym = unsafepkg.Lookup("Pointer")
|
t.Sym = unsafepkg.Lookup("Pointer")
|
||||||
t.Sym.Def = asTypesNode(typenod(t))
|
t.Sym.Def = typenod(t)
|
||||||
asNode(t.Sym.Def).Name = new(Name)
|
ir.AsNode(t.Sym.Def).SetName(new(ir.Name))
|
||||||
dowidth(types.Types[TUNSAFEPTR])
|
dowidth(types.Types[types.TUNSAFEPTR])
|
||||||
|
|
||||||
for et := TINT8; et <= TUINT64; et++ {
|
for et := types.TINT8; et <= types.TUINT64; et++ {
|
||||||
isInt[et] = true
|
isInt[et] = true
|
||||||
}
|
}
|
||||||
isInt[TINT] = true
|
isInt[types.TINT] = true
|
||||||
isInt[TUINT] = true
|
isInt[types.TUINT] = true
|
||||||
isInt[TUINTPTR] = true
|
isInt[types.TUINTPTR] = true
|
||||||
|
|
||||||
isFloat[TFLOAT32] = true
|
isFloat[types.TFLOAT32] = true
|
||||||
isFloat[TFLOAT64] = true
|
isFloat[types.TFLOAT64] = true
|
||||||
|
|
||||||
isComplex[TCOMPLEX64] = true
|
isComplex[types.TCOMPLEX64] = true
|
||||||
isComplex[TCOMPLEX128] = true
|
isComplex[types.TCOMPLEX128] = true
|
||||||
|
|
||||||
// initialize okfor
|
// initialize okfor
|
||||||
for et := types.EType(0); et < NTYPE; et++ {
|
for et := types.EType(0); et < types.NTYPE; et++ {
|
||||||
if isInt[et] || et == TIDEAL {
|
if isInt[et] || et == types.TIDEAL {
|
||||||
okforeq[et] = true
|
okforeq[et] = true
|
||||||
okforcmp[et] = true
|
okforcmp[et] = true
|
||||||
okforarith[et] = true
|
okforarith[et] = true
|
||||||
okforadd[et] = true
|
okforadd[et] = true
|
||||||
okforand[et] = true
|
okforand[et] = true
|
||||||
okforconst[et] = true
|
ir.OKForConst[et] = true
|
||||||
issimple[et] = true
|
issimple[et] = true
|
||||||
minintval[et] = new(Mpint)
|
|
||||||
maxintval[et] = new(Mpint)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if isFloat[et] {
|
if isFloat[et] {
|
||||||
|
|
@ -217,53 +215,51 @@ func typeinit() {
|
||||||
okforcmp[et] = true
|
okforcmp[et] = true
|
||||||
okforadd[et] = true
|
okforadd[et] = true
|
||||||
okforarith[et] = true
|
okforarith[et] = true
|
||||||
okforconst[et] = true
|
ir.OKForConst[et] = true
|
||||||
issimple[et] = true
|
issimple[et] = true
|
||||||
minfltval[et] = newMpflt()
|
|
||||||
maxfltval[et] = newMpflt()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if isComplex[et] {
|
if isComplex[et] {
|
||||||
okforeq[et] = true
|
okforeq[et] = true
|
||||||
okforadd[et] = true
|
okforadd[et] = true
|
||||||
okforarith[et] = true
|
okforarith[et] = true
|
||||||
okforconst[et] = true
|
ir.OKForConst[et] = true
|
||||||
issimple[et] = true
|
issimple[et] = true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
issimple[TBOOL] = true
|
issimple[types.TBOOL] = true
|
||||||
|
|
||||||
okforadd[TSTRING] = true
|
okforadd[types.TSTRING] = true
|
||||||
|
|
||||||
okforbool[TBOOL] = true
|
okforbool[types.TBOOL] = true
|
||||||
|
|
||||||
okforcap[TARRAY] = true
|
okforcap[types.TARRAY] = true
|
||||||
okforcap[TCHAN] = true
|
okforcap[types.TCHAN] = true
|
||||||
okforcap[TSLICE] = true
|
okforcap[types.TSLICE] = true
|
||||||
|
|
||||||
okforconst[TBOOL] = true
|
ir.OKForConst[types.TBOOL] = true
|
||||||
okforconst[TSTRING] = true
|
ir.OKForConst[types.TSTRING] = true
|
||||||
|
|
||||||
okforlen[TARRAY] = true
|
okforlen[types.TARRAY] = true
|
||||||
okforlen[TCHAN] = true
|
okforlen[types.TCHAN] = true
|
||||||
okforlen[TMAP] = true
|
okforlen[types.TMAP] = true
|
||||||
okforlen[TSLICE] = true
|
okforlen[types.TSLICE] = true
|
||||||
okforlen[TSTRING] = true
|
okforlen[types.TSTRING] = true
|
||||||
|
|
||||||
okforeq[TPTR] = true
|
okforeq[types.TPTR] = true
|
||||||
okforeq[TUNSAFEPTR] = true
|
okforeq[types.TUNSAFEPTR] = true
|
||||||
okforeq[TINTER] = true
|
okforeq[types.TINTER] = true
|
||||||
okforeq[TCHAN] = true
|
okforeq[types.TCHAN] = true
|
||||||
okforeq[TSTRING] = true
|
okforeq[types.TSTRING] = true
|
||||||
okforeq[TBOOL] = true
|
okforeq[types.TBOOL] = true
|
||||||
okforeq[TMAP] = true // nil only; refined in typecheck
|
okforeq[types.TMAP] = true // nil only; refined in typecheck
|
||||||
okforeq[TFUNC] = true // nil only; refined in typecheck
|
okforeq[types.TFUNC] = true // nil only; refined in typecheck
|
||||||
okforeq[TSLICE] = true // nil only; refined in typecheck
|
okforeq[types.TSLICE] = true // nil only; refined in typecheck
|
||||||
okforeq[TARRAY] = true // only if element type is comparable; refined in typecheck
|
okforeq[types.TARRAY] = true // only if element type is comparable; refined in typecheck
|
||||||
okforeq[TSTRUCT] = true // only if all struct fields are comparable; refined in typecheck
|
okforeq[types.TSTRUCT] = true // only if all struct fields are comparable; refined in typecheck
|
||||||
|
|
||||||
okforcmp[TSTRING] = true
|
okforcmp[types.TSTRING] = true
|
||||||
|
|
||||||
var i int
|
var i int
|
||||||
for i = 0; i < len(okfor); i++ {
|
for i = 0; i < len(okfor); i++ {
|
||||||
|
|
@ -271,76 +267,51 @@ func typeinit() {
|
||||||
}
|
}
|
||||||
|
|
||||||
// binary
|
// binary
|
||||||
okfor[OADD] = okforadd[:]
|
okfor[ir.OADD] = okforadd[:]
|
||||||
okfor[OAND] = okforand[:]
|
okfor[ir.OAND] = okforand[:]
|
||||||
okfor[OANDAND] = okforbool[:]
|
okfor[ir.OANDAND] = okforbool[:]
|
||||||
okfor[OANDNOT] = okforand[:]
|
okfor[ir.OANDNOT] = okforand[:]
|
||||||
okfor[ODIV] = okforarith[:]
|
okfor[ir.ODIV] = okforarith[:]
|
||||||
okfor[OEQ] = okforeq[:]
|
okfor[ir.OEQ] = okforeq[:]
|
||||||
okfor[OGE] = okforcmp[:]
|
okfor[ir.OGE] = okforcmp[:]
|
||||||
okfor[OGT] = okforcmp[:]
|
okfor[ir.OGT] = okforcmp[:]
|
||||||
okfor[OLE] = okforcmp[:]
|
okfor[ir.OLE] = okforcmp[:]
|
||||||
okfor[OLT] = okforcmp[:]
|
okfor[ir.OLT] = okforcmp[:]
|
||||||
okfor[OMOD] = okforand[:]
|
okfor[ir.OMOD] = okforand[:]
|
||||||
okfor[OMUL] = okforarith[:]
|
okfor[ir.OMUL] = okforarith[:]
|
||||||
okfor[ONE] = okforeq[:]
|
okfor[ir.ONE] = okforeq[:]
|
||||||
okfor[OOR] = okforand[:]
|
okfor[ir.OOR] = okforand[:]
|
||||||
okfor[OOROR] = okforbool[:]
|
okfor[ir.OOROR] = okforbool[:]
|
||||||
okfor[OSUB] = okforarith[:]
|
okfor[ir.OSUB] = okforarith[:]
|
||||||
okfor[OXOR] = okforand[:]
|
okfor[ir.OXOR] = okforand[:]
|
||||||
okfor[OLSH] = okforand[:]
|
okfor[ir.OLSH] = okforand[:]
|
||||||
okfor[ORSH] = okforand[:]
|
okfor[ir.ORSH] = okforand[:]
|
||||||
|
|
||||||
// unary
|
// unary
|
||||||
okfor[OBITNOT] = okforand[:]
|
okfor[ir.OBITNOT] = okforand[:]
|
||||||
okfor[ONEG] = okforarith[:]
|
okfor[ir.ONEG] = okforarith[:]
|
||||||
okfor[ONOT] = okforbool[:]
|
okfor[ir.ONOT] = okforbool[:]
|
||||||
okfor[OPLUS] = okforarith[:]
|
okfor[ir.OPLUS] = okforarith[:]
|
||||||
|
|
||||||
// special
|
// special
|
||||||
okfor[OCAP] = okforcap[:]
|
okfor[ir.OCAP] = okforcap[:]
|
||||||
okfor[OLEN] = okforlen[:]
|
okfor[ir.OLEN] = okforlen[:]
|
||||||
|
|
||||||
// comparison
|
// comparison
|
||||||
iscmp[OLT] = true
|
iscmp[ir.OLT] = true
|
||||||
iscmp[OGT] = true
|
iscmp[ir.OGT] = true
|
||||||
iscmp[OGE] = true
|
iscmp[ir.OGE] = true
|
||||||
iscmp[OLE] = true
|
iscmp[ir.OLE] = true
|
||||||
iscmp[OEQ] = true
|
iscmp[ir.OEQ] = true
|
||||||
iscmp[ONE] = true
|
iscmp[ir.ONE] = true
|
||||||
|
|
||||||
maxintval[TINT8].SetString("0x7f")
|
types.Types[types.TINTER] = types.New(types.TINTER) // empty interface
|
||||||
minintval[TINT8].SetString("-0x80")
|
|
||||||
maxintval[TINT16].SetString("0x7fff")
|
|
||||||
minintval[TINT16].SetString("-0x8000")
|
|
||||||
maxintval[TINT32].SetString("0x7fffffff")
|
|
||||||
minintval[TINT32].SetString("-0x80000000")
|
|
||||||
maxintval[TINT64].SetString("0x7fffffffffffffff")
|
|
||||||
minintval[TINT64].SetString("-0x8000000000000000")
|
|
||||||
|
|
||||||
maxintval[TUINT8].SetString("0xff")
|
|
||||||
maxintval[TUINT16].SetString("0xffff")
|
|
||||||
maxintval[TUINT32].SetString("0xffffffff")
|
|
||||||
maxintval[TUINT64].SetString("0xffffffffffffffff")
|
|
||||||
|
|
||||||
// f is valid float if min < f < max. (min and max are not themselves valid.)
|
|
||||||
maxfltval[TFLOAT32].SetString("33554431p103") // 2^24-1 p (127-23) + 1/2 ulp
|
|
||||||
minfltval[TFLOAT32].SetString("-33554431p103")
|
|
||||||
maxfltval[TFLOAT64].SetString("18014398509481983p970") // 2^53-1 p (1023-52) + 1/2 ulp
|
|
||||||
minfltval[TFLOAT64].SetString("-18014398509481983p970")
|
|
||||||
|
|
||||||
maxfltval[TCOMPLEX64] = maxfltval[TFLOAT32]
|
|
||||||
minfltval[TCOMPLEX64] = minfltval[TFLOAT32]
|
|
||||||
maxfltval[TCOMPLEX128] = maxfltval[TFLOAT64]
|
|
||||||
minfltval[TCOMPLEX128] = minfltval[TFLOAT64]
|
|
||||||
|
|
||||||
types.Types[TINTER] = types.New(TINTER) // empty interface
|
|
||||||
|
|
||||||
// simple aliases
|
// simple aliases
|
||||||
simtype[TMAP] = TPTR
|
simtype[types.TMAP] = types.TPTR
|
||||||
simtype[TCHAN] = TPTR
|
simtype[types.TCHAN] = types.TPTR
|
||||||
simtype[TFUNC] = TPTR
|
simtype[types.TFUNC] = types.TPTR
|
||||||
simtype[TUNSAFEPTR] = TPTR
|
simtype[types.TUNSAFEPTR] = types.TPTR
|
||||||
|
|
||||||
slicePtrOffset = 0
|
slicePtrOffset = 0
|
||||||
sliceLenOffset = Rnd(slicePtrOffset+int64(Widthptr), int64(Widthptr))
|
sliceLenOffset = Rnd(slicePtrOffset+int64(Widthptr), int64(Widthptr))
|
||||||
|
|
@ -350,31 +321,29 @@ func typeinit() {
|
||||||
// string is same as slice wo the cap
|
// string is same as slice wo the cap
|
||||||
sizeofString = Rnd(sliceLenOffset+int64(Widthptr), int64(Widthptr))
|
sizeofString = Rnd(sliceLenOffset+int64(Widthptr), int64(Widthptr))
|
||||||
|
|
||||||
dowidth(types.Types[TSTRING])
|
dowidth(types.Types[types.TSTRING])
|
||||||
dowidth(types.UntypedString)
|
dowidth(types.UntypedString)
|
||||||
}
|
}
|
||||||
|
|
||||||
func makeErrorInterface() *types.Type {
|
func makeErrorInterface() *types.Type {
|
||||||
field := types.NewField()
|
sig := functypefield(fakeRecvField(), nil, []*types.Field{
|
||||||
field.Type = types.Types[TSTRING]
|
types.NewField(src.NoXPos, nil, types.Types[types.TSTRING]),
|
||||||
f := functypefield(fakeRecvField(), nil, []*types.Field{field})
|
})
|
||||||
|
|
||||||
field = types.NewField()
|
method := types.NewField(src.NoXPos, lookup("Error"), sig)
|
||||||
field.Sym = lookup("Error")
|
|
||||||
field.Type = f
|
|
||||||
|
|
||||||
t := types.New(TINTER)
|
t := types.New(types.TINTER)
|
||||||
t.SetInterface([]*types.Field{field})
|
t.SetInterface([]*types.Field{method})
|
||||||
return t
|
return t
|
||||||
}
|
}
|
||||||
|
|
||||||
func lexinit1() {
|
func lexinit1() {
|
||||||
// error type
|
// error type
|
||||||
s := builtinpkg.Lookup("error")
|
s := ir.BuiltinPkg.Lookup("error")
|
||||||
types.Errortype = makeErrorInterface()
|
types.Errortype = makeErrorInterface()
|
||||||
types.Errortype.Sym = s
|
types.Errortype.Sym = s
|
||||||
types.Errortype.Orig = makeErrorInterface()
|
types.Errortype.Orig = makeErrorInterface()
|
||||||
s.Def = asTypesNode(typenod(types.Errortype))
|
s.Def = typenod(types.Errortype)
|
||||||
dowidth(types.Errortype)
|
dowidth(types.Errortype)
|
||||||
|
|
||||||
// We create separate byte and rune types for better error messages
|
// We create separate byte and rune types for better error messages
|
||||||
|
|
@ -386,24 +355,24 @@ func lexinit1() {
|
||||||
// type aliases, albeit at the cost of having to deal with it everywhere).
|
// type aliases, albeit at the cost of having to deal with it everywhere).
|
||||||
|
|
||||||
// byte alias
|
// byte alias
|
||||||
s = builtinpkg.Lookup("byte")
|
s = ir.BuiltinPkg.Lookup("byte")
|
||||||
types.Bytetype = types.New(TUINT8)
|
types.Bytetype = types.New(types.TUINT8)
|
||||||
types.Bytetype.Sym = s
|
types.Bytetype.Sym = s
|
||||||
s.Def = asTypesNode(typenod(types.Bytetype))
|
s.Def = typenod(types.Bytetype)
|
||||||
asNode(s.Def).Name = new(Name)
|
ir.AsNode(s.Def).SetName(new(ir.Name))
|
||||||
dowidth(types.Bytetype)
|
dowidth(types.Bytetype)
|
||||||
|
|
||||||
// rune alias
|
// rune alias
|
||||||
s = builtinpkg.Lookup("rune")
|
s = ir.BuiltinPkg.Lookup("rune")
|
||||||
types.Runetype = types.New(TINT32)
|
types.Runetype = types.New(types.TINT32)
|
||||||
types.Runetype.Sym = s
|
types.Runetype.Sym = s
|
||||||
s.Def = asTypesNode(typenod(types.Runetype))
|
s.Def = typenod(types.Runetype)
|
||||||
asNode(s.Def).Name = new(Name)
|
ir.AsNode(s.Def).SetName(new(ir.Name))
|
||||||
dowidth(types.Runetype)
|
dowidth(types.Runetype)
|
||||||
|
|
||||||
// backend-dependent builtin types (e.g. int).
|
// backend-dependent builtin types (e.g. int).
|
||||||
for _, s := range &typedefs {
|
for _, s := range &typedefs {
|
||||||
s1 := builtinpkg.Lookup(s.name)
|
s1 := ir.BuiltinPkg.Lookup(s.name)
|
||||||
|
|
||||||
sameas := s.sameas32
|
sameas := s.sameas32
|
||||||
if Widthptr == 8 {
|
if Widthptr == 8 {
|
||||||
|
|
@ -411,17 +380,13 @@ func lexinit1() {
|
||||||
}
|
}
|
||||||
|
|
||||||
simtype[s.etype] = sameas
|
simtype[s.etype] = sameas
|
||||||
minfltval[s.etype] = minfltval[sameas]
|
|
||||||
maxfltval[s.etype] = maxfltval[sameas]
|
|
||||||
minintval[s.etype] = minintval[sameas]
|
|
||||||
maxintval[s.etype] = maxintval[sameas]
|
|
||||||
|
|
||||||
t := types.New(s.etype)
|
t := types.New(s.etype)
|
||||||
t.Sym = s1
|
t.Sym = s1
|
||||||
types.Types[s.etype] = t
|
types.Types[s.etype] = t
|
||||||
s1.Def = asTypesNode(typenod(t))
|
s1.Def = typenod(t)
|
||||||
asNode(s1.Def).Name = new(Name)
|
ir.AsNode(s1.Def).SetName(new(ir.Name))
|
||||||
s1.Origpkg = builtinpkg
|
s1.Origpkg = ir.BuiltinPkg
|
||||||
|
|
||||||
dowidth(t)
|
dowidth(t)
|
||||||
}
|
}
|
||||||
|
|
@ -433,7 +398,7 @@ func finishUniverse() {
|
||||||
// that we silently skip symbols that are already declared in the
|
// that we silently skip symbols that are already declared in the
|
||||||
// package block rather than emitting a redeclared symbol error.
|
// package block rather than emitting a redeclared symbol error.
|
||||||
|
|
||||||
for _, s := range builtinpkg.Syms {
|
for _, s := range ir.BuiltinPkg.Syms {
|
||||||
if s.Def == nil {
|
if s.Def == nil {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
@ -446,8 +411,8 @@ func finishUniverse() {
|
||||||
s1.Block = s.Block
|
s1.Block = s.Block
|
||||||
}
|
}
|
||||||
|
|
||||||
nodfp = newname(lookup(".fp"))
|
nodfp = NewName(lookup(".fp"))
|
||||||
nodfp.Type = types.Types[TINT32]
|
nodfp.SetType(types.Types[types.TINT32])
|
||||||
nodfp.SetClass(PPARAM)
|
nodfp.SetClass(ir.PPARAM)
|
||||||
nodfp.Name.SetUsed(true)
|
nodfp.Name().SetUsed(true)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -4,73 +4,78 @@
|
||||||
|
|
||||||
package gc
|
package gc
|
||||||
|
|
||||||
|
import (
|
||||||
|
"cmd/compile/internal/base"
|
||||||
|
"cmd/compile/internal/ir"
|
||||||
|
)
|
||||||
|
|
||||||
// evalunsafe evaluates a package unsafe operation and returns the result.
|
// evalunsafe evaluates a package unsafe operation and returns the result.
|
||||||
func evalunsafe(n *Node) int64 {
|
func evalunsafe(n ir.Node) int64 {
|
||||||
switch n.Op {
|
switch n.Op() {
|
||||||
case OALIGNOF, OSIZEOF:
|
case ir.OALIGNOF, ir.OSIZEOF:
|
||||||
n.Left = typecheck(n.Left, ctxExpr)
|
n.SetLeft(typecheck(n.Left(), ctxExpr))
|
||||||
n.Left = defaultlit(n.Left, nil)
|
n.SetLeft(defaultlit(n.Left(), nil))
|
||||||
tr := n.Left.Type
|
tr := n.Left().Type()
|
||||||
if tr == nil {
|
if tr == nil {
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
dowidth(tr)
|
dowidth(tr)
|
||||||
if n.Op == OALIGNOF {
|
if n.Op() == ir.OALIGNOF {
|
||||||
return int64(tr.Align)
|
return int64(tr.Align)
|
||||||
}
|
}
|
||||||
return tr.Width
|
return tr.Width
|
||||||
|
|
||||||
case OOFFSETOF:
|
case ir.OOFFSETOF:
|
||||||
// must be a selector.
|
// must be a selector.
|
||||||
if n.Left.Op != OXDOT {
|
if n.Left().Op() != ir.OXDOT {
|
||||||
yyerror("invalid expression %v", n)
|
base.Errorf("invalid expression %v", n)
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
// Remember base of selector to find it back after dot insertion.
|
// Remember base of selector to find it back after dot insertion.
|
||||||
// Since r->left may be mutated by typechecking, check it explicitly
|
// Since r->left may be mutated by typechecking, check it explicitly
|
||||||
// first to track it correctly.
|
// first to track it correctly.
|
||||||
n.Left.Left = typecheck(n.Left.Left, ctxExpr)
|
n.Left().SetLeft(typecheck(n.Left().Left(), ctxExpr))
|
||||||
base := n.Left.Left
|
sbase := n.Left().Left()
|
||||||
|
|
||||||
n.Left = typecheck(n.Left, ctxExpr)
|
n.SetLeft(typecheck(n.Left(), ctxExpr))
|
||||||
if n.Left.Type == nil {
|
if n.Left().Type() == nil {
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
switch n.Left.Op {
|
switch n.Left().Op() {
|
||||||
case ODOT, ODOTPTR:
|
case ir.ODOT, ir.ODOTPTR:
|
||||||
break
|
break
|
||||||
case OCALLPART:
|
case ir.OCALLPART:
|
||||||
yyerror("invalid expression %v: argument is a method value", n)
|
base.Errorf("invalid expression %v: argument is a method value", n)
|
||||||
return 0
|
return 0
|
||||||
default:
|
default:
|
||||||
yyerror("invalid expression %v", n)
|
base.Errorf("invalid expression %v", n)
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
// Sum offsets for dots until we reach base.
|
// Sum offsets for dots until we reach sbase.
|
||||||
var v int64
|
var v int64
|
||||||
for r := n.Left; r != base; r = r.Left {
|
for r := n.Left(); r != sbase; r = r.Left() {
|
||||||
switch r.Op {
|
switch r.Op() {
|
||||||
case ODOTPTR:
|
case ir.ODOTPTR:
|
||||||
// For Offsetof(s.f), s may itself be a pointer,
|
// For Offsetof(s.f), s may itself be a pointer,
|
||||||
// but accessing f must not otherwise involve
|
// but accessing f must not otherwise involve
|
||||||
// indirection via embedded pointer types.
|
// indirection via embedded pointer types.
|
||||||
if r.Left != base {
|
if r.Left() != sbase {
|
||||||
yyerror("invalid expression %v: selector implies indirection of embedded %v", n, r.Left)
|
base.Errorf("invalid expression %v: selector implies indirection of embedded %v", n, r.Left())
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
fallthrough
|
fallthrough
|
||||||
case ODOT:
|
case ir.ODOT:
|
||||||
v += r.Xoffset
|
v += r.Offset()
|
||||||
default:
|
default:
|
||||||
Dump("unsafenmagic", n.Left)
|
ir.Dump("unsafenmagic", n.Left())
|
||||||
Fatalf("impossible %#v node after dot insertion", r.Op)
|
base.Fatalf("impossible %#v node after dot insertion", r.Op())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return v
|
return v
|
||||||
}
|
}
|
||||||
|
|
||||||
Fatalf("unexpected op %v", n.Op)
|
base.Fatalf("unexpected op %v", n.Op())
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -8,59 +8,35 @@ import (
|
||||||
"os"
|
"os"
|
||||||
"runtime"
|
"runtime"
|
||||||
"runtime/pprof"
|
"runtime/pprof"
|
||||||
|
|
||||||
|
"cmd/compile/internal/base"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Line returns n's position as a string. If n has been inlined,
|
|
||||||
// it uses the outermost position where n has been inlined.
|
|
||||||
func (n *Node) Line() string {
|
|
||||||
return linestr(n.Pos)
|
|
||||||
}
|
|
||||||
|
|
||||||
var atExitFuncs []func()
|
|
||||||
|
|
||||||
func atExit(f func()) {
|
|
||||||
atExitFuncs = append(atExitFuncs, f)
|
|
||||||
}
|
|
||||||
|
|
||||||
func Exit(code int) {
|
|
||||||
for i := len(atExitFuncs) - 1; i >= 0; i-- {
|
|
||||||
f := atExitFuncs[i]
|
|
||||||
atExitFuncs = atExitFuncs[:i]
|
|
||||||
f()
|
|
||||||
}
|
|
||||||
os.Exit(code)
|
|
||||||
}
|
|
||||||
|
|
||||||
var (
|
var (
|
||||||
blockprofile string
|
|
||||||
cpuprofile string
|
|
||||||
memprofile string
|
|
||||||
memprofilerate int64
|
memprofilerate int64
|
||||||
traceprofile string
|
|
||||||
traceHandler func(string)
|
traceHandler func(string)
|
||||||
mutexprofile string
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func startProfile() {
|
func startProfile() {
|
||||||
if cpuprofile != "" {
|
if base.Flag.CPUProfile != "" {
|
||||||
f, err := os.Create(cpuprofile)
|
f, err := os.Create(base.Flag.CPUProfile)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
Fatalf("%v", err)
|
base.Fatalf("%v", err)
|
||||||
}
|
}
|
||||||
if err := pprof.StartCPUProfile(f); err != nil {
|
if err := pprof.StartCPUProfile(f); err != nil {
|
||||||
Fatalf("%v", err)
|
base.Fatalf("%v", err)
|
||||||
}
|
}
|
||||||
atExit(pprof.StopCPUProfile)
|
base.AtExit(pprof.StopCPUProfile)
|
||||||
}
|
}
|
||||||
if memprofile != "" {
|
if base.Flag.MemProfile != "" {
|
||||||
if memprofilerate != 0 {
|
if memprofilerate != 0 {
|
||||||
runtime.MemProfileRate = int(memprofilerate)
|
runtime.MemProfileRate = int(memprofilerate)
|
||||||
}
|
}
|
||||||
f, err := os.Create(memprofile)
|
f, err := os.Create(base.Flag.MemProfile)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
Fatalf("%v", err)
|
base.Fatalf("%v", err)
|
||||||
}
|
}
|
||||||
atExit(func() {
|
base.AtExit(func() {
|
||||||
// Profile all outstanding allocations.
|
// Profile all outstanding allocations.
|
||||||
runtime.GC()
|
runtime.GC()
|
||||||
// compilebench parses the memory profile to extract memstats,
|
// compilebench parses the memory profile to extract memstats,
|
||||||
|
|
@ -68,36 +44,36 @@ func startProfile() {
|
||||||
// See golang.org/issue/18641 and runtime/pprof/pprof.go:writeHeap.
|
// See golang.org/issue/18641 and runtime/pprof/pprof.go:writeHeap.
|
||||||
const writeLegacyFormat = 1
|
const writeLegacyFormat = 1
|
||||||
if err := pprof.Lookup("heap").WriteTo(f, writeLegacyFormat); err != nil {
|
if err := pprof.Lookup("heap").WriteTo(f, writeLegacyFormat); err != nil {
|
||||||
Fatalf("%v", err)
|
base.Fatalf("%v", err)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
// Not doing memory profiling; disable it entirely.
|
// Not doing memory profiling; disable it entirely.
|
||||||
runtime.MemProfileRate = 0
|
runtime.MemProfileRate = 0
|
||||||
}
|
}
|
||||||
if blockprofile != "" {
|
if base.Flag.BlockProfile != "" {
|
||||||
f, err := os.Create(blockprofile)
|
f, err := os.Create(base.Flag.BlockProfile)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
Fatalf("%v", err)
|
base.Fatalf("%v", err)
|
||||||
}
|
}
|
||||||
runtime.SetBlockProfileRate(1)
|
runtime.SetBlockProfileRate(1)
|
||||||
atExit(func() {
|
base.AtExit(func() {
|
||||||
pprof.Lookup("block").WriteTo(f, 0)
|
pprof.Lookup("block").WriteTo(f, 0)
|
||||||
f.Close()
|
f.Close()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
if mutexprofile != "" {
|
if base.Flag.MutexProfile != "" {
|
||||||
f, err := os.Create(mutexprofile)
|
f, err := os.Create(base.Flag.MutexProfile)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
Fatalf("%v", err)
|
base.Fatalf("%v", err)
|
||||||
}
|
}
|
||||||
startMutexProfiling()
|
startMutexProfiling()
|
||||||
atExit(func() {
|
base.AtExit(func() {
|
||||||
pprof.Lookup("mutex").WriteTo(f, 0)
|
pprof.Lookup("mutex").WriteTo(f, 0)
|
||||||
f.Close()
|
f.Close()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
if traceprofile != "" && traceHandler != nil {
|
if base.Flag.TraceProfile != "" && traceHandler != nil {
|
||||||
traceHandler(traceprofile)
|
traceHandler(base.Flag.TraceProfile)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
File diff suppressed because it is too large
Load diff
|
|
@ -2,7 +2,7 @@
|
||||||
// Use of this source code is governed by a BSD-style
|
// Use of this source code is governed by a BSD-style
|
||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
package gc
|
package ir
|
||||||
|
|
||||||
type bitset8 uint8
|
type bitset8 uint8
|
||||||
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
// Code generated by "stringer -type=Class"; DO NOT EDIT.
|
// Code generated by "stringer -type=Class"; DO NOT EDIT.
|
||||||
|
|
||||||
package gc
|
package ir
|
||||||
|
|
||||||
import "strconv"
|
import "strconv"
|
||||||
|
|
||||||
|
|
@ -6,21 +6,23 @@
|
||||||
// for debugging purposes. The code is customized for Node graphs
|
// for debugging purposes. The code is customized for Node graphs
|
||||||
// and may be used for an alternative view of the node structure.
|
// and may be used for an alternative view of the node structure.
|
||||||
|
|
||||||
package gc
|
package ir
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"cmd/compile/internal/types"
|
|
||||||
"cmd/internal/src"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"os"
|
"os"
|
||||||
"reflect"
|
"reflect"
|
||||||
"regexp"
|
"regexp"
|
||||||
|
|
||||||
|
"cmd/compile/internal/base"
|
||||||
|
"cmd/compile/internal/types"
|
||||||
|
"cmd/internal/src"
|
||||||
)
|
)
|
||||||
|
|
||||||
// dump is like fdump but prints to stderr.
|
// dump is like fdump but prints to stderr.
|
||||||
func dump(root interface{}, filter string, depth int) {
|
func DumpAny(root interface{}, filter string, depth int) {
|
||||||
fdump(os.Stderr, root, filter, depth)
|
FDumpAny(os.Stderr, root, filter, depth)
|
||||||
}
|
}
|
||||||
|
|
||||||
// fdump prints the structure of a rooted data structure
|
// fdump prints the structure of a rooted data structure
|
||||||
|
|
@ -40,7 +42,7 @@ func dump(root interface{}, filter string, depth int) {
|
||||||
// rather than their type; struct fields with zero values or
|
// rather than their type; struct fields with zero values or
|
||||||
// non-matching field names are omitted, and "…" means recursion
|
// non-matching field names are omitted, and "…" means recursion
|
||||||
// depth has been reached or struct fields have been omitted.
|
// depth has been reached or struct fields have been omitted.
|
||||||
func fdump(w io.Writer, root interface{}, filter string, depth int) {
|
func FDumpAny(w io.Writer, root interface{}, filter string, depth int) {
|
||||||
if root == nil {
|
if root == nil {
|
||||||
fmt.Fprintln(w, "nil")
|
fmt.Fprintln(w, "nil")
|
||||||
return
|
return
|
||||||
|
|
@ -146,11 +148,8 @@ func (p *dumper) dump(x reflect.Value, depth int) {
|
||||||
x = reflect.ValueOf(v.Slice())
|
x = reflect.ValueOf(v.Slice())
|
||||||
|
|
||||||
case src.XPos:
|
case src.XPos:
|
||||||
p.printf("%s", linestr(v))
|
p.printf("%s", base.FmtPos(v))
|
||||||
return
|
return
|
||||||
|
|
||||||
case *types.Node:
|
|
||||||
x = reflect.ValueOf(asNode(v))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
switch x.Kind() {
|
switch x.Kind() {
|
||||||
|
|
@ -201,9 +200,9 @@ func (p *dumper) dump(x reflect.Value, depth int) {
|
||||||
typ := x.Type()
|
typ := x.Type()
|
||||||
|
|
||||||
isNode := false
|
isNode := false
|
||||||
if n, ok := x.Interface().(Node); ok {
|
if n, ok := x.Interface().(node); ok {
|
||||||
isNode = true
|
isNode = true
|
||||||
p.printf("%s %s {", n.Op.String(), p.addr(x))
|
p.printf("%s %s {", n.op.String(), p.addr(x))
|
||||||
} else {
|
} else {
|
||||||
p.printf("%s {", typ)
|
p.printf("%s {", typ)
|
||||||
}
|
}
|
||||||
File diff suppressed because it is too large
Load diff
12
src/cmd/compile/internal/ir/ir.go
Normal file
12
src/cmd/compile/internal/ir/ir.go
Normal file
|
|
@ -0,0 +1,12 @@
|
||||||
|
// Copyright 2009 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package ir
|
||||||
|
|
||||||
|
import "cmd/compile/internal/types"
|
||||||
|
|
||||||
|
var LocalPkg *types.Pkg // package being compiled
|
||||||
|
|
||||||
|
// builtinpkg is a fake package that declares the universe block.
|
||||||
|
var BuiltinPkg *types.Pkg
|
||||||
File diff suppressed because it is too large
Load diff
177
src/cmd/compile/internal/ir/op_string.go
Normal file
177
src/cmd/compile/internal/ir/op_string.go
Normal file
|
|
@ -0,0 +1,177 @@
|
||||||
|
// Code generated by "stringer -type=Op -trimprefix=O"; DO NOT EDIT.
|
||||||
|
|
||||||
|
package ir
|
||||||
|
|
||||||
|
import "strconv"
|
||||||
|
|
||||||
|
func _() {
|
||||||
|
// An "invalid array index" compiler error signifies that the constant values have changed.
|
||||||
|
// Re-run the stringer command to generate them again.
|
||||||
|
var x [1]struct{}
|
||||||
|
_ = x[OXXX-0]
|
||||||
|
_ = x[ONAME-1]
|
||||||
|
_ = x[ONONAME-2]
|
||||||
|
_ = x[OTYPE-3]
|
||||||
|
_ = x[OPACK-4]
|
||||||
|
_ = x[OLITERAL-5]
|
||||||
|
_ = x[ONIL-6]
|
||||||
|
_ = x[OADD-7]
|
||||||
|
_ = x[OSUB-8]
|
||||||
|
_ = x[OOR-9]
|
||||||
|
_ = x[OXOR-10]
|
||||||
|
_ = x[OADDSTR-11]
|
||||||
|
_ = x[OADDR-12]
|
||||||
|
_ = x[OANDAND-13]
|
||||||
|
_ = x[OAPPEND-14]
|
||||||
|
_ = x[OBYTES2STR-15]
|
||||||
|
_ = x[OBYTES2STRTMP-16]
|
||||||
|
_ = x[ORUNES2STR-17]
|
||||||
|
_ = x[OSTR2BYTES-18]
|
||||||
|
_ = x[OSTR2BYTESTMP-19]
|
||||||
|
_ = x[OSTR2RUNES-20]
|
||||||
|
_ = x[OAS-21]
|
||||||
|
_ = x[OAS2-22]
|
||||||
|
_ = x[OAS2DOTTYPE-23]
|
||||||
|
_ = x[OAS2FUNC-24]
|
||||||
|
_ = x[OAS2MAPR-25]
|
||||||
|
_ = x[OAS2RECV-26]
|
||||||
|
_ = x[OASOP-27]
|
||||||
|
_ = x[OCALL-28]
|
||||||
|
_ = x[OCALLFUNC-29]
|
||||||
|
_ = x[OCALLMETH-30]
|
||||||
|
_ = x[OCALLINTER-31]
|
||||||
|
_ = x[OCALLPART-32]
|
||||||
|
_ = x[OCAP-33]
|
||||||
|
_ = x[OCLOSE-34]
|
||||||
|
_ = x[OCLOSURE-35]
|
||||||
|
_ = x[OCOMPLIT-36]
|
||||||
|
_ = x[OMAPLIT-37]
|
||||||
|
_ = x[OSTRUCTLIT-38]
|
||||||
|
_ = x[OARRAYLIT-39]
|
||||||
|
_ = x[OSLICELIT-40]
|
||||||
|
_ = x[OPTRLIT-41]
|
||||||
|
_ = x[OCONV-42]
|
||||||
|
_ = x[OCONVIFACE-43]
|
||||||
|
_ = x[OCONVNOP-44]
|
||||||
|
_ = x[OCOPY-45]
|
||||||
|
_ = x[ODCL-46]
|
||||||
|
_ = x[ODCLFUNC-47]
|
||||||
|
_ = x[ODCLFIELD-48]
|
||||||
|
_ = x[ODCLCONST-49]
|
||||||
|
_ = x[ODCLTYPE-50]
|
||||||
|
_ = x[ODELETE-51]
|
||||||
|
_ = x[ODOT-52]
|
||||||
|
_ = x[ODOTPTR-53]
|
||||||
|
_ = x[ODOTMETH-54]
|
||||||
|
_ = x[ODOTINTER-55]
|
||||||
|
_ = x[OXDOT-56]
|
||||||
|
_ = x[ODOTTYPE-57]
|
||||||
|
_ = x[ODOTTYPE2-58]
|
||||||
|
_ = x[OEQ-59]
|
||||||
|
_ = x[ONE-60]
|
||||||
|
_ = x[OLT-61]
|
||||||
|
_ = x[OLE-62]
|
||||||
|
_ = x[OGE-63]
|
||||||
|
_ = x[OGT-64]
|
||||||
|
_ = x[ODEREF-65]
|
||||||
|
_ = x[OINDEX-66]
|
||||||
|
_ = x[OINDEXMAP-67]
|
||||||
|
_ = x[OKEY-68]
|
||||||
|
_ = x[OSTRUCTKEY-69]
|
||||||
|
_ = x[OLEN-70]
|
||||||
|
_ = x[OMAKE-71]
|
||||||
|
_ = x[OMAKECHAN-72]
|
||||||
|
_ = x[OMAKEMAP-73]
|
||||||
|
_ = x[OMAKESLICE-74]
|
||||||
|
_ = x[OMAKESLICECOPY-75]
|
||||||
|
_ = x[OMUL-76]
|
||||||
|
_ = x[ODIV-77]
|
||||||
|
_ = x[OMOD-78]
|
||||||
|
_ = x[OLSH-79]
|
||||||
|
_ = x[ORSH-80]
|
||||||
|
_ = x[OAND-81]
|
||||||
|
_ = x[OANDNOT-82]
|
||||||
|
_ = x[ONEW-83]
|
||||||
|
_ = x[ONEWOBJ-84]
|
||||||
|
_ = x[ONOT-85]
|
||||||
|
_ = x[OBITNOT-86]
|
||||||
|
_ = x[OPLUS-87]
|
||||||
|
_ = x[ONEG-88]
|
||||||
|
_ = x[OOROR-89]
|
||||||
|
_ = x[OPANIC-90]
|
||||||
|
_ = x[OPRINT-91]
|
||||||
|
_ = x[OPRINTN-92]
|
||||||
|
_ = x[OPAREN-93]
|
||||||
|
_ = x[OSEND-94]
|
||||||
|
_ = x[OSLICE-95]
|
||||||
|
_ = x[OSLICEARR-96]
|
||||||
|
_ = x[OSLICESTR-97]
|
||||||
|
_ = x[OSLICE3-98]
|
||||||
|
_ = x[OSLICE3ARR-99]
|
||||||
|
_ = x[OSLICEHEADER-100]
|
||||||
|
_ = x[ORECOVER-101]
|
||||||
|
_ = x[ORECV-102]
|
||||||
|
_ = x[ORUNESTR-103]
|
||||||
|
_ = x[OSELRECV-104]
|
||||||
|
_ = x[OSELRECV2-105]
|
||||||
|
_ = x[OIOTA-106]
|
||||||
|
_ = x[OREAL-107]
|
||||||
|
_ = x[OIMAG-108]
|
||||||
|
_ = x[OCOMPLEX-109]
|
||||||
|
_ = x[OALIGNOF-110]
|
||||||
|
_ = x[OOFFSETOF-111]
|
||||||
|
_ = x[OSIZEOF-112]
|
||||||
|
_ = x[OMETHEXPR-113]
|
||||||
|
_ = x[OBLOCK-114]
|
||||||
|
_ = x[OBREAK-115]
|
||||||
|
_ = x[OCASE-116]
|
||||||
|
_ = x[OCONTINUE-117]
|
||||||
|
_ = x[ODEFER-118]
|
||||||
|
_ = x[OEMPTY-119]
|
||||||
|
_ = x[OFALL-120]
|
||||||
|
_ = x[OFOR-121]
|
||||||
|
_ = x[OFORUNTIL-122]
|
||||||
|
_ = x[OGOTO-123]
|
||||||
|
_ = x[OIF-124]
|
||||||
|
_ = x[OLABEL-125]
|
||||||
|
_ = x[OGO-126]
|
||||||
|
_ = x[ORANGE-127]
|
||||||
|
_ = x[ORETURN-128]
|
||||||
|
_ = x[OSELECT-129]
|
||||||
|
_ = x[OSWITCH-130]
|
||||||
|
_ = x[OTYPESW-131]
|
||||||
|
_ = x[OTCHAN-132]
|
||||||
|
_ = x[OTMAP-133]
|
||||||
|
_ = x[OTSTRUCT-134]
|
||||||
|
_ = x[OTINTER-135]
|
||||||
|
_ = x[OTFUNC-136]
|
||||||
|
_ = x[OTARRAY-137]
|
||||||
|
_ = x[ODDD-138]
|
||||||
|
_ = x[OINLCALL-139]
|
||||||
|
_ = x[OEFACE-140]
|
||||||
|
_ = x[OITAB-141]
|
||||||
|
_ = x[OIDATA-142]
|
||||||
|
_ = x[OSPTR-143]
|
||||||
|
_ = x[OCLOSUREVAR-144]
|
||||||
|
_ = x[OCFUNC-145]
|
||||||
|
_ = x[OCHECKNIL-146]
|
||||||
|
_ = x[OVARDEF-147]
|
||||||
|
_ = x[OVARKILL-148]
|
||||||
|
_ = x[OVARLIVE-149]
|
||||||
|
_ = x[ORESULT-150]
|
||||||
|
_ = x[OINLMARK-151]
|
||||||
|
_ = x[ORETJMP-152]
|
||||||
|
_ = x[OGETG-153]
|
||||||
|
_ = x[OEND-154]
|
||||||
|
}
|
||||||
|
|
||||||
|
const _Op_name = "XXXNAMENONAMETYPEPACKLITERALNILADDSUBORXORADDSTRADDRANDANDAPPENDBYTES2STRBYTES2STRTMPRUNES2STRSTR2BYTESSTR2BYTESTMPSTR2RUNESASAS2AS2DOTTYPEAS2FUNCAS2MAPRAS2RECVASOPCALLCALLFUNCCALLMETHCALLINTERCALLPARTCAPCLOSECLOSURECOMPLITMAPLITSTRUCTLITARRAYLITSLICELITPTRLITCONVCONVIFACECONVNOPCOPYDCLDCLFUNCDCLFIELDDCLCONSTDCLTYPEDELETEDOTDOTPTRDOTMETHDOTINTERXDOTDOTTYPEDOTTYPE2EQNELTLEGEGTDEREFINDEXINDEXMAPKEYSTRUCTKEYLENMAKEMAKECHANMAKEMAPMAKESLICEMAKESLICECOPYMULDIVMODLSHRSHANDANDNOTNEWNEWOBJNOTBITNOTPLUSNEGORORPANICPRINTPRINTNPARENSENDSLICESLICEARRSLICESTRSLICE3SLICE3ARRSLICEHEADERRECOVERRECVRUNESTRSELRECVSELRECV2IOTAREALIMAGCOMPLEXALIGNOFOFFSETOFSIZEOFMETHEXPRBLOCKBREAKCASECONTINUEDEFEREMPTYFALLFORFORUNTILGOTOIFLABELGORANGERETURNSELECTSWITCHTYPESWTCHANTMAPTSTRUCTTINTERTFUNCTARRAYDDDINLCALLEFACEITABIDATASPTRCLOSUREVARCFUNCCHECKNILVARDEFVARKILLVARLIVERESULTINLMARKRETJMPGETGEND"
|
||||||
|
|
||||||
|
var _Op_index = [...]uint16{0, 3, 7, 13, 17, 21, 28, 31, 34, 37, 39, 42, 48, 52, 58, 64, 73, 85, 94, 103, 115, 124, 126, 129, 139, 146, 153, 160, 164, 168, 176, 184, 193, 201, 204, 209, 216, 223, 229, 238, 246, 254, 260, 264, 273, 280, 284, 287, 294, 302, 310, 317, 323, 326, 332, 339, 347, 351, 358, 366, 368, 370, 372, 374, 376, 378, 383, 388, 396, 399, 408, 411, 415, 423, 430, 439, 452, 455, 458, 461, 464, 467, 470, 476, 479, 485, 488, 494, 498, 501, 505, 510, 515, 521, 526, 530, 535, 543, 551, 557, 566, 577, 584, 588, 595, 602, 610, 614, 618, 622, 629, 636, 644, 650, 658, 663, 668, 672, 680, 685, 690, 694, 697, 705, 709, 711, 716, 718, 723, 729, 735, 741, 747, 752, 756, 763, 769, 774, 780, 783, 790, 795, 799, 804, 808, 818, 823, 831, 837, 844, 851, 857, 864, 870, 874, 877}
|
||||||
|
|
||||||
|
func (i Op) String() string {
|
||||||
|
if i >= Op(len(_Op_index)-1) {
|
||||||
|
return "Op(" + strconv.FormatInt(int64(i), 10) + ")"
|
||||||
|
}
|
||||||
|
return _Op_name[_Op_index[i]:_Op_index[i+1]]
|
||||||
|
}
|
||||||
|
|
@ -2,7 +2,7 @@
|
||||||
// Use of this source code is governed by a BSD-style
|
// Use of this source code is governed by a BSD-style
|
||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
package gc
|
package ir
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"reflect"
|
"reflect"
|
||||||
|
|
@ -20,10 +20,10 @@ func TestSizeof(t *testing.T) {
|
||||||
_32bit uintptr // size on 32bit platforms
|
_32bit uintptr // size on 32bit platforms
|
||||||
_64bit uintptr // size on 64bit platforms
|
_64bit uintptr // size on 64bit platforms
|
||||||
}{
|
}{
|
||||||
{Func{}, 124, 224},
|
{Func{}, 152, 280},
|
||||||
{Name{}, 32, 56},
|
{Name{}, 44, 80},
|
||||||
{Param{}, 24, 48},
|
{Param{}, 44, 88},
|
||||||
{Node{}, 76, 128},
|
{node{}, 88, 152},
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
120
src/cmd/compile/internal/ir/val.go
Normal file
120
src/cmd/compile/internal/ir/val.go
Normal file
|
|
@ -0,0 +1,120 @@
|
||||||
|
// Copyright 2009 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package ir
|
||||||
|
|
||||||
|
import (
|
||||||
|
"go/constant"
|
||||||
|
"math"
|
||||||
|
|
||||||
|
"cmd/compile/internal/base"
|
||||||
|
"cmd/compile/internal/types"
|
||||||
|
)
|
||||||
|
|
||||||
|
func ConstType(n Node) constant.Kind {
|
||||||
|
if n == nil || n.Op() != OLITERAL {
|
||||||
|
return constant.Unknown
|
||||||
|
}
|
||||||
|
return n.Val().Kind()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ValueInterface returns the constant value stored in n as an interface{}.
|
||||||
|
// It returns int64s for ints and runes, float64s for floats,
|
||||||
|
// and complex128s for complex values.
|
||||||
|
func ConstValue(n Node) interface{} {
|
||||||
|
switch v := n.Val(); v.Kind() {
|
||||||
|
default:
|
||||||
|
base.Fatalf("unexpected constant: %v", v)
|
||||||
|
panic("unreachable")
|
||||||
|
case constant.Bool:
|
||||||
|
return constant.BoolVal(v)
|
||||||
|
case constant.String:
|
||||||
|
return constant.StringVal(v)
|
||||||
|
case constant.Int:
|
||||||
|
return Int64Val(n.Type(), v)
|
||||||
|
case constant.Float:
|
||||||
|
return Float64Val(v)
|
||||||
|
case constant.Complex:
|
||||||
|
return complex(Float64Val(constant.Real(v)), Float64Val(constant.Imag(v)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// int64Val returns v converted to int64.
|
||||||
|
// Note: if t is uint64, very large values will be converted to negative int64.
|
||||||
|
func Int64Val(t *types.Type, v constant.Value) int64 {
|
||||||
|
if t.IsUnsigned() {
|
||||||
|
if x, ok := constant.Uint64Val(v); ok {
|
||||||
|
return int64(x)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if x, ok := constant.Int64Val(v); ok {
|
||||||
|
return x
|
||||||
|
}
|
||||||
|
}
|
||||||
|
base.Fatalf("%v out of range for %v", v, t)
|
||||||
|
panic("unreachable")
|
||||||
|
}
|
||||||
|
|
||||||
|
func Float64Val(v constant.Value) float64 {
|
||||||
|
if x, _ := constant.Float64Val(v); !math.IsInf(x, 0) {
|
||||||
|
return x + 0 // avoid -0 (should not be needed, but be conservative)
|
||||||
|
}
|
||||||
|
base.Fatalf("bad float64 value: %v", v)
|
||||||
|
panic("unreachable")
|
||||||
|
}
|
||||||
|
|
||||||
|
func AssertValidTypeForConst(t *types.Type, v constant.Value) {
|
||||||
|
if !ValidTypeForConst(t, v) {
|
||||||
|
base.Fatalf("%v does not represent %v", t, v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func ValidTypeForConst(t *types.Type, v constant.Value) bool {
|
||||||
|
switch v.Kind() {
|
||||||
|
case constant.Unknown:
|
||||||
|
return OKForConst[t.Etype]
|
||||||
|
case constant.Bool:
|
||||||
|
return t.IsBoolean()
|
||||||
|
case constant.String:
|
||||||
|
return t.IsString()
|
||||||
|
case constant.Int:
|
||||||
|
return t.IsInteger()
|
||||||
|
case constant.Float:
|
||||||
|
return t.IsFloat()
|
||||||
|
case constant.Complex:
|
||||||
|
return t.IsComplex()
|
||||||
|
}
|
||||||
|
|
||||||
|
base.Fatalf("unexpected constant kind: %v", v)
|
||||||
|
panic("unreachable")
|
||||||
|
}
|
||||||
|
|
||||||
|
// nodlit returns a new untyped constant with value v.
|
||||||
|
func NewLiteral(v constant.Value) Node {
|
||||||
|
n := Nod(OLITERAL, nil, nil)
|
||||||
|
if k := v.Kind(); k != constant.Unknown {
|
||||||
|
n.SetType(idealType(k))
|
||||||
|
n.SetVal(v)
|
||||||
|
}
|
||||||
|
return n
|
||||||
|
}
|
||||||
|
|
||||||
|
func idealType(ct constant.Kind) *types.Type {
|
||||||
|
switch ct {
|
||||||
|
case constant.String:
|
||||||
|
return types.UntypedString
|
||||||
|
case constant.Bool:
|
||||||
|
return types.UntypedBool
|
||||||
|
case constant.Int:
|
||||||
|
return types.UntypedInt
|
||||||
|
case constant.Float:
|
||||||
|
return types.UntypedFloat
|
||||||
|
case constant.Complex:
|
||||||
|
return types.UntypedComplex
|
||||||
|
}
|
||||||
|
base.Fatalf("unexpected Ctype: %v", ct)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var OKForConst [types.NTYPE]bool
|
||||||
|
|
@ -5,6 +5,7 @@
|
||||||
package mips
|
package mips
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"cmd/compile/internal/base"
|
||||||
"cmd/compile/internal/gc"
|
"cmd/compile/internal/gc"
|
||||||
"cmd/internal/obj"
|
"cmd/internal/obj"
|
||||||
"cmd/internal/obj/mips"
|
"cmd/internal/obj/mips"
|
||||||
|
|
@ -18,7 +19,7 @@ func zerorange(pp *gc.Progs, p *obj.Prog, off, cnt int64, _ *uint32) *obj.Prog {
|
||||||
}
|
}
|
||||||
if cnt < int64(4*gc.Widthptr) {
|
if cnt < int64(4*gc.Widthptr) {
|
||||||
for i := int64(0); i < cnt; i += int64(gc.Widthptr) {
|
for i := int64(0); i < cnt; i += int64(gc.Widthptr) {
|
||||||
p = pp.Appendpp(p, mips.AMOVW, obj.TYPE_REG, mips.REGZERO, 0, obj.TYPE_MEM, mips.REGSP, gc.Ctxt.FixedFrameSize()+off+i)
|
p = pp.Appendpp(p, mips.AMOVW, obj.TYPE_REG, mips.REGZERO, 0, obj.TYPE_MEM, mips.REGSP, base.Ctxt.FixedFrameSize()+off+i)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
//fmt.Printf("zerorange frame:%v, lo: %v, hi:%v \n", frame ,lo, hi)
|
//fmt.Printf("zerorange frame:%v, lo: %v, hi:%v \n", frame ,lo, hi)
|
||||||
|
|
@ -28,7 +29,7 @@ func zerorange(pp *gc.Progs, p *obj.Prog, off, cnt int64, _ *uint32) *obj.Prog {
|
||||||
// MOVW R0, (Widthptr)r1
|
// MOVW R0, (Widthptr)r1
|
||||||
// ADD $Widthptr, r1
|
// ADD $Widthptr, r1
|
||||||
// BNE r1, r2, loop
|
// BNE r1, r2, loop
|
||||||
p = pp.Appendpp(p, mips.AADD, obj.TYPE_CONST, 0, gc.Ctxt.FixedFrameSize()+off-4, obj.TYPE_REG, mips.REGRT1, 0)
|
p = pp.Appendpp(p, mips.AADD, obj.TYPE_CONST, 0, base.Ctxt.FixedFrameSize()+off-4, obj.TYPE_REG, mips.REGRT1, 0)
|
||||||
p.Reg = mips.REGSP
|
p.Reg = mips.REGSP
|
||||||
p = pp.Appendpp(p, mips.AADD, obj.TYPE_CONST, 0, cnt, obj.TYPE_REG, mips.REGRT2, 0)
|
p = pp.Appendpp(p, mips.AADD, obj.TYPE_CONST, 0, cnt, obj.TYPE_REG, mips.REGRT2, 0)
|
||||||
p.Reg = mips.REGRT1
|
p.Reg = mips.REGRT1
|
||||||
|
|
|
||||||
|
|
@ -7,7 +7,9 @@ package mips
|
||||||
import (
|
import (
|
||||||
"math"
|
"math"
|
||||||
|
|
||||||
|
"cmd/compile/internal/base"
|
||||||
"cmd/compile/internal/gc"
|
"cmd/compile/internal/gc"
|
||||||
|
"cmd/compile/internal/ir"
|
||||||
"cmd/compile/internal/logopt"
|
"cmd/compile/internal/logopt"
|
||||||
"cmd/compile/internal/ssa"
|
"cmd/compile/internal/ssa"
|
||||||
"cmd/compile/internal/types"
|
"cmd/compile/internal/types"
|
||||||
|
|
@ -287,7 +289,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
|
||||||
case *obj.LSym:
|
case *obj.LSym:
|
||||||
wantreg = "SB"
|
wantreg = "SB"
|
||||||
gc.AddAux(&p.From, v)
|
gc.AddAux(&p.From, v)
|
||||||
case *gc.Node:
|
case ir.Node:
|
||||||
wantreg = "SP"
|
wantreg = "SP"
|
||||||
gc.AddAux(&p.From, v)
|
gc.AddAux(&p.From, v)
|
||||||
case nil:
|
case nil:
|
||||||
|
|
@ -766,8 +768,8 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
|
||||||
if logopt.Enabled() {
|
if logopt.Enabled() {
|
||||||
logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name)
|
logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name)
|
||||||
}
|
}
|
||||||
if gc.Debug_checknil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers
|
if base.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers
|
||||||
gc.Warnl(v.Pos, "generated nil check")
|
base.WarnfAt(v.Pos, "generated nil check")
|
||||||
}
|
}
|
||||||
case ssa.OpMIPSFPFlagTrue,
|
case ssa.OpMIPSFPFlagTrue,
|
||||||
ssa.OpMIPSFPFlagFalse:
|
ssa.OpMIPSFPFlagFalse:
|
||||||
|
|
@ -796,7 +798,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
|
||||||
// caller's SP is FixedFrameSize below the address of the first arg
|
// caller's SP is FixedFrameSize below the address of the first arg
|
||||||
p := s.Prog(mips.AMOVW)
|
p := s.Prog(mips.AMOVW)
|
||||||
p.From.Type = obj.TYPE_ADDR
|
p.From.Type = obj.TYPE_ADDR
|
||||||
p.From.Offset = -gc.Ctxt.FixedFrameSize()
|
p.From.Offset = -base.Ctxt.FixedFrameSize()
|
||||||
p.From.Name = obj.NAME_PARAM
|
p.From.Name = obj.NAME_PARAM
|
||||||
p.To.Type = obj.TYPE_REG
|
p.To.Type = obj.TYPE_REG
|
||||||
p.To.Reg = v.Reg()
|
p.To.Reg = v.Reg()
|
||||||
|
|
|
||||||
|
|
@ -7,7 +7,9 @@ package mips64
|
||||||
import (
|
import (
|
||||||
"math"
|
"math"
|
||||||
|
|
||||||
|
"cmd/compile/internal/base"
|
||||||
"cmd/compile/internal/gc"
|
"cmd/compile/internal/gc"
|
||||||
|
"cmd/compile/internal/ir"
|
||||||
"cmd/compile/internal/logopt"
|
"cmd/compile/internal/logopt"
|
||||||
"cmd/compile/internal/ssa"
|
"cmd/compile/internal/ssa"
|
||||||
"cmd/compile/internal/types"
|
"cmd/compile/internal/types"
|
||||||
|
|
@ -261,7 +263,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
|
||||||
case *obj.LSym:
|
case *obj.LSym:
|
||||||
wantreg = "SB"
|
wantreg = "SB"
|
||||||
gc.AddAux(&p.From, v)
|
gc.AddAux(&p.From, v)
|
||||||
case *gc.Node:
|
case ir.Node:
|
||||||
wantreg = "SP"
|
wantreg = "SP"
|
||||||
gc.AddAux(&p.From, v)
|
gc.AddAux(&p.From, v)
|
||||||
case nil:
|
case nil:
|
||||||
|
|
@ -724,8 +726,8 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
|
||||||
if logopt.Enabled() {
|
if logopt.Enabled() {
|
||||||
logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name)
|
logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name)
|
||||||
}
|
}
|
||||||
if gc.Debug_checknil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers
|
if base.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers
|
||||||
gc.Warnl(v.Pos, "generated nil check")
|
base.WarnfAt(v.Pos, "generated nil check")
|
||||||
}
|
}
|
||||||
case ssa.OpMIPS64FPFlagTrue,
|
case ssa.OpMIPS64FPFlagTrue,
|
||||||
ssa.OpMIPS64FPFlagFalse:
|
ssa.OpMIPS64FPFlagFalse:
|
||||||
|
|
@ -757,7 +759,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
|
||||||
// caller's SP is FixedFrameSize below the address of the first arg
|
// caller's SP is FixedFrameSize below the address of the first arg
|
||||||
p := s.Prog(mips.AMOVV)
|
p := s.Prog(mips.AMOVV)
|
||||||
p.From.Type = obj.TYPE_ADDR
|
p.From.Type = obj.TYPE_ADDR
|
||||||
p.From.Offset = -gc.Ctxt.FixedFrameSize()
|
p.From.Offset = -base.Ctxt.FixedFrameSize()
|
||||||
p.From.Name = obj.NAME_PARAM
|
p.From.Name = obj.NAME_PARAM
|
||||||
p.To.Type = obj.TYPE_REG
|
p.To.Type = obj.TYPE_REG
|
||||||
p.To.Reg = v.Reg()
|
p.To.Reg = v.Reg()
|
||||||
|
|
|
||||||
|
|
@ -5,6 +5,7 @@
|
||||||
package ppc64
|
package ppc64
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"cmd/compile/internal/base"
|
||||||
"cmd/compile/internal/gc"
|
"cmd/compile/internal/gc"
|
||||||
"cmd/internal/obj"
|
"cmd/internal/obj"
|
||||||
"cmd/internal/obj/ppc64"
|
"cmd/internal/obj/ppc64"
|
||||||
|
|
@ -16,17 +17,17 @@ func zerorange(pp *gc.Progs, p *obj.Prog, off, cnt int64, _ *uint32) *obj.Prog {
|
||||||
}
|
}
|
||||||
if cnt < int64(4*gc.Widthptr) {
|
if cnt < int64(4*gc.Widthptr) {
|
||||||
for i := int64(0); i < cnt; i += int64(gc.Widthptr) {
|
for i := int64(0); i < cnt; i += int64(gc.Widthptr) {
|
||||||
p = pp.Appendpp(p, ppc64.AMOVD, obj.TYPE_REG, ppc64.REGZERO, 0, obj.TYPE_MEM, ppc64.REGSP, gc.Ctxt.FixedFrameSize()+off+i)
|
p = pp.Appendpp(p, ppc64.AMOVD, obj.TYPE_REG, ppc64.REGZERO, 0, obj.TYPE_MEM, ppc64.REGSP, base.Ctxt.FixedFrameSize()+off+i)
|
||||||
}
|
}
|
||||||
} else if cnt <= int64(128*gc.Widthptr) {
|
} else if cnt <= int64(128*gc.Widthptr) {
|
||||||
p = pp.Appendpp(p, ppc64.AADD, obj.TYPE_CONST, 0, gc.Ctxt.FixedFrameSize()+off-8, obj.TYPE_REG, ppc64.REGRT1, 0)
|
p = pp.Appendpp(p, ppc64.AADD, obj.TYPE_CONST, 0, base.Ctxt.FixedFrameSize()+off-8, obj.TYPE_REG, ppc64.REGRT1, 0)
|
||||||
p.Reg = ppc64.REGSP
|
p.Reg = ppc64.REGSP
|
||||||
p = pp.Appendpp(p, obj.ADUFFZERO, obj.TYPE_NONE, 0, 0, obj.TYPE_MEM, 0, 0)
|
p = pp.Appendpp(p, obj.ADUFFZERO, obj.TYPE_NONE, 0, 0, obj.TYPE_MEM, 0, 0)
|
||||||
p.To.Name = obj.NAME_EXTERN
|
p.To.Name = obj.NAME_EXTERN
|
||||||
p.To.Sym = gc.Duffzero
|
p.To.Sym = gc.Duffzero
|
||||||
p.To.Offset = 4 * (128 - cnt/int64(gc.Widthptr))
|
p.To.Offset = 4 * (128 - cnt/int64(gc.Widthptr))
|
||||||
} else {
|
} else {
|
||||||
p = pp.Appendpp(p, ppc64.AMOVD, obj.TYPE_CONST, 0, gc.Ctxt.FixedFrameSize()+off-8, obj.TYPE_REG, ppc64.REGTMP, 0)
|
p = pp.Appendpp(p, ppc64.AMOVD, obj.TYPE_CONST, 0, base.Ctxt.FixedFrameSize()+off-8, obj.TYPE_REG, ppc64.REGTMP, 0)
|
||||||
p = pp.Appendpp(p, ppc64.AADD, obj.TYPE_REG, ppc64.REGTMP, 0, obj.TYPE_REG, ppc64.REGRT1, 0)
|
p = pp.Appendpp(p, ppc64.AADD, obj.TYPE_REG, ppc64.REGTMP, 0, obj.TYPE_REG, ppc64.REGRT1, 0)
|
||||||
p.Reg = ppc64.REGSP
|
p.Reg = ppc64.REGSP
|
||||||
p = pp.Appendpp(p, ppc64.AMOVD, obj.TYPE_CONST, 0, cnt, obj.TYPE_REG, ppc64.REGTMP, 0)
|
p = pp.Appendpp(p, ppc64.AMOVD, obj.TYPE_CONST, 0, cnt, obj.TYPE_REG, ppc64.REGTMP, 0)
|
||||||
|
|
@ -66,7 +67,7 @@ func ginsnopdefer(pp *gc.Progs) *obj.Prog {
|
||||||
// on ppc64 in both shared and non-shared modes.
|
// on ppc64 in both shared and non-shared modes.
|
||||||
|
|
||||||
ginsnop(pp)
|
ginsnop(pp)
|
||||||
if gc.Ctxt.Flag_shared {
|
if base.Ctxt.Flag_shared {
|
||||||
p := pp.Prog(ppc64.AMOVD)
|
p := pp.Prog(ppc64.AMOVD)
|
||||||
p.From.Type = obj.TYPE_MEM
|
p.From.Type = obj.TYPE_MEM
|
||||||
p.From.Offset = 24
|
p.From.Offset = 24
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,9 @@
|
||||||
package ppc64
|
package ppc64
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"cmd/compile/internal/base"
|
||||||
"cmd/compile/internal/gc"
|
"cmd/compile/internal/gc"
|
||||||
|
"cmd/compile/internal/ir"
|
||||||
"cmd/compile/internal/logopt"
|
"cmd/compile/internal/logopt"
|
||||||
"cmd/compile/internal/ssa"
|
"cmd/compile/internal/ssa"
|
||||||
"cmd/compile/internal/types"
|
"cmd/compile/internal/types"
|
||||||
|
|
@ -473,7 +475,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
|
||||||
// caller's SP is FixedFrameSize below the address of the first arg
|
// caller's SP is FixedFrameSize below the address of the first arg
|
||||||
p := s.Prog(ppc64.AMOVD)
|
p := s.Prog(ppc64.AMOVD)
|
||||||
p.From.Type = obj.TYPE_ADDR
|
p.From.Type = obj.TYPE_ADDR
|
||||||
p.From.Offset = -gc.Ctxt.FixedFrameSize()
|
p.From.Offset = -base.Ctxt.FixedFrameSize()
|
||||||
p.From.Name = obj.NAME_PARAM
|
p.From.Name = obj.NAME_PARAM
|
||||||
p.To.Type = obj.TYPE_REG
|
p.To.Type = obj.TYPE_REG
|
||||||
p.To.Reg = v.Reg()
|
p.To.Reg = v.Reg()
|
||||||
|
|
@ -750,7 +752,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
|
||||||
p.To.Reg = v.Reg()
|
p.To.Reg = v.Reg()
|
||||||
}
|
}
|
||||||
|
|
||||||
case *obj.LSym, *gc.Node:
|
case *obj.LSym, ir.Node:
|
||||||
p := s.Prog(ppc64.AMOVD)
|
p := s.Prog(ppc64.AMOVD)
|
||||||
p.From.Type = obj.TYPE_ADDR
|
p.From.Type = obj.TYPE_ADDR
|
||||||
p.From.Reg = v.Args[0].Reg()
|
p.From.Reg = v.Args[0].Reg()
|
||||||
|
|
@ -1784,7 +1786,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
|
||||||
// Insert a hint this is not a subroutine return.
|
// Insert a hint this is not a subroutine return.
|
||||||
pp.SetFrom3(obj.Addr{Type: obj.TYPE_CONST, Offset: 1})
|
pp.SetFrom3(obj.Addr{Type: obj.TYPE_CONST, Offset: 1})
|
||||||
|
|
||||||
if gc.Ctxt.Flag_shared {
|
if base.Ctxt.Flag_shared {
|
||||||
// When compiling Go into PIC, the function we just
|
// When compiling Go into PIC, the function we just
|
||||||
// called via pointer might have been implemented in
|
// called via pointer might have been implemented in
|
||||||
// a separate module and so overwritten the TOC
|
// a separate module and so overwritten the TOC
|
||||||
|
|
@ -1852,8 +1854,8 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
|
||||||
if logopt.Enabled() {
|
if logopt.Enabled() {
|
||||||
logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name)
|
logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name)
|
||||||
}
|
}
|
||||||
if gc.Debug_checknil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers
|
if base.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers
|
||||||
gc.Warnl(v.Pos, "generated nil check")
|
base.WarnfAt(v.Pos, "generated nil check")
|
||||||
}
|
}
|
||||||
|
|
||||||
// These should be resolved by rules and not make it here.
|
// These should be resolved by rules and not make it here.
|
||||||
|
|
|
||||||
|
|
@ -5,6 +5,7 @@
|
||||||
package riscv64
|
package riscv64
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"cmd/compile/internal/base"
|
||||||
"cmd/compile/internal/gc"
|
"cmd/compile/internal/gc"
|
||||||
"cmd/internal/obj"
|
"cmd/internal/obj"
|
||||||
"cmd/internal/obj/riscv"
|
"cmd/internal/obj/riscv"
|
||||||
|
|
@ -16,7 +17,7 @@ func zeroRange(pp *gc.Progs, p *obj.Prog, off, cnt int64, _ *uint32) *obj.Prog {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Adjust the frame to account for LR.
|
// Adjust the frame to account for LR.
|
||||||
off += gc.Ctxt.FixedFrameSize()
|
off += base.Ctxt.FixedFrameSize()
|
||||||
|
|
||||||
if cnt < int64(4*gc.Widthptr) {
|
if cnt < int64(4*gc.Widthptr) {
|
||||||
for i := int64(0); i < cnt; i += int64(gc.Widthptr) {
|
for i := int64(0); i < cnt; i += int64(gc.Widthptr) {
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,9 @@
|
||||||
package riscv64
|
package riscv64
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"cmd/compile/internal/base"
|
||||||
"cmd/compile/internal/gc"
|
"cmd/compile/internal/gc"
|
||||||
|
"cmd/compile/internal/ir"
|
||||||
"cmd/compile/internal/ssa"
|
"cmd/compile/internal/ssa"
|
||||||
"cmd/compile/internal/types"
|
"cmd/compile/internal/types"
|
||||||
"cmd/internal/obj"
|
"cmd/internal/obj"
|
||||||
|
|
@ -91,7 +93,7 @@ func loadByType(t *types.Type) obj.As {
|
||||||
case 8:
|
case 8:
|
||||||
return riscv.AMOVD
|
return riscv.AMOVD
|
||||||
default:
|
default:
|
||||||
gc.Fatalf("unknown float width for load %d in type %v", width, t)
|
base.Fatalf("unknown float width for load %d in type %v", width, t)
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -118,7 +120,7 @@ func loadByType(t *types.Type) obj.As {
|
||||||
case 8:
|
case 8:
|
||||||
return riscv.AMOV
|
return riscv.AMOV
|
||||||
default:
|
default:
|
||||||
gc.Fatalf("unknown width for load %d in type %v", width, t)
|
base.Fatalf("unknown width for load %d in type %v", width, t)
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -134,7 +136,7 @@ func storeByType(t *types.Type) obj.As {
|
||||||
case 8:
|
case 8:
|
||||||
return riscv.AMOVD
|
return riscv.AMOVD
|
||||||
default:
|
default:
|
||||||
gc.Fatalf("unknown float width for store %d in type %v", width, t)
|
base.Fatalf("unknown float width for store %d in type %v", width, t)
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -149,7 +151,7 @@ func storeByType(t *types.Type) obj.As {
|
||||||
case 8:
|
case 8:
|
||||||
return riscv.AMOV
|
return riscv.AMOV
|
||||||
default:
|
default:
|
||||||
gc.Fatalf("unknown width for store %d in type %v", width, t)
|
base.Fatalf("unknown width for store %d in type %v", width, t)
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -322,7 +324,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
|
||||||
case *obj.LSym:
|
case *obj.LSym:
|
||||||
wantreg = "SB"
|
wantreg = "SB"
|
||||||
gc.AddAux(&p.From, v)
|
gc.AddAux(&p.From, v)
|
||||||
case *gc.Node:
|
case ir.Node:
|
||||||
wantreg = "SP"
|
wantreg = "SP"
|
||||||
gc.AddAux(&p.From, v)
|
gc.AddAux(&p.From, v)
|
||||||
case nil:
|
case nil:
|
||||||
|
|
@ -586,8 +588,8 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
|
||||||
gc.AddAux(&p.From, v)
|
gc.AddAux(&p.From, v)
|
||||||
p.To.Type = obj.TYPE_REG
|
p.To.Type = obj.TYPE_REG
|
||||||
p.To.Reg = riscv.REG_ZERO
|
p.To.Reg = riscv.REG_ZERO
|
||||||
if gc.Debug_checknil != 0 && v.Pos.Line() > 1 { // v.Pos == 1 in generated wrappers
|
if base.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos == 1 in generated wrappers
|
||||||
gc.Warnl(v.Pos, "generated nil check")
|
base.WarnfAt(v.Pos, "generated nil check")
|
||||||
}
|
}
|
||||||
|
|
||||||
case ssa.OpRISCV64LoweredGetClosurePtr:
|
case ssa.OpRISCV64LoweredGetClosurePtr:
|
||||||
|
|
@ -598,7 +600,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
|
||||||
// caller's SP is FixedFrameSize below the address of the first arg
|
// caller's SP is FixedFrameSize below the address of the first arg
|
||||||
p := s.Prog(riscv.AMOV)
|
p := s.Prog(riscv.AMOV)
|
||||||
p.From.Type = obj.TYPE_ADDR
|
p.From.Type = obj.TYPE_ADDR
|
||||||
p.From.Offset = -gc.Ctxt.FixedFrameSize()
|
p.From.Offset = -base.Ctxt.FixedFrameSize()
|
||||||
p.From.Name = obj.NAME_PARAM
|
p.From.Name = obj.NAME_PARAM
|
||||||
p.To.Type = obj.TYPE_REG
|
p.To.Type = obj.TYPE_REG
|
||||||
p.To.Reg = v.Reg()
|
p.To.Reg = v.Reg()
|
||||||
|
|
|
||||||
|
|
@ -5,6 +5,7 @@
|
||||||
package s390x
|
package s390x
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"cmd/compile/internal/base"
|
||||||
"cmd/compile/internal/gc"
|
"cmd/compile/internal/gc"
|
||||||
"cmd/internal/obj"
|
"cmd/internal/obj"
|
||||||
"cmd/internal/obj/s390x"
|
"cmd/internal/obj/s390x"
|
||||||
|
|
@ -23,7 +24,7 @@ func zerorange(pp *gc.Progs, p *obj.Prog, off, cnt int64, _ *uint32) *obj.Prog {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Adjust the frame to account for LR.
|
// Adjust the frame to account for LR.
|
||||||
off += gc.Ctxt.FixedFrameSize()
|
off += base.Ctxt.FixedFrameSize()
|
||||||
reg := int16(s390x.REGSP)
|
reg := int16(s390x.REGSP)
|
||||||
|
|
||||||
// If the off cannot fit in a 12-bit unsigned displacement then we
|
// If the off cannot fit in a 12-bit unsigned displacement then we
|
||||||
|
|
|
||||||
|
|
@ -7,6 +7,7 @@ package s390x
|
||||||
import (
|
import (
|
||||||
"math"
|
"math"
|
||||||
|
|
||||||
|
"cmd/compile/internal/base"
|
||||||
"cmd/compile/internal/gc"
|
"cmd/compile/internal/gc"
|
||||||
"cmd/compile/internal/logopt"
|
"cmd/compile/internal/logopt"
|
||||||
"cmd/compile/internal/ssa"
|
"cmd/compile/internal/ssa"
|
||||||
|
|
@ -573,7 +574,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
|
||||||
// caller's SP is FixedFrameSize below the address of the first arg
|
// caller's SP is FixedFrameSize below the address of the first arg
|
||||||
p := s.Prog(s390x.AMOVD)
|
p := s.Prog(s390x.AMOVD)
|
||||||
p.From.Type = obj.TYPE_ADDR
|
p.From.Type = obj.TYPE_ADDR
|
||||||
p.From.Offset = -gc.Ctxt.FixedFrameSize()
|
p.From.Offset = -base.Ctxt.FixedFrameSize()
|
||||||
p.From.Name = obj.NAME_PARAM
|
p.From.Name = obj.NAME_PARAM
|
||||||
p.To.Type = obj.TYPE_REG
|
p.To.Type = obj.TYPE_REG
|
||||||
p.To.Reg = v.Reg()
|
p.To.Reg = v.Reg()
|
||||||
|
|
@ -642,8 +643,8 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
|
||||||
if logopt.Enabled() {
|
if logopt.Enabled() {
|
||||||
logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name)
|
logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name)
|
||||||
}
|
}
|
||||||
if gc.Debug_checknil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers
|
if base.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers
|
||||||
gc.Warnl(v.Pos, "generated nil check")
|
base.WarnfAt(v.Pos, "generated nil check")
|
||||||
}
|
}
|
||||||
case ssa.OpS390XMVC:
|
case ssa.OpS390XMVC:
|
||||||
vo := v.AuxValAndOff()
|
vo := v.AuxValAndOff()
|
||||||
|
|
|
||||||
|
|
@ -5,6 +5,7 @@
|
||||||
package ssa
|
package ssa
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"cmd/compile/internal/ir"
|
||||||
"cmd/compile/internal/types"
|
"cmd/compile/internal/types"
|
||||||
"cmd/internal/obj"
|
"cmd/internal/obj"
|
||||||
"cmd/internal/objabi"
|
"cmd/internal/objabi"
|
||||||
|
|
@ -138,7 +139,7 @@ type Frontend interface {
|
||||||
|
|
||||||
// Auto returns a Node for an auto variable of the given type.
|
// Auto returns a Node for an auto variable of the given type.
|
||||||
// The SSA compiler uses this function to allocate space for spills.
|
// The SSA compiler uses this function to allocate space for spills.
|
||||||
Auto(src.XPos, *types.Type) GCNode
|
Auto(src.XPos, *types.Type) ir.Node
|
||||||
|
|
||||||
// Given the name for a compound type, returns the name we should use
|
// Given the name for a compound type, returns the name we should use
|
||||||
// for the parts of that compound type.
|
// for the parts of that compound type.
|
||||||
|
|
@ -178,24 +179,6 @@ type Frontend interface {
|
||||||
MyImportPath() string
|
MyImportPath() string
|
||||||
}
|
}
|
||||||
|
|
||||||
// interface used to hold a *gc.Node (a stack variable).
|
|
||||||
// We'd use *gc.Node directly but that would lead to an import cycle.
|
|
||||||
type GCNode interface {
|
|
||||||
Typ() *types.Type
|
|
||||||
String() string
|
|
||||||
IsSynthetic() bool
|
|
||||||
IsAutoTmp() bool
|
|
||||||
StorageClass() StorageClass
|
|
||||||
}
|
|
||||||
|
|
||||||
type StorageClass uint8
|
|
||||||
|
|
||||||
const (
|
|
||||||
ClassAuto StorageClass = iota // local stack variable
|
|
||||||
ClassParam // argument
|
|
||||||
ClassParamOut // return value
|
|
||||||
)
|
|
||||||
|
|
||||||
const go116lateCallExpansion = true
|
const go116lateCallExpansion = true
|
||||||
|
|
||||||
// LateCallExpansionEnabledWithin returns true if late call expansion should be tested
|
// LateCallExpansionEnabledWithin returns true if late call expansion should be tested
|
||||||
|
|
|
||||||
|
|
@ -5,6 +5,7 @@
|
||||||
package ssa
|
package ssa
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"cmd/compile/internal/ir"
|
||||||
"cmd/compile/internal/types"
|
"cmd/compile/internal/types"
|
||||||
"cmd/internal/src"
|
"cmd/internal/src"
|
||||||
)
|
)
|
||||||
|
|
@ -136,9 +137,9 @@ func dse(f *Func) {
|
||||||
// reaches stores then we delete all the stores. The other operations will then
|
// reaches stores then we delete all the stores. The other operations will then
|
||||||
// be eliminated by the dead code elimination pass.
|
// be eliminated by the dead code elimination pass.
|
||||||
func elimDeadAutosGeneric(f *Func) {
|
func elimDeadAutosGeneric(f *Func) {
|
||||||
addr := make(map[*Value]GCNode) // values that the address of the auto reaches
|
addr := make(map[*Value]ir.Node) // values that the address of the auto reaches
|
||||||
elim := make(map[*Value]GCNode) // values that could be eliminated if the auto is
|
elim := make(map[*Value]ir.Node) // values that could be eliminated if the auto is
|
||||||
used := make(map[GCNode]bool) // used autos that must be kept
|
used := make(map[ir.Node]bool) // used autos that must be kept
|
||||||
|
|
||||||
// visit the value and report whether any of the maps are updated
|
// visit the value and report whether any of the maps are updated
|
||||||
visit := func(v *Value) (changed bool) {
|
visit := func(v *Value) (changed bool) {
|
||||||
|
|
@ -146,8 +147,8 @@ func elimDeadAutosGeneric(f *Func) {
|
||||||
switch v.Op {
|
switch v.Op {
|
||||||
case OpAddr, OpLocalAddr:
|
case OpAddr, OpLocalAddr:
|
||||||
// Propagate the address if it points to an auto.
|
// Propagate the address if it points to an auto.
|
||||||
n, ok := v.Aux.(GCNode)
|
n, ok := v.Aux.(ir.Node)
|
||||||
if !ok || n.StorageClass() != ClassAuto {
|
if !ok || n.Class() != ir.PAUTO {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if addr[v] == nil {
|
if addr[v] == nil {
|
||||||
|
|
@ -157,8 +158,8 @@ func elimDeadAutosGeneric(f *Func) {
|
||||||
return
|
return
|
||||||
case OpVarDef, OpVarKill:
|
case OpVarDef, OpVarKill:
|
||||||
// v should be eliminated if we eliminate the auto.
|
// v should be eliminated if we eliminate the auto.
|
||||||
n, ok := v.Aux.(GCNode)
|
n, ok := v.Aux.(ir.Node)
|
||||||
if !ok || n.StorageClass() != ClassAuto {
|
if !ok || n.Class() != ir.PAUTO {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if elim[v] == nil {
|
if elim[v] == nil {
|
||||||
|
|
@ -173,8 +174,8 @@ func elimDeadAutosGeneric(f *Func) {
|
||||||
// for open-coded defers from being removed (since they
|
// for open-coded defers from being removed (since they
|
||||||
// may not be used by the inline code, but will be used by
|
// may not be used by the inline code, but will be used by
|
||||||
// panic processing).
|
// panic processing).
|
||||||
n, ok := v.Aux.(GCNode)
|
n, ok := v.Aux.(ir.Node)
|
||||||
if !ok || n.StorageClass() != ClassAuto {
|
if !ok || n.Class() != ir.PAUTO {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if !used[n] {
|
if !used[n] {
|
||||||
|
|
@ -221,7 +222,7 @@ func elimDeadAutosGeneric(f *Func) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Propagate any auto addresses through v.
|
// Propagate any auto addresses through v.
|
||||||
node := GCNode(nil)
|
var node ir.Node
|
||||||
for _, a := range args {
|
for _, a := range args {
|
||||||
if n, ok := addr[a]; ok && !used[n] {
|
if n, ok := addr[a]; ok && !used[n] {
|
||||||
if node == nil {
|
if node == nil {
|
||||||
|
|
@ -298,15 +299,15 @@ func elimUnreadAutos(f *Func) {
|
||||||
// Loop over all ops that affect autos taking note of which
|
// Loop over all ops that affect autos taking note of which
|
||||||
// autos we need and also stores that we might be able to
|
// autos we need and also stores that we might be able to
|
||||||
// eliminate.
|
// eliminate.
|
||||||
seen := make(map[GCNode]bool)
|
seen := make(map[ir.Node]bool)
|
||||||
var stores []*Value
|
var stores []*Value
|
||||||
for _, b := range f.Blocks {
|
for _, b := range f.Blocks {
|
||||||
for _, v := range b.Values {
|
for _, v := range b.Values {
|
||||||
n, ok := v.Aux.(GCNode)
|
n, ok := v.Aux.(ir.Node)
|
||||||
if !ok {
|
if !ok {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if n.StorageClass() != ClassAuto {
|
if n.Class() != ir.PAUTO {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -334,7 +335,7 @@ func elimUnreadAutos(f *Func) {
|
||||||
|
|
||||||
// Eliminate stores to unread autos.
|
// Eliminate stores to unread autos.
|
||||||
for _, store := range stores {
|
for _, store := range stores {
|
||||||
n, _ := store.Aux.(GCNode)
|
n, _ := store.Aux.(ir.Node)
|
||||||
if seen[n] {
|
if seen[n] {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -5,6 +5,7 @@
|
||||||
package ssa
|
package ssa
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"cmd/compile/internal/ir"
|
||||||
"cmd/internal/dwarf"
|
"cmd/internal/dwarf"
|
||||||
"cmd/internal/obj"
|
"cmd/internal/obj"
|
||||||
"encoding/hex"
|
"encoding/hex"
|
||||||
|
|
@ -24,7 +25,7 @@ type FuncDebug struct {
|
||||||
// Slots is all the slots used in the debug info, indexed by their SlotID.
|
// Slots is all the slots used in the debug info, indexed by their SlotID.
|
||||||
Slots []LocalSlot
|
Slots []LocalSlot
|
||||||
// The user variables, indexed by VarID.
|
// The user variables, indexed by VarID.
|
||||||
Vars []GCNode
|
Vars []ir.Node
|
||||||
// The slots that make up each variable, indexed by VarID.
|
// The slots that make up each variable, indexed by VarID.
|
||||||
VarSlots [][]SlotID
|
VarSlots [][]SlotID
|
||||||
// The location list data, indexed by VarID. Must be processed by PutLocationList.
|
// The location list data, indexed by VarID. Must be processed by PutLocationList.
|
||||||
|
|
@ -165,7 +166,7 @@ func (s *debugState) logf(msg string, args ...interface{}) {
|
||||||
type debugState struct {
|
type debugState struct {
|
||||||
// See FuncDebug.
|
// See FuncDebug.
|
||||||
slots []LocalSlot
|
slots []LocalSlot
|
||||||
vars []GCNode
|
vars []ir.Node
|
||||||
varSlots [][]SlotID
|
varSlots [][]SlotID
|
||||||
lists [][]byte
|
lists [][]byte
|
||||||
|
|
||||||
|
|
@ -189,7 +190,7 @@ type debugState struct {
|
||||||
// The pending location list entry for each user variable, indexed by VarID.
|
// The pending location list entry for each user variable, indexed by VarID.
|
||||||
pendingEntries []pendingEntry
|
pendingEntries []pendingEntry
|
||||||
|
|
||||||
varParts map[GCNode][]SlotID
|
varParts map[ir.Node][]SlotID
|
||||||
blockDebug []BlockDebug
|
blockDebug []BlockDebug
|
||||||
pendingSlotLocs []VarLoc
|
pendingSlotLocs []VarLoc
|
||||||
liveSlots []liveSlot
|
liveSlots []liveSlot
|
||||||
|
|
@ -346,7 +347,7 @@ func BuildFuncDebug(ctxt *obj.Link, f *Func, loggingEnabled bool, stackOffset fu
|
||||||
}
|
}
|
||||||
|
|
||||||
if state.varParts == nil {
|
if state.varParts == nil {
|
||||||
state.varParts = make(map[GCNode][]SlotID)
|
state.varParts = make(map[ir.Node][]SlotID)
|
||||||
} else {
|
} else {
|
||||||
for n := range state.varParts {
|
for n := range state.varParts {
|
||||||
delete(state.varParts, n)
|
delete(state.varParts, n)
|
||||||
|
|
@ -360,7 +361,7 @@ func BuildFuncDebug(ctxt *obj.Link, f *Func, loggingEnabled bool, stackOffset fu
|
||||||
state.vars = state.vars[:0]
|
state.vars = state.vars[:0]
|
||||||
for i, slot := range f.Names {
|
for i, slot := range f.Names {
|
||||||
state.slots = append(state.slots, slot)
|
state.slots = append(state.slots, slot)
|
||||||
if slot.N.IsSynthetic() {
|
if ir.IsSynthetic(slot.N) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -379,8 +380,8 @@ func BuildFuncDebug(ctxt *obj.Link, f *Func, loggingEnabled bool, stackOffset fu
|
||||||
for _, b := range f.Blocks {
|
for _, b := range f.Blocks {
|
||||||
for _, v := range b.Values {
|
for _, v := range b.Values {
|
||||||
if v.Op == OpVarDef || v.Op == OpVarKill {
|
if v.Op == OpVarDef || v.Op == OpVarKill {
|
||||||
n := v.Aux.(GCNode)
|
n := v.Aux.(ir.Node)
|
||||||
if n.IsSynthetic() {
|
if ir.IsSynthetic(n) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -425,7 +426,7 @@ func BuildFuncDebug(ctxt *obj.Link, f *Func, loggingEnabled bool, stackOffset fu
|
||||||
state.initializeCache(f, len(state.varParts), len(state.slots))
|
state.initializeCache(f, len(state.varParts), len(state.slots))
|
||||||
|
|
||||||
for i, slot := range f.Names {
|
for i, slot := range f.Names {
|
||||||
if slot.N.IsSynthetic() {
|
if ir.IsSynthetic(slot.N) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
for _, value := range f.NamedValues[slot] {
|
for _, value := range f.NamedValues[slot] {
|
||||||
|
|
@ -717,8 +718,8 @@ func (state *debugState) processValue(v *Value, vSlots []SlotID, vReg *Register)
|
||||||
|
|
||||||
switch {
|
switch {
|
||||||
case v.Op == OpVarDef, v.Op == OpVarKill:
|
case v.Op == OpVarDef, v.Op == OpVarKill:
|
||||||
n := v.Aux.(GCNode)
|
n := v.Aux.(ir.Node)
|
||||||
if n.IsSynthetic() {
|
if ir.IsSynthetic(n) {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -247,7 +247,7 @@ func expandCalls(f *Func) {
|
||||||
// i.e., the struct select is generated and remains in because it is not applied to an actual structure.
|
// i.e., the struct select is generated and remains in because it is not applied to an actual structure.
|
||||||
// The OpLoad was created to load the single field of the IData
|
// The OpLoad was created to load the single field of the IData
|
||||||
// This case removes that StructSelect.
|
// This case removes that StructSelect.
|
||||||
if leafType != selector.Type {
|
if leafType != selector.Type && !selector.Type.IsEmptyInterface() { // empty interface for #42727
|
||||||
f.Fatalf("Unexpected Load as selector, leaf=%s, selector=%s\n", leaf.LongString(), selector.LongString())
|
f.Fatalf("Unexpected Load as selector, leaf=%s, selector=%s\n", leaf.LongString(), selector.LongString())
|
||||||
}
|
}
|
||||||
leaf.copyOf(selector)
|
leaf.copyOf(selector)
|
||||||
|
|
|
||||||
|
|
@ -5,6 +5,7 @@
|
||||||
package ssa
|
package ssa
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"cmd/compile/internal/ir"
|
||||||
"cmd/compile/internal/types"
|
"cmd/compile/internal/types"
|
||||||
"cmd/internal/obj"
|
"cmd/internal/obj"
|
||||||
"cmd/internal/obj/arm64"
|
"cmd/internal/obj/arm64"
|
||||||
|
|
@ -36,10 +37,10 @@ func testConfigArch(tb testing.TB, arch string) *Conf {
|
||||||
tb.Fatalf("unknown arch %s", arch)
|
tb.Fatalf("unknown arch %s", arch)
|
||||||
}
|
}
|
||||||
if ctxt.Arch.PtrSize != 8 {
|
if ctxt.Arch.PtrSize != 8 {
|
||||||
tb.Fatal("dummyTypes is 64-bit only")
|
tb.Fatal("testTypes is 64-bit only")
|
||||||
}
|
}
|
||||||
c := &Conf{
|
c := &Conf{
|
||||||
config: NewConfig(arch, dummyTypes, ctxt, true),
|
config: NewConfig(arch, testTypes, ctxt, true),
|
||||||
tb: tb,
|
tb: tb,
|
||||||
}
|
}
|
||||||
return c
|
return c
|
||||||
|
|
@ -53,108 +54,85 @@ type Conf struct {
|
||||||
|
|
||||||
func (c *Conf) Frontend() Frontend {
|
func (c *Conf) Frontend() Frontend {
|
||||||
if c.fe == nil {
|
if c.fe == nil {
|
||||||
c.fe = DummyFrontend{t: c.tb, ctxt: c.config.ctxt}
|
c.fe = TestFrontend{t: c.tb, ctxt: c.config.ctxt}
|
||||||
}
|
}
|
||||||
return c.fe
|
return c.fe
|
||||||
}
|
}
|
||||||
|
|
||||||
// DummyFrontend is a test-only frontend.
|
// TestFrontend is a test-only frontend.
|
||||||
// It assumes 64 bit integers and pointers.
|
// It assumes 64 bit integers and pointers.
|
||||||
type DummyFrontend struct {
|
type TestFrontend struct {
|
||||||
t testing.TB
|
t testing.TB
|
||||||
ctxt *obj.Link
|
ctxt *obj.Link
|
||||||
}
|
}
|
||||||
|
|
||||||
type DummyAuto struct {
|
func (TestFrontend) StringData(s string) *obj.LSym {
|
||||||
t *types.Type
|
|
||||||
s string
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *DummyAuto) Typ() *types.Type {
|
|
||||||
return d.t
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *DummyAuto) String() string {
|
|
||||||
return d.s
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *DummyAuto) StorageClass() StorageClass {
|
|
||||||
return ClassAuto
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *DummyAuto) IsSynthetic() bool {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *DummyAuto) IsAutoTmp() bool {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
func (DummyFrontend) StringData(s string) *obj.LSym {
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
func (DummyFrontend) Auto(pos src.XPos, t *types.Type) GCNode {
|
func (TestFrontend) Auto(pos src.XPos, t *types.Type) ir.Node {
|
||||||
return &DummyAuto{t: t, s: "aDummyAuto"}
|
n := ir.NewNameAt(pos, &types.Sym{Name: "aFakeAuto"})
|
||||||
|
n.SetClass(ir.PAUTO)
|
||||||
|
return n
|
||||||
}
|
}
|
||||||
func (d DummyFrontend) SplitString(s LocalSlot) (LocalSlot, LocalSlot) {
|
func (d TestFrontend) SplitString(s LocalSlot) (LocalSlot, LocalSlot) {
|
||||||
return LocalSlot{N: s.N, Type: dummyTypes.BytePtr, Off: s.Off}, LocalSlot{N: s.N, Type: dummyTypes.Int, Off: s.Off + 8}
|
return LocalSlot{N: s.N, Type: testTypes.BytePtr, Off: s.Off}, LocalSlot{N: s.N, Type: testTypes.Int, Off: s.Off + 8}
|
||||||
}
|
}
|
||||||
func (d DummyFrontend) SplitInterface(s LocalSlot) (LocalSlot, LocalSlot) {
|
func (d TestFrontend) SplitInterface(s LocalSlot) (LocalSlot, LocalSlot) {
|
||||||
return LocalSlot{N: s.N, Type: dummyTypes.BytePtr, Off: s.Off}, LocalSlot{N: s.N, Type: dummyTypes.BytePtr, Off: s.Off + 8}
|
return LocalSlot{N: s.N, Type: testTypes.BytePtr, Off: s.Off}, LocalSlot{N: s.N, Type: testTypes.BytePtr, Off: s.Off + 8}
|
||||||
}
|
}
|
||||||
func (d DummyFrontend) SplitSlice(s LocalSlot) (LocalSlot, LocalSlot, LocalSlot) {
|
func (d TestFrontend) SplitSlice(s LocalSlot) (LocalSlot, LocalSlot, LocalSlot) {
|
||||||
return LocalSlot{N: s.N, Type: s.Type.Elem().PtrTo(), Off: s.Off},
|
return LocalSlot{N: s.N, Type: s.Type.Elem().PtrTo(), Off: s.Off},
|
||||||
LocalSlot{N: s.N, Type: dummyTypes.Int, Off: s.Off + 8},
|
LocalSlot{N: s.N, Type: testTypes.Int, Off: s.Off + 8},
|
||||||
LocalSlot{N: s.N, Type: dummyTypes.Int, Off: s.Off + 16}
|
LocalSlot{N: s.N, Type: testTypes.Int, Off: s.Off + 16}
|
||||||
}
|
}
|
||||||
func (d DummyFrontend) SplitComplex(s LocalSlot) (LocalSlot, LocalSlot) {
|
func (d TestFrontend) SplitComplex(s LocalSlot) (LocalSlot, LocalSlot) {
|
||||||
if s.Type.Size() == 16 {
|
if s.Type.Size() == 16 {
|
||||||
return LocalSlot{N: s.N, Type: dummyTypes.Float64, Off: s.Off}, LocalSlot{N: s.N, Type: dummyTypes.Float64, Off: s.Off + 8}
|
return LocalSlot{N: s.N, Type: testTypes.Float64, Off: s.Off}, LocalSlot{N: s.N, Type: testTypes.Float64, Off: s.Off + 8}
|
||||||
}
|
}
|
||||||
return LocalSlot{N: s.N, Type: dummyTypes.Float32, Off: s.Off}, LocalSlot{N: s.N, Type: dummyTypes.Float32, Off: s.Off + 4}
|
return LocalSlot{N: s.N, Type: testTypes.Float32, Off: s.Off}, LocalSlot{N: s.N, Type: testTypes.Float32, Off: s.Off + 4}
|
||||||
}
|
}
|
||||||
func (d DummyFrontend) SplitInt64(s LocalSlot) (LocalSlot, LocalSlot) {
|
func (d TestFrontend) SplitInt64(s LocalSlot) (LocalSlot, LocalSlot) {
|
||||||
if s.Type.IsSigned() {
|
if s.Type.IsSigned() {
|
||||||
return LocalSlot{N: s.N, Type: dummyTypes.Int32, Off: s.Off + 4}, LocalSlot{N: s.N, Type: dummyTypes.UInt32, Off: s.Off}
|
return LocalSlot{N: s.N, Type: testTypes.Int32, Off: s.Off + 4}, LocalSlot{N: s.N, Type: testTypes.UInt32, Off: s.Off}
|
||||||
}
|
}
|
||||||
return LocalSlot{N: s.N, Type: dummyTypes.UInt32, Off: s.Off + 4}, LocalSlot{N: s.N, Type: dummyTypes.UInt32, Off: s.Off}
|
return LocalSlot{N: s.N, Type: testTypes.UInt32, Off: s.Off + 4}, LocalSlot{N: s.N, Type: testTypes.UInt32, Off: s.Off}
|
||||||
}
|
}
|
||||||
func (d DummyFrontend) SplitStruct(s LocalSlot, i int) LocalSlot {
|
func (d TestFrontend) SplitStruct(s LocalSlot, i int) LocalSlot {
|
||||||
return LocalSlot{N: s.N, Type: s.Type.FieldType(i), Off: s.Off + s.Type.FieldOff(i)}
|
return LocalSlot{N: s.N, Type: s.Type.FieldType(i), Off: s.Off + s.Type.FieldOff(i)}
|
||||||
}
|
}
|
||||||
func (d DummyFrontend) SplitArray(s LocalSlot) LocalSlot {
|
func (d TestFrontend) SplitArray(s LocalSlot) LocalSlot {
|
||||||
return LocalSlot{N: s.N, Type: s.Type.Elem(), Off: s.Off}
|
return LocalSlot{N: s.N, Type: s.Type.Elem(), Off: s.Off}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d DummyFrontend) SplitSlot(parent *LocalSlot, suffix string, offset int64, t *types.Type) LocalSlot {
|
func (d TestFrontend) SplitSlot(parent *LocalSlot, suffix string, offset int64, t *types.Type) LocalSlot {
|
||||||
return LocalSlot{N: parent.N, Type: t, Off: offset}
|
return LocalSlot{N: parent.N, Type: t, Off: offset}
|
||||||
}
|
}
|
||||||
func (DummyFrontend) Line(_ src.XPos) string {
|
func (TestFrontend) Line(_ src.XPos) string {
|
||||||
return "unknown.go:0"
|
return "unknown.go:0"
|
||||||
}
|
}
|
||||||
func (DummyFrontend) AllocFrame(f *Func) {
|
func (TestFrontend) AllocFrame(f *Func) {
|
||||||
}
|
}
|
||||||
func (d DummyFrontend) Syslook(s string) *obj.LSym {
|
func (d TestFrontend) Syslook(s string) *obj.LSym {
|
||||||
return d.ctxt.Lookup(s)
|
return d.ctxt.Lookup(s)
|
||||||
}
|
}
|
||||||
func (DummyFrontend) UseWriteBarrier() bool {
|
func (TestFrontend) UseWriteBarrier() bool {
|
||||||
return true // only writebarrier_test cares
|
return true // only writebarrier_test cares
|
||||||
}
|
}
|
||||||
func (DummyFrontend) SetWBPos(pos src.XPos) {
|
func (TestFrontend) SetWBPos(pos src.XPos) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d DummyFrontend) Logf(msg string, args ...interface{}) { d.t.Logf(msg, args...) }
|
func (d TestFrontend) Logf(msg string, args ...interface{}) { d.t.Logf(msg, args...) }
|
||||||
func (d DummyFrontend) Log() bool { return true }
|
func (d TestFrontend) Log() bool { return true }
|
||||||
|
|
||||||
func (d DummyFrontend) Fatalf(_ src.XPos, msg string, args ...interface{}) { d.t.Fatalf(msg, args...) }
|
func (d TestFrontend) Fatalf(_ src.XPos, msg string, args ...interface{}) { d.t.Fatalf(msg, args...) }
|
||||||
func (d DummyFrontend) Warnl(_ src.XPos, msg string, args ...interface{}) { d.t.Logf(msg, args...) }
|
func (d TestFrontend) Warnl(_ src.XPos, msg string, args ...interface{}) { d.t.Logf(msg, args...) }
|
||||||
func (d DummyFrontend) Debug_checknil() bool { return false }
|
func (d TestFrontend) Debug_checknil() bool { return false }
|
||||||
|
|
||||||
func (d DummyFrontend) MyImportPath() string {
|
func (d TestFrontend) MyImportPath() string {
|
||||||
return "my/import/path"
|
return "my/import/path"
|
||||||
}
|
}
|
||||||
|
|
||||||
var dummyTypes Types
|
var testTypes Types
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
// Initialize just enough of the universe and the types package to make our tests function.
|
// Initialize just enough of the universe and the types package to make our tests function.
|
||||||
|
|
@ -198,12 +176,12 @@ func init() {
|
||||||
t.Align = uint8(typ.width)
|
t.Align = uint8(typ.width)
|
||||||
types.Types[typ.et] = t
|
types.Types[typ.et] = t
|
||||||
}
|
}
|
||||||
dummyTypes.SetTypPtrs()
|
testTypes.SetTypPtrs()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d DummyFrontend) DerefItab(sym *obj.LSym, off int64) *obj.LSym { return nil }
|
func (d TestFrontend) DerefItab(sym *obj.LSym, off int64) *obj.LSym { return nil }
|
||||||
|
|
||||||
func (d DummyFrontend) CanSSA(t *types.Type) bool {
|
func (d TestFrontend) CanSSA(t *types.Type) bool {
|
||||||
// There are no un-SSAable types in dummy land.
|
// There are no un-SSAable types in test land.
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -5,6 +5,7 @@
|
||||||
package ssa
|
package ssa
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"cmd/compile/internal/ir"
|
||||||
"cmd/compile/internal/types"
|
"cmd/compile/internal/types"
|
||||||
"fmt"
|
"fmt"
|
||||||
)
|
)
|
||||||
|
|
@ -59,7 +60,7 @@ func (r *Register) GCNum() int16 {
|
||||||
// { N: len, Type: int, Off: 0, SplitOf: parent, SplitOffset: 8}
|
// { N: len, Type: int, Off: 0, SplitOf: parent, SplitOffset: 8}
|
||||||
// parent = &{N: s, Type: string}
|
// parent = &{N: s, Type: string}
|
||||||
type LocalSlot struct {
|
type LocalSlot struct {
|
||||||
N GCNode // an ONAME *gc.Node representing a stack location.
|
N ir.Node // an ONAME *gc.Node representing a stack location.
|
||||||
Type *types.Type // type of slot
|
Type *types.Type // type of slot
|
||||||
Off int64 // offset of slot in N
|
Off int64 // offset of slot in N
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -5,6 +5,7 @@
|
||||||
package ssa
|
package ssa
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"cmd/compile/internal/ir"
|
||||||
"cmd/internal/objabi"
|
"cmd/internal/objabi"
|
||||||
"cmd/internal/src"
|
"cmd/internal/src"
|
||||||
)
|
)
|
||||||
|
|
@ -235,7 +236,7 @@ func nilcheckelim2(f *Func) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if v.Type.IsMemory() || v.Type.IsTuple() && v.Type.FieldType(1).IsMemory() {
|
if v.Type.IsMemory() || v.Type.IsTuple() && v.Type.FieldType(1).IsMemory() {
|
||||||
if v.Op == OpVarKill || v.Op == OpVarLive || (v.Op == OpVarDef && !v.Aux.(GCNode).Typ().HasPointers()) {
|
if v.Op == OpVarKill || v.Op == OpVarLive || (v.Op == OpVarDef && !v.Aux.(ir.Node).Type().HasPointers()) {
|
||||||
// These ops don't really change memory.
|
// These ops don't really change memory.
|
||||||
continue
|
continue
|
||||||
// Note: OpVarDef requires that the defined variable not have pointers.
|
// Note: OpVarDef requires that the defined variable not have pointers.
|
||||||
|
|
|
||||||
|
|
@ -136,13 +136,13 @@ type posetNode struct {
|
||||||
// Most internal data structures are pre-allocated and flat, so for instance adding a
|
// Most internal data structures are pre-allocated and flat, so for instance adding a
|
||||||
// new relation does not cause any allocation. For performance reasons,
|
// new relation does not cause any allocation. For performance reasons,
|
||||||
// each node has only up to two outgoing edges (like a binary tree), so intermediate
|
// each node has only up to two outgoing edges (like a binary tree), so intermediate
|
||||||
// "dummy" nodes are required to represent more than two relations. For instance,
|
// "extra" nodes are required to represent more than two relations. For instance,
|
||||||
// to record that A<I, A<J, A<K (with no known relation between I,J,K), we create the
|
// to record that A<I, A<J, A<K (with no known relation between I,J,K), we create the
|
||||||
// following DAG:
|
// following DAG:
|
||||||
//
|
//
|
||||||
// A
|
// A
|
||||||
// / \
|
// / \
|
||||||
// I dummy
|
// I extra
|
||||||
// / \
|
// / \
|
||||||
// J K
|
// J K
|
||||||
//
|
//
|
||||||
|
|
@ -223,7 +223,7 @@ func (po *poset) addchild(i1, i2 uint32, strict bool) {
|
||||||
po.setchr(i1, e2)
|
po.setchr(i1, e2)
|
||||||
po.upush(undoSetChr, i1, 0)
|
po.upush(undoSetChr, i1, 0)
|
||||||
} else {
|
} else {
|
||||||
// If n1 already has two children, add an intermediate dummy
|
// If n1 already has two children, add an intermediate extra
|
||||||
// node to record the relation correctly (without relating
|
// node to record the relation correctly (without relating
|
||||||
// n2 to other existing nodes). Use a non-deterministic value
|
// n2 to other existing nodes). Use a non-deterministic value
|
||||||
// to decide whether to append on the left or the right, to avoid
|
// to decide whether to append on the left or the right, to avoid
|
||||||
|
|
@ -231,27 +231,27 @@ func (po *poset) addchild(i1, i2 uint32, strict bool) {
|
||||||
//
|
//
|
||||||
// n1
|
// n1
|
||||||
// / \
|
// / \
|
||||||
// i1l dummy
|
// i1l extra
|
||||||
// / \
|
// / \
|
||||||
// i1r n2
|
// i1r n2
|
||||||
//
|
//
|
||||||
dummy := po.newnode(nil)
|
extra := po.newnode(nil)
|
||||||
if (i1^i2)&1 != 0 { // non-deterministic
|
if (i1^i2)&1 != 0 { // non-deterministic
|
||||||
po.setchl(dummy, i1r)
|
po.setchl(extra, i1r)
|
||||||
po.setchr(dummy, e2)
|
po.setchr(extra, e2)
|
||||||
po.setchr(i1, newedge(dummy, false))
|
po.setchr(i1, newedge(extra, false))
|
||||||
po.upush(undoSetChr, i1, i1r)
|
po.upush(undoSetChr, i1, i1r)
|
||||||
} else {
|
} else {
|
||||||
po.setchl(dummy, i1l)
|
po.setchl(extra, i1l)
|
||||||
po.setchr(dummy, e2)
|
po.setchr(extra, e2)
|
||||||
po.setchl(i1, newedge(dummy, false))
|
po.setchl(i1, newedge(extra, false))
|
||||||
po.upush(undoSetChl, i1, i1l)
|
po.upush(undoSetChl, i1, i1l)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// newnode allocates a new node bound to SSA value n.
|
// newnode allocates a new node bound to SSA value n.
|
||||||
// If n is nil, this is a dummy node (= only used internally).
|
// If n is nil, this is an extra node (= only used internally).
|
||||||
func (po *poset) newnode(n *Value) uint32 {
|
func (po *poset) newnode(n *Value) uint32 {
|
||||||
i := po.lastidx + 1
|
i := po.lastidx + 1
|
||||||
po.lastidx++
|
po.lastidx++
|
||||||
|
|
@ -380,9 +380,9 @@ func (po *poset) newconst(n *Value) {
|
||||||
|
|
||||||
case higherptr != 0:
|
case higherptr != 0:
|
||||||
// Higher bound only. To record n < higher, we need
|
// Higher bound only. To record n < higher, we need
|
||||||
// a dummy root:
|
// an extra root:
|
||||||
//
|
//
|
||||||
// dummy
|
// extra
|
||||||
// / \
|
// / \
|
||||||
// root \
|
// root \
|
||||||
// / n
|
// / n
|
||||||
|
|
@ -395,11 +395,11 @@ func (po *poset) newconst(n *Value) {
|
||||||
if r2 != po.roots[0] { // all constants should be in root #0
|
if r2 != po.roots[0] { // all constants should be in root #0
|
||||||
panic("constant not in root #0")
|
panic("constant not in root #0")
|
||||||
}
|
}
|
||||||
dummy := po.newnode(nil)
|
extra := po.newnode(nil)
|
||||||
po.changeroot(r2, dummy)
|
po.changeroot(r2, extra)
|
||||||
po.upush(undoChangeRoot, dummy, newedge(r2, false))
|
po.upush(undoChangeRoot, extra, newedge(r2, false))
|
||||||
po.addchild(dummy, r2, false)
|
po.addchild(extra, r2, false)
|
||||||
po.addchild(dummy, i, false)
|
po.addchild(extra, i, false)
|
||||||
po.addchild(i, i2, true)
|
po.addchild(i, i2, true)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -612,7 +612,7 @@ func (po *poset) findroot(i uint32) uint32 {
|
||||||
panic("findroot didn't find any root")
|
panic("findroot didn't find any root")
|
||||||
}
|
}
|
||||||
|
|
||||||
// mergeroot merges two DAGs into one DAG by creating a new dummy root
|
// mergeroot merges two DAGs into one DAG by creating a new extra root
|
||||||
func (po *poset) mergeroot(r1, r2 uint32) uint32 {
|
func (po *poset) mergeroot(r1, r2 uint32) uint32 {
|
||||||
// Root #0 is special as it contains all constants. Since mergeroot
|
// Root #0 is special as it contains all constants. Since mergeroot
|
||||||
// discards r2 as root and keeps r1, make sure that r2 is not root #0,
|
// discards r2 as root and keeps r1, make sure that r2 is not root #0,
|
||||||
|
|
@ -1004,7 +1004,7 @@ func (po *poset) setOrder(n1, n2 *Value, strict bool) bool {
|
||||||
case !f1 && f2:
|
case !f1 && f2:
|
||||||
// n1 is not in any DAG but n2 is. If n2 is a root, we can put
|
// n1 is not in any DAG but n2 is. If n2 is a root, we can put
|
||||||
// n1 in its place as a root; otherwise, we need to create a new
|
// n1 in its place as a root; otherwise, we need to create a new
|
||||||
// dummy root to record the relation.
|
// extra root to record the relation.
|
||||||
i1 = po.newnode(n1)
|
i1 = po.newnode(n1)
|
||||||
|
|
||||||
if po.isroot(i2) {
|
if po.isroot(i2) {
|
||||||
|
|
@ -1020,17 +1020,17 @@ func (po *poset) setOrder(n1, n2 *Value, strict bool) bool {
|
||||||
|
|
||||||
// Re-parent as follows:
|
// Re-parent as follows:
|
||||||
//
|
//
|
||||||
// dummy
|
// extra
|
||||||
// r / \
|
// r / \
|
||||||
// \ ===> r i1
|
// \ ===> r i1
|
||||||
// i2 \ /
|
// i2 \ /
|
||||||
// i2
|
// i2
|
||||||
//
|
//
|
||||||
dummy := po.newnode(nil)
|
extra := po.newnode(nil)
|
||||||
po.changeroot(r, dummy)
|
po.changeroot(r, extra)
|
||||||
po.upush(undoChangeRoot, dummy, newedge(r, false))
|
po.upush(undoChangeRoot, extra, newedge(r, false))
|
||||||
po.addchild(dummy, r, false)
|
po.addchild(extra, r, false)
|
||||||
po.addchild(dummy, i1, false)
|
po.addchild(extra, i1, false)
|
||||||
po.addchild(i1, i2, strict)
|
po.addchild(i1, i2, strict)
|
||||||
|
|
||||||
case f1 && f2:
|
case f1 && f2:
|
||||||
|
|
|
||||||
|
|
@ -104,7 +104,7 @@
|
||||||
// If b3 is the primary predecessor of b2, then we use x3 in b2 and
|
// If b3 is the primary predecessor of b2, then we use x3 in b2 and
|
||||||
// add a x4:CX->BX copy at the end of b4.
|
// add a x4:CX->BX copy at the end of b4.
|
||||||
// But the definition of x3 doesn't dominate b2. We should really
|
// But the definition of x3 doesn't dominate b2. We should really
|
||||||
// insert a dummy phi at the start of b2 (x5=phi(x3,x4):BX) to keep
|
// insert an extra phi at the start of b2 (x5=phi(x3,x4):BX) to keep
|
||||||
// SSA form. For now, we ignore this problem as remaining in strict
|
// SSA form. For now, we ignore this problem as remaining in strict
|
||||||
// SSA form isn't needed after regalloc. We'll just leave the use
|
// SSA form isn't needed after regalloc. We'll just leave the use
|
||||||
// of x3 not dominated by the definition of x3, and the CX->BX copy
|
// of x3 not dominated by the definition of x3, and the CX->BX copy
|
||||||
|
|
@ -114,6 +114,7 @@
|
||||||
package ssa
|
package ssa
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"cmd/compile/internal/ir"
|
||||||
"cmd/compile/internal/types"
|
"cmd/compile/internal/types"
|
||||||
"cmd/internal/objabi"
|
"cmd/internal/objabi"
|
||||||
"cmd/internal/src"
|
"cmd/internal/src"
|
||||||
|
|
@ -1248,7 +1249,7 @@ func (s *regAllocState) regalloc(f *Func) {
|
||||||
// This forces later liveness analysis to make the
|
// This forces later liveness analysis to make the
|
||||||
// value live at this point.
|
// value live at this point.
|
||||||
v.SetArg(0, s.makeSpill(a, b))
|
v.SetArg(0, s.makeSpill(a, b))
|
||||||
} else if _, ok := a.Aux.(GCNode); ok && vi.rematerializeable {
|
} else if _, ok := a.Aux.(ir.Node); ok && vi.rematerializeable {
|
||||||
// Rematerializeable value with a gc.Node. This is the address of
|
// Rematerializeable value with a gc.Node. This is the address of
|
||||||
// a stack object (e.g. an LEAQ). Keep the object live.
|
// a stack object (e.g. an LEAQ). Keep the object live.
|
||||||
// Change it to VarLive, which is what plive expects for locals.
|
// Change it to VarLive, which is what plive expects for locals.
|
||||||
|
|
|
||||||
|
|
@ -7,6 +7,7 @@
|
||||||
package ssa
|
package ssa
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"cmd/compile/internal/ir"
|
||||||
"cmd/compile/internal/types"
|
"cmd/compile/internal/types"
|
||||||
"cmd/internal/src"
|
"cmd/internal/src"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
@ -156,7 +157,7 @@ func (s *stackAllocState) stackalloc() {
|
||||||
if v.Aux == nil {
|
if v.Aux == nil {
|
||||||
f.Fatalf("%s has nil Aux\n", v.LongString())
|
f.Fatalf("%s has nil Aux\n", v.LongString())
|
||||||
}
|
}
|
||||||
loc := LocalSlot{N: v.Aux.(GCNode), Type: v.Type, Off: v.AuxInt}
|
loc := LocalSlot{N: v.Aux.(ir.Node), Type: v.Type, Off: v.AuxInt}
|
||||||
if f.pass.debug > stackDebug {
|
if f.pass.debug > stackDebug {
|
||||||
fmt.Printf("stackalloc %s to %s\n", v, loc)
|
fmt.Printf("stackalloc %s to %s\n", v, loc)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -13,7 +13,7 @@ func TestDump(t *testing.T) {
|
||||||
t.Skip("skipping test in short mode")
|
t.Skip("skipping test in short mode")
|
||||||
}
|
}
|
||||||
|
|
||||||
// provide a dummy error handler so parsing doesn't stop after first error
|
// provide a no-op error handler so parsing doesn't stop after first error
|
||||||
ast, err := ParseFile(*src_, func(error) {}, nil, CheckBranches)
|
ast, err := ParseFile(*src_, func(error) {}, nil, CheckBranches)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Error(err)
|
t.Error(err)
|
||||||
|
|
|
||||||
|
|
@ -116,7 +116,7 @@ func (*decl) aDecl() {}
|
||||||
|
|
||||||
// All declarations belonging to the same group point to the same Group node.
|
// All declarations belonging to the same group point to the same Group node.
|
||||||
type Group struct {
|
type Group struct {
|
||||||
dummy int // not empty so we are guaranteed different Group instances
|
_ int // not empty so we are guaranteed different Group instances
|
||||||
}
|
}
|
||||||
|
|
||||||
// ----------------------------------------------------------------------------
|
// ----------------------------------------------------------------------------
|
||||||
|
|
|
||||||
|
|
@ -18,7 +18,7 @@ func TestPrint(t *testing.T) {
|
||||||
t.Skip("skipping test in short mode")
|
t.Skip("skipping test in short mode")
|
||||||
}
|
}
|
||||||
|
|
||||||
// provide a dummy error handler so parsing doesn't stop after first error
|
// provide a no-op error handler so parsing doesn't stop after first error
|
||||||
ast, err := ParseFile(*src_, func(error) {}, nil, 0)
|
ast, err := ParseFile(*src_, func(error) {}, nil, 0)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Error(err)
|
t.Error(err)
|
||||||
|
|
|
||||||
|
|
@ -15,7 +15,7 @@ var Block int32 // current block number
|
||||||
// restored once the block scope ends.
|
// restored once the block scope ends.
|
||||||
type dsym struct {
|
type dsym struct {
|
||||||
sym *Sym // sym == nil indicates stack mark
|
sym *Sym // sym == nil indicates stack mark
|
||||||
def *Node
|
def IRNode
|
||||||
block int32
|
block int32
|
||||||
lastlineno src.XPos // last declaration for diagnostic
|
lastlineno src.XPos // last declaration for diagnostic
|
||||||
}
|
}
|
||||||
|
|
@ -79,16 +79,16 @@ func IsDclstackValid() bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
// PkgDef returns the definition associated with s at package scope.
|
// PkgDef returns the definition associated with s at package scope.
|
||||||
func (s *Sym) PkgDef() *Node {
|
func (s *Sym) PkgDef() IRNode {
|
||||||
return *s.pkgDefPtr()
|
return *s.pkgDefPtr()
|
||||||
}
|
}
|
||||||
|
|
||||||
// SetPkgDef sets the definition associated with s at package scope.
|
// SetPkgDef sets the definition associated with s at package scope.
|
||||||
func (s *Sym) SetPkgDef(n *Node) {
|
func (s *Sym) SetPkgDef(n IRNode) {
|
||||||
*s.pkgDefPtr() = n
|
*s.pkgDefPtr() = n
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Sym) pkgDefPtr() **Node {
|
func (s *Sym) pkgDefPtr() *IRNode {
|
||||||
// Look for outermost saved declaration, which must be the
|
// Look for outermost saved declaration, which must be the
|
||||||
// package scope definition, if present.
|
// package scope definition, if present.
|
||||||
for _, d := range dclstack {
|
for _, d := range dclstack {
|
||||||
|
|
|
||||||
|
|
@ -20,11 +20,11 @@ func TestSizeof(t *testing.T) {
|
||||||
_32bit uintptr // size on 32bit platforms
|
_32bit uintptr // size on 32bit platforms
|
||||||
_64bit uintptr // size on 64bit platforms
|
_64bit uintptr // size on 64bit platforms
|
||||||
}{
|
}{
|
||||||
{Sym{}, 52, 88},
|
{Sym{}, 60, 104},
|
||||||
{Type{}, 52, 88},
|
{Type{}, 56, 96},
|
||||||
{Map{}, 20, 40},
|
{Map{}, 20, 40},
|
||||||
{Forward{}, 20, 32},
|
{Forward{}, 20, 32},
|
||||||
{Func{}, 32, 56},
|
{Func{}, 28, 48},
|
||||||
{Struct{}, 16, 32},
|
{Struct{}, 16, 32},
|
||||||
{Interface{}, 8, 16},
|
{Interface{}, 8, 16},
|
||||||
{Chan{}, 8, 16},
|
{Chan{}, 8, 16},
|
||||||
|
|
|
||||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue