[dev.regabi] cmd/compile: group known symbols, packages, names [generated]

There are a handful of pre-computed magic symbols known by
package gc, and we need a place to store them.

If we keep them together, the need for type *ir.Name means that
package ir is the lowest package in the import hierarchy that they
can go in. And package ir needs gopkg for methodSymSuffix
(in a later CL), so they can't go any higher either, at least not all together.
So package ir it is.

Rather than dump them all into the top-level package ir
namespace, however, we introduce global structs, Syms, Pkgs, and Names,
and make the known symbols, packages, and names fields of those.

[git-generate]
cd src/cmd/compile/internal/gc

rf '
	add go.go:$ \
		// Names holds known names. \
		var Names struct{} \
		\
		// Syms holds known symbols. \
		var Syms struct {} \
		\
		// Pkgs holds known packages. \
		var Pkgs struct {} \

	mv staticuint64s Names.Staticuint64s
	mv zerobase Names.Zerobase

	mv assertE2I Syms.AssertE2I
	mv assertE2I2 Syms.AssertE2I2
	mv assertI2I Syms.AssertI2I
	mv assertI2I2 Syms.AssertI2I2
	mv deferproc Syms.Deferproc
	mv deferprocStack Syms.DeferprocStack
	mv Deferreturn Syms.Deferreturn
	mv Duffcopy Syms.Duffcopy
	mv Duffzero Syms.Duffzero
	mv gcWriteBarrier Syms.GCWriteBarrier
	mv goschedguarded Syms.Goschedguarded
	mv growslice Syms.Growslice
	mv msanread Syms.Msanread
	mv msanwrite Syms.Msanwrite
	mv msanmove Syms.Msanmove
	mv newobject Syms.Newobject
	mv newproc Syms.Newproc
	mv panicdivide Syms.Panicdivide
	mv panicshift Syms.Panicshift
	mv panicdottypeE Syms.PanicdottypeE
	mv panicdottypeI Syms.PanicdottypeI
	mv panicnildottype Syms.Panicnildottype
	mv panicoverflow Syms.Panicoverflow
	mv raceread Syms.Raceread
	mv racereadrange Syms.Racereadrange
	mv racewrite Syms.Racewrite
	mv racewriterange Syms.Racewriterange
	mv SigPanic Syms.SigPanic
	mv typedmemclr Syms.Typedmemclr
	mv typedmemmove Syms.Typedmemmove
	mv Udiv Syms.Udiv
	mv writeBarrier Syms.WriteBarrier
	mv zerobaseSym Syms.Zerobase
	mv arm64HasATOMICS Syms.ARM64HasATOMICS
	mv armHasVFPv4 Syms.ARMHasVFPv4
	mv x86HasFMA Syms.X86HasFMA
	mv x86HasPOPCNT Syms.X86HasPOPCNT
	mv x86HasSSE41 Syms.X86HasSSE41
	mv WasmDiv Syms.WasmDiv
	mv WasmMove Syms.WasmMove
	mv WasmZero Syms.WasmZero
	mv WasmTruncS Syms.WasmTruncS
	mv WasmTruncU Syms.WasmTruncU

	mv gopkg Pkgs.Go
	mv itabpkg Pkgs.Itab
	mv itablinkpkg Pkgs.Itablink
	mv mappkg Pkgs.Map
	mv msanpkg Pkgs.Msan
	mv racepkg Pkgs.Race
	mv Runtimepkg Pkgs.Runtime
	mv trackpkg Pkgs.Track
	mv unsafepkg Pkgs.Unsafe

	mv Names Syms Pkgs symtab.go
	mv symtab.go cmd/compile/internal/ir
'

Change-Id: Ic143862148569a3bcde8e70b26d75421aa2d00f3
Reviewed-on: https://go-review.googlesource.com/c/go/+/279235
Trust: Russ Cox <rsc@golang.org>
Run-TryBot: Russ Cox <rsc@golang.org>
TryBot-Result: Go Bot <gobot@golang.org>
Reviewed-by: Matthew Dempsky <mdempsky@google.com>
This commit is contained in:
Russ Cox 2020-12-23 00:10:25 -05:00
parent 9ee309255a
commit 65c4c6dfb2
29 changed files with 255 additions and 231 deletions

View file

@ -7,6 +7,7 @@ package amd64
import ( import (
"cmd/compile/internal/base" "cmd/compile/internal/base"
"cmd/compile/internal/gc" "cmd/compile/internal/gc"
"cmd/compile/internal/ir"
"cmd/internal/obj" "cmd/internal/obj"
"cmd/internal/obj/x86" "cmd/internal/obj/x86"
"cmd/internal/objabi" "cmd/internal/objabi"
@ -102,7 +103,7 @@ func zerorange(pp *gc.Progs, p *obj.Prog, off, cnt int64, state *uint32) *obj.Pr
} }
p = pp.Appendpp(p, leaptr, obj.TYPE_MEM, x86.REG_SP, off+dzDI(cnt), obj.TYPE_REG, x86.REG_DI, 0) p = pp.Appendpp(p, leaptr, obj.TYPE_MEM, x86.REG_SP, off+dzDI(cnt), obj.TYPE_REG, x86.REG_DI, 0)
p = pp.Appendpp(p, obj.ADUFFZERO, obj.TYPE_NONE, 0, 0, obj.TYPE_ADDR, 0, dzOff(cnt)) p = pp.Appendpp(p, obj.ADUFFZERO, obj.TYPE_NONE, 0, 0, obj.TYPE_ADDR, 0, dzOff(cnt))
p.To.Sym = gc.Duffzero p.To.Sym = ir.Syms.Duffzero
if cnt%16 != 0 { if cnt%16 != 0 {
p = pp.Appendpp(p, x86.AMOVUPS, obj.TYPE_REG, x86.REG_X0, 0, obj.TYPE_MEM, x86.REG_DI, -int64(8)) p = pp.Appendpp(p, x86.AMOVUPS, obj.TYPE_REG, x86.REG_X0, 0, obj.TYPE_MEM, x86.REG_DI, -int64(8))

View file

@ -10,6 +10,7 @@ import (
"cmd/compile/internal/base" "cmd/compile/internal/base"
"cmd/compile/internal/gc" "cmd/compile/internal/gc"
"cmd/compile/internal/ir"
"cmd/compile/internal/logopt" "cmd/compile/internal/logopt"
"cmd/compile/internal/ssa" "cmd/compile/internal/ssa"
"cmd/compile/internal/types" "cmd/compile/internal/types"
@ -912,7 +913,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
} }
p = s.Prog(obj.ADUFFZERO) p = s.Prog(obj.ADUFFZERO)
p.To.Type = obj.TYPE_ADDR p.To.Type = obj.TYPE_ADDR
p.To.Sym = gc.Duffzero p.To.Sym = ir.Syms.Duffzero
p.To.Offset = off p.To.Offset = off
case ssa.OpAMD64MOVOconst: case ssa.OpAMD64MOVOconst:
if v.AuxInt != 0 { if v.AuxInt != 0 {
@ -923,7 +924,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
case ssa.OpAMD64DUFFCOPY: case ssa.OpAMD64DUFFCOPY:
p := s.Prog(obj.ADUFFCOPY) p := s.Prog(obj.ADUFFCOPY)
p.To.Type = obj.TYPE_ADDR p.To.Type = obj.TYPE_ADDR
p.To.Sym = gc.Duffcopy p.To.Sym = ir.Syms.Duffcopy
if v.AuxInt%16 != 0 { if v.AuxInt%16 != 0 {
v.Fatalf("bad DUFFCOPY AuxInt %v", v.AuxInt) v.Fatalf("bad DUFFCOPY AuxInt %v", v.AuxInt)
} }

View file

@ -6,6 +6,7 @@ package arm
import ( import (
"cmd/compile/internal/gc" "cmd/compile/internal/gc"
"cmd/compile/internal/ir"
"cmd/internal/obj" "cmd/internal/obj"
"cmd/internal/obj/arm" "cmd/internal/obj/arm"
) )
@ -28,7 +29,7 @@ func zerorange(pp *gc.Progs, p *obj.Prog, off, cnt int64, r0 *uint32) *obj.Prog
p.Reg = arm.REGSP p.Reg = arm.REGSP
p = pp.Appendpp(p, obj.ADUFFZERO, obj.TYPE_NONE, 0, 0, obj.TYPE_MEM, 0, 0) p = pp.Appendpp(p, obj.ADUFFZERO, obj.TYPE_NONE, 0, 0, obj.TYPE_MEM, 0, 0)
p.To.Name = obj.NAME_EXTERN p.To.Name = obj.NAME_EXTERN
p.To.Sym = gc.Duffzero p.To.Sym = ir.Syms.Duffzero
p.To.Offset = 4 * (128 - cnt/int64(gc.Widthptr)) p.To.Offset = 4 * (128 - cnt/int64(gc.Widthptr))
} else { } else {
p = pp.Appendpp(p, arm.AADD, obj.TYPE_CONST, 0, 4+off, obj.TYPE_REG, arm.REG_R1, 0) p = pp.Appendpp(p, arm.AADD, obj.TYPE_CONST, 0, 4+off, obj.TYPE_REG, arm.REG_R1, 0)

View file

@ -702,7 +702,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
p := s.Prog(obj.ACALL) p := s.Prog(obj.ACALL)
p.To.Type = obj.TYPE_MEM p.To.Type = obj.TYPE_MEM
p.To.Name = obj.NAME_EXTERN p.To.Name = obj.NAME_EXTERN
p.To.Sym = gc.Udiv p.To.Sym = ir.Syms.Udiv
case ssa.OpARMLoweredWB: case ssa.OpARMLoweredWB:
p := s.Prog(obj.ACALL) p := s.Prog(obj.ACALL)
p.To.Type = obj.TYPE_MEM p.To.Type = obj.TYPE_MEM
@ -724,13 +724,13 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
p := s.Prog(obj.ADUFFZERO) p := s.Prog(obj.ADUFFZERO)
p.To.Type = obj.TYPE_MEM p.To.Type = obj.TYPE_MEM
p.To.Name = obj.NAME_EXTERN p.To.Name = obj.NAME_EXTERN
p.To.Sym = gc.Duffzero p.To.Sym = ir.Syms.Duffzero
p.To.Offset = v.AuxInt p.To.Offset = v.AuxInt
case ssa.OpARMDUFFCOPY: case ssa.OpARMDUFFCOPY:
p := s.Prog(obj.ADUFFCOPY) p := s.Prog(obj.ADUFFCOPY)
p.To.Type = obj.TYPE_MEM p.To.Type = obj.TYPE_MEM
p.To.Name = obj.NAME_EXTERN p.To.Name = obj.NAME_EXTERN
p.To.Sym = gc.Duffcopy p.To.Sym = ir.Syms.Duffcopy
p.To.Offset = v.AuxInt p.To.Offset = v.AuxInt
case ssa.OpARMLoweredNilCheck: case ssa.OpARMLoweredNilCheck:
// Issue a load which will fault if arg is nil. // Issue a load which will fault if arg is nil.

View file

@ -6,6 +6,7 @@ package arm64
import ( import (
"cmd/compile/internal/gc" "cmd/compile/internal/gc"
"cmd/compile/internal/ir"
"cmd/internal/obj" "cmd/internal/obj"
"cmd/internal/obj/arm64" "cmd/internal/obj/arm64"
"cmd/internal/objabi" "cmd/internal/objabi"
@ -41,7 +42,7 @@ func zerorange(pp *gc.Progs, p *obj.Prog, off, cnt int64, _ *uint32) *obj.Prog {
p.Reg = arm64.REG_R20 p.Reg = arm64.REG_R20
p = pp.Appendpp(p, obj.ADUFFZERO, obj.TYPE_NONE, 0, 0, obj.TYPE_MEM, 0, 0) p = pp.Appendpp(p, obj.ADUFFZERO, obj.TYPE_NONE, 0, 0, obj.TYPE_MEM, 0, 0)
p.To.Name = obj.NAME_EXTERN p.To.Name = obj.NAME_EXTERN
p.To.Sym = gc.Duffzero p.To.Sym = ir.Syms.Duffzero
p.To.Offset = 4 * (64 - cnt/(2*int64(gc.Widthptr))) p.To.Offset = 4 * (64 - cnt/(2*int64(gc.Widthptr)))
} else { } else {
// Not using REGTMP, so this is async preemptible (async preemption clobbers REGTMP). // Not using REGTMP, so this is async preemptible (async preemption clobbers REGTMP).

View file

@ -961,7 +961,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
p := s.Prog(obj.ADUFFZERO) p := s.Prog(obj.ADUFFZERO)
p.To.Type = obj.TYPE_MEM p.To.Type = obj.TYPE_MEM
p.To.Name = obj.NAME_EXTERN p.To.Name = obj.NAME_EXTERN
p.To.Sym = gc.Duffzero p.To.Sym = ir.Syms.Duffzero
p.To.Offset = v.AuxInt p.To.Offset = v.AuxInt
case ssa.OpARM64LoweredZero: case ssa.OpARM64LoweredZero:
// STP.P (ZR,ZR), 16(R16) // STP.P (ZR,ZR), 16(R16)
@ -987,7 +987,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
p := s.Prog(obj.ADUFFCOPY) p := s.Prog(obj.ADUFFCOPY)
p.To.Type = obj.TYPE_MEM p.To.Type = obj.TYPE_MEM
p.To.Name = obj.NAME_EXTERN p.To.Name = obj.NAME_EXTERN
p.To.Sym = gc.Duffcopy p.To.Sym = ir.Syms.Duffcopy
p.To.Offset = v.AuxInt p.To.Offset = v.AuxInt
case ssa.OpARM64LoweredMove: case ssa.OpARM64LoweredMove:
// MOVD.P 8(R16), Rtmp // MOVD.P 8(R16), Rtmp

View file

@ -265,19 +265,19 @@ func hashfor(t *types.Type) ir.Node {
case types.AMEM: case types.AMEM:
base.Fatalf("hashfor with AMEM type") base.Fatalf("hashfor with AMEM type")
case types.AINTER: case types.AINTER:
sym = Runtimepkg.Lookup("interhash") sym = ir.Pkgs.Runtime.Lookup("interhash")
case types.ANILINTER: case types.ANILINTER:
sym = Runtimepkg.Lookup("nilinterhash") sym = ir.Pkgs.Runtime.Lookup("nilinterhash")
case types.ASTRING: case types.ASTRING:
sym = Runtimepkg.Lookup("strhash") sym = ir.Pkgs.Runtime.Lookup("strhash")
case types.AFLOAT32: case types.AFLOAT32:
sym = Runtimepkg.Lookup("f32hash") sym = ir.Pkgs.Runtime.Lookup("f32hash")
case types.AFLOAT64: case types.AFLOAT64:
sym = Runtimepkg.Lookup("f64hash") sym = ir.Pkgs.Runtime.Lookup("f64hash")
case types.ACPLX64: case types.ACPLX64:
sym = Runtimepkg.Lookup("c64hash") sym = ir.Pkgs.Runtime.Lookup("c64hash")
case types.ACPLX128: case types.ACPLX128:
sym = Runtimepkg.Lookup("c128hash") sym = ir.Pkgs.Runtime.Lookup("c128hash")
default: default:
// Note: the caller of hashfor ensured that this symbol // Note: the caller of hashfor ensured that this symbol
// exists and has a body by calling genhash for t. // exists and has a body by calling genhash for t.

View file

@ -626,7 +626,7 @@ func methodSymSuffix(recv *types.Type, msym *types.Sym, suffix string) *types.Sy
// Find the package the receiver type appeared in. For // Find the package the receiver type appeared in. For
// anonymous receiver types (i.e., anonymous structs with // anonymous receiver types (i.e., anonymous structs with
// embedded fields), use the "go" pseudo-package instead. // embedded fields), use the "go" pseudo-package instead.
rpkg := gopkg rpkg := ir.Pkgs.Go
if rsym != nil { if rsym != nil {
rpkg = rsym.Pkg rpkg = rsym.Pkg
} }

View file

@ -16,7 +16,7 @@ import (
// sysfunc looks up Go function name in package runtime. This function // sysfunc looks up Go function name in package runtime. This function
// must follow the internal calling convention. // must follow the internal calling convention.
func sysfunc(name string) *obj.LSym { func sysfunc(name string) *obj.LSym {
s := Runtimepkg.Lookup(name) s := ir.Pkgs.Runtime.Lookup(name)
s.SetFunc(true) s.SetFunc(true)
return s.Linksym() return s.Linksym()
} }
@ -25,7 +25,7 @@ func sysfunc(name string) *obj.LSym {
// runtime. If this is a function, it may have a special calling // runtime. If this is a function, it may have a special calling
// convention. // convention.
func sysvar(name string) *obj.LSym { func sysvar(name string) *obj.LSym {
return Runtimepkg.Lookup(name).Linksym() return ir.Pkgs.Runtime.Lookup(name).Linksym()
} }
// isParamStackCopy reports whether this is the on-stack copy of a // isParamStackCopy reports whether this is the on-stack copy of a

View file

@ -64,24 +64,6 @@ var decldepth int32
var inimport bool // set during import var inimport bool // set during import
var itabpkg *types.Pkg // fake pkg for itab entries
var itablinkpkg *types.Pkg // fake package for runtime itab entries
var Runtimepkg *types.Pkg // fake package runtime
var racepkg *types.Pkg // package runtime/race
var msanpkg *types.Pkg // package runtime/msan
var unsafepkg *types.Pkg // package unsafe
var trackpkg *types.Pkg // fake package for field tracking
var mappkg *types.Pkg // fake package for map zero value
var gopkg *types.Pkg // pseudo-package for method symbols on anonymous receiver types
var zerosize int64 var zerosize int64
var ( var (
@ -149,57 +131,8 @@ type Arch struct {
var thearch Arch var thearch Arch
var ( var (
staticuint64s *ir.Name
zerobase *ir.Name
assertE2I,
assertE2I2,
assertI2I,
assertI2I2,
deferproc,
deferprocStack,
Deferreturn,
Duffcopy,
Duffzero,
gcWriteBarrier,
goschedguarded,
growslice,
msanread,
msanwrite,
msanmove,
newobject,
newproc,
panicdivide,
panicshift,
panicdottypeE,
panicdottypeI,
panicnildottype,
panicoverflow,
raceread,
racereadrange,
racewrite,
racewriterange,
x86HasPOPCNT,
x86HasSSE41,
x86HasFMA,
armHasVFPv4,
arm64HasATOMICS,
typedmemclr,
typedmemmove,
Udiv,
writeBarrier,
zerobaseSym *obj.LSym
BoundsCheckFunc [ssa.BoundsKindCount]*obj.LSym BoundsCheckFunc [ssa.BoundsKindCount]*obj.LSym
ExtendCheckFunc [ssa.BoundsKindCount]*obj.LSym ExtendCheckFunc [ssa.BoundsKindCount]*obj.LSym
// Wasm
WasmMove,
WasmZero,
WasmDiv,
WasmTruncS,
WasmTruncU,
SigPanic *obj.LSym
) )
// GCWriteBarrierReg maps from registers to gcWriteBarrier implementation LSyms. // GCWriteBarrierReg maps from registers to gcWriteBarrier implementation LSyms.

View file

@ -400,7 +400,7 @@ func (p *iexporter) pushDecl(n *ir.Name) {
} }
// Don't export predeclared declarations. // Don't export predeclared declarations.
if n.Sym().Pkg == types.BuiltinPkg || n.Sym().Pkg == unsafepkg { if n.Sym().Pkg == types.BuiltinPkg || n.Sym().Pkg == ir.Pkgs.Unsafe {
return return
} }
@ -647,7 +647,7 @@ func (w *exportWriter) startType(k itag) {
func (w *exportWriter) doTyp(t *types.Type) { func (w *exportWriter) doTyp(t *types.Type) {
if t.Sym() != nil { if t.Sym() != nil {
if t.Sym().Pkg == types.BuiltinPkg || t.Sym().Pkg == unsafepkg { if t.Sym().Pkg == types.BuiltinPkg || t.Sym().Pkg == ir.Pkgs.Unsafe {
base.Fatalf("builtin type missing from typIndex: %v", t) base.Fatalf("builtin type missing from typIndex: %v", t)
} }

View file

@ -86,32 +86,32 @@ func Main(archInit func(*Arch)) {
types.BuiltinPkg.Prefix = "go.builtin" // not go%2ebuiltin types.BuiltinPkg.Prefix = "go.builtin" // not go%2ebuiltin
// pseudo-package, accessed by import "unsafe" // pseudo-package, accessed by import "unsafe"
unsafepkg = types.NewPkg("unsafe", "unsafe") ir.Pkgs.Unsafe = types.NewPkg("unsafe", "unsafe")
// Pseudo-package that contains the compiler's builtin // Pseudo-package that contains the compiler's builtin
// declarations for package runtime. These are declared in a // declarations for package runtime. These are declared in a
// separate package to avoid conflicts with package runtime's // separate package to avoid conflicts with package runtime's
// actual declarations, which may differ intentionally but // actual declarations, which may differ intentionally but
// insignificantly. // insignificantly.
Runtimepkg = types.NewPkg("go.runtime", "runtime") ir.Pkgs.Runtime = types.NewPkg("go.runtime", "runtime")
Runtimepkg.Prefix = "runtime" ir.Pkgs.Runtime.Prefix = "runtime"
// pseudo-packages used in symbol tables // pseudo-packages used in symbol tables
itabpkg = types.NewPkg("go.itab", "go.itab") ir.Pkgs.Itab = types.NewPkg("go.itab", "go.itab")
itabpkg.Prefix = "go.itab" // not go%2eitab ir.Pkgs.Itab.Prefix = "go.itab" // not go%2eitab
itablinkpkg = types.NewPkg("go.itablink", "go.itablink") ir.Pkgs.Itablink = types.NewPkg("go.itablink", "go.itablink")
itablinkpkg.Prefix = "go.itablink" // not go%2eitablink ir.Pkgs.Itablink.Prefix = "go.itablink" // not go%2eitablink
trackpkg = types.NewPkg("go.track", "go.track") ir.Pkgs.Track = types.NewPkg("go.track", "go.track")
trackpkg.Prefix = "go.track" // not go%2etrack ir.Pkgs.Track.Prefix = "go.track" // not go%2etrack
// pseudo-package used for map zero values // pseudo-package used for map zero values
mappkg = types.NewPkg("go.map", "go.map") ir.Pkgs.Map = types.NewPkg("go.map", "go.map")
mappkg.Prefix = "go.map" ir.Pkgs.Map.Prefix = "go.map"
// pseudo-package used for methods with anonymous receivers // pseudo-package used for methods with anonymous receivers
gopkg = types.NewPkg("go", "") ir.Pkgs.Go = types.NewPkg("go", "")
base.DebugSSA = ssa.PhaseOption base.DebugSSA = ssa.PhaseOption
base.ParseFlags() base.ParseFlags()
@ -165,10 +165,10 @@ func Main(archInit func(*Arch)) {
thearch.LinkArch.Init(base.Ctxt) thearch.LinkArch.Init(base.Ctxt)
startProfile() startProfile()
if base.Flag.Race { if base.Flag.Race {
racepkg = types.NewPkg("runtime/race", "") ir.Pkgs.Race = types.NewPkg("runtime/race", "")
} }
if base.Flag.MSan { if base.Flag.MSan {
msanpkg = types.NewPkg("runtime/msan", "") ir.Pkgs.Msan = types.NewPkg("runtime/msan", "")
} }
if base.Flag.Race || base.Flag.MSan { if base.Flag.Race || base.Flag.MSan {
base.Flag.Cfg.Instrumenting = true base.Flag.Cfg.Instrumenting = true
@ -592,13 +592,13 @@ func loadsys() {
typs := runtimeTypes() typs := runtimeTypes()
for _, d := range &runtimeDecls { for _, d := range &runtimeDecls {
sym := Runtimepkg.Lookup(d.name) sym := ir.Pkgs.Runtime.Lookup(d.name)
typ := typs[d.typ] typ := typs[d.typ]
switch d.tag { switch d.tag {
case funcTag: case funcTag:
importfunc(Runtimepkg, src.NoXPos, sym, typ) importfunc(ir.Pkgs.Runtime, src.NoXPos, sym, typ)
case varTag: case varTag:
importvar(Runtimepkg, src.NoXPos, sym, typ) importvar(ir.Pkgs.Runtime, src.NoXPos, sym, typ)
default: default:
base.Fatalf("unhandled declaration tag %v", d.tag) base.Fatalf("unhandled declaration tag %v", d.tag)
} }
@ -647,7 +647,7 @@ func importfile(f constant.Value) *types.Pkg {
} }
if path_ == "unsafe" { if path_ == "unsafe" {
return unsafepkg return ir.Pkgs.Unsafe
} }
if islocalname(path_) { if islocalname(path_) {

View file

@ -334,7 +334,7 @@ func (p *noder) importDecl(imp *syntax.ImportDecl) {
return return
} }
if ipkg == unsafepkg { if ipkg == ir.Pkgs.Unsafe {
p.importedUnsafe = true p.importedUnsafe = true
} }
if ipkg.Path == "embed" { if ipkg.Path == "embed" {

View file

@ -158,7 +158,7 @@ func dumpdata() {
dumpglobls(Target.Externs[numExterns:]) dumpglobls(Target.Externs[numExterns:])
if zerosize > 0 { if zerosize > 0 {
zero := mappkg.Lookup("zero") zero := ir.Pkgs.Map.Lookup("zero")
ggloblsym(zero.Linksym(), int32(zerosize), obj.DUPOK|obj.RODATA) ggloblsym(zero.Linksym(), int32(zerosize), obj.DUPOK|obj.RODATA)
} }

View file

@ -571,7 +571,7 @@ func (lv *Liveness) markUnsafePoints() {
var load *ssa.Value var load *ssa.Value
v := wbBlock.Controls[0] v := wbBlock.Controls[0]
for { for {
if sym, ok := v.Aux.(*obj.LSym); ok && sym == writeBarrier { if sym, ok := v.Aux.(*obj.LSym); ok && sym == ir.Syms.WriteBarrier {
load = v load = v
break break
} }
@ -690,7 +690,7 @@ func (lv *Liveness) hasStackMap(v *ssa.Value) bool {
// typedmemclr and typedmemmove are write barriers and // typedmemclr and typedmemmove are write barriers and
// deeply non-preemptible. They are unsafe points and // deeply non-preemptible. They are unsafe points and
// hence should not have liveness maps. // hence should not have liveness maps.
if sym, ok := v.Aux.(*ssa.AuxCall); ok && (sym.Fn == typedmemclr || sym.Fn == typedmemmove) { if sym, ok := v.Aux.(*ssa.AuxCall); ok && (sym.Fn == ir.Syms.Typedmemclr || sym.Fn == ir.Syms.Typedmemmove) {
return false return false
} }
return true return true

View file

@ -489,7 +489,7 @@ func dimportpath(p *types.Pkg) {
// If we are compiling the runtime package, there are two runtime packages around // If we are compiling the runtime package, there are two runtime packages around
// -- localpkg and Runtimepkg. We don't want to produce import path symbols for // -- localpkg and Runtimepkg. We don't want to produce import path symbols for
// both of them, so just produce one for localpkg. // both of them, so just produce one for localpkg.
if base.Ctxt.Pkgpath == "runtime" && p == Runtimepkg { if base.Ctxt.Pkgpath == "runtime" && p == ir.Pkgs.Runtime {
return return
} }
@ -926,7 +926,7 @@ func dcommontype(lsym *obj.LSym, t *types.Type) int {
// tracksym returns the symbol for tracking use of field/method f, assumed // tracksym returns the symbol for tracking use of field/method f, assumed
// to be a member of struct/interface type t. // to be a member of struct/interface type t.
func tracksym(t *types.Type, f *types.Field) *types.Sym { func tracksym(t *types.Type, f *types.Field) *types.Sym {
return trackpkg.Lookup(t.ShortString() + "." + f.Sym.Name) return ir.Pkgs.Track.Lookup(t.ShortString() + "." + f.Sym.Name)
} }
func typesymprefix(prefix string, t *types.Type) *types.Sym { func typesymprefix(prefix string, t *types.Type) *types.Sym {
@ -975,7 +975,7 @@ func itabname(t, itype *types.Type) *ir.AddrExpr {
if t == nil || (t.IsPtr() && t.Elem() == nil) || t.IsUntyped() || !itype.IsInterface() || itype.IsEmptyInterface() { if t == nil || (t.IsPtr() && t.Elem() == nil) || t.IsUntyped() || !itype.IsInterface() || itype.IsEmptyInterface() {
base.Fatalf("itabname(%v, %v)", t, itype) base.Fatalf("itabname(%v, %v)", t, itype)
} }
s := itabpkg.Lookup(t.ShortString() + "," + itype.ShortString()) s := ir.Pkgs.Itab.Lookup(t.ShortString() + "," + itype.ShortString())
if s.Def == nil { if s.Def == nil {
n := NewName(s) n := NewName(s)
n.SetType(types.Types[types.TUINT8]) n.SetType(types.Types[types.TUINT8])
@ -1544,13 +1544,13 @@ func dumpbasictypes() {
dtypesym(functype(nil, []*ir.Field{anonfield(types.ErrorType)}, []*ir.Field{anonfield(types.Types[types.TSTRING])})) dtypesym(functype(nil, []*ir.Field{anonfield(types.ErrorType)}, []*ir.Field{anonfield(types.Types[types.TSTRING])}))
// add paths for runtime and main, which 6l imports implicitly. // add paths for runtime and main, which 6l imports implicitly.
dimportpath(Runtimepkg) dimportpath(ir.Pkgs.Runtime)
if base.Flag.Race { if base.Flag.Race {
dimportpath(racepkg) dimportpath(ir.Pkgs.Race)
} }
if base.Flag.MSan { if base.Flag.MSan {
dimportpath(msanpkg) dimportpath(ir.Pkgs.Msan)
} }
dimportpath(types.NewPkg("main", "")) dimportpath(types.NewPkg("main", ""))
} }
@ -1642,7 +1642,7 @@ func dgcptrmask(t *types.Type) *obj.LSym {
fillptrmask(t, ptrmask) fillptrmask(t, ptrmask)
p := fmt.Sprintf("gcbits.%x", ptrmask) p := fmt.Sprintf("gcbits.%x", ptrmask)
sym := Runtimepkg.Lookup(p) sym := ir.Pkgs.Runtime.Lookup(p)
lsym := sym.Linksym() lsym := sym.Linksym()
if !sym.Uniq() { if !sym.Uniq() {
sym.SetUniq(true) sym.SetUniq(true)
@ -1791,7 +1791,7 @@ func zeroaddr(size int64) ir.Node {
if zerosize < size { if zerosize < size {
zerosize = size zerosize = size
} }
s := mappkg.Lookup("zero") s := ir.Pkgs.Map.Lookup("zero")
if s.Def == nil { if s.Def == nil {
x := NewName(s) x := NewName(s)
x.SetType(types.Types[types.TUINT8]) x.SetType(types.Types[types.TUINT8])

View file

@ -91,43 +91,43 @@ func initssaconfig() {
ssaCaches = make([]ssa.Cache, base.Flag.LowerC) ssaCaches = make([]ssa.Cache, base.Flag.LowerC)
// Set up some runtime functions we'll need to call. // Set up some runtime functions we'll need to call.
assertE2I = sysfunc("assertE2I") ir.Syms.AssertE2I = sysfunc("assertE2I")
assertE2I2 = sysfunc("assertE2I2") ir.Syms.AssertE2I2 = sysfunc("assertE2I2")
assertI2I = sysfunc("assertI2I") ir.Syms.AssertI2I = sysfunc("assertI2I")
assertI2I2 = sysfunc("assertI2I2") ir.Syms.AssertI2I2 = sysfunc("assertI2I2")
deferproc = sysfunc("deferproc") ir.Syms.Deferproc = sysfunc("deferproc")
deferprocStack = sysfunc("deferprocStack") ir.Syms.DeferprocStack = sysfunc("deferprocStack")
Deferreturn = sysfunc("deferreturn") ir.Syms.Deferreturn = sysfunc("deferreturn")
Duffcopy = sysfunc("duffcopy") ir.Syms.Duffcopy = sysfunc("duffcopy")
Duffzero = sysfunc("duffzero") ir.Syms.Duffzero = sysfunc("duffzero")
gcWriteBarrier = sysfunc("gcWriteBarrier") ir.Syms.GCWriteBarrier = sysfunc("gcWriteBarrier")
goschedguarded = sysfunc("goschedguarded") ir.Syms.Goschedguarded = sysfunc("goschedguarded")
growslice = sysfunc("growslice") ir.Syms.Growslice = sysfunc("growslice")
msanread = sysfunc("msanread") ir.Syms.Msanread = sysfunc("msanread")
msanwrite = sysfunc("msanwrite") ir.Syms.Msanwrite = sysfunc("msanwrite")
msanmove = sysfunc("msanmove") ir.Syms.Msanmove = sysfunc("msanmove")
newobject = sysfunc("newobject") ir.Syms.Newobject = sysfunc("newobject")
newproc = sysfunc("newproc") ir.Syms.Newproc = sysfunc("newproc")
panicdivide = sysfunc("panicdivide") ir.Syms.Panicdivide = sysfunc("panicdivide")
panicdottypeE = sysfunc("panicdottypeE") ir.Syms.PanicdottypeE = sysfunc("panicdottypeE")
panicdottypeI = sysfunc("panicdottypeI") ir.Syms.PanicdottypeI = sysfunc("panicdottypeI")
panicnildottype = sysfunc("panicnildottype") ir.Syms.Panicnildottype = sysfunc("panicnildottype")
panicoverflow = sysfunc("panicoverflow") ir.Syms.Panicoverflow = sysfunc("panicoverflow")
panicshift = sysfunc("panicshift") ir.Syms.Panicshift = sysfunc("panicshift")
raceread = sysfunc("raceread") ir.Syms.Raceread = sysfunc("raceread")
racereadrange = sysfunc("racereadrange") ir.Syms.Racereadrange = sysfunc("racereadrange")
racewrite = sysfunc("racewrite") ir.Syms.Racewrite = sysfunc("racewrite")
racewriterange = sysfunc("racewriterange") ir.Syms.Racewriterange = sysfunc("racewriterange")
x86HasPOPCNT = sysvar("x86HasPOPCNT") // bool ir.Syms.X86HasPOPCNT = sysvar("x86HasPOPCNT") // bool
x86HasSSE41 = sysvar("x86HasSSE41") // bool ir.Syms.X86HasSSE41 = sysvar("x86HasSSE41") // bool
x86HasFMA = sysvar("x86HasFMA") // bool ir.Syms.X86HasFMA = sysvar("x86HasFMA") // bool
armHasVFPv4 = sysvar("armHasVFPv4") // bool ir.Syms.ARMHasVFPv4 = sysvar("armHasVFPv4") // bool
arm64HasATOMICS = sysvar("arm64HasATOMICS") // bool ir.Syms.ARM64HasATOMICS = sysvar("arm64HasATOMICS") // bool
typedmemclr = sysfunc("typedmemclr") ir.Syms.Typedmemclr = sysfunc("typedmemclr")
typedmemmove = sysfunc("typedmemmove") ir.Syms.Typedmemmove = sysfunc("typedmemmove")
Udiv = sysvar("udiv") // asm func with special ABI ir.Syms.Udiv = sysvar("udiv") // asm func with special ABI
writeBarrier = sysvar("writeBarrier") // struct { bool; ... } ir.Syms.WriteBarrier = sysvar("writeBarrier") // struct { bool; ... }
zerobaseSym = sysvar("zerobase") ir.Syms.Zerobase = sysvar("zerobase")
// asm funcs with special ABI // asm funcs with special ABI
if thearch.LinkArch.Name == "amd64" { if thearch.LinkArch.Name == "amd64" {
@ -198,12 +198,12 @@ func initssaconfig() {
} }
// Wasm (all asm funcs with special ABIs) // Wasm (all asm funcs with special ABIs)
WasmMove = sysvar("wasmMove") ir.Syms.WasmMove = sysvar("wasmMove")
WasmZero = sysvar("wasmZero") ir.Syms.WasmZero = sysvar("wasmZero")
WasmDiv = sysvar("wasmDiv") ir.Syms.WasmDiv = sysvar("wasmDiv")
WasmTruncS = sysvar("wasmTruncS") ir.Syms.WasmTruncS = sysvar("wasmTruncS")
WasmTruncU = sysvar("wasmTruncU") ir.Syms.WasmTruncU = sysvar("wasmTruncU")
SigPanic = sysfunc("sigpanic") ir.Syms.SigPanic = sysfunc("sigpanic")
} }
// getParam returns the Field of ith param of node n (which is a // getParam returns the Field of ith param of node n (which is a
@ -1051,11 +1051,11 @@ func (s *state) instrument2(t *types.Type, addr, addr2 *ssa.Value, kind instrume
if base.Flag.MSan { if base.Flag.MSan {
switch kind { switch kind {
case instrumentRead: case instrumentRead:
fn = msanread fn = ir.Syms.Msanread
case instrumentWrite: case instrumentWrite:
fn = msanwrite fn = ir.Syms.Msanwrite
case instrumentMove: case instrumentMove:
fn = msanmove fn = ir.Syms.Msanmove
default: default:
panic("unreachable") panic("unreachable")
} }
@ -1066,9 +1066,9 @@ func (s *state) instrument2(t *types.Type, addr, addr2 *ssa.Value, kind instrume
// composites with only one element don't have subobjects, though. // composites with only one element don't have subobjects, though.
switch kind { switch kind {
case instrumentRead: case instrumentRead:
fn = racereadrange fn = ir.Syms.Racereadrange
case instrumentWrite: case instrumentWrite:
fn = racewriterange fn = ir.Syms.Racewriterange
default: default:
panic("unreachable") panic("unreachable")
} }
@ -1078,9 +1078,9 @@ func (s *state) instrument2(t *types.Type, addr, addr2 *ssa.Value, kind instrume
// address, as any write must write the first byte. // address, as any write must write the first byte.
switch kind { switch kind {
case instrumentRead: case instrumentRead:
fn = raceread fn = ir.Syms.Raceread
case instrumentWrite: case instrumentWrite:
fn = racewrite fn = ir.Syms.Racewrite
default: default:
panic("unreachable") panic("unreachable")
} }
@ -1170,7 +1170,7 @@ func (s *state) stmt(n ir.Node) {
s.callResult(n, callNormal) s.callResult(n, callNormal)
if n.Op() == ir.OCALLFUNC && n.X.Op() == ir.ONAME && n.X.(*ir.Name).Class_ == ir.PFUNC { if n.Op() == ir.OCALLFUNC && n.X.Op() == ir.ONAME && n.X.(*ir.Name).Class_ == ir.PFUNC {
if fn := n.X.Sym().Name; base.Flag.CompilingRuntime && fn == "throw" || if fn := n.X.Sym().Name; base.Flag.CompilingRuntime && fn == "throw" ||
n.X.Sym().Pkg == Runtimepkg && (fn == "throwinit" || fn == "gopanic" || fn == "panicwrap" || fn == "block" || fn == "panicmakeslicelen" || fn == "panicmakeslicecap") { n.X.Sym().Pkg == ir.Pkgs.Runtime && (fn == "throwinit" || fn == "gopanic" || fn == "panicwrap" || fn == "block" || fn == "panicmakeslicelen" || fn == "panicmakeslicecap") {
m := s.mem() m := s.mem()
b := s.endBlock() b := s.endBlock()
b.Kind = ssa.BlockExit b.Kind = ssa.BlockExit
@ -1677,7 +1677,7 @@ func (s *state) exit() *ssa.Block {
} }
s.openDeferExit() s.openDeferExit()
} else { } else {
s.rtcall(Deferreturn, true, nil) s.rtcall(ir.Syms.Deferreturn, true, nil)
} }
} }
@ -2612,7 +2612,7 @@ func (s *state) expr(n ir.Node) *ssa.Value {
bt := b.Type bt := b.Type
if bt.IsSigned() { if bt.IsSigned() {
cmp := s.newValue2(s.ssaOp(ir.OLE, bt), types.Types[types.TBOOL], s.zeroVal(bt), b) cmp := s.newValue2(s.ssaOp(ir.OLE, bt), types.Types[types.TBOOL], s.zeroVal(bt), b)
s.check(cmp, panicshift) s.check(cmp, ir.Syms.Panicshift)
bt = bt.ToUnsigned() bt = bt.ToUnsigned()
} }
return s.newValue2(s.ssaShiftOp(n.Op(), n.Type(), bt), a.Type, a, b) return s.newValue2(s.ssaShiftOp(n.Op(), n.Type(), bt), a.Type, a, b)
@ -2909,10 +2909,10 @@ func (s *state) expr(n ir.Node) *ssa.Value {
case ir.ONEWOBJ: case ir.ONEWOBJ:
n := n.(*ir.UnaryExpr) n := n.(*ir.UnaryExpr)
if n.Type().Elem().Size() == 0 { if n.Type().Elem().Size() == 0 {
return s.newValue1A(ssa.OpAddr, n.Type(), zerobaseSym, s.sb) return s.newValue1A(ssa.OpAddr, n.Type(), ir.Syms.Zerobase, s.sb)
} }
typ := s.expr(n.X) typ := s.expr(n.X)
vv := s.rtcall(newobject, true, []*types.Type{n.Type()}, typ) vv := s.rtcall(ir.Syms.Newobject, true, []*types.Type{n.Type()}, typ)
return vv[0] return vv[0]
default: default:
@ -3006,7 +3006,7 @@ func (s *state) append(n *ir.CallExpr, inplace bool) *ssa.Value {
// Call growslice // Call growslice
s.startBlock(grow) s.startBlock(grow)
taddr := s.expr(n.X) taddr := s.expr(n.X)
r := s.rtcall(growslice, true, []*types.Type{pt, types.Types[types.TINT], types.Types[types.TINT]}, taddr, p, l, c, nl) r := s.rtcall(ir.Syms.Growslice, true, []*types.Type{pt, types.Types[types.TINT], types.Types[types.TINT]}, taddr, p, l, c, nl)
if inplace { if inplace {
if sn.Op() == ir.ONAME { if sn.Op() == ir.ONAME {
@ -3635,7 +3635,7 @@ func initSSATables() {
return func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value { return func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
// Target Atomic feature is identified by dynamic detection // Target Atomic feature is identified by dynamic detection
addr := s.entryNewValue1A(ssa.OpAddr, types.Types[types.TBOOL].PtrTo(), arm64HasATOMICS, s.sb) addr := s.entryNewValue1A(ssa.OpAddr, types.Types[types.TBOOL].PtrTo(), ir.Syms.ARM64HasATOMICS, s.sb)
v := s.load(types.Types[types.TBOOL], addr) v := s.load(types.Types[types.TBOOL], addr)
b := s.endBlock() b := s.endBlock()
b.Kind = ssa.BlockIf b.Kind = ssa.BlockIf
@ -3860,7 +3860,7 @@ func initSSATables() {
s.vars[n] = s.callResult(n, callNormal) // types.Types[TFLOAT64] s.vars[n] = s.callResult(n, callNormal) // types.Types[TFLOAT64]
return s.variable(n, types.Types[types.TFLOAT64]) return s.variable(n, types.Types[types.TFLOAT64])
} }
v := s.entryNewValue0A(ssa.OpHasCPUFeature, types.Types[types.TBOOL], x86HasFMA) v := s.entryNewValue0A(ssa.OpHasCPUFeature, types.Types[types.TBOOL], ir.Syms.X86HasFMA)
b := s.endBlock() b := s.endBlock()
b.Kind = ssa.BlockIf b.Kind = ssa.BlockIf
b.SetControl(v) b.SetControl(v)
@ -3892,7 +3892,7 @@ func initSSATables() {
s.vars[n] = s.callResult(n, callNormal) // types.Types[TFLOAT64] s.vars[n] = s.callResult(n, callNormal) // types.Types[TFLOAT64]
return s.variable(n, types.Types[types.TFLOAT64]) return s.variable(n, types.Types[types.TFLOAT64])
} }
addr := s.entryNewValue1A(ssa.OpAddr, types.Types[types.TBOOL].PtrTo(), armHasVFPv4, s.sb) addr := s.entryNewValue1A(ssa.OpAddr, types.Types[types.TBOOL].PtrTo(), ir.Syms.ARMHasVFPv4, s.sb)
v := s.load(types.Types[types.TBOOL], addr) v := s.load(types.Types[types.TBOOL], addr)
b := s.endBlock() b := s.endBlock()
b.Kind = ssa.BlockIf b.Kind = ssa.BlockIf
@ -3922,7 +3922,7 @@ func initSSATables() {
makeRoundAMD64 := func(op ssa.Op) func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value { makeRoundAMD64 := func(op ssa.Op) func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
return func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value { return func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
v := s.entryNewValue0A(ssa.OpHasCPUFeature, types.Types[types.TBOOL], x86HasSSE41) v := s.entryNewValue0A(ssa.OpHasCPUFeature, types.Types[types.TBOOL], ir.Syms.X86HasSSE41)
b := s.endBlock() b := s.endBlock()
b.Kind = ssa.BlockIf b.Kind = ssa.BlockIf
b.SetControl(v) b.SetControl(v)
@ -4128,7 +4128,7 @@ func initSSATables() {
makeOnesCountAMD64 := func(op64 ssa.Op, op32 ssa.Op) func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value { makeOnesCountAMD64 := func(op64 ssa.Op, op32 ssa.Op) func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
return func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value { return func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
v := s.entryNewValue0A(ssa.OpHasCPUFeature, types.Types[types.TBOOL], x86HasPOPCNT) v := s.entryNewValue0A(ssa.OpHasCPUFeature, types.Types[types.TBOOL], ir.Syms.X86HasPOPCNT)
b := s.endBlock() b := s.endBlock()
b.Kind = ssa.BlockIf b.Kind = ssa.BlockIf
b.SetControl(v) b.SetControl(v)
@ -4212,9 +4212,9 @@ func initSSATables() {
func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value { func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
// check for divide-by-zero/overflow and panic with appropriate message // check for divide-by-zero/overflow and panic with appropriate message
cmpZero := s.newValue2(s.ssaOp(ir.ONE, types.Types[types.TUINT64]), types.Types[types.TBOOL], args[2], s.zeroVal(types.Types[types.TUINT64])) cmpZero := s.newValue2(s.ssaOp(ir.ONE, types.Types[types.TUINT64]), types.Types[types.TBOOL], args[2], s.zeroVal(types.Types[types.TUINT64]))
s.check(cmpZero, panicdivide) s.check(cmpZero, ir.Syms.Panicdivide)
cmpOverflow := s.newValue2(s.ssaOp(ir.OLT, types.Types[types.TUINT64]), types.Types[types.TBOOL], args[0], args[2]) cmpOverflow := s.newValue2(s.ssaOp(ir.OLT, types.Types[types.TUINT64]), types.Types[types.TBOOL], args[0], args[2])
s.check(cmpOverflow, panicoverflow) s.check(cmpOverflow, ir.Syms.Panicoverflow)
return s.newValue3(ssa.OpDiv128u, types.NewTuple(types.Types[types.TUINT64], types.Types[types.TUINT64]), args[0], args[1], args[2]) return s.newValue3(ssa.OpDiv128u, types.NewTuple(types.Types[types.TUINT64], types.Types[types.TUINT64]), args[0], args[1], args[2])
}, },
sys.AMD64) sys.AMD64)
@ -4768,7 +4768,7 @@ func (s *state) call(n *ir.CallExpr, k callKind, returnResultAddr bool) *ssa.Val
// Call runtime.deferprocStack with pointer to _defer record. // Call runtime.deferprocStack with pointer to _defer record.
ACArgs = append(ACArgs, ssa.Param{Type: types.Types[types.TUINTPTR], Offset: int32(base.Ctxt.FixedFrameSize())}) ACArgs = append(ACArgs, ssa.Param{Type: types.Types[types.TUINTPTR], Offset: int32(base.Ctxt.FixedFrameSize())})
aux := ssa.StaticAuxCall(deferprocStack, ACArgs, ACResults) aux := ssa.StaticAuxCall(ir.Syms.DeferprocStack, ACArgs, ACResults)
if testLateExpansion { if testLateExpansion {
callArgs = append(callArgs, addr, s.mem()) callArgs = append(callArgs, addr, s.mem())
call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux) call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
@ -4844,7 +4844,7 @@ func (s *state) call(n *ir.CallExpr, k callKind, returnResultAddr bool) *ssa.Val
// call target // call target
switch { switch {
case k == callDefer: case k == callDefer:
aux := ssa.StaticAuxCall(deferproc, ACArgs, ACResults) aux := ssa.StaticAuxCall(ir.Syms.Deferproc, ACArgs, ACResults)
if testLateExpansion { if testLateExpansion {
call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux) call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
call.AddArgs(callArgs...) call.AddArgs(callArgs...)
@ -4852,7 +4852,7 @@ func (s *state) call(n *ir.CallExpr, k callKind, returnResultAddr bool) *ssa.Val
call = s.newValue1A(ssa.OpStaticCall, types.TypeMem, aux, s.mem()) call = s.newValue1A(ssa.OpStaticCall, types.TypeMem, aux, s.mem())
} }
case k == callGo: case k == callGo:
aux := ssa.StaticAuxCall(newproc, ACArgs, ACResults) aux := ssa.StaticAuxCall(ir.Syms.Newproc, ACArgs, ACResults)
if testLateExpansion { if testLateExpansion {
call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux) call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
call.AddArgs(callArgs...) call.AddArgs(callArgs...)
@ -5359,7 +5359,7 @@ func (s *state) intDivide(n ir.Node, a, b *ssa.Value) *ssa.Value {
if needcheck { if needcheck {
// do a size-appropriate check for zero // do a size-appropriate check for zero
cmp := s.newValue2(s.ssaOp(ir.ONE, n.Type()), types.Types[types.TBOOL], b, s.zeroVal(n.Type())) cmp := s.newValue2(s.ssaOp(ir.ONE, n.Type()), types.Types[types.TBOOL], b, s.zeroVal(n.Type()))
s.check(cmp, panicdivide) s.check(cmp, ir.Syms.Panicdivide)
} }
return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b) return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
} }
@ -6063,7 +6063,7 @@ func (s *state) dottype(n *ir.TypeAssertExpr, commaok bool) (res, resok *ssa.Val
if !commaok { if !commaok {
// On failure, panic by calling panicnildottype. // On failure, panic by calling panicnildottype.
s.startBlock(bFail) s.startBlock(bFail)
s.rtcall(panicnildottype, false, nil, target) s.rtcall(ir.Syms.Panicnildottype, false, nil, target)
// On success, return (perhaps modified) input interface. // On success, return (perhaps modified) input interface.
s.startBlock(bOk) s.startBlock(bOk)
@ -6108,16 +6108,16 @@ func (s *state) dottype(n *ir.TypeAssertExpr, commaok bool) (res, resok *ssa.Val
} }
if n.X.Type().IsEmptyInterface() { if n.X.Type().IsEmptyInterface() {
if commaok { if commaok {
call := s.rtcall(assertE2I2, true, []*types.Type{n.Type(), types.Types[types.TBOOL]}, target, iface) call := s.rtcall(ir.Syms.AssertE2I2, true, []*types.Type{n.Type(), types.Types[types.TBOOL]}, target, iface)
return call[0], call[1] return call[0], call[1]
} }
return s.rtcall(assertE2I, true, []*types.Type{n.Type()}, target, iface)[0], nil return s.rtcall(ir.Syms.AssertE2I, true, []*types.Type{n.Type()}, target, iface)[0], nil
} }
if commaok { if commaok {
call := s.rtcall(assertI2I2, true, []*types.Type{n.Type(), types.Types[types.TBOOL]}, target, iface) call := s.rtcall(ir.Syms.AssertI2I2, true, []*types.Type{n.Type(), types.Types[types.TBOOL]}, target, iface)
return call[0], call[1] return call[0], call[1]
} }
return s.rtcall(assertI2I, true, []*types.Type{n.Type()}, target, iface)[0], nil return s.rtcall(ir.Syms.AssertI2I, true, []*types.Type{n.Type()}, target, iface)[0], nil
} }
if base.Debug.TypeAssert > 0 { if base.Debug.TypeAssert > 0 {
@ -6165,9 +6165,9 @@ func (s *state) dottype(n *ir.TypeAssertExpr, commaok bool) (res, resok *ssa.Val
s.startBlock(bFail) s.startBlock(bFail)
taddr := s.expr(n.Ntype.(*ir.AddrExpr).Alloc) taddr := s.expr(n.Ntype.(*ir.AddrExpr).Alloc)
if n.X.Type().IsEmptyInterface() { if n.X.Type().IsEmptyInterface() {
s.rtcall(panicdottypeE, false, nil, itab, target, taddr) s.rtcall(ir.Syms.PanicdottypeE, false, nil, itab, target, taddr)
} else { } else {
s.rtcall(panicdottypeI, false, nil, itab, target, taddr) s.rtcall(ir.Syms.PanicdottypeI, false, nil, itab, target, taddr)
} }
// on success, return data from interface // on success, return data from interface
@ -6623,7 +6623,7 @@ func genssa(f *ssa.Func, pp *Progs) {
// deferreturn and a return. This will be used to during panic // deferreturn and a return. This will be used to during panic
// recovery to unwind the stack and return back to the runtime. // recovery to unwind the stack and return back to the runtime.
s.pp.nextLive = s.livenessMap.deferreturn s.pp.nextLive = s.livenessMap.deferreturn
gencallret(pp, Deferreturn) gencallret(pp, ir.Syms.Deferreturn)
} }
if inlMarks != nil { if inlMarks != nil {
@ -7082,14 +7082,14 @@ func (s *SSAGenState) PrepareCall(v *ssa.Value) {
idx := s.livenessMap.Get(v) idx := s.livenessMap.Get(v)
if !idx.StackMapValid() { if !idx.StackMapValid() {
// See Liveness.hasStackMap. // See Liveness.hasStackMap.
if sym, ok := v.Aux.(*ssa.AuxCall); !ok || !(sym.Fn == typedmemclr || sym.Fn == typedmemmove) { if sym, ok := v.Aux.(*ssa.AuxCall); !ok || !(sym.Fn == ir.Syms.Typedmemclr || sym.Fn == ir.Syms.Typedmemmove) {
base.Fatalf("missing stack map index for %v", v.LongString()) base.Fatalf("missing stack map index for %v", v.LongString())
} }
} }
call, ok := v.Aux.(*ssa.AuxCall) call, ok := v.Aux.(*ssa.AuxCall)
if ok && call.Fn == Deferreturn { if ok && call.Fn == ir.Syms.Deferreturn {
// Deferred calls will appear to be returning to // Deferred calls will appear to be returning to
// the CALL deferreturn(SB) that we are about to emit. // the CALL deferreturn(SB) that we are about to emit.
// However, the stack trace code will show the line // However, the stack trace code will show the line
@ -7321,15 +7321,15 @@ func (e *ssafn) UseWriteBarrier() bool {
func (e *ssafn) Syslook(name string) *obj.LSym { func (e *ssafn) Syslook(name string) *obj.LSym {
switch name { switch name {
case "goschedguarded": case "goschedguarded":
return goschedguarded return ir.Syms.Goschedguarded
case "writeBarrier": case "writeBarrier":
return writeBarrier return ir.Syms.WriteBarrier
case "gcWriteBarrier": case "gcWriteBarrier":
return gcWriteBarrier return ir.Syms.GCWriteBarrier
case "typedmemmove": case "typedmemmove":
return typedmemmove return ir.Syms.Typedmemmove
case "typedmemclr": case "typedmemclr":
return typedmemclr return ir.Syms.Typedmemclr
} }
e.Fatalf(src.NoXPos, "unknown Syslook func %v", name) e.Fatalf(src.NoXPos, "unknown Syslook func %v", name)
return nil return nil

View file

@ -523,7 +523,7 @@ func backingArrayPtrLen(n ir.Node) (ptr, length ir.Node) {
} }
func syslook(name string) *ir.Name { func syslook(name string) *ir.Name {
s := Runtimepkg.Lookup(name) s := ir.Pkgs.Runtime.Lookup(name)
if s == nil || s.Def == nil { if s == nil || s.Def == nil {
base.Fatalf("syslook: can't find runtime.%s", name) base.Fatalf("syslook: can't find runtime.%s", name)
} }
@ -1247,7 +1247,7 @@ func paramNnames(ft *types.Type) []ir.Node {
} }
func hashmem(t *types.Type) ir.Node { func hashmem(t *types.Type) ir.Node {
sym := Runtimepkg.Lookup("memhash") sym := ir.Pkgs.Runtime.Lookup("memhash")
n := NewName(sym) n := NewName(sym)
setNodeNameFunc(n) setNodeNameFunc(n)

View file

@ -141,7 +141,7 @@ func initUniverse() {
s.Def = n s.Def = n
dowidth(types.ErrorType) dowidth(types.ErrorType)
types.Types[types.TUNSAFEPTR] = defBasic(types.TUNSAFEPTR, unsafepkg, "Pointer") types.Types[types.TUNSAFEPTR] = defBasic(types.TUNSAFEPTR, ir.Pkgs.Unsafe, "Pointer")
// simple aliases // simple aliases
types.SimType[types.TMAP] = types.TPTR types.SimType[types.TMAP] = types.TPTR
@ -157,7 +157,7 @@ func initUniverse() {
} }
for _, s := range &unsafeFuncs { for _, s := range &unsafeFuncs {
s2 := unsafepkg.Lookup(s.name) s2 := ir.Pkgs.Unsafe.Lookup(s.name)
def := NewName(s2) def := NewName(s2)
def.BuiltinOp = s.op def.BuiltinOp = s.op
s2.Def = def s2.Def = def

View file

@ -946,15 +946,15 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
return l return l
} }
if staticuint64s == nil { if ir.Names.Staticuint64s == nil {
staticuint64s = NewName(Runtimepkg.Lookup("staticuint64s")) ir.Names.Staticuint64s = NewName(ir.Pkgs.Runtime.Lookup("staticuint64s"))
staticuint64s.Class_ = ir.PEXTERN ir.Names.Staticuint64s.Class_ = ir.PEXTERN
// The actual type is [256]uint64, but we use [256*8]uint8 so we can address // The actual type is [256]uint64, but we use [256*8]uint8 so we can address
// individual bytes. // individual bytes.
staticuint64s.SetType(types.NewArray(types.Types[types.TUINT8], 256*8)) ir.Names.Staticuint64s.SetType(types.NewArray(types.Types[types.TUINT8], 256*8))
zerobase = NewName(Runtimepkg.Lookup("zerobase")) ir.Names.Zerobase = NewName(ir.Pkgs.Runtime.Lookup("zerobase"))
zerobase.Class_ = ir.PEXTERN ir.Names.Zerobase.Class_ = ir.PEXTERN
zerobase.SetType(types.Types[types.TUINTPTR]) ir.Names.Zerobase.SetType(types.Types[types.TUINTPTR])
} }
// Optimize convT2{E,I} for many cases in which T is not pointer-shaped, // Optimize convT2{E,I} for many cases in which T is not pointer-shaped,
@ -965,7 +965,7 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
case fromType.Size() == 0: case fromType.Size() == 0:
// n.Left is zero-sized. Use zerobase. // n.Left is zero-sized. Use zerobase.
cheapexpr(n.X, init) // Evaluate n.Left for side-effects. See issue 19246. cheapexpr(n.X, init) // Evaluate n.Left for side-effects. See issue 19246.
value = zerobase value = ir.Names.Zerobase
case fromType.IsBoolean() || (fromType.Size() == 1 && fromType.IsInteger()): case fromType.IsBoolean() || (fromType.Size() == 1 && fromType.IsInteger()):
// n.Left is a bool/byte. Use staticuint64s[n.Left * 8] on little-endian // n.Left is a bool/byte. Use staticuint64s[n.Left * 8] on little-endian
// and staticuint64s[n.Left * 8 + 7] on big-endian. // and staticuint64s[n.Left * 8 + 7] on big-endian.
@ -975,7 +975,7 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
if thearch.LinkArch.ByteOrder == binary.BigEndian { if thearch.LinkArch.ByteOrder == binary.BigEndian {
index = ir.NewBinaryExpr(base.Pos, ir.OADD, index, nodintconst(7)) index = ir.NewBinaryExpr(base.Pos, ir.OADD, index, nodintconst(7))
} }
xe := ir.NewIndexExpr(base.Pos, staticuint64s, index) xe := ir.NewIndexExpr(base.Pos, ir.Names.Staticuint64s, index)
xe.SetBounded(true) xe.SetBounded(true)
value = xe value = xe
case n.X.Op() == ir.ONAME && n.X.(*ir.Name).Class_ == ir.PEXTERN && n.X.(*ir.Name).Readonly(): case n.X.Op() == ir.ONAME && n.X.(*ir.Name).Class_ == ir.PEXTERN && n.X.(*ir.Name).Readonly():

View file

@ -0,0 +1,82 @@
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package ir
import (
"cmd/compile/internal/types"
"cmd/internal/obj"
)
// Names holds known names.
var Names struct {
Staticuint64s *Name
Zerobase *Name
}
// Syms holds known symbols.
var Syms struct {
AssertE2I *obj.LSym
AssertE2I2 *obj.LSym
AssertI2I *obj.LSym
AssertI2I2 *obj.LSym
Deferproc *obj.LSym
DeferprocStack *obj.LSym
Deferreturn *obj.LSym
Duffcopy *obj.LSym
Duffzero *obj.LSym
GCWriteBarrier *obj.LSym
Goschedguarded *obj.LSym
Growslice *obj.LSym
Msanread *obj.LSym
Msanwrite *obj.LSym
Msanmove *obj.LSym
Newobject *obj.LSym
Newproc *obj.LSym
Panicdivide *obj.LSym
Panicshift *obj.LSym
PanicdottypeE *obj.LSym
PanicdottypeI *obj.LSym
Panicnildottype *obj.LSym
Panicoverflow *obj.LSym
Raceread *obj.LSym
Racereadrange *obj.LSym
Racewrite *obj.LSym
Racewriterange *obj.LSym
// Wasm
SigPanic *obj.LSym
Typedmemclr *obj.LSym
Typedmemmove *obj.LSym
Udiv *obj.LSym
WriteBarrier *obj.LSym
Zerobase *obj.LSym
ARM64HasATOMICS *obj.LSym
ARMHasVFPv4 *obj.LSym
X86HasFMA *obj.LSym
X86HasPOPCNT *obj.LSym
X86HasSSE41 *obj.LSym
// Wasm
WasmDiv *obj.LSym
// Wasm
WasmMove *obj.LSym
// Wasm
WasmZero *obj.LSym
// Wasm
WasmTruncS *obj.LSym
// Wasm
WasmTruncU *obj.LSym
}
// Pkgs holds known packages.
var Pkgs struct {
Go *types.Pkg
Itab *types.Pkg
Itablink *types.Pkg
Map *types.Pkg
Msan *types.Pkg
Race *types.Pkg
Runtime *types.Pkg
Track *types.Pkg
Unsafe *types.Pkg
}

View file

@ -6,6 +6,7 @@ package mips64
import ( import (
"cmd/compile/internal/gc" "cmd/compile/internal/gc"
"cmd/compile/internal/ir"
"cmd/internal/obj" "cmd/internal/obj"
"cmd/internal/obj/mips" "cmd/internal/obj/mips"
) )
@ -23,7 +24,7 @@ func zerorange(pp *gc.Progs, p *obj.Prog, off, cnt int64, _ *uint32) *obj.Prog {
p.Reg = mips.REGSP p.Reg = mips.REGSP
p = pp.Appendpp(p, obj.ADUFFZERO, obj.TYPE_NONE, 0, 0, obj.TYPE_MEM, 0, 0) p = pp.Appendpp(p, obj.ADUFFZERO, obj.TYPE_NONE, 0, 0, obj.TYPE_MEM, 0, 0)
p.To.Name = obj.NAME_EXTERN p.To.Name = obj.NAME_EXTERN
p.To.Sym = gc.Duffzero p.To.Sym = ir.Syms.Duffzero
p.To.Offset = 8 * (128 - cnt/int64(gc.Widthptr)) p.To.Offset = 8 * (128 - cnt/int64(gc.Widthptr))
} else { } else {
// ADDV $(8+frame+lo-8), SP, r1 // ADDV $(8+frame+lo-8), SP, r1

View file

@ -383,7 +383,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
p = s.Prog(obj.ADUFFZERO) p = s.Prog(obj.ADUFFZERO)
p.To.Type = obj.TYPE_MEM p.To.Type = obj.TYPE_MEM
p.To.Name = obj.NAME_EXTERN p.To.Name = obj.NAME_EXTERN
p.To.Sym = gc.Duffzero p.To.Sym = ir.Syms.Duffzero
p.To.Offset = v.AuxInt p.To.Offset = v.AuxInt
case ssa.OpMIPS64LoweredZero: case ssa.OpMIPS64LoweredZero:
// SUBV $8, R1 // SUBV $8, R1
@ -433,7 +433,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
p := s.Prog(obj.ADUFFCOPY) p := s.Prog(obj.ADUFFCOPY)
p.To.Type = obj.TYPE_MEM p.To.Type = obj.TYPE_MEM
p.To.Name = obj.NAME_EXTERN p.To.Name = obj.NAME_EXTERN
p.To.Sym = gc.Duffcopy p.To.Sym = ir.Syms.Duffcopy
p.To.Offset = v.AuxInt p.To.Offset = v.AuxInt
case ssa.OpMIPS64LoweredMove: case ssa.OpMIPS64LoweredMove:
// SUBV $8, R1 // SUBV $8, R1

View file

@ -7,6 +7,7 @@ package ppc64
import ( import (
"cmd/compile/internal/base" "cmd/compile/internal/base"
"cmd/compile/internal/gc" "cmd/compile/internal/gc"
"cmd/compile/internal/ir"
"cmd/internal/obj" "cmd/internal/obj"
"cmd/internal/obj/ppc64" "cmd/internal/obj/ppc64"
) )
@ -24,7 +25,7 @@ func zerorange(pp *gc.Progs, p *obj.Prog, off, cnt int64, _ *uint32) *obj.Prog {
p.Reg = ppc64.REGSP p.Reg = ppc64.REGSP
p = pp.Appendpp(p, obj.ADUFFZERO, obj.TYPE_NONE, 0, 0, obj.TYPE_MEM, 0, 0) p = pp.Appendpp(p, obj.ADUFFZERO, obj.TYPE_NONE, 0, 0, obj.TYPE_MEM, 0, 0)
p.To.Name = obj.NAME_EXTERN p.To.Name = obj.NAME_EXTERN
p.To.Sym = gc.Duffzero p.To.Sym = ir.Syms.Duffzero
p.To.Offset = 4 * (128 - cnt/int64(gc.Widthptr)) p.To.Offset = 4 * (128 - cnt/int64(gc.Widthptr))
} else { } else {
p = pp.Appendpp(p, ppc64.AMOVD, obj.TYPE_CONST, 0, base.Ctxt.FixedFrameSize()+off-8, obj.TYPE_REG, ppc64.REGTMP, 0) p = pp.Appendpp(p, ppc64.AMOVD, obj.TYPE_CONST, 0, base.Ctxt.FixedFrameSize()+off-8, obj.TYPE_REG, ppc64.REGTMP, 0)

View file

@ -7,6 +7,7 @@ package riscv64
import ( import (
"cmd/compile/internal/base" "cmd/compile/internal/base"
"cmd/compile/internal/gc" "cmd/compile/internal/gc"
"cmd/compile/internal/ir"
"cmd/internal/obj" "cmd/internal/obj"
"cmd/internal/obj/riscv" "cmd/internal/obj/riscv"
) )
@ -31,7 +32,7 @@ func zeroRange(pp *gc.Progs, p *obj.Prog, off, cnt int64, _ *uint32) *obj.Prog {
p.Reg = riscv.REG_SP p.Reg = riscv.REG_SP
p = pp.Appendpp(p, obj.ADUFFZERO, obj.TYPE_NONE, 0, 0, obj.TYPE_MEM, 0, 0) p = pp.Appendpp(p, obj.ADUFFZERO, obj.TYPE_NONE, 0, 0, obj.TYPE_MEM, 0, 0)
p.To.Name = obj.NAME_EXTERN p.To.Name = obj.NAME_EXTERN
p.To.Sym = gc.Duffzero p.To.Sym = ir.Syms.Duffzero
p.To.Offset = 8 * (128 - cnt/int64(gc.Widthptr)) p.To.Offset = 8 * (128 - cnt/int64(gc.Widthptr))
return p return p
} }

View file

@ -614,14 +614,14 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
p := s.Prog(obj.ADUFFZERO) p := s.Prog(obj.ADUFFZERO)
p.To.Type = obj.TYPE_MEM p.To.Type = obj.TYPE_MEM
p.To.Name = obj.NAME_EXTERN p.To.Name = obj.NAME_EXTERN
p.To.Sym = gc.Duffzero p.To.Sym = ir.Syms.Duffzero
p.To.Offset = v.AuxInt p.To.Offset = v.AuxInt
case ssa.OpRISCV64DUFFCOPY: case ssa.OpRISCV64DUFFCOPY:
p := s.Prog(obj.ADUFFCOPY) p := s.Prog(obj.ADUFFCOPY)
p.To.Type = obj.TYPE_MEM p.To.Type = obj.TYPE_MEM
p.To.Name = obj.NAME_EXTERN p.To.Name = obj.NAME_EXTERN
p.To.Sym = gc.Duffcopy p.To.Sym = ir.Syms.Duffcopy
p.To.Offset = v.AuxInt p.To.Offset = v.AuxInt
default: default:

View file

@ -124,7 +124,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
switch v.Op { switch v.Op {
case ssa.OpWasmLoweredStaticCall, ssa.OpWasmLoweredClosureCall, ssa.OpWasmLoweredInterCall: case ssa.OpWasmLoweredStaticCall, ssa.OpWasmLoweredClosureCall, ssa.OpWasmLoweredInterCall:
s.PrepareCall(v) s.PrepareCall(v)
if call, ok := v.Aux.(*ssa.AuxCall); ok && call.Fn == gc.Deferreturn { if call, ok := v.Aux.(*ssa.AuxCall); ok && call.Fn == ir.Syms.Deferreturn {
// add a resume point before call to deferreturn so it can be called again via jmpdefer // add a resume point before call to deferreturn so it can be called again via jmpdefer
s.Prog(wasm.ARESUMEPOINT) s.Prog(wasm.ARESUMEPOINT)
} }
@ -149,20 +149,20 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
getValue32(s, v.Args[1]) getValue32(s, v.Args[1])
i32Const(s, int32(v.AuxInt)) i32Const(s, int32(v.AuxInt))
p := s.Prog(wasm.ACall) p := s.Prog(wasm.ACall)
p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: gc.WasmMove} p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: ir.Syms.WasmMove}
case ssa.OpWasmLoweredZero: case ssa.OpWasmLoweredZero:
getValue32(s, v.Args[0]) getValue32(s, v.Args[0])
i32Const(s, int32(v.AuxInt)) i32Const(s, int32(v.AuxInt))
p := s.Prog(wasm.ACall) p := s.Prog(wasm.ACall)
p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: gc.WasmZero} p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: ir.Syms.WasmZero}
case ssa.OpWasmLoweredNilCheck: case ssa.OpWasmLoweredNilCheck:
getValue64(s, v.Args[0]) getValue64(s, v.Args[0])
s.Prog(wasm.AI64Eqz) s.Prog(wasm.AI64Eqz)
s.Prog(wasm.AIf) s.Prog(wasm.AIf)
p := s.Prog(wasm.ACALLNORESUME) p := s.Prog(wasm.ACALLNORESUME)
p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: gc.SigPanic} p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: ir.Syms.SigPanic}
s.Prog(wasm.AEnd) s.Prog(wasm.AEnd)
if logopt.Enabled() { if logopt.Enabled() {
logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name) logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name)
@ -314,7 +314,7 @@ func ssaGenValueOnStack(s *gc.SSAGenState, v *ssa.Value, extend bool) {
if v.Type.Size() == 8 { if v.Type.Size() == 8 {
// Division of int64 needs helper function wasmDiv to handle the MinInt64 / -1 case. // Division of int64 needs helper function wasmDiv to handle the MinInt64 / -1 case.
p := s.Prog(wasm.ACall) p := s.Prog(wasm.ACall)
p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: gc.WasmDiv} p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: ir.Syms.WasmDiv}
break break
} }
s.Prog(wasm.AI64DivS) s.Prog(wasm.AI64DivS)
@ -328,7 +328,7 @@ func ssaGenValueOnStack(s *gc.SSAGenState, v *ssa.Value, extend bool) {
s.Prog(wasm.AF64PromoteF32) s.Prog(wasm.AF64PromoteF32)
} }
p := s.Prog(wasm.ACall) p := s.Prog(wasm.ACall)
p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: gc.WasmTruncS} p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: ir.Syms.WasmTruncS}
} }
case ssa.OpWasmI64TruncSatF32U, ssa.OpWasmI64TruncSatF64U: case ssa.OpWasmI64TruncSatF32U, ssa.OpWasmI64TruncSatF64U:
@ -340,7 +340,7 @@ func ssaGenValueOnStack(s *gc.SSAGenState, v *ssa.Value, extend bool) {
s.Prog(wasm.AF64PromoteF32) s.Prog(wasm.AF64PromoteF32)
} }
p := s.Prog(wasm.ACall) p := s.Prog(wasm.ACall)
p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: gc.WasmTruncU} p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: ir.Syms.WasmTruncU}
} }
case ssa.OpWasmF32DemoteF64: case ssa.OpWasmF32DemoteF64:

View file

@ -6,6 +6,7 @@ package x86
import ( import (
"cmd/compile/internal/gc" "cmd/compile/internal/gc"
"cmd/compile/internal/ir"
"cmd/internal/obj" "cmd/internal/obj"
"cmd/internal/obj/x86" "cmd/internal/obj/x86"
) )
@ -26,7 +27,7 @@ func zerorange(pp *gc.Progs, p *obj.Prog, off, cnt int64, ax *uint32) *obj.Prog
} else if cnt <= int64(128*gc.Widthreg) { } else if cnt <= int64(128*gc.Widthreg) {
p = pp.Appendpp(p, x86.ALEAL, obj.TYPE_MEM, x86.REG_SP, off, obj.TYPE_REG, x86.REG_DI, 0) p = pp.Appendpp(p, x86.ALEAL, obj.TYPE_MEM, x86.REG_SP, off, obj.TYPE_REG, x86.REG_DI, 0)
p = pp.Appendpp(p, obj.ADUFFZERO, obj.TYPE_NONE, 0, 0, obj.TYPE_ADDR, 0, 1*(128-cnt/int64(gc.Widthreg))) p = pp.Appendpp(p, obj.ADUFFZERO, obj.TYPE_NONE, 0, 0, obj.TYPE_ADDR, 0, 1*(128-cnt/int64(gc.Widthreg)))
p.To.Sym = gc.Duffzero p.To.Sym = ir.Syms.Duffzero
} else { } else {
p = pp.Appendpp(p, x86.AMOVL, obj.TYPE_CONST, 0, cnt/int64(gc.Widthreg), obj.TYPE_REG, x86.REG_CX, 0) p = pp.Appendpp(p, x86.AMOVL, obj.TYPE_CONST, 0, cnt/int64(gc.Widthreg), obj.TYPE_REG, x86.REG_CX, 0)
p = pp.Appendpp(p, x86.ALEAL, obj.TYPE_MEM, x86.REG_SP, off, obj.TYPE_REG, x86.REG_DI, 0) p = pp.Appendpp(p, x86.ALEAL, obj.TYPE_MEM, x86.REG_SP, off, obj.TYPE_REG, x86.REG_DI, 0)

View file

@ -10,6 +10,7 @@ import (
"cmd/compile/internal/base" "cmd/compile/internal/base"
"cmd/compile/internal/gc" "cmd/compile/internal/gc"
"cmd/compile/internal/ir"
"cmd/compile/internal/logopt" "cmd/compile/internal/logopt"
"cmd/compile/internal/ssa" "cmd/compile/internal/ssa"
"cmd/compile/internal/types" "cmd/compile/internal/types"
@ -671,12 +672,12 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
case ssa.Op386DUFFZERO: case ssa.Op386DUFFZERO:
p := s.Prog(obj.ADUFFZERO) p := s.Prog(obj.ADUFFZERO)
p.To.Type = obj.TYPE_ADDR p.To.Type = obj.TYPE_ADDR
p.To.Sym = gc.Duffzero p.To.Sym = ir.Syms.Duffzero
p.To.Offset = v.AuxInt p.To.Offset = v.AuxInt
case ssa.Op386DUFFCOPY: case ssa.Op386DUFFCOPY:
p := s.Prog(obj.ADUFFCOPY) p := s.Prog(obj.ADUFFCOPY)
p.To.Type = obj.TYPE_ADDR p.To.Type = obj.TYPE_ADDR
p.To.Sym = gc.Duffcopy p.To.Sym = ir.Syms.Duffcopy
p.To.Offset = v.AuxInt p.To.Offset = v.AuxInt
case ssa.OpCopy: // TODO: use MOVLreg for reg->reg copies instead of OpCopy? case ssa.OpCopy: // TODO: use MOVLreg for reg->reg copies instead of OpCopy?