mirror of
				https://github.com/golang/go.git
				synced 2025-10-30 16:20:58 +00:00 
			
		
		
		
	[dev.regabi] cmd/compile: use LinksymOffsetExpr in walkConvInterface
This CL updates walkConvInterface to use LinksymOffsetExpr for referencing runtime.staticuint64s and runtime.zerobase. Passes toolstash -cmp (surprisingly). Change-Id: Iad7e30371f89c8a5e176b5ddbc53faf57012ba0d Reviewed-on: https://go-review.googlesource.com/c/go/+/284229 Trust: Matthew Dempsky <mdempsky@google.com> Run-TryBot: Matthew Dempsky <mdempsky@google.com> TryBot-Result: Go Bot <gobot@golang.org> Reviewed-by: Cuong Manh Le <cuong.manhle.vn@gmail.com>
This commit is contained in:
		
							parent
							
								
									87845d14f9
								
							
						
					
					
						commit
						99a5db11ac
					
				
					 4 changed files with 14 additions and 19 deletions
				
			
		|  | @ -477,6 +477,13 @@ func NewLinksymOffsetExpr(pos src.XPos, lsym *obj.LSym, offset int64, typ *types | ||||||
| 	return n | 	return n | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
|  | // NewLinksymExpr is NewLinksymOffsetExpr, but with offset fixed at 0. | ||||||
|  | func NewLinksymExpr(pos src.XPos, lsym *obj.LSym, typ *types.Type) *LinksymOffsetExpr { | ||||||
|  | 	return NewLinksymOffsetExpr(pos, lsym, 0, typ) | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | // NewNameOffsetExpr is NewLinksymOffsetExpr, but taking a *Name | ||||||
|  | // representing a global variable instead of an *obj.LSym directly. | ||||||
| func NewNameOffsetExpr(pos src.XPos, name *Name, offset int64, typ *types.Type) *LinksymOffsetExpr { | func NewNameOffsetExpr(pos src.XPos, name *Name, offset int64, typ *types.Type) *LinksymOffsetExpr { | ||||||
| 	if name == nil || IsBlank(name) || !(name.Op() == ONAME && name.Class == PEXTERN) { | 	if name == nil || IsBlank(name) || !(name.Op() == ONAME && name.Class == PEXTERN) { | ||||||
| 		base.FatalfAt(pos, "cannot take offset of nil, blank name or non-global variable: %v", name) | 		base.FatalfAt(pos, "cannot take offset of nil, blank name or non-global variable: %v", name) | ||||||
|  |  | ||||||
|  | @ -9,12 +9,6 @@ import ( | ||||||
| 	"cmd/internal/obj" | 	"cmd/internal/obj" | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
| // Names holds known names. |  | ||||||
| var Names struct { |  | ||||||
| 	Staticuint64s *Name |  | ||||||
| 	Zerobase      *Name |  | ||||||
| } |  | ||||||
| 
 |  | ||||||
| // Syms holds known symbols. | // Syms holds known symbols. | ||||||
| var Syms struct { | var Syms struct { | ||||||
| 	AssertE2I       *obj.LSym | 	AssertE2I       *obj.LSym | ||||||
|  | @ -46,6 +40,7 @@ var Syms struct { | ||||||
| 	Racewriterange  *obj.LSym | 	Racewriterange  *obj.LSym | ||||||
| 	// Wasm | 	// Wasm | ||||||
| 	SigPanic        *obj.LSym | 	SigPanic        *obj.LSym | ||||||
|  | 	Staticuint64s   *obj.LSym | ||||||
| 	Typedmemclr     *obj.LSym | 	Typedmemclr     *obj.LSym | ||||||
| 	Typedmemmove    *obj.LSym | 	Typedmemmove    *obj.LSym | ||||||
| 	Udiv            *obj.LSym | 	Udiv            *obj.LSym | ||||||
|  |  | ||||||
|  | @ -124,6 +124,7 @@ func InitConfig() { | ||||||
| 	ir.Syms.X86HasFMA = typecheck.LookupRuntimeVar("x86HasFMA")             // bool | 	ir.Syms.X86HasFMA = typecheck.LookupRuntimeVar("x86HasFMA")             // bool | ||||||
| 	ir.Syms.ARMHasVFPv4 = typecheck.LookupRuntimeVar("armHasVFPv4")         // bool | 	ir.Syms.ARMHasVFPv4 = typecheck.LookupRuntimeVar("armHasVFPv4")         // bool | ||||||
| 	ir.Syms.ARM64HasATOMICS = typecheck.LookupRuntimeVar("arm64HasATOMICS") // bool | 	ir.Syms.ARM64HasATOMICS = typecheck.LookupRuntimeVar("arm64HasATOMICS") // bool | ||||||
|  | 	ir.Syms.Staticuint64s = typecheck.LookupRuntimeVar("staticuint64s") | ||||||
| 	ir.Syms.Typedmemclr = typecheck.LookupRuntimeFunc("typedmemclr") | 	ir.Syms.Typedmemclr = typecheck.LookupRuntimeFunc("typedmemclr") | ||||||
| 	ir.Syms.Typedmemmove = typecheck.LookupRuntimeFunc("typedmemmove") | 	ir.Syms.Typedmemmove = typecheck.LookupRuntimeFunc("typedmemmove") | ||||||
| 	ir.Syms.Udiv = typecheck.LookupRuntimeVar("udiv")                 // asm func with special ABI | 	ir.Syms.Udiv = typecheck.LookupRuntimeVar("udiv")                 // asm func with special ABI | ||||||
|  |  | ||||||
|  | @ -66,17 +66,6 @@ func walkConvInterface(n *ir.ConvExpr, init *ir.Nodes) ir.Node { | ||||||
| 		return l | 		return l | ||||||
| 	} | 	} | ||||||
| 
 | 
 | ||||||
| 	if ir.Names.Staticuint64s == nil { |  | ||||||
| 		ir.Names.Staticuint64s = typecheck.NewName(ir.Pkgs.Runtime.Lookup("staticuint64s")) |  | ||||||
| 		ir.Names.Staticuint64s.Class = ir.PEXTERN |  | ||||||
| 		// The actual type is [256]uint64, but we use [256*8]uint8 so we can address |  | ||||||
| 		// individual bytes. |  | ||||||
| 		ir.Names.Staticuint64s.SetType(types.NewArray(types.Types[types.TUINT8], 256*8)) |  | ||||||
| 		ir.Names.Zerobase = typecheck.NewName(ir.Pkgs.Runtime.Lookup("zerobase")) |  | ||||||
| 		ir.Names.Zerobase.Class = ir.PEXTERN |  | ||||||
| 		ir.Names.Zerobase.SetType(types.Types[types.TUINTPTR]) |  | ||||||
| 	} |  | ||||||
| 
 |  | ||||||
| 	// Optimize convT2{E,I} for many cases in which T is not pointer-shaped, | 	// Optimize convT2{E,I} for many cases in which T is not pointer-shaped, | ||||||
| 	// by using an existing addressable value identical to n.Left | 	// by using an existing addressable value identical to n.Left | ||||||
| 	// or creating one on the stack. | 	// or creating one on the stack. | ||||||
|  | @ -85,7 +74,7 @@ func walkConvInterface(n *ir.ConvExpr, init *ir.Nodes) ir.Node { | ||||||
| 	case fromType.Size() == 0: | 	case fromType.Size() == 0: | ||||||
| 		// n.Left is zero-sized. Use zerobase. | 		// n.Left is zero-sized. Use zerobase. | ||||||
| 		cheapExpr(n.X, init) // Evaluate n.Left for side-effects. See issue 19246. | 		cheapExpr(n.X, init) // Evaluate n.Left for side-effects. See issue 19246. | ||||||
| 		value = ir.Names.Zerobase | 		value = ir.NewLinksymExpr(base.Pos, ir.Syms.Zerobase, types.Types[types.TUINTPTR]) | ||||||
| 	case fromType.IsBoolean() || (fromType.Size() == 1 && fromType.IsInteger()): | 	case fromType.IsBoolean() || (fromType.Size() == 1 && fromType.IsInteger()): | ||||||
| 		// n.Left is a bool/byte. Use staticuint64s[n.Left * 8] on little-endian | 		// n.Left is a bool/byte. Use staticuint64s[n.Left * 8] on little-endian | ||||||
| 		// and staticuint64s[n.Left * 8 + 7] on big-endian. | 		// and staticuint64s[n.Left * 8 + 7] on big-endian. | ||||||
|  | @ -95,7 +84,10 @@ func walkConvInterface(n *ir.ConvExpr, init *ir.Nodes) ir.Node { | ||||||
| 		if ssagen.Arch.LinkArch.ByteOrder == binary.BigEndian { | 		if ssagen.Arch.LinkArch.ByteOrder == binary.BigEndian { | ||||||
| 			index = ir.NewBinaryExpr(base.Pos, ir.OADD, index, ir.NewInt(7)) | 			index = ir.NewBinaryExpr(base.Pos, ir.OADD, index, ir.NewInt(7)) | ||||||
| 		} | 		} | ||||||
| 		xe := ir.NewIndexExpr(base.Pos, ir.Names.Staticuint64s, index) | 		// The actual type is [256]uint64, but we use [256*8]uint8 so we can address | ||||||
|  | 		// individual bytes. | ||||||
|  | 		staticuint64s := ir.NewLinksymExpr(base.Pos, ir.Syms.Staticuint64s, types.NewArray(types.Types[types.TUINT8], 256*8)) | ||||||
|  | 		xe := ir.NewIndexExpr(base.Pos, staticuint64s, index) | ||||||
| 		xe.SetBounded(true) | 		xe.SetBounded(true) | ||||||
| 		value = xe | 		value = xe | ||||||
| 	case n.X.Op() == ir.ONAME && n.X.(*ir.Name).Class == ir.PEXTERN && n.X.(*ir.Name).Readonly(): | 	case n.X.Op() == ir.ONAME && n.X.(*ir.Name).Class == ir.PEXTERN && n.X.(*ir.Name).Readonly(): | ||||||
|  |  | ||||||
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue
	
	 Matthew Dempsky
						Matthew Dempsky