2020-07-09 15:47:26 -04:00
// Copyright 2020 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package ssa
2020-07-27 16:46:35 -04:00
import (
2021-02-01 13:26:47 -05:00
"cmd/compile/internal/abi"
2021-04-23 21:49:08 -04:00
"cmd/compile/internal/base"
2021-02-01 13:26:47 -05:00
"cmd/compile/internal/ir"
2020-07-27 16:46:35 -04:00
"cmd/compile/internal/types"
"cmd/internal/src"
"fmt"
"sort"
)
2020-07-09 15:47:26 -04:00
2020-08-17 16:57:22 -04:00
type selKey struct {
2021-02-22 21:51:35 -05:00
from * Value // what is selected from
offsetOrIndex int64 // whatever is appropriate for the selector
size int64
typ * types . Type
2020-08-17 16:57:22 -04:00
}
2021-02-01 13:26:47 -05:00
type Abi1RO uint8 // An offset within a parameter's slice of register indices, for abi1.
2020-12-29 22:44:30 -05:00
func isBlockMultiValueExit ( b * Block ) bool {
2021-08-28 14:55:10 +07:00
return ( b . Kind == BlockRet || b . Kind == BlockRetJmp ) && b . Controls [ 0 ] != nil && b . Controls [ 0 ] . Op == OpMakeResult
2020-12-29 22:44:30 -05:00
}
2021-02-09 15:14:43 -05:00
func badVal ( s string , v * Value ) error {
return fmt . Errorf ( "%s %s" , s , v . LongString ( ) )
}
2021-01-26 19:33:34 -05:00
// removeTrivialWrapperTypes unwraps layers of
// struct { singleField SomeType } and [1]SomeType
// until a non-wrapper type is reached. This is useful
// for working with assignments to/from interface data
// fields (either second operand to OpIMake or OpIData)
// where the wrapping or type conversion can be elided
// because of type conversions/assertions in source code
// that do not appear in SSA.
func removeTrivialWrapperTypes ( t * types . Type ) * types . Type {
for {
if t . IsStruct ( ) && t . NumFields ( ) == 1 {
t = t . Field ( 0 ) . Type
continue
}
if t . IsArray ( ) && t . NumElem ( ) == 1 {
t = t . Elem ( )
continue
}
break
2020-10-13 19:24:04 -04:00
}
2021-01-26 19:33:34 -05:00
return t
}
2020-10-13 19:24:04 -04:00
2021-02-01 13:26:47 -05:00
// A registerCursor tracks which register is used for an Arg or regValues, or a piece of such.
type registerCursor struct {
// TODO(register args) convert this to a generalized target cursor.
2021-02-17 12:17:25 -05:00
storeDest * Value // if there are no register targets, then this is the base of the store.
2021-02-01 13:26:47 -05:00
regsLen int // the number of registers available for this Arg/result (which is all in registers or not at all)
nextSlice Abi1RO // the next register/register-slice offset
config * abi . ABIConfig
regValues * [ ] * Value // values assigned to registers accumulate here
}
2021-02-17 12:17:25 -05:00
func ( rc * registerCursor ) String ( ) string {
dest := "<none>"
if rc . storeDest != nil {
dest = rc . storeDest . String ( )
}
regs := "<none>"
if rc . regValues != nil {
regs = ""
for i , x := range * rc . regValues {
if i > 0 {
regs = regs + "; "
}
regs = regs + x . LongString ( )
}
}
2021-03-05 19:56:13 -05:00
// not printing the config because that has not been useful
return fmt . Sprintf ( "RCSR{storeDest=%v, regsLen=%d, nextSlice=%d, regValues=[%s]}" , dest , rc . regsLen , rc . nextSlice , regs )
2021-02-17 12:17:25 -05:00
}
2021-02-01 13:26:47 -05:00
// next effectively post-increments the register cursor; the receiver is advanced,
// the old value is returned.
func ( c * registerCursor ) next ( t * types . Type ) registerCursor {
rc := * c
if int ( c . nextSlice ) < c . regsLen {
w := c . config . NumParamRegs ( t )
c . nextSlice += Abi1RO ( w )
}
return rc
}
// plus returns a register cursor offset from the original, without modifying the original.
func ( c * registerCursor ) plus ( regWidth Abi1RO ) registerCursor {
rc := * c
rc . nextSlice += regWidth
return rc
}
const (
// Register offsets for fields of built-in aggregate types; the ones not listed are zero.
RO_complex_imag = 1
RO_string_len = 1
RO_slice_len = 1
RO_slice_cap = 2
RO_iface_data = 1
)
func ( x * expandState ) regWidth ( t * types . Type ) Abi1RO {
return Abi1RO ( x . abi1 . NumParamRegs ( t ) )
}
// regOffset returns the register offset of the i'th element of type t
func ( x * expandState ) regOffset ( t * types . Type , i int ) Abi1RO {
// TODO maybe cache this in a map if profiling recommends.
if i == 0 {
return 0
}
if t . IsArray ( ) {
return Abi1RO ( i ) * x . regWidth ( t . Elem ( ) )
}
if t . IsStruct ( ) {
k := Abi1RO ( 0 )
for j := 0 ; j < i ; j ++ {
k += x . regWidth ( t . FieldType ( j ) )
}
return k
}
panic ( "Haven't implemented this case yet, do I need to?" )
}
// at returns the register cursor for component i of t, where the first
// component is numbered 0.
func ( c * registerCursor ) at ( t * types . Type , i int ) registerCursor {
rc := * c
if i == 0 || c . regsLen == 0 {
return rc
}
if t . IsArray ( ) {
w := c . config . NumParamRegs ( t . Elem ( ) )
rc . nextSlice += Abi1RO ( i * w )
return rc
}
if t . IsStruct ( ) {
for j := 0 ; j < i ; j ++ {
rc . next ( t . FieldType ( j ) )
}
return rc
}
panic ( "Haven't implemented this case yet, do I need to?" )
}
2021-02-17 12:17:25 -05:00
func ( c * registerCursor ) init ( regs [ ] abi . RegIndex , info * abi . ABIParamResultInfo , result * [ ] * Value , storeDest * Value ) {
2021-02-01 13:26:47 -05:00
c . regsLen = len ( regs )
c . nextSlice = 0
if len ( regs ) == 0 {
2021-02-17 12:17:25 -05:00
c . storeDest = storeDest // only save this if there are no registers, will explode if misused.
2021-02-01 13:26:47 -05:00
return
}
c . config = info . Config ( )
c . regValues = result
}
func ( c * registerCursor ) addArg ( v * Value ) {
* c . regValues = append ( * c . regValues , v )
}
func ( c * registerCursor ) hasRegs ( ) bool {
return c . regsLen > 0
}
2021-01-26 19:33:34 -05:00
type expandState struct {
2021-04-07 22:21:35 -04:00
f * Func
abi1 * abi . ABIConfig
debug bool
canSSAType func ( * types . Type ) bool
regSize int64
sp * Value
typs * Types
ptrSize int64
hiOffset int64
lowOffset int64
hiRo Abi1RO
loRo Abi1RO
namedSelects map [ * Value ] [ ] namedVal
sdom SparseTree
commonSelectors map [ selKey ] * Value // used to de-dupe selectors
commonArgs map [ selKey ] * Value // used to de-dupe OpArg/OpArgIntReg/OpArgFloatReg
memForCall map [ ID ] * Value // For a call, need to know the unique selector that gets the mem.
transformedSelects map [ ID ] bool // OpSelectN after rewriting, either created or renumbered.
indentLevel int // Indentation for debugging recursion
2021-01-26 19:33:34 -05:00
}
2020-07-27 16:46:35 -04:00
2021-01-26 19:33:34 -05:00
// intPairTypes returns the pair of 32-bit int types needed to encode a 64-bit integer type on a target
// that has no 64-bit integer registers.
func ( x * expandState ) intPairTypes ( et types . Kind ) ( tHi , tLo * types . Type ) {
tHi = x . typs . UInt32
if et == types . TINT64 {
tHi = x . typs . Int32
2020-07-27 16:46:35 -04:00
}
2021-01-26 19:33:34 -05:00
tLo = x . typs . UInt32
return
}
2020-08-07 22:46:43 -04:00
2021-01-26 19:33:34 -05:00
// isAlreadyExpandedAggregateType returns whether a type is an SSA-able "aggregate" (multiple register) type
// that was expanded in an earlier phase (currently, expand_calls is intended to run after decomposeBuiltin,
// so this is all aggregate types -- small struct and array, complex, interface, string, slice, and 64-bit
// integer on 32-bit).
func ( x * expandState ) isAlreadyExpandedAggregateType ( t * types . Type ) bool {
if ! x . canSSAType ( t ) {
return false
2020-07-27 16:46:35 -04:00
}
2021-01-26 19:33:34 -05:00
return t . IsStruct ( ) || t . IsArray ( ) || t . IsComplex ( ) || t . IsInterface ( ) || t . IsString ( ) || t . IsSlice ( ) ||
2021-06-09 20:14:15 -04:00
( t . Size ( ) > x . regSize && ( t . IsInteger ( ) || ( x . f . Config . SoftFloat && t . IsFloat ( ) ) ) )
2021-01-26 19:33:34 -05:00
}
2020-07-27 16:46:35 -04:00
2021-01-26 19:33:34 -05:00
// offsetFrom creates an offset from a pointer, simplifying chained offsets and offsets from SP
// TODO should also optimize offsets from SB?
2021-02-27 17:11:36 -05:00
func ( x * expandState ) offsetFrom ( b * Block , from * Value , offset int64 , pt * types . Type ) * Value {
ft := from . Type
if offset == 0 {
if ft == pt {
return from
}
// This captures common, (apparently) safe cases. The unsafe cases involve ft == uintptr
if ( ft . IsPtr ( ) || ft . IsUnsafePtr ( ) ) && pt . IsPtr ( ) {
return from
}
2020-08-17 16:57:22 -04:00
}
2021-01-26 19:33:34 -05:00
// Simplify, canonicalize
for from . Op == OpOffPtr {
offset += from . AuxInt
from = from . Args [ 0 ]
}
if from == x . sp {
return x . f . ConstOffPtrSP ( pt , offset , x . sp )
}
2021-02-27 17:11:36 -05:00
return b . NewValue1I ( from . Pos . WithNotStmt ( ) , OpOffPtr , pt , offset , from )
2021-01-26 19:33:34 -05:00
}
2020-08-17 16:57:22 -04:00
2021-01-26 19:33:34 -05:00
// splitSlots splits one "field" (specified by sfx, offset, and ty) out of the LocalSlots in ls and returns the new LocalSlots this generates.
2021-04-21 10:55:42 -04:00
func ( x * expandState ) splitSlots ( ls [ ] * LocalSlot , sfx string , offset int64 , ty * types . Type ) [ ] * LocalSlot {
var locs [ ] * LocalSlot
2021-01-26 19:33:34 -05:00
for i := range ls {
2021-04-21 10:55:42 -04:00
locs = append ( locs , x . f . SplitSlot ( ls [ i ] , sfx , offset , ty ) )
2020-07-27 16:46:35 -04:00
}
2021-01-26 19:33:34 -05:00
return locs
}
2020-07-27 16:46:35 -04:00
2021-02-01 13:26:47 -05:00
// prAssignForArg returns the ABIParamAssignment for v, assumed to be an OpArg.
2021-02-26 14:27:59 -05:00
func ( x * expandState ) prAssignForArg ( v * Value ) * abi . ABIParamAssignment {
2021-02-09 15:14:43 -05:00
if v . Op != OpArg {
panic ( badVal ( "Wanted OpArg, instead saw" , v ) )
}
2021-02-17 10:38:03 -05:00
return ParamAssignmentForArgName ( x . f , v . Aux . ( * ir . Name ) )
}
// ParamAssignmentForArgName returns the ABIParamAssignment for f's arg with matching name.
2021-02-26 14:27:59 -05:00
func ParamAssignmentForArgName ( f * Func , name * ir . Name ) * abi . ABIParamAssignment {
2021-02-17 10:38:03 -05:00
abiInfo := f . OwnAux . abiInfo
2021-02-26 14:27:59 -05:00
ip := abiInfo . InParams ( )
for i , a := range ip {
2021-02-01 13:26:47 -05:00
if a . Name == name {
2021-02-26 14:27:59 -05:00
return & ip [ i ]
2021-02-01 13:26:47 -05:00
}
}
2021-02-17 10:38:03 -05:00
panic ( fmt . Errorf ( "Did not match param %v in prInfo %+v" , name , abiInfo . InParams ( ) ) )
2021-02-01 13:26:47 -05:00
}
2021-03-05 19:56:13 -05:00
// indent increments (or decrements) the indentation.
func ( x * expandState ) indent ( n int ) {
x . indentLevel += n
}
// Printf does an indented fmt.Printf on te format and args.
func ( x * expandState ) Printf ( format string , a ... interface { } ) ( n int , err error ) {
if x . indentLevel > 0 {
fmt . Printf ( "%[1]*s" , x . indentLevel , "" )
}
return fmt . Printf ( format , a ... )
}
2021-01-26 19:33:34 -05:00
// Calls that need lowering have some number of inputs, including a memory input,
// and produce a tuple of (value1, value2, ..., mem) where valueK may or may not be SSA-able.
// With the current ABI those inputs need to be converted into stores to memory,
// rethreading the call's memory input to the first, and the new call now receiving the last.
// With the current ABI, the outputs need to be converted to loads, which will all use the call's
// memory output as their input.
// rewriteSelect recursively walks from leaf selector to a root (OpSelectN, OpLoad, OpArg)
// through a chain of Struct/Array/builtin Select operations. If the chain of selectors does not
// end in an expected root, it does nothing (this can happen depending on compiler phase ordering).
// The "leaf" provides the type, the root supplies the container, and the leaf-to-root path
// accumulates the offset.
// It emits the code necessary to implement the leaf select operation that leads to the root.
//
// TODO when registers really arrive, must also decompose anything split across two registers or registers and memory.
2021-04-21 10:55:42 -04:00
func ( x * expandState ) rewriteSelect ( leaf * Value , selector * Value , offset int64 , regOffset Abi1RO ) [ ] * LocalSlot {
2021-01-26 19:33:34 -05:00
if x . debug {
2021-03-05 19:56:13 -05:00
x . indent ( 3 )
defer x . indent ( - 3 )
2021-03-07 14:00:10 -05:00
x . Printf ( "rewriteSelect(%s; %s; memOff=%d; regOff=%d)\n" , leaf . LongString ( ) , selector . LongString ( ) , offset , regOffset )
2020-10-02 14:53:48 -04:00
}
2021-04-21 10:55:42 -04:00
var locs [ ] * LocalSlot
2021-01-26 19:33:34 -05:00
leafType := leaf . Type
if len ( selector . Args ) > 0 {
w := selector . Args [ 0 ]
if w . Op == OpCopy {
for w . Op == OpCopy {
w = w . Args [ 0 ]
2020-10-13 19:24:04 -04:00
}
2021-01-26 19:33:34 -05:00
selector . SetArg ( 0 , w )
2020-10-13 19:24:04 -04:00
}
2021-01-26 19:33:34 -05:00
}
switch selector . Op {
2021-04-05 09:47:22 -04:00
case OpArgIntReg , OpArgFloatReg :
if leafType == selector . Type { // OpIData leads us here, sometimes.
leaf . copyOf ( selector )
} else {
x . f . Fatalf ( "Unexpected %s type, selector=%s, leaf=%s\n" , selector . Op . String ( ) , selector . LongString ( ) , leaf . LongString ( ) )
}
if x . debug {
x . Printf ( "---%s, break\n" , selector . Op . String ( ) )
}
2021-01-26 19:33:34 -05:00
case OpArg :
if ! x . isAlreadyExpandedAggregateType ( selector . Type ) {
if leafType == selector . Type { // OpIData leads us here, sometimes.
2021-02-17 10:38:03 -05:00
x . newArgToMemOrRegs ( selector , leaf , offset , regOffset , leafType , leaf . Pos )
2020-10-13 19:24:04 -04:00
} else {
2021-01-26 19:33:34 -05:00
x . f . Fatalf ( "Unexpected OpArg type, selector=%s, leaf=%s\n" , selector . LongString ( ) , leaf . LongString ( ) )
2020-10-13 19:24:04 -04:00
}
2021-01-26 19:33:34 -05:00
if x . debug {
2021-03-05 19:56:13 -05:00
x . Printf ( "---OpArg, break\n" )
2020-08-17 16:57:22 -04:00
}
2021-01-26 19:33:34 -05:00
break
}
switch leaf . Op {
case OpIData , OpStructSelect , OpArraySelect :
leafType = removeTrivialWrapperTypes ( leaf . Type )
}
2021-02-17 10:38:03 -05:00
x . newArgToMemOrRegs ( selector , leaf , offset , regOffset , leafType , leaf . Pos )
2021-01-26 19:33:34 -05:00
for _ , s := range x . namedSelects [ selector ] {
locs = append ( locs , x . f . Names [ s . locIndex ] )
}
2020-10-13 19:24:04 -04:00
2021-01-26 19:33:34 -05:00
case OpLoad : // We end up here because of IData of immediate structures.
// Failure case:
// (note the failure case is very rare; w/o this case, make.bash and run.bash both pass, as well as
// the hard cases of building {syscall,math,math/cmplx,math/bits,go/constant} on ppc64le and mips-softfloat).
//
// GOSSAFUNC='(*dumper).dump' go build -gcflags=-l -tags=math_big_pure_go cmd/compile/internal/gc
// cmd/compile/internal/gc/dump.go:136:14: internal compiler error: '(*dumper).dump': not lowered: v827, StructSelect PTR PTR
// b2: ← b1
// v20 (+142) = StaticLECall <interface {},mem> {AuxCall{reflect.Value.Interface([reflect.Value,0])[interface {},24]}} [40] v8 v1
// v21 (142) = SelectN <mem> [1] v20
// v22 (142) = SelectN <interface {}> [0] v20
// b15: ← b8
// v71 (+143) = IData <Nodes> v22 (v[Nodes])
// v73 (+146) = StaticLECall <[]*Node,mem> {AuxCall{"".Nodes.Slice([Nodes,0])[[]*Node,8]}} [32] v71 v21
//
// translates (w/o the "case OpLoad:" above) to:
//
// b2: ← b1
// v20 (+142) = StaticCall <mem> {AuxCall{reflect.Value.Interface([reflect.Value,0])[interface {},24]}} [40] v715
// v23 (142) = Load <*uintptr> v19 v20
// v823 (142) = IsNonNil <bool> v23
// v67 (+143) = Load <*[]*Node> v880 v20
// b15: ← b8
// v827 (146) = StructSelect <*[]*Node> [0] v67
// v846 (146) = Store <mem> {*[]*Node} v769 v827 v20
// v73 (+146) = StaticCall <mem> {AuxCall{"".Nodes.Slice([Nodes,0])[[]*Node,8]}} [32] v846
// i.e., the struct select is generated and remains in because it is not applied to an actual structure.
// The OpLoad was created to load the single field of the IData
// This case removes that StructSelect.
if leafType != selector . Type {
2021-06-09 20:14:15 -04:00
if x . f . Config . SoftFloat && selector . Type . IsFloat ( ) {
if x . debug {
x . Printf ( "---OpLoad, break\n" )
}
break // softfloat pass will take care of that
}
2021-01-26 19:33:34 -05:00
x . f . Fatalf ( "Unexpected Load as selector, leaf=%s, selector=%s\n" , leaf . LongString ( ) , selector . LongString ( ) )
}
leaf . copyOf ( selector )
for _ , s := range x . namedSelects [ selector ] {
locs = append ( locs , x . f . Names [ s . locIndex ] )
}
case OpSelectN :
2021-02-01 13:26:47 -05:00
// TODO(register args) result case
// if applied to Op-mumble-call, the Aux tells us which result, regOffset specifies offset within result. If a register, should rewrite to OpSelectN for new call.
2021-01-26 19:33:34 -05:00
// TODO these may be duplicated. Should memoize. Intermediate selectors will go dead, no worries there.
call := selector . Args [ 0 ]
2021-02-22 21:51:35 -05:00
call0 := call
2021-01-26 19:33:34 -05:00
aux := call . Aux . ( * AuxCall )
which := selector . AuxInt
2021-04-07 22:21:35 -04:00
if x . transformedSelects [ selector . ID ] {
// This is a minor hack. Either this select has had its operand adjusted (mem) or
// it is some other intermediate node that was rewritten to reference a register (not a generic arg).
// This can occur with chains of selection/indexing from single field/element aggregates.
leaf . copyOf ( selector )
break
}
2021-01-26 19:33:34 -05:00
if which == aux . NResults ( ) { // mem is after the results.
// rewrite v as a Copy of call -- the replacement call will produce a mem.
2021-03-05 14:24:41 -05:00
if leaf != selector {
2021-04-07 22:21:35 -04:00
panic ( fmt . Errorf ( "Unexpected selector of memory, selector=%s, call=%s, leaf=%s" , selector . LongString ( ) , call . LongString ( ) , leaf . LongString ( ) ) )
2021-03-05 14:24:41 -05:00
}
if aux . abiInfo == nil {
panic ( badVal ( "aux.abiInfo nil for call" , call ) )
}
if existing := x . memForCall [ call . ID ] ; existing == nil {
selector . AuxInt = int64 ( aux . abiInfo . OutRegistersUsed ( ) )
x . memForCall [ call . ID ] = selector
2021-04-07 22:21:35 -04:00
x . transformedSelects [ selector . ID ] = true // operand adjusted
2021-02-04 16:42:35 -05:00
} else {
2021-03-05 14:24:41 -05:00
selector . copyOf ( existing )
2021-02-04 16:42:35 -05:00
}
2021-03-05 14:24:41 -05:00
2021-01-26 19:33:34 -05:00
} else {
leafType := removeTrivialWrapperTypes ( leaf . Type )
if x . canSSAType ( leafType ) {
pt := types . NewPtr ( leafType )
// Any selection right out of the arg area/registers has to be same Block as call, use call as mem input.
2021-03-05 14:24:41 -05:00
// Create a "mem" for any loads that need to occur.
if mem := x . memForCall [ call . ID ] ; mem != nil {
if mem . Block != call . Block {
panic ( fmt . Errorf ( "selector and call need to be in same block, selector=%s; call=%s" , selector . LongString ( ) , call . LongString ( ) ) )
2021-02-04 16:42:35 -05:00
}
2021-03-05 14:24:41 -05:00
call = mem
} else {
mem = call . Block . NewValue1I ( call . Pos . WithNotStmt ( ) , OpSelectN , types . TypeMem , int64 ( aux . abiInfo . OutRegistersUsed ( ) ) , call )
2021-04-07 22:21:35 -04:00
x . transformedSelects [ mem . ID ] = true // select uses post-expansion indexing
2021-03-05 14:24:41 -05:00
x . memForCall [ call . ID ] = mem
call = mem
2021-02-04 16:42:35 -05:00
}
2021-02-22 21:51:35 -05:00
outParam := aux . abiInfo . OutParam ( int ( which ) )
if len ( outParam . Registers ) > 0 {
2021-03-06 20:59:40 -05:00
firstReg := uint32 ( 0 )
for i := 0 ; i < int ( which ) ; i ++ {
firstReg += uint32 ( len ( aux . abiInfo . OutParam ( i ) . Registers ) )
}
reg := int64 ( regOffset + Abi1RO ( firstReg ) )
2021-02-22 21:51:35 -05:00
if leaf . Block == call . Block {
leaf . reset ( OpSelectN )
leaf . SetArgs1 ( call0 )
leaf . Type = leafType
leaf . AuxInt = reg
2021-04-07 22:21:35 -04:00
x . transformedSelects [ leaf . ID ] = true // leaf, rewritten to use post-expansion indexing.
2021-02-22 21:51:35 -05:00
} else {
w := call . Block . NewValue1I ( leaf . Pos , OpSelectN , leafType , reg , call0 )
2021-04-07 22:21:35 -04:00
x . transformedSelects [ w . ID ] = true // select, using post-expansion indexing.
2021-02-22 21:51:35 -05:00
leaf . copyOf ( w )
}
2020-07-27 16:46:35 -04:00
} else {
2021-02-27 17:11:36 -05:00
off := x . offsetFrom ( x . f . Entry , x . sp , offset + aux . OffsetOfResult ( which ) , pt )
2021-02-22 21:51:35 -05:00
if leaf . Block == call . Block {
leaf . reset ( OpLoad )
leaf . SetArgs2 ( off , call )
leaf . Type = leafType
} else {
w := call . Block . NewValue2 ( leaf . Pos , OpLoad , leafType , off , call )
leaf . copyOf ( w )
if x . debug {
2021-03-05 19:56:13 -05:00
x . Printf ( "---new %s\n" , w . LongString ( ) )
2021-02-22 21:51:35 -05:00
}
2021-01-26 19:33:34 -05:00
}
}
for _ , s := range x . namedSelects [ selector ] {
locs = append ( locs , x . f . Names [ s . locIndex ] )
2020-07-09 15:47:26 -04:00
}
2021-01-26 19:33:34 -05:00
} else {
x . f . Fatalf ( "Should not have non-SSA-able OpSelectN, selector=%s" , selector . LongString ( ) )
2020-07-27 16:46:35 -04:00
}
2021-01-26 19:33:34 -05:00
}
2020-10-13 19:24:04 -04:00
2021-01-26 19:33:34 -05:00
case OpStructSelect :
w := selector . Args [ 0 ]
2021-04-21 10:55:42 -04:00
var ls [ ] * LocalSlot
2021-01-26 19:33:34 -05:00
if w . Type . Kind ( ) != types . TSTRUCT { // IData artifact
2021-02-01 13:26:47 -05:00
ls = x . rewriteSelect ( leaf , w , offset , regOffset )
2021-01-26 19:33:34 -05:00
} else {
2021-02-01 13:26:47 -05:00
fldi := int ( selector . AuxInt )
ls = x . rewriteSelect ( leaf , w , offset + w . Type . FieldOff ( fldi ) , regOffset + x . regOffset ( w . Type , fldi ) )
2021-01-26 19:33:34 -05:00
if w . Op != OpIData {
for _ , l := range ls {
2021-04-21 10:55:42 -04:00
locs = append ( locs , x . f . SplitStruct ( l , int ( selector . AuxInt ) ) )
2020-08-17 16:57:22 -04:00
}
2020-07-27 16:46:35 -04:00
}
2021-01-26 19:33:34 -05:00
}
2020-08-17 16:57:22 -04:00
2021-01-26 19:33:34 -05:00
case OpArraySelect :
w := selector . Args [ 0 ]
2021-02-01 13:26:47 -05:00
index := selector . AuxInt
x . rewriteSelect ( leaf , w , offset + selector . Type . Size ( ) * index , regOffset + x . regOffset ( w . Type , int ( index ) ) )
2021-01-26 19:33:34 -05:00
case OpInt64Hi :
w := selector . Args [ 0 ]
2021-02-01 13:26:47 -05:00
ls := x . rewriteSelect ( leaf , w , offset + x . hiOffset , regOffset + x . hiRo )
2021-01-26 19:33:34 -05:00
locs = x . splitSlots ( ls , ".hi" , x . hiOffset , leafType )
case OpInt64Lo :
w := selector . Args [ 0 ]
2021-02-01 13:26:47 -05:00
ls := x . rewriteSelect ( leaf , w , offset + x . lowOffset , regOffset + x . loRo )
2021-01-26 19:33:34 -05:00
locs = x . splitSlots ( ls , ".lo" , x . lowOffset , leafType )
case OpStringPtr :
2021-02-01 13:26:47 -05:00
ls := x . rewriteSelect ( leaf , selector . Args [ 0 ] , offset , regOffset )
2021-01-26 19:33:34 -05:00
locs = x . splitSlots ( ls , ".ptr" , 0 , x . typs . BytePtr )
2021-03-14 14:24:47 -07:00
case OpSlicePtr , OpSlicePtrUnchecked :
2021-01-26 19:33:34 -05:00
w := selector . Args [ 0 ]
2021-02-01 13:26:47 -05:00
ls := x . rewriteSelect ( leaf , w , offset , regOffset )
2021-01-26 19:33:34 -05:00
locs = x . splitSlots ( ls , ".ptr" , 0 , types . NewPtr ( w . Type . Elem ( ) ) )
case OpITab :
w := selector . Args [ 0 ]
2021-02-01 13:26:47 -05:00
ls := x . rewriteSelect ( leaf , w , offset , regOffset )
2021-01-26 19:33:34 -05:00
sfx := ".itab"
if w . Type . IsEmptyInterface ( ) {
sfx = ".type"
}
locs = x . splitSlots ( ls , sfx , 0 , x . typs . Uintptr )
2020-08-17 16:57:22 -04:00
2021-01-26 19:33:34 -05:00
case OpComplexReal :
2021-02-01 13:26:47 -05:00
ls := x . rewriteSelect ( leaf , selector . Args [ 0 ] , offset , regOffset )
2021-06-09 20:14:15 -04:00
locs = x . splitSlots ( ls , ".real" , 0 , selector . Type )
2020-08-17 16:57:22 -04:00
2021-01-26 19:33:34 -05:00
case OpComplexImag :
2021-08-26 12:11:14 -07:00
ls := x . rewriteSelect ( leaf , selector . Args [ 0 ] , offset + selector . Type . Size ( ) , regOffset + RO_complex_imag ) // result is FloatNN, width of result is offset of imaginary part.
locs = x . splitSlots ( ls , ".imag" , selector . Type . Size ( ) , selector . Type )
2020-08-17 16:57:22 -04:00
2021-01-26 19:33:34 -05:00
case OpStringLen , OpSliceLen :
2021-02-01 13:26:47 -05:00
ls := x . rewriteSelect ( leaf , selector . Args [ 0 ] , offset + x . ptrSize , regOffset + RO_slice_len )
2021-01-26 19:33:34 -05:00
locs = x . splitSlots ( ls , ".len" , x . ptrSize , leafType )
2020-08-17 16:57:22 -04:00
2021-01-26 19:33:34 -05:00
case OpIData :
2021-02-01 13:26:47 -05:00
ls := x . rewriteSelect ( leaf , selector . Args [ 0 ] , offset + x . ptrSize , regOffset + RO_iface_data )
2021-01-26 19:33:34 -05:00
locs = x . splitSlots ( ls , ".data" , x . ptrSize , leafType )
2020-08-17 16:57:22 -04:00
2021-01-26 19:33:34 -05:00
case OpSliceCap :
2021-02-01 13:26:47 -05:00
ls := x . rewriteSelect ( leaf , selector . Args [ 0 ] , offset + 2 * x . ptrSize , regOffset + RO_slice_cap )
2021-01-26 19:33:34 -05:00
locs = x . splitSlots ( ls , ".cap" , 2 * x . ptrSize , leafType )
2020-08-17 16:57:22 -04:00
2021-01-26 19:33:34 -05:00
case OpCopy : // If it's an intermediate result, recurse
2021-02-01 13:26:47 -05:00
locs = x . rewriteSelect ( leaf , selector . Args [ 0 ] , offset , regOffset )
2021-01-26 19:33:34 -05:00
for _ , s := range x . namedSelects [ selector ] {
// this copy may have had its own name, preserve that, too.
locs = append ( locs , x . f . Names [ s . locIndex ] )
2020-07-27 16:46:35 -04:00
}
2020-08-17 16:57:22 -04:00
2021-01-26 19:33:34 -05:00
default :
// Ignore dead ends. These can occur if this phase is run before decompose builtin (which is not intended, but allowed).
2020-07-27 16:46:35 -04:00
}
2021-01-26 19:33:34 -05:00
return locs
}
2020-10-13 19:24:04 -04:00
2021-01-26 19:33:34 -05:00
func ( x * expandState ) rewriteDereference ( b * Block , base , a , mem * Value , offset , size int64 , typ * types . Type , pos src . XPos ) * Value {
source := a . Args [ 0 ]
2021-02-27 17:11:36 -05:00
dst := x . offsetFrom ( b , base , offset , source . Type )
2021-01-26 19:33:34 -05:00
if a . Uses == 1 && a . Block == b {
a . reset ( OpMove )
a . Pos = pos
a . Type = types . TypeMem
a . Aux = typ
a . AuxInt = size
a . SetArgs3 ( dst , source , mem )
mem = a
} else {
mem = b . NewValue3A ( pos , OpMove , types . TypeMem , typ , dst , source , mem )
mem . AuxInt = size
}
return mem
}
2021-04-19 23:00:24 -04:00
var indexNames [ 1 ] string = [ 1 ] string { "[0]" }
// pathTo returns the selection path to the leaf type at offset within container.
// e.g. len(thing.field[0]) => ".field[0].len"
// this is for purposes of generating names ultimately fed to a debugger.
func ( x * expandState ) pathTo ( container , leaf * types . Type , offset int64 ) string {
if container == leaf || offset == 0 && container . Size ( ) == leaf . Size ( ) {
return ""
}
path := ""
outer :
for {
switch container . Kind ( ) {
case types . TARRAY :
container = container . Elem ( )
if container . Size ( ) == 0 {
return path
}
i := offset / container . Size ( )
offset = offset % container . Size ( )
// If a future compiler/ABI supports larger SSA/Arg-able arrays, expand indexNames.
path = path + indexNames [ i ]
continue
case types . TSTRUCT :
for i := 0 ; i < container . NumFields ( ) ; i ++ {
fld := container . Field ( i )
if fld . Offset + fld . Type . Size ( ) > offset {
offset -= fld . Offset
path += "." + fld . Sym . Name
container = fld . Type
continue outer
}
}
return path
case types . TINT64 , types . TUINT64 :
2021-08-26 12:11:14 -07:00
if container . Size ( ) == x . regSize {
2021-04-19 23:00:24 -04:00
return path
}
if offset == x . hiOffset {
return path + ".hi"
}
return path + ".lo"
case types . TINTER :
if offset != 0 {
return path + ".data"
}
if container . IsEmptyInterface ( ) {
return path + ".type"
}
return path + ".itab"
case types . TSLICE :
if offset == 2 * x . regSize {
return path + ".cap"
}
fallthrough
case types . TSTRING :
if offset == 0 {
return path + ".ptr"
}
return path + ".len"
case types . TCOMPLEX64 , types . TCOMPLEX128 :
if offset == 0 {
return path + ".real"
}
return path + ".imag"
}
return path
}
}
2021-03-05 19:56:13 -05:00
// decomposeArg is a helper for storeArgOrLoad.
// It decomposes a Load or an Arg into smaller parts and returns the new mem.
// If the type does not match one of the expected aggregate types, it returns nil instead.
// Parameters:
// pos -- the location of any generated code.
// b -- the block into which any generated code should normally be placed
// source -- the value, possibly an aggregate, to be stored.
// mem -- the mem flowing into this decomposition (loads depend on it, stores updated it)
// t -- the type of the value to be stored
// storeOffset -- if the value is stored in memory, it is stored at base (see storeRc) + storeOffset
// loadRegOffset -- regarding source as a value in registers, the register offset in ABI1. Meaningful only if source is OpArg.
// storeRc -- storeRC; if the value is stored in registers, this specifies the registers.
// StoreRc also identifies whether the target is registers or memory, and has the base for the store operation.
func ( x * expandState ) decomposeArg ( pos src . XPos , b * Block , source , mem * Value , t * types . Type , storeOffset int64 , loadRegOffset Abi1RO , storeRc registerCursor ) * Value {
pa := x . prAssignForArg ( source )
2021-04-21 10:55:42 -04:00
var locs [ ] * LocalSlot
2021-04-19 23:00:24 -04:00
for _ , s := range x . namedSelects [ source ] {
locs = append ( locs , x . f . Names [ s . locIndex ] )
}
2021-03-05 19:56:13 -05:00
if len ( pa . Registers ) > 0 {
// Handle the in-registers case directly
rts , offs := pa . RegisterTypesAndOffsets ( )
last := loadRegOffset + x . regWidth ( t )
if offs [ loadRegOffset ] != 0 {
2021-03-07 14:00:10 -05:00
// Document the problem before panicking.
for i := 0 ; i < len ( rts ) ; i ++ {
rt := rts [ i ]
off := offs [ i ]
2021-08-26 12:11:14 -07:00
fmt . Printf ( "rt=%s, off=%d, rt.Width=%d, rt.Align=%d\n" , rt . String ( ) , off , rt . Size ( ) , uint8 ( rt . Alignment ( ) ) )
2021-03-07 14:00:10 -05:00
}
panic ( fmt . Errorf ( "offset %d of requested register %d should be zero, source=%s" , offs [ loadRegOffset ] , loadRegOffset , source . LongString ( ) ) )
2021-03-05 19:56:13 -05:00
}
2021-04-19 23:00:24 -04:00
if x . debug {
x . Printf ( "decompose arg %s has %d locs\n" , source . LongString ( ) , len ( locs ) )
}
2021-03-05 19:56:13 -05:00
for i := loadRegOffset ; i < last ; i ++ {
rt := rts [ i ]
off := offs [ i ]
2021-08-26 12:11:14 -07:00
w := x . commonArgs [ selKey { source , off , rt . Size ( ) , rt } ]
2021-03-05 19:56:13 -05:00
if w == nil {
w = x . newArgToMemOrRegs ( source , w , off , i , rt , pos )
2021-04-19 23:00:24 -04:00
suffix := x . pathTo ( source . Type , rt , off )
if suffix != "" {
x . splitSlotsIntoNames ( locs , suffix , off , rt , w )
}
2021-03-05 19:56:13 -05:00
}
2021-04-01 14:21:12 -04:00
if t . IsPtrShaped ( ) {
// Preserve the original store type. This ensures pointer type
// properties aren't discarded (e.g, notinheap).
2021-08-26 12:11:14 -07:00
if rt . Size ( ) != t . Size ( ) || len ( pa . Registers ) != 1 || i != loadRegOffset {
2021-04-01 14:21:12 -04:00
b . Func . Fatalf ( "incompatible store type %v and %v, i=%d" , t , rt , i )
}
rt = t
}
2021-03-05 19:56:13 -05:00
mem = x . storeArgOrLoad ( pos , b , w , mem , rt , storeOffset + off , i , storeRc . next ( rt ) )
}
return mem
}
u := source . Type
switch u . Kind ( ) {
case types . TARRAY :
elem := u . Elem ( )
elemRO := x . regWidth ( elem )
for i := int64 ( 0 ) ; i < u . NumElem ( ) ; i ++ {
elemOff := i * elem . Size ( )
2021-04-19 23:00:24 -04:00
mem = storeOneArg ( x , pos , b , locs , indexNames [ i ] , source , mem , elem , elemOff , storeOffset + elemOff , loadRegOffset , storeRc . next ( elem ) )
2021-03-05 19:56:13 -05:00
loadRegOffset += elemRO
pos = pos . WithNotStmt ( )
}
return mem
case types . TSTRUCT :
for i := 0 ; i < u . NumFields ( ) ; i ++ {
fld := u . Field ( i )
2021-04-19 23:00:24 -04:00
mem = storeOneArg ( x , pos , b , locs , "." + fld . Sym . Name , source , mem , fld . Type , fld . Offset , storeOffset + fld . Offset , loadRegOffset , storeRc . next ( fld . Type ) )
2021-03-05 19:56:13 -05:00
loadRegOffset += x . regWidth ( fld . Type )
pos = pos . WithNotStmt ( )
}
return mem
case types . TINT64 , types . TUINT64 :
2021-08-26 12:11:14 -07:00
if t . Size ( ) == x . regSize {
2021-03-05 19:56:13 -05:00
break
}
tHi , tLo := x . intPairTypes ( t . Kind ( ) )
2021-04-19 23:00:24 -04:00
mem = storeOneArg ( x , pos , b , locs , ".hi" , source , mem , tHi , x . hiOffset , storeOffset + x . hiOffset , loadRegOffset + x . hiRo , storeRc . plus ( x . hiRo ) )
2021-03-05 19:56:13 -05:00
pos = pos . WithNotStmt ( )
2021-04-19 23:00:24 -04:00
return storeOneArg ( x , pos , b , locs , ".lo" , source , mem , tLo , x . lowOffset , storeOffset + x . lowOffset , loadRegOffset + x . loRo , storeRc . plus ( x . loRo ) )
2021-03-05 19:56:13 -05:00
case types . TINTER :
2021-04-19 23:00:24 -04:00
sfx := ".itab"
if u . IsEmptyInterface ( ) {
sfx = ".type"
}
return storeTwoArg ( x , pos , b , locs , sfx , ".idata" , source , mem , x . typs . Uintptr , x . typs . BytePtr , 0 , storeOffset , loadRegOffset , storeRc )
2021-03-05 19:56:13 -05:00
case types . TSTRING :
2021-04-19 23:00:24 -04:00
return storeTwoArg ( x , pos , b , locs , ".ptr" , ".len" , source , mem , x . typs . BytePtr , x . typs . Int , 0 , storeOffset , loadRegOffset , storeRc )
2021-03-05 19:56:13 -05:00
case types . TCOMPLEX64 :
2021-04-19 23:00:24 -04:00
return storeTwoArg ( x , pos , b , locs , ".real" , ".imag" , source , mem , x . typs . Float32 , x . typs . Float32 , 0 , storeOffset , loadRegOffset , storeRc )
2021-03-05 19:56:13 -05:00
case types . TCOMPLEX128 :
2021-04-19 23:00:24 -04:00
return storeTwoArg ( x , pos , b , locs , ".real" , ".imag" , source , mem , x . typs . Float64 , x . typs . Float64 , 0 , storeOffset , loadRegOffset , storeRc )
2021-03-05 19:56:13 -05:00
case types . TSLICE :
2021-04-19 23:00:24 -04:00
mem = storeOneArg ( x , pos , b , locs , ".ptr" , source , mem , x . typs . BytePtr , 0 , storeOffset , loadRegOffset , storeRc . next ( x . typs . BytePtr ) )
return storeTwoArg ( x , pos , b , locs , ".len" , ".cap" , source , mem , x . typs . Int , x . typs . Int , x . ptrSize , storeOffset + x . ptrSize , loadRegOffset + RO_slice_len , storeRc )
2021-03-05 19:56:13 -05:00
}
return nil
}
2021-04-21 10:55:42 -04:00
func ( x * expandState ) splitSlotsIntoNames ( locs [ ] * LocalSlot , suffix string , off int64 , rt * types . Type , w * Value ) {
2021-04-19 23:00:24 -04:00
wlocs := x . splitSlots ( locs , suffix , off , rt )
for _ , l := range wlocs {
2021-04-21 10:55:42 -04:00
old , ok := x . f . NamedValues [ * l ]
x . f . NamedValues [ * l ] = append ( old , w )
if ! ok {
x . f . Names = append ( x . f . Names , l )
}
2021-04-19 23:00:24 -04:00
}
}
2021-03-05 19:56:13 -05:00
// decomposeLoad is a helper for storeArgOrLoad.
// It decomposes a Load into smaller parts and returns the new mem.
2021-02-17 10:38:03 -05:00
// If the type does not match one of the expected aggregate types, it returns nil instead.
// Parameters:
// pos -- the location of any generated code.
// b -- the block into which any generated code should normally be placed
// source -- the value, possibly an aggregate, to be stored.
// mem -- the mem flowing into this decomposition (loads depend on it, stores updated it)
// t -- the type of the value to be stored
2021-03-05 21:09:40 -05:00
// storeOffset -- if the value is stored in memory, it is stored at base (see storeRc) + offset
2021-02-17 10:38:03 -05:00
// loadRegOffset -- regarding source as a value in registers, the register offset in ABI1. Meaningful only if source is OpArg.
2021-02-17 12:17:25 -05:00
// storeRc -- storeRC; if the value is stored in registers, this specifies the registers.
// StoreRc also identifies whether the target is registers or memory, and has the base for the store operation.
2021-02-17 10:38:03 -05:00
//
// TODO -- this needs cleanup; it just works for SSA-able aggregates, and won't fully generalize to register-args aggregates.
2021-03-05 21:09:40 -05:00
func ( x * expandState ) decomposeLoad ( pos src . XPos , b * Block , source , mem * Value , t * types . Type , storeOffset int64 , loadRegOffset Abi1RO , storeRc registerCursor ) * Value {
2021-01-26 19:33:34 -05:00
u := source . Type
switch u . Kind ( ) {
case types . TARRAY :
elem := u . Elem ( )
2021-02-01 13:26:47 -05:00
elemRO := x . regWidth ( elem )
2021-01-26 19:33:34 -05:00
for i := int64 ( 0 ) ; i < u . NumElem ( ) ; i ++ {
elemOff := i * elem . Size ( )
2021-03-05 21:09:40 -05:00
mem = storeOneLoad ( x , pos , b , source , mem , elem , elemOff , storeOffset + elemOff , loadRegOffset , storeRc . next ( elem ) )
2021-02-01 13:26:47 -05:00
loadRegOffset += elemRO
2020-10-13 19:24:04 -04:00
pos = pos . WithNotStmt ( )
2021-01-26 19:33:34 -05:00
}
return mem
case types . TSTRUCT :
for i := 0 ; i < u . NumFields ( ) ; i ++ {
fld := u . Field ( i )
2021-03-05 21:09:40 -05:00
mem = storeOneLoad ( x , pos , b , source , mem , fld . Type , fld . Offset , storeOffset + fld . Offset , loadRegOffset , storeRc . next ( fld . Type ) )
2021-02-01 13:26:47 -05:00
loadRegOffset += x . regWidth ( fld . Type )
2021-01-26 19:33:34 -05:00
pos = pos . WithNotStmt ( )
}
return mem
case types . TINT64 , types . TUINT64 :
2021-08-26 12:11:14 -07:00
if t . Size ( ) == x . regSize {
2021-01-26 19:33:34 -05:00
break
}
tHi , tLo := x . intPairTypes ( t . Kind ( ) )
2021-03-05 21:09:40 -05:00
mem = storeOneLoad ( x , pos , b , source , mem , tHi , x . hiOffset , storeOffset + x . hiOffset , loadRegOffset + x . hiRo , storeRc . plus ( x . hiRo ) )
2020-10-13 19:24:04 -04:00
pos = pos . WithNotStmt ( )
2021-03-05 21:09:40 -05:00
return storeOneLoad ( x , pos , b , source , mem , tLo , x . lowOffset , storeOffset + x . lowOffset , loadRegOffset + x . loRo , storeRc . plus ( x . loRo ) )
2021-01-26 19:33:34 -05:00
case types . TINTER :
2021-03-05 21:09:40 -05:00
return storeTwoLoad ( x , pos , b , source , mem , x . typs . Uintptr , x . typs . BytePtr , 0 , storeOffset , loadRegOffset , storeRc )
2021-01-26 19:33:34 -05:00
case types . TSTRING :
2021-03-05 21:09:40 -05:00
return storeTwoLoad ( x , pos , b , source , mem , x . typs . BytePtr , x . typs . Int , 0 , storeOffset , loadRegOffset , storeRc )
2021-01-26 19:33:34 -05:00
case types . TCOMPLEX64 :
2021-03-05 21:09:40 -05:00
return storeTwoLoad ( x , pos , b , source , mem , x . typs . Float32 , x . typs . Float32 , 0 , storeOffset , loadRegOffset , storeRc )
2021-01-26 19:33:34 -05:00
case types . TCOMPLEX128 :
2021-03-05 21:09:40 -05:00
return storeTwoLoad ( x , pos , b , source , mem , x . typs . Float64 , x . typs . Float64 , 0 , storeOffset , loadRegOffset , storeRc )
2021-01-26 19:33:34 -05:00
case types . TSLICE :
2021-03-05 21:09:40 -05:00
mem = storeOneLoad ( x , pos , b , source , mem , x . typs . BytePtr , 0 , storeOffset , loadRegOffset , storeRc . next ( x . typs . BytePtr ) )
return storeTwoLoad ( x , pos , b , source , mem , x . typs . Int , x . typs . Int , x . ptrSize , storeOffset + x . ptrSize , loadRegOffset + RO_slice_len , storeRc )
2020-10-13 19:24:04 -04:00
}
2021-01-26 19:33:34 -05:00
return nil
}
2020-10-13 19:24:04 -04:00
2021-01-26 19:33:34 -05:00
// storeOneArg creates a decomposed (one step) arg that is then stored.
2021-02-17 12:17:25 -05:00
// pos and b locate the store instruction, source is the "base" of the value input,
2021-01-26 19:33:34 -05:00
// mem is the input mem, t is the type in question, and offArg and offStore are the offsets from the respective bases.
2021-04-21 10:55:42 -04:00
func storeOneArg ( x * expandState , pos src . XPos , b * Block , locs [ ] * LocalSlot , suffix string , source , mem * Value , t * types . Type , argOffset , storeOffset int64 , loadRegOffset Abi1RO , storeRc registerCursor ) * Value {
2021-03-05 21:09:40 -05:00
if x . debug {
x . indent ( 3 )
defer x . indent ( - 3 )
2021-03-07 14:00:10 -05:00
x . Printf ( "storeOneArg(%s; %s; %s; aO=%d; sO=%d; lrO=%d; %s)\n" , source . LongString ( ) , mem . String ( ) , t . String ( ) , argOffset , storeOffset , loadRegOffset , storeRc . String ( ) )
2021-03-05 21:09:40 -05:00
}
2021-08-26 12:11:14 -07:00
w := x . commonArgs [ selKey { source , argOffset , t . Size ( ) , t } ]
2021-01-26 19:33:34 -05:00
if w == nil {
2021-03-05 21:09:40 -05:00
w = x . newArgToMemOrRegs ( source , w , argOffset , loadRegOffset , t , pos )
2021-04-19 23:00:24 -04:00
x . splitSlotsIntoNames ( locs , suffix , argOffset , t , w )
2020-10-13 19:24:04 -04:00
}
2021-03-05 21:09:40 -05:00
return x . storeArgOrLoad ( pos , b , w , mem , t , storeOffset , loadRegOffset , storeRc )
2021-01-26 19:33:34 -05:00
}
2020-10-13 19:24:04 -04:00
2021-01-26 19:33:34 -05:00
// storeOneLoad creates a decomposed (one step) load that is then stored.
2021-02-17 12:17:25 -05:00
func storeOneLoad ( x * expandState , pos src . XPos , b * Block , source , mem * Value , t * types . Type , offArg , offStore int64 , loadRegOffset Abi1RO , storeRc registerCursor ) * Value {
2021-08-28 15:27:55 +07:00
from := x . offsetFrom ( source . Block , source . Args [ 0 ] , offArg , types . NewPtr ( t ) )
2021-01-26 19:33:34 -05:00
w := source . Block . NewValue2 ( source . Pos , OpLoad , t , from , mem )
2021-02-17 12:17:25 -05:00
return x . storeArgOrLoad ( pos , b , w , mem , t , offStore , loadRegOffset , storeRc )
2021-01-26 19:33:34 -05:00
}
2020-08-17 16:57:22 -04:00
2021-04-21 10:55:42 -04:00
func storeTwoArg ( x * expandState , pos src . XPos , b * Block , locs [ ] * LocalSlot , suffix1 string , suffix2 string , source , mem * Value , t1 , t2 * types . Type , offArg , offStore int64 , loadRegOffset Abi1RO , storeRc registerCursor ) * Value {
2021-04-19 23:00:24 -04:00
mem = storeOneArg ( x , pos , b , locs , suffix1 , source , mem , t1 , offArg , offStore , loadRegOffset , storeRc . next ( t1 ) )
2021-01-26 19:33:34 -05:00
pos = pos . WithNotStmt ( )
t1Size := t1 . Size ( )
2021-04-19 23:00:24 -04:00
return storeOneArg ( x , pos , b , locs , suffix2 , source , mem , t2 , offArg + t1Size , offStore + t1Size , loadRegOffset + 1 , storeRc )
2021-01-26 19:33:34 -05:00
}
2020-10-13 19:24:04 -04:00
2021-02-01 13:26:47 -05:00
// storeTwoLoad creates a pair of decomposed (one step) loads that are then stored.
// the elements of the pair must not require any additional alignment.
2021-02-17 12:17:25 -05:00
func storeTwoLoad ( x * expandState , pos src . XPos , b * Block , source , mem * Value , t1 , t2 * types . Type , offArg , offStore int64 , loadRegOffset Abi1RO , storeRc registerCursor ) * Value {
mem = storeOneLoad ( x , pos , b , source , mem , t1 , offArg , offStore , loadRegOffset , storeRc . next ( t1 ) )
2021-01-26 19:33:34 -05:00
pos = pos . WithNotStmt ( )
t1Size := t1 . Size ( )
2021-02-17 12:17:25 -05:00
return storeOneLoad ( x , pos , b , source , mem , t2 , offArg + t1Size , offStore + t1Size , loadRegOffset + 1 , storeRc )
2021-01-26 19:33:34 -05:00
}
2020-10-13 19:24:04 -04:00
2021-02-01 13:26:47 -05:00
// storeArgOrLoad converts stores of SSA-able potentially aggregatable arguments (passed to a call) into a series of primitive-typed
2021-01-26 19:33:34 -05:00
// stores of non-aggregate types. It recursively walks up a chain of selectors until it reaches a Load or an Arg.
// If it does not reach a Load or an Arg, nothing happens; this allows a little freedom in phase ordering.
2021-03-05 21:09:40 -05:00
func ( x * expandState ) storeArgOrLoad ( pos src . XPos , b * Block , source , mem * Value , t * types . Type , storeOffset int64 , loadRegOffset Abi1RO , storeRc registerCursor ) * Value {
2021-01-26 19:33:34 -05:00
if x . debug {
2021-03-05 19:56:13 -05:00
x . indent ( 3 )
defer x . indent ( - 3 )
2021-03-05 21:09:40 -05:00
x . Printf ( "storeArgOrLoad(%s; %s; %s; %d; %s)\n" , source . LongString ( ) , mem . String ( ) , t . String ( ) , storeOffset , storeRc . String ( ) )
2021-01-26 19:33:34 -05:00
}
2020-10-13 19:24:04 -04:00
2021-02-26 14:27:59 -05:00
// Start with Opcodes that can be disassembled
2021-01-26 19:33:34 -05:00
switch source . Op {
case OpCopy :
2021-03-05 21:09:40 -05:00
return x . storeArgOrLoad ( pos , b , source . Args [ 0 ] , mem , t , storeOffset , loadRegOffset , storeRc )
2020-08-17 16:57:22 -04:00
2021-02-23 20:00:31 -05:00
case OpLoad , OpDereference :
2021-03-05 21:09:40 -05:00
ret := x . decomposeLoad ( pos , b , source , mem , t , storeOffset , loadRegOffset , storeRc )
2021-01-26 19:33:34 -05:00
if ret != nil {
return ret
}
2020-08-17 16:57:22 -04:00
2021-01-26 19:33:34 -05:00
case OpArg :
2021-03-05 21:09:40 -05:00
ret := x . decomposeArg ( pos , b , source , mem , t , storeOffset , loadRegOffset , storeRc )
2021-01-26 19:33:34 -05:00
if ret != nil {
return ret
}
2020-07-27 16:46:35 -04:00
2021-01-26 19:33:34 -05:00
case OpArrayMake0 , OpStructMake0 :
2021-02-01 13:26:47 -05:00
// TODO(register args) is this correct for registers?
2021-01-26 19:33:34 -05:00
return mem
2020-08-17 16:57:22 -04:00
2021-01-26 19:33:34 -05:00
case OpStructMake1 , OpStructMake2 , OpStructMake3 , OpStructMake4 :
for i := 0 ; i < t . NumFields ( ) ; i ++ {
fld := t . Field ( i )
2021-03-05 21:09:40 -05:00
mem = x . storeArgOrLoad ( pos , b , source . Args [ i ] , mem , fld . Type , storeOffset + fld . Offset , 0 , storeRc . next ( fld . Type ) )
2020-10-13 19:24:04 -04:00
pos = pos . WithNotStmt ( )
2021-01-26 19:33:34 -05:00
}
return mem
2020-08-17 16:57:22 -04:00
2021-01-26 19:33:34 -05:00
case OpArrayMake1 :
2021-03-05 21:09:40 -05:00
return x . storeArgOrLoad ( pos , b , source . Args [ 0 ] , mem , t . Elem ( ) , storeOffset , 0 , storeRc . at ( t , 0 ) )
2020-08-17 16:57:22 -04:00
2021-01-26 19:33:34 -05:00
case OpInt64Make :
tHi , tLo := x . intPairTypes ( t . Kind ( ) )
2021-03-05 21:09:40 -05:00
mem = x . storeArgOrLoad ( pos , b , source . Args [ 0 ] , mem , tHi , storeOffset + x . hiOffset , 0 , storeRc . next ( tHi ) )
2021-01-26 19:33:34 -05:00
pos = pos . WithNotStmt ( )
2021-03-05 21:09:40 -05:00
return x . storeArgOrLoad ( pos , b , source . Args [ 1 ] , mem , tLo , storeOffset + x . lowOffset , 0 , storeRc )
2020-08-17 16:57:22 -04:00
2021-01-26 19:33:34 -05:00
case OpComplexMake :
tPart := x . typs . Float32
2021-08-26 12:11:14 -07:00
wPart := t . Size ( ) / 2
2021-01-26 19:33:34 -05:00
if wPart == 8 {
tPart = x . typs . Float64
}
2021-03-05 21:09:40 -05:00
mem = x . storeArgOrLoad ( pos , b , source . Args [ 0 ] , mem , tPart , storeOffset , 0 , storeRc . next ( tPart ) )
2021-01-26 19:33:34 -05:00
pos = pos . WithNotStmt ( )
2021-03-05 21:09:40 -05:00
return x . storeArgOrLoad ( pos , b , source . Args [ 1 ] , mem , tPart , storeOffset + wPart , 0 , storeRc )
2020-08-17 16:57:22 -04:00
2021-01-26 19:33:34 -05:00
case OpIMake :
2021-03-05 21:09:40 -05:00
mem = x . storeArgOrLoad ( pos , b , source . Args [ 0 ] , mem , x . typs . Uintptr , storeOffset , 0 , storeRc . next ( x . typs . Uintptr ) )
2021-01-26 19:33:34 -05:00
pos = pos . WithNotStmt ( )
2021-03-05 21:09:40 -05:00
return x . storeArgOrLoad ( pos , b , source . Args [ 1 ] , mem , x . typs . BytePtr , storeOffset + x . ptrSize , 0 , storeRc )
2020-08-17 16:57:22 -04:00
2021-01-26 19:33:34 -05:00
case OpStringMake :
2021-03-05 21:09:40 -05:00
mem = x . storeArgOrLoad ( pos , b , source . Args [ 0 ] , mem , x . typs . BytePtr , storeOffset , 0 , storeRc . next ( x . typs . BytePtr ) )
2021-01-26 19:33:34 -05:00
pos = pos . WithNotStmt ( )
2021-03-05 21:09:40 -05:00
return x . storeArgOrLoad ( pos , b , source . Args [ 1 ] , mem , x . typs . Int , storeOffset + x . ptrSize , 0 , storeRc )
2020-08-17 16:57:22 -04:00
2021-01-26 19:33:34 -05:00
case OpSliceMake :
2021-03-05 21:09:40 -05:00
mem = x . storeArgOrLoad ( pos , b , source . Args [ 0 ] , mem , x . typs . BytePtr , storeOffset , 0 , storeRc . next ( x . typs . BytePtr ) )
2021-01-26 19:33:34 -05:00
pos = pos . WithNotStmt ( )
2021-03-05 21:09:40 -05:00
mem = x . storeArgOrLoad ( pos , b , source . Args [ 1 ] , mem , x . typs . Int , storeOffset + x . ptrSize , 0 , storeRc . next ( x . typs . Int ) )
return x . storeArgOrLoad ( pos , b , source . Args [ 2 ] , mem , x . typs . Int , storeOffset + 2 * x . ptrSize , 0 , storeRc )
2021-01-26 19:33:34 -05:00
}
2020-08-17 16:57:22 -04:00
2021-01-26 19:33:34 -05:00
// For nodes that cannot be taken apart -- OpSelectN, other structure selectors.
switch t . Kind ( ) {
case types . TARRAY :
elt := t . Elem ( )
2021-08-26 12:11:14 -07:00
if source . Type != t && t . NumElem ( ) == 1 && elt . Size ( ) == t . Size ( ) && t . Size ( ) == x . regSize {
2021-01-26 19:33:34 -05:00
t = removeTrivialWrapperTypes ( t )
// it could be a leaf type, but the "leaf" could be complex64 (for example)
2021-03-05 21:09:40 -05:00
return x . storeArgOrLoad ( pos , b , source , mem , t , storeOffset , loadRegOffset , storeRc )
2021-01-26 19:33:34 -05:00
}
2021-02-01 13:26:47 -05:00
eltRO := x . regWidth ( elt )
2021-01-26 19:33:34 -05:00
for i := int64 ( 0 ) ; i < t . NumElem ( ) ; i ++ {
sel := source . Block . NewValue1I ( pos , OpArraySelect , elt , i , source )
2021-08-26 12:11:14 -07:00
mem = x . storeArgOrLoad ( pos , b , sel , mem , elt , storeOffset + i * elt . Size ( ) , loadRegOffset , storeRc . at ( t , 0 ) )
2021-02-01 13:26:47 -05:00
loadRegOffset += eltRO
2020-10-13 19:24:04 -04:00
pos = pos . WithNotStmt ( )
2021-01-26 19:33:34 -05:00
}
return mem
2020-08-17 16:57:22 -04:00
2021-01-26 19:33:34 -05:00
case types . TSTRUCT :
2021-08-26 12:11:14 -07:00
if source . Type != t && t . NumFields ( ) == 1 && t . Field ( 0 ) . Type . Size ( ) == t . Size ( ) && t . Size ( ) == x . regSize {
2021-01-26 19:33:34 -05:00
// This peculiar test deals with accesses to immediate interface data.
// It works okay because everything is the same size.
// Example code that triggers this can be found in go/constant/value.go, function ToComplex
// v119 (+881) = IData <intVal> v6
// v121 (+882) = StaticLECall <floatVal,mem> {AuxCall{"".itof([intVal,0])[floatVal,8]}} [16] v119 v1
// This corresponds to the generic rewrite rule "(StructSelect [0] (IData x)) => (IData x)"
// Guard against "struct{struct{*foo}}"
// Other rewriting phases create minor glitches when they transform IData, for instance the
// interface-typed Arg "x" of ToFloat in go/constant/value.go
// v6 (858) = Arg <Value> {x} (x[Value], x[Value])
// is rewritten by decomposeArgs into
// v141 (858) = Arg <uintptr> {x}
// v139 (858) = Arg <*uint8> {x} [8]
// because of a type case clause on line 862 of go/constant/value.go
// case intVal:
// return itof(x)
// v139 is later stored as an intVal == struct{val *big.Int} which naively requires the fields of
// of a *uint8, which does not succeed.
t = removeTrivialWrapperTypes ( t )
// it could be a leaf type, but the "leaf" could be complex64 (for example)
2021-03-05 21:09:40 -05:00
return x . storeArgOrLoad ( pos , b , source , mem , t , storeOffset , loadRegOffset , storeRc )
2021-01-26 19:33:34 -05:00
}
2020-08-17 16:57:22 -04:00
2021-01-26 19:33:34 -05:00
for i := 0 ; i < t . NumFields ( ) ; i ++ {
fld := t . Field ( i )
sel := source . Block . NewValue1I ( pos , OpStructSelect , fld . Type , int64 ( i ) , source )
2021-03-05 21:09:40 -05:00
mem = x . storeArgOrLoad ( pos , b , sel , mem , fld . Type , storeOffset + fld . Offset , loadRegOffset , storeRc . next ( fld . Type ) )
2021-02-01 13:26:47 -05:00
loadRegOffset += x . regWidth ( fld . Type )
2020-10-13 19:24:04 -04:00
pos = pos . WithNotStmt ( )
2020-07-09 15:47:26 -04:00
}
2021-01-26 19:33:34 -05:00
return mem
2020-10-13 19:24:04 -04:00
2021-01-26 19:33:34 -05:00
case types . TINT64 , types . TUINT64 :
2021-08-26 12:11:14 -07:00
if t . Size ( ) == x . regSize {
2021-01-26 19:33:34 -05:00
break
2020-07-27 16:46:35 -04:00
}
2021-01-26 19:33:34 -05:00
tHi , tLo := x . intPairTypes ( t . Kind ( ) )
sel := source . Block . NewValue1 ( pos , OpInt64Hi , tHi , source )
2021-03-05 21:09:40 -05:00
mem = x . storeArgOrLoad ( pos , b , sel , mem , tHi , storeOffset + x . hiOffset , loadRegOffset + x . hiRo , storeRc . plus ( x . hiRo ) )
2021-01-26 19:33:34 -05:00
pos = pos . WithNotStmt ( )
sel = source . Block . NewValue1 ( pos , OpInt64Lo , tLo , source )
2021-03-05 21:09:40 -05:00
return x . storeArgOrLoad ( pos , b , sel , mem , tLo , storeOffset + x . lowOffset , loadRegOffset + x . loRo , storeRc . plus ( x . hiRo ) )
2021-01-26 19:33:34 -05:00
case types . TINTER :
sel := source . Block . NewValue1 ( pos , OpITab , x . typs . BytePtr , source )
2021-03-05 21:09:40 -05:00
mem = x . storeArgOrLoad ( pos , b , sel , mem , x . typs . BytePtr , storeOffset , loadRegOffset , storeRc . next ( x . typs . BytePtr ) )
2021-01-26 19:33:34 -05:00
pos = pos . WithNotStmt ( )
sel = source . Block . NewValue1 ( pos , OpIData , x . typs . BytePtr , source )
2021-03-05 21:09:40 -05:00
return x . storeArgOrLoad ( pos , b , sel , mem , x . typs . BytePtr , storeOffset + x . ptrSize , loadRegOffset + RO_iface_data , storeRc )
2021-01-26 19:33:34 -05:00
case types . TSTRING :
sel := source . Block . NewValue1 ( pos , OpStringPtr , x . typs . BytePtr , source )
2021-03-05 21:09:40 -05:00
mem = x . storeArgOrLoad ( pos , b , sel , mem , x . typs . BytePtr , storeOffset , loadRegOffset , storeRc . next ( x . typs . BytePtr ) )
2021-01-26 19:33:34 -05:00
pos = pos . WithNotStmt ( )
sel = source . Block . NewValue1 ( pos , OpStringLen , x . typs . Int , source )
2021-03-05 21:09:40 -05:00
return x . storeArgOrLoad ( pos , b , sel , mem , x . typs . Int , storeOffset + x . ptrSize , loadRegOffset + RO_string_len , storeRc )
2021-01-26 19:33:34 -05:00
case types . TSLICE :
et := types . NewPtr ( t . Elem ( ) )
sel := source . Block . NewValue1 ( pos , OpSlicePtr , et , source )
2021-03-05 21:09:40 -05:00
mem = x . storeArgOrLoad ( pos , b , sel , mem , et , storeOffset , loadRegOffset , storeRc . next ( et ) )
2021-01-26 19:33:34 -05:00
pos = pos . WithNotStmt ( )
sel = source . Block . NewValue1 ( pos , OpSliceLen , x . typs . Int , source )
2021-03-05 21:09:40 -05:00
mem = x . storeArgOrLoad ( pos , b , sel , mem , x . typs . Int , storeOffset + x . ptrSize , loadRegOffset + RO_slice_len , storeRc . next ( x . typs . Int ) )
2021-01-26 19:33:34 -05:00
sel = source . Block . NewValue1 ( pos , OpSliceCap , x . typs . Int , source )
2021-03-05 21:09:40 -05:00
return x . storeArgOrLoad ( pos , b , sel , mem , x . typs . Int , storeOffset + 2 * x . ptrSize , loadRegOffset + RO_slice_cap , storeRc )
2021-01-26 19:33:34 -05:00
case types . TCOMPLEX64 :
sel := source . Block . NewValue1 ( pos , OpComplexReal , x . typs . Float32 , source )
2021-03-05 21:09:40 -05:00
mem = x . storeArgOrLoad ( pos , b , sel , mem , x . typs . Float32 , storeOffset , loadRegOffset , storeRc . next ( x . typs . Float32 ) )
2021-01-26 19:33:34 -05:00
pos = pos . WithNotStmt ( )
sel = source . Block . NewValue1 ( pos , OpComplexImag , x . typs . Float32 , source )
2021-03-05 21:09:40 -05:00
return x . storeArgOrLoad ( pos , b , sel , mem , x . typs . Float32 , storeOffset + 4 , loadRegOffset + RO_complex_imag , storeRc )
2021-01-26 19:33:34 -05:00
case types . TCOMPLEX128 :
sel := source . Block . NewValue1 ( pos , OpComplexReal , x . typs . Float64 , source )
2021-03-05 21:09:40 -05:00
mem = x . storeArgOrLoad ( pos , b , sel , mem , x . typs . Float64 , storeOffset , loadRegOffset , storeRc . next ( x . typs . Float64 ) )
2021-01-26 19:33:34 -05:00
pos = pos . WithNotStmt ( )
sel = source . Block . NewValue1 ( pos , OpComplexImag , x . typs . Float64 , source )
2021-03-05 21:09:40 -05:00
return x . storeArgOrLoad ( pos , b , sel , mem , x . typs . Float64 , storeOffset + 8 , loadRegOffset + RO_complex_imag , storeRc )
2020-07-09 15:47:26 -04:00
}
2021-02-01 13:26:47 -05:00
s := mem
2021-02-23 20:00:31 -05:00
if source . Op == OpDereference {
source . Op = OpLoad // For purposes of parameter passing expansion, a Dereference is a Load.
}
2021-02-01 13:26:47 -05:00
if storeRc . hasRegs ( ) {
storeRc . addArg ( source )
} else {
2021-03-05 21:09:40 -05:00
dst := x . offsetFrom ( b , storeRc . storeDest , storeOffset , types . NewPtr ( t ) )
2021-02-01 13:26:47 -05:00
s = b . NewValue3A ( pos , OpStore , types . TypeMem , t , dst , source , mem )
}
2021-01-26 19:33:34 -05:00
if x . debug {
2021-03-05 19:56:13 -05:00
x . Printf ( "-->storeArg returns %s, storeRc=%s\n" , s . LongString ( ) , storeRc . String ( ) )
2020-12-29 22:44:30 -05:00
}
2021-01-26 19:33:34 -05:00
return s
}
2020-12-29 22:44:30 -05:00
2021-04-05 22:20:41 -04:00
// rewriteArgs replaces all the call-parameter Args to a call with their register translation (if any).
// Preceding parameters (code pointers, closure pointer) are preserved, and the memory input is modified
// to account for any parameter stores required.
// Any of the old Args that have their use count fall to zero are marked OpInvalid.
func ( x * expandState ) rewriteArgs ( v * Value , firstArg int ) {
2021-04-02 12:33:23 -04:00
if x . debug {
x . indent ( 3 )
defer x . indent ( - 3 )
x . Printf ( "rewriteArgs(%s; %d)\n" , v . LongString ( ) , firstArg )
}
2021-01-26 19:33:34 -05:00
// Thread the stores on the memory arg
aux := v . Aux . ( * AuxCall )
pos := v . Pos . WithNotStmt ( )
m0 := v . MemoryArg ( )
mem := m0
2021-04-05 22:20:41 -04:00
newArgs := [ ] * Value { }
oldArgs := [ ] * Value { }
2021-02-17 18:01:52 -05:00
for i , a := range v . Args [ firstArg : len ( v . Args ) - 1 ] { // skip leading non-parameter SSA Args and trailing mem SSA Arg.
2021-04-05 22:20:41 -04:00
oldArgs = append ( oldArgs , a )
2021-02-17 18:01:52 -05:00
auxI := int64 ( i )
2021-02-01 13:26:47 -05:00
aRegs := aux . RegsOfArg ( auxI )
aType := aux . TypeOfArg ( auxI )
2021-02-23 20:00:31 -05:00
if len ( aRegs ) == 0 && a . Op == OpDereference {
2021-02-13 10:49:37 -05:00
aOffset := aux . OffsetOfArg ( auxI )
2021-01-26 19:33:34 -05:00
if a . MemoryArg ( ) != m0 {
x . f . Fatalf ( "Op...LECall and OpDereference have mismatched mem, %s and %s" , v . LongString ( ) , a . LongString ( ) )
2020-08-07 22:46:43 -04:00
}
2021-01-26 19:33:34 -05:00
// "Dereference" of addressed (probably not-SSA-eligible) value becomes Move
2021-02-01 13:26:47 -05:00
// TODO(register args) this will be more complicated with registers in the picture.
mem = x . rewriteDereference ( v . Block , x . sp , a , mem , aOffset , aux . SizeOfArg ( auxI ) , aType , pos )
2021-01-26 19:33:34 -05:00
} else {
2021-02-01 13:26:47 -05:00
var rc registerCursor
var result * [ ] * Value
2021-02-13 10:49:37 -05:00
var aOffset int64
2021-02-01 13:26:47 -05:00
if len ( aRegs ) > 0 {
2021-04-05 22:20:41 -04:00
result = & newArgs
2021-02-13 10:49:37 -05:00
} else {
aOffset = aux . OffsetOfArg ( auxI )
}
if x . debug {
2021-04-02 12:33:23 -04:00
x . Printf ( "...storeArg %s, %v, %d\n" , a . LongString ( ) , aType , aOffset )
2020-08-07 22:46:43 -04:00
}
2021-02-17 12:17:25 -05:00
rc . init ( aRegs , aux . abiInfo , result , x . sp )
mem = x . storeArgOrLoad ( pos , v . Block , a , mem , aType , aOffset , 0 , rc )
2020-08-07 22:46:43 -04:00
}
2021-01-26 19:33:34 -05:00
}
2021-04-05 22:20:41 -04:00
var preArgStore [ 2 ] * Value
preArgs := append ( preArgStore [ : 0 ] , v . Args [ 0 : firstArg ] ... )
2021-01-26 19:33:34 -05:00
v . resetArgs ( )
2021-04-05 22:20:41 -04:00
v . AddArgs ( preArgs ... )
v . AddArgs ( newArgs ... )
v . AddArg ( mem )
for _ , a := range oldArgs {
if a . Uses == 0 {
if x . debug {
x . Printf ( "...marking %v unused\n" , a . LongString ( ) )
}
2021-04-08 12:44:05 -04:00
a . invalidateRecursively ( )
2021-04-05 22:20:41 -04:00
}
}
return
2021-01-26 19:33:34 -05:00
}
// expandCalls converts LE (Late Expansion) calls that act like they receive value args into a lower-level form
// that is more oriented to a platform's ABI. The SelectN operations that extract results are rewritten into
// more appropriate forms, and any StructMake or ArrayMake inputs are decomposed until non-struct values are
// reached. On the callee side, OpArg nodes are not decomposed until this phase is run.
// TODO results should not be lowered until this phase.
func expandCalls ( f * Func ) {
// Calls that need lowering have some number of inputs, including a memory input,
// and produce a tuple of (value1, value2, ..., mem) where valueK may or may not be SSA-able.
// With the current ABI those inputs need to be converted into stores to memory,
// rethreading the call's memory input to the first, and the new call now receiving the last.
// With the current ABI, the outputs need to be converted to loads, which will all use the call's
// memory output as their input.
sp , _ := f . spSb ( )
x := & expandState {
2021-04-07 22:21:35 -04:00
f : f ,
abi1 : f . ABI1 ,
debug : f . pass . debug > 0 ,
canSSAType : f . fe . CanSSA ,
regSize : f . Config . RegSize ,
sp : sp ,
typs : & f . Config . Types ,
ptrSize : f . Config . PtrSize ,
namedSelects : make ( map [ * Value ] [ ] namedVal ) ,
sdom : f . Sdom ( ) ,
commonArgs : make ( map [ selKey ] * Value ) ,
memForCall : make ( map [ ID ] * Value ) ,
transformedSelects : make ( map [ ID ] bool ) ,
2021-01-26 19:33:34 -05:00
}
// For 32-bit, need to deal with decomposition of 64-bit integers, which depends on endianness.
if f . Config . BigEndian {
2021-02-01 13:26:47 -05:00
x . lowOffset , x . hiOffset = 4 , 0
x . loRo , x . hiRo = 1 , 0
2021-01-26 19:33:34 -05:00
} else {
2021-02-01 13:26:47 -05:00
x . lowOffset , x . hiOffset = 0 , 4
x . loRo , x . hiRo = 0 , 1
2021-01-26 19:33:34 -05:00
}
if x . debug {
2021-03-05 19:56:13 -05:00
x . Printf ( "\nexpandsCalls(%s)\n" , f . Name )
2020-08-07 22:46:43 -04:00
}
2021-04-19 23:00:24 -04:00
for i , name := range f . Names {
t := name . Type
if x . isAlreadyExpandedAggregateType ( t ) {
2021-04-21 10:55:42 -04:00
for j , v := range f . NamedValues [ * name ] {
2021-04-19 23:00:24 -04:00
if v . Op == OpSelectN || v . Op == OpArg && x . isAlreadyExpandedAggregateType ( v . Type ) {
ns := x . namedSelects [ v ]
x . namedSelects [ v ] = append ( ns , namedVal { locIndex : i , valIndex : j } )
}
}
}
}
2020-08-17 16:57:22 -04:00
// TODO if too slow, whole program iteration can be replaced w/ slices of appropriate values, accumulated in first loop here.
2021-02-01 13:26:47 -05:00
// Step 0: rewrite the calls to convert args to calls into stores/register movement.
2020-07-09 15:47:26 -04:00
for _ , b := range f . Blocks {
for _ , v := range b . Values {
2021-04-05 22:20:41 -04:00
firstArg := 0
2020-07-09 15:47:26 -04:00
switch v . Op {
case OpStaticLECall :
2020-08-07 22:46:43 -04:00
case OpInterLECall :
2021-04-05 22:20:41 -04:00
firstArg = 1
case OpClosureLECall :
firstArg = 2
default :
continue
2020-07-09 15:47:26 -04:00
}
2021-04-05 22:20:41 -04:00
x . rewriteArgs ( v , firstArg )
2020-07-09 15:47:26 -04:00
}
2020-12-29 22:44:30 -05:00
if isBlockMultiValueExit ( b ) {
2021-04-02 12:33:23 -04:00
x . indent ( 3 )
2020-12-29 22:44:30 -05:00
// Very similar to code in rewriteArgs, but results instead of args.
v := b . Controls [ 0 ]
m0 := v . MemoryArg ( )
mem := m0
aux := f . OwnAux
pos := v . Pos . WithNotStmt ( )
2021-02-01 13:26:47 -05:00
allResults := [ ] * Value { }
2021-04-02 12:33:23 -04:00
if x . debug {
x . Printf ( "multiValueExit rewriting %s\n" , v . LongString ( ) )
}
var oldArgs [ ] * Value
2021-02-17 18:01:52 -05:00
for j , a := range v . Args [ : len ( v . Args ) - 1 ] {
2021-04-02 12:33:23 -04:00
oldArgs = append ( oldArgs , a )
2020-12-29 22:44:30 -05:00
i := int64 ( j )
auxType := aux . TypeOfResult ( i )
2021-03-19 16:52:55 -04:00
auxBase := b . NewValue2A ( v . Pos , OpLocalAddr , types . NewPtr ( auxType ) , aux . NameOfResult ( i ) , x . sp , mem )
2020-12-29 22:44:30 -05:00
auxOffset := int64 ( 0 )
auxSize := aux . SizeOfResult ( i )
2021-02-01 13:26:47 -05:00
aRegs := aux . RegsOfResult ( int64 ( j ) )
2021-02-23 20:00:31 -05:00
if len ( aRegs ) == 0 && a . Op == OpDereference {
2020-12-29 22:44:30 -05:00
// Avoid a self-move, and if one is detected try to remove the already-inserted VarDef for the assignment that won't happen.
if dAddr , dMem := a . Args [ 0 ] , a . Args [ 1 ] ; dAddr . Op == OpLocalAddr && dAddr . Args [ 0 ] . Op == OpSP &&
2021-03-19 16:52:55 -04:00
dAddr . Args [ 1 ] == dMem && dAddr . Aux == aux . NameOfResult ( i ) {
2020-12-29 22:44:30 -05:00
if dMem . Op == OpVarDef && dMem . Aux == dAddr . Aux {
dMem . copyOf ( dMem . MemoryArg ( ) ) // elide the VarDef
}
continue
}
2021-01-26 19:33:34 -05:00
mem = x . rewriteDereference ( v . Block , auxBase , a , mem , auxOffset , auxSize , auxType , pos )
2020-12-29 22:44:30 -05:00
} else {
if a . Op == OpLoad && a . Args [ 0 ] . Op == OpLocalAddr {
2021-02-01 13:26:47 -05:00
addr := a . Args [ 0 ] // This is a self-move. // TODO(register args) do what here for registers?
2021-03-19 16:52:55 -04:00
if addr . MemoryArg ( ) == a . MemoryArg ( ) && addr . Aux == aux . NameOfResult ( i ) {
2020-12-29 22:44:30 -05:00
continue
}
}
2021-02-01 13:26:47 -05:00
var rc registerCursor
var result * [ ] * Value
if len ( aRegs ) > 0 {
result = & allResults
}
2021-02-17 12:17:25 -05:00
rc . init ( aRegs , aux . abiInfo , result , auxBase )
mem = x . storeArgOrLoad ( v . Pos , b , a , mem , aux . TypeOfResult ( i ) , auxOffset , 0 , rc )
2020-12-29 22:44:30 -05:00
}
}
2021-02-22 21:51:35 -05:00
v . resetArgs ( )
v . AddArgs ( allResults ... )
v . AddArg ( mem )
v . Type = types . NewResults ( append ( abi . RegisterTypes ( aux . abiInfo . OutParams ( ) ) , types . TypeMem ) )
b . SetControl ( v )
2021-04-02 12:33:23 -04:00
for _ , a := range oldArgs {
if a . Uses == 0 {
if x . debug {
x . Printf ( "...marking %v unused\n" , a . LongString ( ) )
}
2021-04-08 12:44:05 -04:00
a . invalidateRecursively ( )
2021-04-02 12:33:23 -04:00
}
}
if x . debug {
x . Printf ( "...multiValueExit new result %s\n" , v . LongString ( ) )
}
x . indent ( - 3 )
2020-12-29 22:44:30 -05:00
}
2020-07-09 15:47:26 -04:00
}
2020-07-27 16:46:35 -04:00
2020-10-13 19:24:04 -04:00
// Step 1: any stores of aggregates remaining are believed to be sourced from call results or args.
2020-07-27 16:46:35 -04:00
// Decompose those stores into a series of smaller stores, adding selection ops as necessary.
for _ , b := range f . Blocks {
for _ , v := range b . Values {
if v . Op == OpStore {
t := v . Aux . ( * types . Type )
2020-10-13 19:24:04 -04:00
source := v . Args [ 1 ]
tSrc := source . Type
2021-01-26 19:33:34 -05:00
iAEATt := x . isAlreadyExpandedAggregateType ( t )
2020-10-13 19:24:04 -04:00
2020-08-17 16:57:22 -04:00
if ! iAEATt {
// guarding against store immediate struct into interface data field -- store type is *uint8
// TODO can this happen recursively?
2021-01-26 19:33:34 -05:00
iAEATt = x . isAlreadyExpandedAggregateType ( tSrc )
2020-08-17 16:57:22 -04:00
if iAEATt {
t = tSrc
}
}
2021-02-26 14:27:59 -05:00
dst , mem := v . Args [ 0 ] , v . Args [ 2 ]
mem = x . storeArgOrLoad ( v . Pos , b , source , mem , t , 0 , 0 , registerCursor { storeDest : dst } )
v . copyOf ( mem )
2020-07-27 16:46:35 -04:00
}
}
}
val2Preds := make ( map [ * Value ] int32 ) // Used to accumulate dependency graph of selection operations for topological ordering.
2020-08-17 16:57:22 -04:00
// Step 2: transform or accumulate selection operations for rewrite in topological order.
//
// Aggregate types that have already (in earlier phases) been transformed must be lowered comprehensively to finish
// the transformation (user-defined structs and arrays, slices, strings, interfaces, complex, 64-bit on 32-bit architectures),
//
2020-07-27 16:46:35 -04:00
// Any select-for-addressing applied to call results can be transformed directly.
for _ , b := range f . Blocks {
for _ , v := range b . Values {
// Accumulate chains of selectors for processing in topological order
switch v . Op {
2020-08-17 16:57:22 -04:00
case OpStructSelect , OpArraySelect ,
OpIData , OpITab ,
OpStringPtr , OpStringLen ,
2021-03-14 14:24:47 -07:00
OpSlicePtr , OpSliceLen , OpSliceCap , OpSlicePtrUnchecked ,
2020-08-17 16:57:22 -04:00
OpComplexReal , OpComplexImag ,
OpInt64Hi , OpInt64Lo :
2020-07-27 16:46:35 -04:00
w := v . Args [ 0 ]
switch w . Op {
2020-10-13 19:24:04 -04:00
case OpStructSelect , OpArraySelect , OpSelectN , OpArg :
2020-07-27 16:46:35 -04:00
val2Preds [ w ] += 1
2021-01-26 19:33:34 -05:00
if x . debug {
2021-03-05 19:56:13 -05:00
x . Printf ( "v2p[%s] = %d\n" , w . LongString ( ) , val2Preds [ w ] )
2020-07-27 16:46:35 -04:00
}
}
fallthrough
2020-08-17 16:57:22 -04:00
2020-07-27 16:46:35 -04:00
case OpSelectN :
if _ , ok := val2Preds [ v ] ; ! ok {
val2Preds [ v ] = 0
2021-01-26 19:33:34 -05:00
if x . debug {
2021-03-05 19:56:13 -05:00
x . Printf ( "v2p[%s] = %d\n" , v . LongString ( ) , val2Preds [ v ] )
2020-07-27 16:46:35 -04:00
}
}
2020-08-17 16:57:22 -04:00
2020-10-13 19:24:04 -04:00
case OpArg :
2021-01-26 19:33:34 -05:00
if ! x . isAlreadyExpandedAggregateType ( v . Type ) {
2020-10-13 19:24:04 -04:00
continue
}
if _ , ok := val2Preds [ v ] ; ! ok {
val2Preds [ v ] = 0
2021-01-26 19:33:34 -05:00
if x . debug {
2021-03-05 19:56:13 -05:00
x . Printf ( "v2p[%s] = %d\n" , v . LongString ( ) , val2Preds [ v ] )
2020-10-13 19:24:04 -04:00
}
}
2020-07-27 16:46:35 -04:00
case OpSelectNAddr :
// Do these directly, there are no chains of selectors.
call := v . Args [ 0 ]
which := v . AuxInt
aux := call . Aux . ( * AuxCall )
pt := v . Type
2021-02-27 17:11:36 -05:00
off := x . offsetFrom ( x . f . Entry , x . sp , aux . OffsetOfResult ( which ) , pt )
2020-07-27 16:46:35 -04:00
v . copyOf ( off )
}
}
}
2020-08-17 16:57:22 -04:00
// Step 3: Compute topological order of selectors,
// then process it in reverse to eliminate duplicates,
// then forwards to rewrite selectors.
//
// All chains of selectors end up in same block as the call.
// Compilation must be deterministic, so sort after extracting first zeroes from map.
// Sorting allows dominators-last order within each batch,
// so that the backwards scan for duplicates will most often find copies from dominating blocks (it is best-effort).
var toProcess [ ] * Value
less := func ( i , j int ) bool {
vi , vj := toProcess [ i ] , toProcess [ j ]
bi , bj := vi . Block , vj . Block
if bi == bj {
return vi . ID < vj . ID
}
2021-01-26 19:33:34 -05:00
return x . sdom . domorder ( bi ) > x . sdom . domorder ( bj ) // reverse the order to put dominators last.
2020-08-17 16:57:22 -04:00
}
2020-07-27 16:46:35 -04:00
2020-08-17 16:57:22 -04:00
// Accumulate order in allOrdered
var allOrdered [ ] * Value
for v , n := range val2Preds {
if n == 0 {
allOrdered = append ( allOrdered , v )
}
}
last := 0 // allOrdered[0:last] has been top-sorted and processed
2020-07-27 16:46:35 -04:00
for len ( val2Preds ) > 0 {
2020-08-17 16:57:22 -04:00
toProcess = allOrdered [ last : ]
last = len ( allOrdered )
sort . SliceStable ( toProcess , less )
for _ , v := range toProcess {
delete ( val2Preds , v )
2020-10-13 19:24:04 -04:00
if v . Op == OpArg {
continue // no Args[0], hence done.
}
w := v . Args [ 0 ]
2020-08-17 16:57:22 -04:00
n , ok := val2Preds [ w ]
if ! ok {
continue
2020-07-27 16:46:35 -04:00
}
2020-08-17 16:57:22 -04:00
if n == 1 {
allOrdered = append ( allOrdered , w )
delete ( val2Preds , w )
continue
}
val2Preds [ w ] = n - 1
2020-07-27 16:46:35 -04:00
}
2020-08-17 16:57:22 -04:00
}
2021-02-17 10:38:03 -05:00
x . commonSelectors = make ( map [ selKey ] * Value )
2020-08-17 16:57:22 -04:00
// Rewrite duplicate selectors as copies where possible.
for i := len ( allOrdered ) - 1 ; i >= 0 ; i -- {
v := allOrdered [ i ]
2020-10-13 19:24:04 -04:00
if v . Op == OpArg {
continue
}
2020-08-17 16:57:22 -04:00
w := v . Args [ 0 ]
2020-10-13 19:24:04 -04:00
if w . Op == OpCopy {
for w . Op == OpCopy {
w = w . Args [ 0 ]
}
v . SetArg ( 0 , w )
2020-08-17 16:57:22 -04:00
}
typ := v . Type
if typ . IsMemory ( ) {
continue // handled elsewhere, not an indexable result
}
2021-08-26 12:11:14 -07:00
size := typ . Size ( )
2020-08-17 16:57:22 -04:00
offset := int64 ( 0 )
switch v . Op {
case OpStructSelect :
2020-12-01 03:25:29 -08:00
if w . Type . Kind ( ) == types . TSTRUCT {
2020-08-17 16:57:22 -04:00
offset = w . Type . FieldOff ( int ( v . AuxInt ) )
} else { // Immediate interface data artifact, offset is zero.
f . Fatalf ( "Expand calls interface data problem, func %s, v=%s, w=%s\n" , f . Name , v . LongString ( ) , w . LongString ( ) )
2020-07-27 16:46:35 -04:00
}
2020-08-17 16:57:22 -04:00
case OpArraySelect :
offset = size * v . AuxInt
case OpSelectN :
2021-02-22 21:51:35 -05:00
offset = v . AuxInt // offset is just a key, really.
2020-08-17 16:57:22 -04:00
case OpInt64Hi :
2021-01-26 19:33:34 -05:00
offset = x . hiOffset
2020-08-17 16:57:22 -04:00
case OpInt64Lo :
2021-01-26 19:33:34 -05:00
offset = x . lowOffset
2020-08-17 16:57:22 -04:00
case OpStringLen , OpSliceLen , OpIData :
2021-01-26 19:33:34 -05:00
offset = x . ptrSize
2020-08-17 16:57:22 -04:00
case OpSliceCap :
2021-01-26 19:33:34 -05:00
offset = 2 * x . ptrSize
2020-08-17 16:57:22 -04:00
case OpComplexImag :
offset = size
}
2021-02-22 21:51:35 -05:00
sk := selKey { from : w , size : size , offsetOrIndex : offset , typ : typ }
2021-02-17 10:38:03 -05:00
dupe := x . commonSelectors [ sk ]
2020-08-17 16:57:22 -04:00
if dupe == nil {
2021-02-17 10:38:03 -05:00
x . commonSelectors [ sk ] = v
2021-01-26 19:33:34 -05:00
} else if x . sdom . IsAncestorEq ( dupe . Block , v . Block ) {
2021-04-08 12:44:05 -04:00
if x . debug {
x . Printf ( "Duplicate, make %s copy of %s\n" , v , dupe )
}
2020-08-17 16:57:22 -04:00
v . copyOf ( dupe )
} else {
// Because values are processed in dominator order, the old common[s] will never dominate after a miss is seen.
// Installing the new value might match some future values.
2021-02-17 10:38:03 -05:00
x . commonSelectors [ sk ] = v
2020-07-27 16:46:35 -04:00
}
}
2020-08-17 16:57:22 -04:00
// Indices of entries in f.Names that need to be deleted.
var toDelete [ ] namedVal
// Rewrite selectors.
for i , v := range allOrdered {
2021-01-26 19:33:34 -05:00
if x . debug {
2020-08-17 16:57:22 -04:00
b := v . Block
2021-03-05 19:56:13 -05:00
x . Printf ( "allOrdered[%d] = b%d, %s, uses=%d\n" , i , b . ID , v . LongString ( ) , v . Uses )
2020-08-17 16:57:22 -04:00
}
if v . Uses == 0 {
2021-04-08 12:44:05 -04:00
v . invalidateRecursively ( )
2020-08-17 16:57:22 -04:00
continue
}
if v . Op == OpCopy {
continue
}
2021-02-01 13:26:47 -05:00
locs := x . rewriteSelect ( v , v , 0 , 0 )
2020-08-17 16:57:22 -04:00
// Install new names.
if v . Type . IsMemory ( ) {
continue
}
// Leaf types may have debug locations
2021-01-26 19:33:34 -05:00
if ! x . isAlreadyExpandedAggregateType ( v . Type ) {
2020-08-17 16:57:22 -04:00
for _ , l := range locs {
2021-04-21 10:55:42 -04:00
if _ , ok := f . NamedValues [ * l ] ; ! ok {
2021-04-20 20:33:43 -04:00
f . Names = append ( f . Names , l )
}
2021-04-21 10:55:42 -04:00
f . NamedValues [ * l ] = append ( f . NamedValues [ * l ] , v )
2020-08-17 16:57:22 -04:00
}
continue
}
2021-01-26 19:33:34 -05:00
if ns , ok := x . namedSelects [ v ] ; ok {
2021-04-19 23:00:24 -04:00
// Not-leaf types that had debug locations need to lose them.
2020-08-17 16:57:22 -04:00
toDelete = append ( toDelete , ns ... )
}
}
deleteNamedVals ( f , toDelete )
2021-02-09 15:14:43 -05:00
// Step 4: rewrite the calls themselves, correcting the type.
2020-07-27 16:46:35 -04:00
for _ , b := range f . Blocks {
for _ , v := range b . Values {
switch v . Op {
2021-02-09 15:14:43 -05:00
case OpArg :
2021-02-17 10:38:03 -05:00
x . rewriteArgToMemOrRegs ( v )
2020-07-27 16:46:35 -04:00
case OpStaticLECall :
v . Op = OpStaticCall
2021-02-22 21:51:35 -05:00
rts := abi . RegisterTypes ( v . Aux . ( * AuxCall ) . abiInfo . OutParams ( ) )
v . Type = types . NewResults ( append ( rts , types . TypeMem ) )
2020-08-07 22:46:43 -04:00
case OpClosureLECall :
v . Op = OpClosureCall
2021-03-05 14:24:41 -05:00
rts := abi . RegisterTypes ( v . Aux . ( * AuxCall ) . abiInfo . OutParams ( ) )
v . Type = types . NewResults ( append ( rts , types . TypeMem ) )
2020-08-07 22:46:43 -04:00
case OpInterLECall :
v . Op = OpInterCall
2021-03-05 14:24:41 -05:00
rts := abi . RegisterTypes ( v . Aux . ( * AuxCall ) . abiInfo . OutParams ( ) )
v . Type = types . NewResults ( append ( rts , types . TypeMem ) )
2020-07-27 16:46:35 -04:00
}
}
}
2021-03-31 12:41:20 -04:00
// Step 5: dedup OpArgXXXReg values. Mostly it is already dedup'd by commonArgs,
// but there are cases that we have same OpArgXXXReg values with different types.
// E.g. string is sometimes decomposed as { *int8, int }, sometimes as { unsafe.Pointer, uintptr }.
// (Can we avoid that?)
var IArg , FArg [ 32 ] * Value
for _ , v := range f . Entry . Values {
switch v . Op {
case OpArgIntReg :
i := v . AuxInt
if w := IArg [ i ] ; w != nil {
2021-08-26 12:11:14 -07:00
if w . Type . Size ( ) != v . Type . Size ( ) {
2021-04-05 22:20:41 -04:00
f . Fatalf ( "incompatible OpArgIntReg [%d]: %s and %s" , i , v . LongString ( ) , w . LongString ( ) )
2021-03-31 12:41:20 -04:00
}
if w . Type . IsUnsafePtr ( ) && ! v . Type . IsUnsafePtr ( ) {
// Update unsafe.Pointer type if we know the actual pointer type.
w . Type = v . Type
}
// TODO: don't dedup pointer and scalar? Rewrite to OpConvert? Can it happen?
v . copyOf ( w )
} else {
IArg [ i ] = v
}
case OpArgFloatReg :
i := v . AuxInt
if w := FArg [ i ] ; w != nil {
2021-08-26 12:11:14 -07:00
if w . Type . Size ( ) != v . Type . Size ( ) {
2021-03-31 12:41:20 -04:00
f . Fatalf ( "incompatible OpArgFloatReg [%d]: %v and %v" , i , v , w )
}
v . copyOf ( w )
} else {
FArg [ i ] = v
}
}
}
// Step 6: elide any copies introduced.
2021-04-11 12:07:33 -04:00
// Update named values.
2021-04-20 21:07:23 -04:00
for _ , name := range f . Names {
2021-04-21 10:55:42 -04:00
values := f . NamedValues [ * name ]
2021-04-20 21:07:23 -04:00
for i , v := range values {
if v . Op == OpCopy {
a := v . Args [ 0 ]
for a . Op == OpCopy {
a = a . Args [ 0 ]
2021-04-11 12:07:33 -04:00
}
2021-04-20 21:07:23 -04:00
values [ i ] = a
2021-04-11 12:07:33 -04:00
}
}
}
2020-07-27 16:46:35 -04:00
for _ , b := range f . Blocks {
for _ , v := range b . Values {
for i , a := range v . Args {
if a . Op != OpCopy {
continue
}
aa := copySource ( a )
v . SetArg ( i , aa )
for a . Uses == 0 {
b := a . Args [ 0 ]
2021-04-08 12:44:05 -04:00
a . invalidateRecursively ( )
2020-07-27 16:46:35 -04:00
a = b
}
}
}
}
2021-04-12 20:53:03 -04:00
// Rewriting can attach lines to values that are unlikely to survive code generation, so move them to a use.
for _ , b := range f . Blocks {
for _ , v := range b . Values {
for _ , a := range v . Args {
if a . Pos . IsStmt ( ) != src . PosIsStmt {
continue
}
if a . Type . IsMemory ( ) {
continue
}
if a . Pos . Line ( ) != v . Pos . Line ( ) {
continue
}
if ! a . Pos . SameFile ( v . Pos ) {
continue
}
switch a . Op {
case OpArgIntReg , OpArgFloatReg , OpSelectN :
v . Pos = v . Pos . WithIsStmt ( )
a . Pos = a . Pos . WithDefaultStmt ( )
}
}
}
}
2020-07-09 15:47:26 -04:00
}
2021-02-17 10:38:03 -05:00
// rewriteArgToMemOrRegs converts OpArg v in-place into the register version of v,
// if that is appropriate.
func ( x * expandState ) rewriteArgToMemOrRegs ( v * Value ) * Value {
2021-03-30 16:28:48 -04:00
if x . debug {
x . indent ( 3 )
defer x . indent ( - 3 )
x . Printf ( "rewriteArgToMemOrRegs(%s)\n" , v . LongString ( ) )
}
2021-02-17 10:38:03 -05:00
pa := x . prAssignForArg ( v )
switch len ( pa . Registers ) {
case 0 :
frameOff := v . Aux . ( * ir . Name ) . FrameOffset ( )
if pa . Offset ( ) != int32 ( frameOff + x . f . ABISelf . LocalsOffset ( ) ) {
panic ( fmt . Errorf ( "Parameter assignment %d and OpArg.Aux frameOffset %d disagree, op=%s" ,
pa . Offset ( ) , frameOff , v . LongString ( ) ) )
}
case 1 :
2021-03-30 16:28:48 -04:00
t := v . Type
2021-08-26 12:11:14 -07:00
key := selKey { v , 0 , t . Size ( ) , t }
2021-03-30 16:28:48 -04:00
w := x . commonArgs [ key ]
if w != nil {
v . copyOf ( w )
break
}
2021-02-17 10:38:03 -05:00
r := pa . Registers [ 0 ]
2021-03-07 14:00:10 -05:00
var i int64
v . Op , i = ArgOpAndRegisterFor ( r , x . f . ABISelf )
2021-02-17 10:38:03 -05:00
v . Aux = & AuxNameOffset { v . Aux . ( * ir . Name ) , 0 }
v . AuxInt = i
2021-03-30 16:28:48 -04:00
x . commonArgs [ key ] = v
2021-02-17 10:38:03 -05:00
default :
panic ( badVal ( "Saw unexpanded OpArg" , v ) )
}
2021-03-30 16:28:48 -04:00
if x . debug {
x . Printf ( "-->%s\n" , v . LongString ( ) )
}
2021-02-17 10:38:03 -05:00
return v
}
// newArgToMemOrRegs either rewrites toReplace into an OpArg referencing memory or into an OpArgXXXReg to a register,
// or rewrites it into a copy of the appropriate OpArgXXX. The actual OpArgXXX is determined by combining baseArg (an OpArg)
2021-03-05 19:56:13 -05:00
// with offset, regOffset, and t to determine which portion of it to reference (either all or a part, in memory or in registers).
2021-02-17 10:38:03 -05:00
func ( x * expandState ) newArgToMemOrRegs ( baseArg , toReplace * Value , offset int64 , regOffset Abi1RO , t * types . Type , pos src . XPos ) * Value {
2021-03-07 14:00:10 -05:00
if x . debug {
x . indent ( 3 )
defer x . indent ( - 3 )
2021-03-29 20:14:51 -04:00
x . Printf ( "newArgToMemOrRegs(base=%s; toReplace=%s; t=%s; memOff=%d; regOff=%d)\n" , baseArg . String ( ) , toReplace . LongString ( ) , t . String ( ) , offset , regOffset )
2021-03-07 14:00:10 -05:00
}
2021-08-26 12:11:14 -07:00
key := selKey { baseArg , offset , t . Size ( ) , t }
2021-02-17 10:38:03 -05:00
w := x . commonArgs [ key ]
if w != nil {
if toReplace != nil {
toReplace . copyOf ( w )
}
return w
}
pa := x . prAssignForArg ( baseArg )
2021-02-26 14:27:59 -05:00
if len ( pa . Registers ) == 0 { // Arg is on stack
2021-02-17 10:38:03 -05:00
frameOff := baseArg . Aux . ( * ir . Name ) . FrameOffset ( )
if pa . Offset ( ) != int32 ( frameOff + x . f . ABISelf . LocalsOffset ( ) ) {
panic ( fmt . Errorf ( "Parameter assignment %d and OpArg.Aux frameOffset %d disagree, op=%s" ,
pa . Offset ( ) , frameOff , baseArg . LongString ( ) ) )
}
aux := baseArg . Aux
auxInt := baseArg . AuxInt + offset
if toReplace != nil && toReplace . Block == baseArg . Block {
toReplace . reset ( OpArg )
toReplace . Aux = aux
toReplace . AuxInt = auxInt
toReplace . Type = t
2021-03-07 14:00:10 -05:00
w = toReplace
2021-02-17 10:38:03 -05:00
} else {
2021-03-07 14:00:10 -05:00
w = baseArg . Block . NewValue0IA ( pos , OpArg , t , auxInt , aux )
}
x . commonArgs [ key ] = w
if toReplace != nil {
toReplace . copyOf ( w )
2021-02-17 10:38:03 -05:00
}
2021-03-07 14:00:10 -05:00
if x . debug {
x . Printf ( "-->%s\n" , w . LongString ( ) )
}
return w
2021-02-26 14:27:59 -05:00
}
// Arg is in registers
r := pa . Registers [ regOffset ]
2021-03-07 14:00:10 -05:00
op , auxInt := ArgOpAndRegisterFor ( r , x . f . ABISelf )
if op == OpArgIntReg && t . IsFloat ( ) || op == OpArgFloatReg && t . IsInteger ( ) {
fmt . Printf ( "pa=%v\nx.f.OwnAux.abiInfo=%s\n" ,
pa . ToString ( x . f . ABISelf , true ) ,
x . f . OwnAux . abiInfo . String ( ) )
panic ( fmt . Errorf ( "Op/Type mismatch, op=%s, type=%s" , op . String ( ) , t . String ( ) ) )
2021-02-26 14:27:59 -05:00
}
2021-04-23 21:49:08 -04:00
if baseArg . AuxInt != 0 {
base . Fatalf ( "BaseArg %s bound to registers has non-zero AuxInt" , baseArg . LongString ( ) )
}
aux := & AuxNameOffset { baseArg . Aux . ( * ir . Name ) , offset }
2021-02-26 14:27:59 -05:00
if toReplace != nil && toReplace . Block == baseArg . Block {
toReplace . reset ( op )
toReplace . Aux = aux
toReplace . AuxInt = auxInt
toReplace . Type = t
2021-03-07 14:00:10 -05:00
w = toReplace
2021-02-26 14:27:59 -05:00
} else {
2021-03-07 14:00:10 -05:00
w = baseArg . Block . NewValue0IA ( pos , op , t , auxInt , aux )
}
x . commonArgs [ key ] = w
if toReplace != nil {
toReplace . copyOf ( w )
2021-02-17 10:38:03 -05:00
}
2021-03-07 14:00:10 -05:00
if x . debug {
x . Printf ( "-->%s\n" , w . LongString ( ) )
}
return w
2021-02-17 10:38:03 -05:00
}
2021-02-23 20:00:31 -05:00
// argOpAndRegisterFor converts an abi register index into an ssa Op and corresponding
// arg register index.
func ArgOpAndRegisterFor ( r abi . RegIndex , abiConfig * abi . ABIConfig ) ( Op , int64 ) {
i := abiConfig . FloatIndexFor ( r )
if i >= 0 { // float PR
return OpArgFloatReg , i
}
return OpArgIntReg , int64 ( r )
}