cmd/compile: use type information in Aux for Store size

Remove size AuxInt in Store, and alignment in Move/Zero. We still
pass size AuxInt to Move/Zero, as it is used for partial Move/Zero
lowering (e.g. cmd/compile/internal/ssa/gen/386.rules:288).
SizeAndAlign is gone.

Passes "toolstash -cmp" on std.

Change-Id: I1ca34652b65dd30de886940e789fcf41d521475d
Reviewed-on: https://go-review.googlesource.com/38150
Run-TryBot: Cherry Zhang <cherryyz@google.com>
TryBot-Result: Gobot Gobot <gobot@golang.org>
Reviewed-by: Keith Randall <khr@golang.org>
This commit is contained in:
Cherry Zhang 2017-03-13 21:51:08 -04:00
parent d75925d6ba
commit c8f38b3398
41 changed files with 2007 additions and 2108 deletions

View file

@ -640,7 +640,6 @@ var knownFormats = map[string]string{
"cmd/compile/internal/ssa.Location %v": "", "cmd/compile/internal/ssa.Location %v": "",
"cmd/compile/internal/ssa.Op %s": "", "cmd/compile/internal/ssa.Op %s": "",
"cmd/compile/internal/ssa.Op %v": "", "cmd/compile/internal/ssa.Op %v": "",
"cmd/compile/internal/ssa.SizeAndAlign %s": "",
"cmd/compile/internal/ssa.Type %s": "", "cmd/compile/internal/ssa.Type %s": "",
"cmd/compile/internal/ssa.Type %v": "", "cmd/compile/internal/ssa.Type %v": "",
"cmd/compile/internal/ssa.ValAndOff %s": "", "cmd/compile/internal/ssa.ValAndOff %s": "",

View file

@ -369,6 +369,11 @@ func (s *state) newValue3I(op ssa.Op, t ssa.Type, aux int64, arg0, arg1, arg2 *s
return s.curBlock.NewValue3I(s.peekPos(), op, t, aux, arg0, arg1, arg2) return s.curBlock.NewValue3I(s.peekPos(), op, t, aux, arg0, arg1, arg2)
} }
// newValue3A adds a new value with three arguments and an aux value to the current block.
func (s *state) newValue3A(op ssa.Op, t ssa.Type, aux interface{}, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
return s.curBlock.NewValue3A(s.peekPos(), op, t, aux, arg0, arg1, arg2)
}
// newValue4 adds a new value with four arguments to the current block. // newValue4 adds a new value with four arguments to the current block.
func (s *state) newValue4(op ssa.Op, t ssa.Type, arg0, arg1, arg2, arg3 *ssa.Value) *ssa.Value { func (s *state) newValue4(op ssa.Op, t ssa.Type, arg0, arg1, arg2, arg3 *ssa.Value) *ssa.Value {
return s.curBlock.NewValue4(s.peekPos(), op, t, arg0, arg1, arg2, arg3) return s.curBlock.NewValue4(s.peekPos(), op, t, arg0, arg1, arg2, arg3)
@ -885,9 +890,7 @@ func (s *state) exit() *ssa.Block {
addr := s.decladdrs[n] addr := s.decladdrs[n]
val := s.variable(n, n.Type) val := s.variable(n, n.Type)
s.vars[&memVar] = s.newValue1A(ssa.OpVarDef, ssa.TypeMem, n, s.mem()) s.vars[&memVar] = s.newValue1A(ssa.OpVarDef, ssa.TypeMem, n, s.mem())
store := s.newValue3I(ssa.OpStore, ssa.TypeMem, n.Type.Size(), addr, val, s.mem()) s.vars[&memVar] = s.newValue3A(ssa.OpStore, ssa.TypeMem, n.Type, addr, val, s.mem())
store.Aux = n.Type
s.vars[&memVar] = store
// TODO: if val is ever spilled, we'd like to use the // TODO: if val is ever spilled, we'd like to use the
// PPARAMOUT slot for spilling it. That won't happen // PPARAMOUT slot for spilling it. That won't happen
// currently. // currently.
@ -2120,12 +2123,8 @@ func (s *state) append(n *Node, inplace bool) *ssa.Value {
s.vars[&memVar] = s.newValue1A(ssa.OpVarDef, ssa.TypeMem, sn, s.mem()) s.vars[&memVar] = s.newValue1A(ssa.OpVarDef, ssa.TypeMem, sn, s.mem())
} }
capaddr := s.newValue1I(ssa.OpOffPtr, ptrto(Types[TINT]), int64(array_cap), addr) capaddr := s.newValue1I(ssa.OpOffPtr, ptrto(Types[TINT]), int64(array_cap), addr)
store := s.newValue3I(ssa.OpStore, ssa.TypeMem, s.config.IntSize, capaddr, r[2], s.mem()) s.vars[&memVar] = s.newValue3A(ssa.OpStore, ssa.TypeMem, Types[TINT], capaddr, r[2], s.mem())
store.Aux = Types[TINT] s.vars[&memVar] = s.newValue3A(ssa.OpStore, ssa.TypeMem, pt, addr, r[0], s.mem())
s.vars[&memVar] = store
store = s.newValue3I(ssa.OpStore, ssa.TypeMem, pt.Size(), addr, r[0], s.mem())
store.Aux = pt
s.vars[&memVar] = store
// load the value we just stored to avoid having to spill it // load the value we just stored to avoid having to spill it
s.vars[&ptrVar] = s.newValue2(ssa.OpLoad, pt, addr, s.mem()) s.vars[&ptrVar] = s.newValue2(ssa.OpLoad, pt, addr, s.mem())
s.vars[&lenVar] = r[1] // avoid a spill in the fast path s.vars[&lenVar] = r[1] // avoid a spill in the fast path
@ -2145,9 +2144,7 @@ func (s *state) append(n *Node, inplace bool) *ssa.Value {
l = s.variable(&lenVar, Types[TINT]) // generates phi for len l = s.variable(&lenVar, Types[TINT]) // generates phi for len
nl = s.newValue2(s.ssaOp(OADD, Types[TINT]), Types[TINT], l, s.constInt(Types[TINT], nargs)) nl = s.newValue2(s.ssaOp(OADD, Types[TINT]), Types[TINT], l, s.constInt(Types[TINT], nargs))
lenaddr := s.newValue1I(ssa.OpOffPtr, ptrto(Types[TINT]), int64(array_nel), addr) lenaddr := s.newValue1I(ssa.OpOffPtr, ptrto(Types[TINT]), int64(array_nel), addr)
store := s.newValue3I(ssa.OpStore, ssa.TypeMem, s.config.IntSize, lenaddr, nl, s.mem()) s.vars[&memVar] = s.newValue3A(ssa.OpStore, ssa.TypeMem, Types[TINT], lenaddr, nl, s.mem())
store.Aux = Types[TINT]
s.vars[&memVar] = store
} }
// Evaluate args // Evaluate args
@ -2178,7 +2175,7 @@ func (s *state) append(n *Node, inplace bool) *ssa.Value {
if arg.store { if arg.store {
s.storeType(et, addr, arg.v, 0) s.storeType(et, addr, arg.v, 0)
} else { } else {
store := s.newValue3I(ssa.OpMove, ssa.TypeMem, sizeAlignAuxInt(et), addr, arg.v, s.mem()) store := s.newValue3I(ssa.OpMove, ssa.TypeMem, et.Size(), addr, arg.v, s.mem())
store.Aux = et store.Aux = et
s.vars[&memVar] = store s.vars[&memVar] = store
} }
@ -2343,9 +2340,9 @@ func (s *state) assign(left *Node, right *ssa.Value, deref bool, skip skipMask)
// Treat as a mem->mem move. // Treat as a mem->mem move.
var store *ssa.Value var store *ssa.Value
if right == nil { if right == nil {
store = s.newValue2I(ssa.OpZero, ssa.TypeMem, sizeAlignAuxInt(t), addr, s.mem()) store = s.newValue2I(ssa.OpZero, ssa.TypeMem, t.Size(), addr, s.mem())
} else { } else {
store = s.newValue3I(ssa.OpMove, ssa.TypeMem, sizeAlignAuxInt(t), addr, right, s.mem()) store = s.newValue3I(ssa.OpMove, ssa.TypeMem, t.Size(), addr, right, s.mem())
} }
store.Aux = t store.Aux = t
s.vars[&memVar] = store s.vars[&memVar] = store
@ -2928,9 +2925,7 @@ func (s *state) call(n *Node, k callKind) *ssa.Value {
argStart += int64(2 * Widthptr) argStart += int64(2 * Widthptr)
} }
addr := s.constOffPtrSP(ptrto(Types[TUINTPTR]), argStart) addr := s.constOffPtrSP(ptrto(Types[TUINTPTR]), argStart)
store := s.newValue3I(ssa.OpStore, ssa.TypeMem, int64(Widthptr), addr, rcvr, s.mem()) s.vars[&memVar] = s.newValue3A(ssa.OpStore, ssa.TypeMem, Types[TUINTPTR], addr, rcvr, s.mem())
store.Aux = Types[TUINTPTR]
s.vars[&memVar] = store
} }
// Defer/go args // Defer/go args
@ -2939,13 +2934,9 @@ func (s *state) call(n *Node, k callKind) *ssa.Value {
argStart := Ctxt.FixedFrameSize() argStart := Ctxt.FixedFrameSize()
argsize := s.constInt32(Types[TUINT32], int32(stksize)) argsize := s.constInt32(Types[TUINT32], int32(stksize))
addr := s.constOffPtrSP(ptrto(Types[TUINT32]), argStart) addr := s.constOffPtrSP(ptrto(Types[TUINT32]), argStart)
store := s.newValue3I(ssa.OpStore, ssa.TypeMem, 4, addr, argsize, s.mem()) s.vars[&memVar] = s.newValue3A(ssa.OpStore, ssa.TypeMem, Types[TUINT32], addr, argsize, s.mem())
store.Aux = Types[TUINT32]
s.vars[&memVar] = store
addr = s.constOffPtrSP(ptrto(Types[TUINTPTR]), argStart+int64(Widthptr)) addr = s.constOffPtrSP(ptrto(Types[TUINTPTR]), argStart+int64(Widthptr))
store = s.newValue3I(ssa.OpStore, ssa.TypeMem, int64(Widthptr), addr, closure, s.mem()) s.vars[&memVar] = s.newValue3A(ssa.OpStore, ssa.TypeMem, Types[TUINTPTR], addr, closure, s.mem())
store.Aux = Types[TUINTPTR]
s.vars[&memVar] = store
stksize += 2 * int64(Widthptr) stksize += 2 * int64(Widthptr)
} }
@ -3308,9 +3299,7 @@ func (s *state) rtcall(fn *obj.LSym, returns bool, results []*Type, args ...*ssa
off = Rnd(off, t.Alignment()) off = Rnd(off, t.Alignment())
ptr := s.constOffPtrSP(t.PtrTo(), off) ptr := s.constOffPtrSP(t.PtrTo(), off)
size := t.Size() size := t.Size()
store := s.newValue3I(ssa.OpStore, ssa.TypeMem, size, ptr, arg, s.mem()) s.vars[&memVar] = s.newValue3A(ssa.OpStore, ssa.TypeMem, t, ptr, arg, s.mem())
store.Aux = t
s.vars[&memVar] = store
off += size off += size
} }
off = Rnd(off, int64(Widthptr)) off = Rnd(off, int64(Widthptr))
@ -3366,9 +3355,7 @@ func (s *state) storeType(t *Type, left, right *ssa.Value, skip skipMask) {
func (s *state) storeTypeScalars(t *Type, left, right *ssa.Value, skip skipMask) { func (s *state) storeTypeScalars(t *Type, left, right *ssa.Value, skip skipMask) {
switch { switch {
case t.IsBoolean() || t.IsInteger() || t.IsFloat() || t.IsComplex(): case t.IsBoolean() || t.IsInteger() || t.IsFloat() || t.IsComplex():
store := s.newValue3I(ssa.OpStore, ssa.TypeMem, t.Size(), left, right, s.mem()) s.vars[&memVar] = s.newValue3A(ssa.OpStore, ssa.TypeMem, t, left, right, s.mem())
store.Aux = t
s.vars[&memVar] = store
case t.IsPtrShaped(): case t.IsPtrShaped():
// no scalar fields. // no scalar fields.
case t.IsString(): case t.IsString():
@ -3377,30 +3364,22 @@ func (s *state) storeTypeScalars(t *Type, left, right *ssa.Value, skip skipMask)
} }
len := s.newValue1(ssa.OpStringLen, Types[TINT], right) len := s.newValue1(ssa.OpStringLen, Types[TINT], right)
lenAddr := s.newValue1I(ssa.OpOffPtr, ptrto(Types[TINT]), s.config.IntSize, left) lenAddr := s.newValue1I(ssa.OpOffPtr, ptrto(Types[TINT]), s.config.IntSize, left)
store := s.newValue3I(ssa.OpStore, ssa.TypeMem, s.config.IntSize, lenAddr, len, s.mem()) s.vars[&memVar] = s.newValue3A(ssa.OpStore, ssa.TypeMem, Types[TINT], lenAddr, len, s.mem())
store.Aux = Types[TINT]
s.vars[&memVar] = store
case t.IsSlice(): case t.IsSlice():
if skip&skipLen == 0 { if skip&skipLen == 0 {
len := s.newValue1(ssa.OpSliceLen, Types[TINT], right) len := s.newValue1(ssa.OpSliceLen, Types[TINT], right)
lenAddr := s.newValue1I(ssa.OpOffPtr, ptrto(Types[TINT]), s.config.IntSize, left) lenAddr := s.newValue1I(ssa.OpOffPtr, ptrto(Types[TINT]), s.config.IntSize, left)
store := s.newValue3I(ssa.OpStore, ssa.TypeMem, s.config.IntSize, lenAddr, len, s.mem()) s.vars[&memVar] = s.newValue3A(ssa.OpStore, ssa.TypeMem, Types[TINT], lenAddr, len, s.mem())
store.Aux = Types[TINT]
s.vars[&memVar] = store
} }
if skip&skipCap == 0 { if skip&skipCap == 0 {
cap := s.newValue1(ssa.OpSliceCap, Types[TINT], right) cap := s.newValue1(ssa.OpSliceCap, Types[TINT], right)
capAddr := s.newValue1I(ssa.OpOffPtr, ptrto(Types[TINT]), 2*s.config.IntSize, left) capAddr := s.newValue1I(ssa.OpOffPtr, ptrto(Types[TINT]), 2*s.config.IntSize, left)
store := s.newValue3I(ssa.OpStore, ssa.TypeMem, s.config.IntSize, capAddr, cap, s.mem()) s.vars[&memVar] = s.newValue3A(ssa.OpStore, ssa.TypeMem, Types[TINT], capAddr, cap, s.mem())
store.Aux = Types[TINT]
s.vars[&memVar] = store
} }
case t.IsInterface(): case t.IsInterface():
// itab field doesn't need a write barrier (even though it is a pointer). // itab field doesn't need a write barrier (even though it is a pointer).
itab := s.newValue1(ssa.OpITab, ptrto(Types[TUINT8]), right) itab := s.newValue1(ssa.OpITab, ptrto(Types[TUINT8]), right)
store := s.newValue3I(ssa.OpStore, ssa.TypeMem, s.config.IntSize, left, itab, s.mem()) s.vars[&memVar] = s.newValue3A(ssa.OpStore, ssa.TypeMem, Types[TUINTPTR], left, itab, s.mem())
store.Aux = Types[TUINTPTR]
s.vars[&memVar] = store
case t.IsStruct(): case t.IsStruct():
n := t.NumFields() n := t.NumFields()
for i := 0; i < n; i++ { for i := 0; i < n; i++ {
@ -3422,26 +3401,18 @@ func (s *state) storeTypeScalars(t *Type, left, right *ssa.Value, skip skipMask)
func (s *state) storeTypePtrs(t *Type, left, right *ssa.Value) { func (s *state) storeTypePtrs(t *Type, left, right *ssa.Value) {
switch { switch {
case t.IsPtrShaped(): case t.IsPtrShaped():
store := s.newValue3I(ssa.OpStore, ssa.TypeMem, s.config.PtrSize, left, right, s.mem()) s.vars[&memVar] = s.newValue3A(ssa.OpStore, ssa.TypeMem, t, left, right, s.mem())
store.Aux = t
s.vars[&memVar] = store
case t.IsString(): case t.IsString():
ptr := s.newValue1(ssa.OpStringPtr, ptrto(Types[TUINT8]), right) ptr := s.newValue1(ssa.OpStringPtr, ptrto(Types[TUINT8]), right)
store := s.newValue3I(ssa.OpStore, ssa.TypeMem, s.config.PtrSize, left, ptr, s.mem()) s.vars[&memVar] = s.newValue3A(ssa.OpStore, ssa.TypeMem, ptrto(Types[TUINT8]), left, ptr, s.mem())
store.Aux = ptrto(Types[TUINT8])
s.vars[&memVar] = store
case t.IsSlice(): case t.IsSlice():
ptr := s.newValue1(ssa.OpSlicePtr, ptrto(Types[TUINT8]), right) ptr := s.newValue1(ssa.OpSlicePtr, ptrto(Types[TUINT8]), right)
store := s.newValue3I(ssa.OpStore, ssa.TypeMem, s.config.PtrSize, left, ptr, s.mem()) s.vars[&memVar] = s.newValue3A(ssa.OpStore, ssa.TypeMem, ptrto(Types[TUINT8]), left, ptr, s.mem())
store.Aux = ptrto(Types[TUINT8])
s.vars[&memVar] = store
case t.IsInterface(): case t.IsInterface():
// itab field is treated as a scalar. // itab field is treated as a scalar.
idata := s.newValue1(ssa.OpIData, ptrto(Types[TUINT8]), right) idata := s.newValue1(ssa.OpIData, ptrto(Types[TUINT8]), right)
idataAddr := s.newValue1I(ssa.OpOffPtr, ptrto(ptrto(Types[TUINT8])), s.config.PtrSize, left) idataAddr := s.newValue1I(ssa.OpOffPtr, ptrto(ptrto(Types[TUINT8])), s.config.PtrSize, left)
store := s.newValue3I(ssa.OpStore, ssa.TypeMem, s.config.PtrSize, idataAddr, idata, s.mem()) s.vars[&memVar] = s.newValue3A(ssa.OpStore, ssa.TypeMem, ptrto(Types[TUINT8]), idataAddr, idata, s.mem())
store.Aux = ptrto(Types[TUINT8])
s.vars[&memVar] = store
case t.IsStruct(): case t.IsStruct():
n := t.NumFields() n := t.NumFields()
for i := 0; i < n; i++ { for i := 0; i < n; i++ {
@ -4042,7 +4013,7 @@ func (s *state) dottype(n *Node, commaok bool) (res, resok *ssa.Value) {
} }
} else { } else {
p := s.newValue1(ssa.OpIData, ptrto(n.Type), iface) p := s.newValue1(ssa.OpIData, ptrto(n.Type), iface)
store := s.newValue3I(ssa.OpMove, ssa.TypeMem, sizeAlignAuxInt(n.Type), addr, p, s.mem()) store := s.newValue3I(ssa.OpMove, ssa.TypeMem, n.Type.Size(), addr, p, s.mem())
store.Aux = n.Type store.Aux = n.Type
s.vars[&memVar] = store s.vars[&memVar] = store
} }
@ -4055,7 +4026,7 @@ func (s *state) dottype(n *Node, commaok bool) (res, resok *ssa.Value) {
if tmp == nil { if tmp == nil {
s.vars[valVar] = s.zeroVal(n.Type) s.vars[valVar] = s.zeroVal(n.Type)
} else { } else {
store := s.newValue2I(ssa.OpZero, ssa.TypeMem, sizeAlignAuxInt(n.Type), addr, s.mem()) store := s.newValue2I(ssa.OpZero, ssa.TypeMem, n.Type.Size(), addr, s.mem())
store.Aux = n.Type store.Aux = n.Type
s.vars[&memVar] = store s.vars[&memVar] = store
} }
@ -4406,11 +4377,6 @@ func AddAux2(a *obj.Addr, v *ssa.Value, offset int64) {
} }
} }
// sizeAlignAuxInt returns an AuxInt encoding the size and alignment of type t.
func sizeAlignAuxInt(t *Type) int64 {
return ssa.MakeSizeAndAlign(t.Size(), t.Alignment()).Int64()
}
// extendIndex extends v to a full int width. // extendIndex extends v to a full int width.
// panic using the given function if v does not fit in an int (only on 32-bit archs). // panic using the given function if v does not fit in an int (only on 32-bit archs).
func (s *state) extendIndex(v *ssa.Value, panicfn *obj.LSym) *ssa.Value { func (s *state) extendIndex(v *ssa.Value, panicfn *obj.LSym) *ssa.Value {

View file

@ -145,11 +145,9 @@ func checkFunc(f *Func) {
if !isExactFloat32(v) { if !isExactFloat32(v) {
f.Fatalf("value %v has an AuxInt value that is not an exact float32", v) f.Fatalf("value %v has an AuxInt value that is not an exact float32", v)
} }
case auxSizeAndAlign: case auxString, auxSym, auxTyp:
canHaveAuxInt = true
case auxString, auxSym:
canHaveAux = true canHaveAux = true
case auxSymOff, auxSymValAndOff, auxSymSizeAndAlign: case auxSymOff, auxSymValAndOff, auxTypSize:
canHaveAuxInt = true canHaveAuxInt = true
canHaveAux = true canHaveAux = true
case auxSymInt32: case auxSymInt32:

View file

@ -38,7 +38,7 @@ func TestCSEAuxPartitionBug(t *testing.T) {
Valu("r3", OpAdd64, TypeInt64, 0, nil, "arg1", "arg2"), Valu("r3", OpAdd64, TypeInt64, 0, nil, "arg1", "arg2"),
Valu("r5", OpAdd64, TypeInt64, 0, nil, "r2", "r3"), Valu("r5", OpAdd64, TypeInt64, 0, nil, "r2", "r3"),
Valu("r10", OpAdd64, TypeInt64, 0, nil, "r6", "r9"), Valu("r10", OpAdd64, TypeInt64, 0, nil, "r6", "r9"),
Valu("rstore", OpStore, TypeMem, 8, nil, "raddr", "r10", "raddrdef"), Valu("rstore", OpStore, TypeMem, 0, TypeInt64, "raddr", "r10", "raddrdef"),
Goto("exit")), Goto("exit")),
Bloc("exit", Bloc("exit",
Exit("rstore"))) Exit("rstore")))
@ -104,7 +104,7 @@ func TestZCSE(t *testing.T) {
Valu("r3", OpAdd64, TypeInt64, 0, nil, "r1", "r2"), Valu("r3", OpAdd64, TypeInt64, 0, nil, "r1", "r2"),
Valu("raddr", OpAddr, TypeInt64Ptr, 0, nil, "sp"), Valu("raddr", OpAddr, TypeInt64Ptr, 0, nil, "sp"),
Valu("raddrdef", OpVarDef, TypeMem, 0, nil, "start"), Valu("raddrdef", OpVarDef, TypeMem, 0, nil, "start"),
Valu("rstore", OpStore, TypeMem, 8, nil, "raddr", "r3", "raddrdef"), Valu("rstore", OpStore, TypeMem, 0, TypeInt64, "raddr", "r3", "raddrdef"),
Goto("exit")), Goto("exit")),
Bloc("exit", Bloc("exit",
Exit("rstore"))) Exit("rstore")))

View file

@ -88,9 +88,9 @@ func dse(f *Func) {
if v.Op == OpStore || v.Op == OpZero { if v.Op == OpStore || v.Op == OpZero {
var sz int64 var sz int64
if v.Op == OpStore { if v.Op == OpStore {
sz = v.AuxInt sz = v.Aux.(Type).Size()
} else { // OpZero } else { // OpZero
sz = SizeAndAlign(v.AuxInt).Size() sz = v.AuxInt
} }
if shadowedSize := int64(shadowed.get(v.Args[0].ID)); shadowedSize != -1 && shadowedSize >= sz { if shadowedSize := int64(shadowed.get(v.Args[0].ID)); shadowedSize != -1 && shadowedSize >= sz {
// Modify store into a copy // Modify store into a copy

View file

@ -18,11 +18,11 @@ func TestDeadStore(t *testing.T) {
Valu("addr1", OpAddr, ptrType, 0, nil, "sb"), Valu("addr1", OpAddr, ptrType, 0, nil, "sb"),
Valu("addr2", OpAddr, ptrType, 0, nil, "sb"), Valu("addr2", OpAddr, ptrType, 0, nil, "sb"),
Valu("addr3", OpAddr, ptrType, 0, nil, "sb"), Valu("addr3", OpAddr, ptrType, 0, nil, "sb"),
Valu("zero1", OpZero, TypeMem, 1, nil, "addr3", "start"), Valu("zero1", OpZero, TypeMem, 1, TypeBool, "addr3", "start"),
Valu("store1", OpStore, TypeMem, 1, nil, "addr1", "v", "zero1"), Valu("store1", OpStore, TypeMem, 0, TypeBool, "addr1", "v", "zero1"),
Valu("store2", OpStore, TypeMem, 1, nil, "addr2", "v", "store1"), Valu("store2", OpStore, TypeMem, 0, TypeBool, "addr2", "v", "store1"),
Valu("store3", OpStore, TypeMem, 1, nil, "addr1", "v", "store2"), Valu("store3", OpStore, TypeMem, 0, TypeBool, "addr1", "v", "store2"),
Valu("store4", OpStore, TypeMem, 1, nil, "addr3", "v", "store3"), Valu("store4", OpStore, TypeMem, 0, TypeBool, "addr3", "v", "store3"),
Goto("exit")), Goto("exit")),
Bloc("exit", Bloc("exit",
Exit("store3"))) Exit("store3")))
@ -54,7 +54,7 @@ func TestDeadStorePhi(t *testing.T) {
Goto("loop")), Goto("loop")),
Bloc("loop", Bloc("loop",
Valu("phi", OpPhi, TypeMem, 0, nil, "start", "store"), Valu("phi", OpPhi, TypeMem, 0, nil, "start", "store"),
Valu("store", OpStore, TypeMem, 1, nil, "addr", "v", "phi"), Valu("store", OpStore, TypeMem, 0, TypeBool, "addr", "v", "phi"),
If("v", "loop", "exit")), If("v", "loop", "exit")),
Bloc("exit", Bloc("exit",
Exit("store"))) Exit("store")))
@ -79,8 +79,8 @@ func TestDeadStoreTypes(t *testing.T) {
Valu("v", OpConstBool, TypeBool, 1, nil), Valu("v", OpConstBool, TypeBool, 1, nil),
Valu("addr1", OpAddr, t1, 0, nil, "sb"), Valu("addr1", OpAddr, t1, 0, nil, "sb"),
Valu("addr2", OpAddr, t2, 0, nil, "sb"), Valu("addr2", OpAddr, t2, 0, nil, "sb"),
Valu("store1", OpStore, TypeMem, 1, nil, "addr1", "v", "start"), Valu("store1", OpStore, TypeMem, 0, TypeBool, "addr1", "v", "start"),
Valu("store2", OpStore, TypeMem, 1, nil, "addr2", "v", "store1"), Valu("store2", OpStore, TypeMem, 0, TypeBool, "addr2", "v", "store1"),
Goto("exit")), Goto("exit")),
Bloc("exit", Bloc("exit",
Exit("store2"))) Exit("store2")))
@ -108,8 +108,8 @@ func TestDeadStoreUnsafe(t *testing.T) {
Valu("sb", OpSB, TypeInvalid, 0, nil), Valu("sb", OpSB, TypeInvalid, 0, nil),
Valu("v", OpConstBool, TypeBool, 1, nil), Valu("v", OpConstBool, TypeBool, 1, nil),
Valu("addr1", OpAddr, ptrType, 0, nil, "sb"), Valu("addr1", OpAddr, ptrType, 0, nil, "sb"),
Valu("store1", OpStore, TypeMem, 8, nil, "addr1", "v", "start"), // store 8 bytes Valu("store1", OpStore, TypeMem, 0, TypeInt64, "addr1", "v", "start"), // store 8 bytes
Valu("store2", OpStore, TypeMem, 1, nil, "addr1", "v", "store1"), // store 1 byte Valu("store2", OpStore, TypeMem, 0, TypeBool, "addr1", "v", "store1"), // store 1 byte
Goto("exit")), Goto("exit")),
Bloc("exit", Bloc("exit",
Exit("store2"))) Exit("store2")))

View file

@ -352,6 +352,21 @@ func (b *Block) NewValue3I(pos src.XPos, op Op, t Type, auxint int64, arg0, arg1
return v return v
} }
// NewValue3A returns a new value in the block with three argument and an aux value.
func (b *Block) NewValue3A(pos src.XPos, op Op, t Type, aux interface{}, arg0, arg1, arg2 *Value) *Value {
v := b.Func.newValue(op, t, b, pos)
v.AuxInt = 0
v.Aux = aux
v.Args = v.argstorage[:3]
v.argstorage[0] = arg0
v.argstorage[1] = arg1
v.argstorage[2] = arg2
arg0.Uses++
arg1.Uses++
arg2.Uses++
return v
}
// NewValue4 returns a new value in the block with four arguments and zero aux values. // NewValue4 returns a new value in the block with four arguments and zero aux values.
func (b *Block) NewValue4(pos src.XPos, op Op, t Type, arg0, arg1, arg2, arg3 *Value) *Value { func (b *Block) NewValue4(pos src.XPos, op Op, t Type, arg0, arg1, arg2, arg3 *Value) *Value {
v := b.Func.newValue(op, t, b, pos) v := b.Func.newValue(op, t, b, pos)

View file

@ -256,47 +256,47 @@
// Lowering stores // Lowering stores
// These more-specific FP versions of Store pattern should come first. // These more-specific FP versions of Store pattern should come first.
(Store [8] ptr val mem) && is64BitFloat(val.Type) -> (MOVSDstore ptr val mem) (Store {t} ptr val mem) && t.(Type).Size() == 8 && is64BitFloat(val.Type) -> (MOVSDstore ptr val mem)
(Store [4] ptr val mem) && is32BitFloat(val.Type) -> (MOVSSstore ptr val mem) (Store {t} ptr val mem) && t.(Type).Size() == 4 && is32BitFloat(val.Type) -> (MOVSSstore ptr val mem)
(Store [4] ptr val mem) -> (MOVLstore ptr val mem) (Store {t} ptr val mem) && t.(Type).Size() == 4 -> (MOVLstore ptr val mem)
(Store [2] ptr val mem) -> (MOVWstore ptr val mem) (Store {t} ptr val mem) && t.(Type).Size() == 2 -> (MOVWstore ptr val mem)
(Store [1] ptr val mem) -> (MOVBstore ptr val mem) (Store {t} ptr val mem) && t.(Type).Size() == 1 -> (MOVBstore ptr val mem)
// Lowering moves // Lowering moves
(Move [s] _ _ mem) && SizeAndAlign(s).Size() == 0 -> mem (Move [0] _ _ mem) -> mem
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 1 -> (MOVBstore dst (MOVBload src mem) mem) (Move [1] dst src mem) -> (MOVBstore dst (MOVBload src mem) mem)
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 2 -> (MOVWstore dst (MOVWload src mem) mem) (Move [2] dst src mem) -> (MOVWstore dst (MOVWload src mem) mem)
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 4 -> (MOVLstore dst (MOVLload src mem) mem) (Move [4] dst src mem) -> (MOVLstore dst (MOVLload src mem) mem)
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 3 -> (Move [3] dst src mem) ->
(MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [2] dst (MOVBload [2] src mem)
(MOVWstore dst (MOVWload src mem) mem)) (MOVWstore dst (MOVWload src mem) mem))
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 5 -> (Move [5] dst src mem) ->
(MOVBstore [4] dst (MOVBload [4] src mem) (MOVBstore [4] dst (MOVBload [4] src mem)
(MOVLstore dst (MOVLload src mem) mem)) (MOVLstore dst (MOVLload src mem) mem))
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 6 -> (Move [6] dst src mem) ->
(MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore [4] dst (MOVWload [4] src mem)
(MOVLstore dst (MOVLload src mem) mem)) (MOVLstore dst (MOVLload src mem) mem))
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 7 -> (Move [7] dst src mem) ->
(MOVLstore [3] dst (MOVLload [3] src mem) (MOVLstore [3] dst (MOVLload [3] src mem)
(MOVLstore dst (MOVLload src mem) mem)) (MOVLstore dst (MOVLload src mem) mem))
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 8 -> (Move [8] dst src mem) ->
(MOVLstore [4] dst (MOVLload [4] src mem) (MOVLstore [4] dst (MOVLload [4] src mem)
(MOVLstore dst (MOVLload src mem) mem)) (MOVLstore dst (MOVLload src mem) mem))
// Adjust moves to be a multiple of 4 bytes. // Adjust moves to be a multiple of 4 bytes.
(Move [s] dst src mem) (Move [s] dst src mem)
&& SizeAndAlign(s).Size() > 8 && SizeAndAlign(s).Size()%4 != 0 -> && s > 8 && s%4 != 0 ->
(Move [SizeAndAlign(s).Size()-SizeAndAlign(s).Size()%4] (Move [s-s%4]
(ADDLconst <dst.Type> dst [SizeAndAlign(s).Size()%4]) (ADDLconst <dst.Type> dst [s%4])
(ADDLconst <src.Type> src [SizeAndAlign(s).Size()%4]) (ADDLconst <src.Type> src [s%4])
(MOVLstore dst (MOVLload src mem) mem)) (MOVLstore dst (MOVLload src mem) mem))
// Medium copying uses a duff device. // Medium copying uses a duff device.
(Move [s] dst src mem) (Move [s] dst src mem)
&& SizeAndAlign(s).Size() > 8 && SizeAndAlign(s).Size() <= 4*128 && SizeAndAlign(s).Size()%4 == 0 && s > 8 && s <= 4*128 && s%4 == 0
&& !config.noDuffDevice -> && !config.noDuffDevice ->
(DUFFCOPY [10*(128-SizeAndAlign(s).Size()/4)] dst src mem) (DUFFCOPY [10*(128-s/4)] dst src mem)
// 10 and 128 are magic constants. 10 is the number of bytes to encode: // 10 and 128 are magic constants. 10 is the number of bytes to encode:
// MOVL (SI), CX // MOVL (SI), CX
// ADDL $4, SI // ADDL $4, SI
@ -305,42 +305,42 @@
// and 128 is the number of such blocks. See src/runtime/duff_386.s:duffcopy. // and 128 is the number of such blocks. See src/runtime/duff_386.s:duffcopy.
// Large copying uses REP MOVSL. // Large copying uses REP MOVSL.
(Move [s] dst src mem) && (SizeAndAlign(s).Size() > 4*128 || config.noDuffDevice) && SizeAndAlign(s).Size()%4 == 0 -> (Move [s] dst src mem) && (s > 4*128 || config.noDuffDevice) && s%4 == 0 ->
(REPMOVSL dst src (MOVLconst [SizeAndAlign(s).Size()/4]) mem) (REPMOVSL dst src (MOVLconst [s/4]) mem)
// Lowering Zero instructions // Lowering Zero instructions
(Zero [s] _ mem) && SizeAndAlign(s).Size() == 0 -> mem (Zero [0] _ mem) -> mem
(Zero [s] destptr mem) && SizeAndAlign(s).Size() == 1 -> (MOVBstoreconst [0] destptr mem) (Zero [1] destptr mem) -> (MOVBstoreconst [0] destptr mem)
(Zero [s] destptr mem) && SizeAndAlign(s).Size() == 2 -> (MOVWstoreconst [0] destptr mem) (Zero [2] destptr mem) -> (MOVWstoreconst [0] destptr mem)
(Zero [s] destptr mem) && SizeAndAlign(s).Size() == 4 -> (MOVLstoreconst [0] destptr mem) (Zero [4] destptr mem) -> (MOVLstoreconst [0] destptr mem)
(Zero [s] destptr mem) && SizeAndAlign(s).Size() == 3 -> (Zero [3] destptr mem) ->
(MOVBstoreconst [makeValAndOff(0,2)] destptr (MOVBstoreconst [makeValAndOff(0,2)] destptr
(MOVWstoreconst [0] destptr mem)) (MOVWstoreconst [0] destptr mem))
(Zero [s] destptr mem) && SizeAndAlign(s).Size() == 5 -> (Zero [5] destptr mem) ->
(MOVBstoreconst [makeValAndOff(0,4)] destptr (MOVBstoreconst [makeValAndOff(0,4)] destptr
(MOVLstoreconst [0] destptr mem)) (MOVLstoreconst [0] destptr mem))
(Zero [s] destptr mem) && SizeAndAlign(s).Size() == 6 -> (Zero [6] destptr mem) ->
(MOVWstoreconst [makeValAndOff(0,4)] destptr (MOVWstoreconst [makeValAndOff(0,4)] destptr
(MOVLstoreconst [0] destptr mem)) (MOVLstoreconst [0] destptr mem))
(Zero [s] destptr mem) && SizeAndAlign(s).Size() == 7 -> (Zero [7] destptr mem) ->
(MOVLstoreconst [makeValAndOff(0,3)] destptr (MOVLstoreconst [makeValAndOff(0,3)] destptr
(MOVLstoreconst [0] destptr mem)) (MOVLstoreconst [0] destptr mem))
// Strip off any fractional word zeroing. // Strip off any fractional word zeroing.
(Zero [s] destptr mem) && SizeAndAlign(s).Size()%4 != 0 && SizeAndAlign(s).Size() > 4 -> (Zero [s] destptr mem) && s%4 != 0 && s > 4 ->
(Zero [SizeAndAlign(s).Size()-SizeAndAlign(s).Size()%4] (ADDLconst destptr [SizeAndAlign(s).Size()%4]) (Zero [s-s%4] (ADDLconst destptr [s%4])
(MOVLstoreconst [0] destptr mem)) (MOVLstoreconst [0] destptr mem))
// Zero small numbers of words directly. // Zero small numbers of words directly.
(Zero [s] destptr mem) && SizeAndAlign(s).Size() == 8 -> (Zero [8] destptr mem) ->
(MOVLstoreconst [makeValAndOff(0,4)] destptr (MOVLstoreconst [makeValAndOff(0,4)] destptr
(MOVLstoreconst [0] destptr mem)) (MOVLstoreconst [0] destptr mem))
(Zero [s] destptr mem) && SizeAndAlign(s).Size() == 12 -> (Zero [12] destptr mem) ->
(MOVLstoreconst [makeValAndOff(0,8)] destptr (MOVLstoreconst [makeValAndOff(0,8)] destptr
(MOVLstoreconst [makeValAndOff(0,4)] destptr (MOVLstoreconst [makeValAndOff(0,4)] destptr
(MOVLstoreconst [0] destptr mem))) (MOVLstoreconst [0] destptr mem)))
(Zero [s] destptr mem) && SizeAndAlign(s).Size() == 16 -> (Zero [16] destptr mem) ->
(MOVLstoreconst [makeValAndOff(0,12)] destptr (MOVLstoreconst [makeValAndOff(0,12)] destptr
(MOVLstoreconst [makeValAndOff(0,8)] destptr (MOVLstoreconst [makeValAndOff(0,8)] destptr
(MOVLstoreconst [makeValAndOff(0,4)] destptr (MOVLstoreconst [makeValAndOff(0,4)] destptr
@ -348,20 +348,18 @@
// Medium zeroing uses a duff device. // Medium zeroing uses a duff device.
(Zero [s] destptr mem) (Zero [s] destptr mem)
&& SizeAndAlign(s).Size() > 16 && s > 16 && s <= 4*128 && s%4 == 0
&& SizeAndAlign(s).Size() <= 4*128
&& SizeAndAlign(s).Size()%4 == 0
&& !config.noDuffDevice -> && !config.noDuffDevice ->
(DUFFZERO [1*(128-SizeAndAlign(s).Size()/4)] destptr (MOVLconst [0]) mem) (DUFFZERO [1*(128-s/4)] destptr (MOVLconst [0]) mem)
// 1 and 128 are magic constants. 1 is the number of bytes to encode STOSL. // 1 and 128 are magic constants. 1 is the number of bytes to encode STOSL.
// 128 is the number of STOSL instructions in duffzero. // 128 is the number of STOSL instructions in duffzero.
// See src/runtime/duff_386.s:duffzero. // See src/runtime/duff_386.s:duffzero.
// Large zeroing uses REP STOSQ. // Large zeroing uses REP STOSQ.
(Zero [s] destptr mem) (Zero [s] destptr mem)
&& (SizeAndAlign(s).Size() > 4*128 || (config.noDuffDevice && SizeAndAlign(s).Size() > 16)) && (s > 4*128 || (config.noDuffDevice && s > 16))
&& SizeAndAlign(s).Size()%4 == 0 -> && s%4 == 0 ->
(REPSTOSL destptr (MOVLconst [SizeAndAlign(s).Size()/4]) (MOVLconst [0]) mem) (REPSTOSL destptr (MOVLconst [s/4]) (MOVLconst [0]) mem)
// Lowering constants // Lowering constants
(Const8 [val]) -> (MOVLconst [val]) (Const8 [val]) -> (MOVLconst [val])

View file

@ -297,56 +297,56 @@
// Lowering stores // Lowering stores
// These more-specific FP versions of Store pattern should come first. // These more-specific FP versions of Store pattern should come first.
(Store [8] ptr val mem) && is64BitFloat(val.Type) -> (MOVSDstore ptr val mem) (Store {t} ptr val mem) && t.(Type).Size() == 8 && is64BitFloat(val.Type) -> (MOVSDstore ptr val mem)
(Store [4] ptr val mem) && is32BitFloat(val.Type) -> (MOVSSstore ptr val mem) (Store {t} ptr val mem) && t.(Type).Size() == 4 && is32BitFloat(val.Type) -> (MOVSSstore ptr val mem)
(Store [8] ptr val mem) -> (MOVQstore ptr val mem) (Store {t} ptr val mem) && t.(Type).Size() == 8 -> (MOVQstore ptr val mem)
(Store [4] ptr val mem) -> (MOVLstore ptr val mem) (Store {t} ptr val mem) && t.(Type).Size() == 4 -> (MOVLstore ptr val mem)
(Store [2] ptr val mem) -> (MOVWstore ptr val mem) (Store {t} ptr val mem) && t.(Type).Size() == 2 -> (MOVWstore ptr val mem)
(Store [1] ptr val mem) -> (MOVBstore ptr val mem) (Store {t} ptr val mem) && t.(Type).Size() == 1 -> (MOVBstore ptr val mem)
// Lowering moves // Lowering moves
(Move [s] _ _ mem) && SizeAndAlign(s).Size() == 0 -> mem (Move [0] _ _ mem) -> mem
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 1 -> (MOVBstore dst (MOVBload src mem) mem) (Move [1] dst src mem) -> (MOVBstore dst (MOVBload src mem) mem)
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 2 -> (MOVWstore dst (MOVWload src mem) mem) (Move [2] dst src mem) -> (MOVWstore dst (MOVWload src mem) mem)
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 4 -> (MOVLstore dst (MOVLload src mem) mem) (Move [4] dst src mem) -> (MOVLstore dst (MOVLload src mem) mem)
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 8 -> (MOVQstore dst (MOVQload src mem) mem) (Move [8] dst src mem) -> (MOVQstore dst (MOVQload src mem) mem)
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 16 -> (MOVOstore dst (MOVOload src mem) mem) (Move [16] dst src mem) -> (MOVOstore dst (MOVOload src mem) mem)
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 3 -> (Move [3] dst src mem) ->
(MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [2] dst (MOVBload [2] src mem)
(MOVWstore dst (MOVWload src mem) mem)) (MOVWstore dst (MOVWload src mem) mem))
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 5 -> (Move [5] dst src mem) ->
(MOVBstore [4] dst (MOVBload [4] src mem) (MOVBstore [4] dst (MOVBload [4] src mem)
(MOVLstore dst (MOVLload src mem) mem)) (MOVLstore dst (MOVLload src mem) mem))
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 6 -> (Move [6] dst src mem) ->
(MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore [4] dst (MOVWload [4] src mem)
(MOVLstore dst (MOVLload src mem) mem)) (MOVLstore dst (MOVLload src mem) mem))
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 7 -> (Move [7] dst src mem) ->
(MOVLstore [3] dst (MOVLload [3] src mem) (MOVLstore [3] dst (MOVLload [3] src mem)
(MOVLstore dst (MOVLload src mem) mem)) (MOVLstore dst (MOVLload src mem) mem))
(Move [s] dst src mem) && SizeAndAlign(s).Size() > 8 && SizeAndAlign(s).Size() < 16 -> (Move [s] dst src mem) && s > 8 && s < 16 ->
(MOVQstore [SizeAndAlign(s).Size()-8] dst (MOVQload [SizeAndAlign(s).Size()-8] src mem) (MOVQstore [s-8] dst (MOVQload [s-8] src mem)
(MOVQstore dst (MOVQload src mem) mem)) (MOVQstore dst (MOVQload src mem) mem))
// Adjust moves to be a multiple of 16 bytes. // Adjust moves to be a multiple of 16 bytes.
(Move [s] dst src mem) (Move [s] dst src mem)
&& SizeAndAlign(s).Size() > 16 && SizeAndAlign(s).Size()%16 != 0 && SizeAndAlign(s).Size()%16 <= 8 -> && s > 16 && s%16 != 0 && s%16 <= 8 ->
(Move [SizeAndAlign(s).Size()-SizeAndAlign(s).Size()%16] (Move [s-s%16]
(OffPtr <dst.Type> dst [SizeAndAlign(s).Size()%16]) (OffPtr <dst.Type> dst [s%16])
(OffPtr <src.Type> src [SizeAndAlign(s).Size()%16]) (OffPtr <src.Type> src [s%16])
(MOVQstore dst (MOVQload src mem) mem)) (MOVQstore dst (MOVQload src mem) mem))
(Move [s] dst src mem) (Move [s] dst src mem)
&& SizeAndAlign(s).Size() > 16 && SizeAndAlign(s).Size()%16 != 0 && SizeAndAlign(s).Size()%16 > 8 -> && s > 16 && s%16 != 0 && s%16 > 8 ->
(Move [SizeAndAlign(s).Size()-SizeAndAlign(s).Size()%16] (Move [s-s%16]
(OffPtr <dst.Type> dst [SizeAndAlign(s).Size()%16]) (OffPtr <dst.Type> dst [s%16])
(OffPtr <src.Type> src [SizeAndAlign(s).Size()%16]) (OffPtr <src.Type> src [s%16])
(MOVOstore dst (MOVOload src mem) mem)) (MOVOstore dst (MOVOload src mem) mem))
// Medium copying uses a duff device. // Medium copying uses a duff device.
(Move [s] dst src mem) (Move [s] dst src mem)
&& SizeAndAlign(s).Size() >= 32 && SizeAndAlign(s).Size() <= 16*64 && SizeAndAlign(s).Size()%16 == 0 && s >= 32 && s <= 16*64 && s%16 == 0
&& !config.noDuffDevice -> && !config.noDuffDevice ->
(DUFFCOPY [14*(64-SizeAndAlign(s).Size()/16)] dst src mem) (DUFFCOPY [14*(64-s/16)] dst src mem)
// 14 and 64 are magic constants. 14 is the number of bytes to encode: // 14 and 64 are magic constants. 14 is the number of bytes to encode:
// MOVUPS (SI), X0 // MOVUPS (SI), X0
// ADDQ $16, SI // ADDQ $16, SI
@ -355,43 +355,43 @@
// and 64 is the number of such blocks. See src/runtime/duff_amd64.s:duffcopy. // and 64 is the number of such blocks. See src/runtime/duff_amd64.s:duffcopy.
// Large copying uses REP MOVSQ. // Large copying uses REP MOVSQ.
(Move [s] dst src mem) && (SizeAndAlign(s).Size() > 16*64 || config.noDuffDevice) && SizeAndAlign(s).Size()%8 == 0 -> (Move [s] dst src mem) && (s > 16*64 || config.noDuffDevice) && s%8 == 0 ->
(REPMOVSQ dst src (MOVQconst [SizeAndAlign(s).Size()/8]) mem) (REPMOVSQ dst src (MOVQconst [s/8]) mem)
// Lowering Zero instructions // Lowering Zero instructions
(Zero [s] _ mem) && SizeAndAlign(s).Size() == 0 -> mem (Zero [0] _ mem) -> mem
(Zero [s] destptr mem) && SizeAndAlign(s).Size() == 1 -> (MOVBstoreconst [0] destptr mem) (Zero [1] destptr mem) -> (MOVBstoreconst [0] destptr mem)
(Zero [s] destptr mem) && SizeAndAlign(s).Size() == 2 -> (MOVWstoreconst [0] destptr mem) (Zero [2] destptr mem) -> (MOVWstoreconst [0] destptr mem)
(Zero [s] destptr mem) && SizeAndAlign(s).Size() == 4 -> (MOVLstoreconst [0] destptr mem) (Zero [4] destptr mem) -> (MOVLstoreconst [0] destptr mem)
(Zero [s] destptr mem) && SizeAndAlign(s).Size() == 8 -> (MOVQstoreconst [0] destptr mem) (Zero [8] destptr mem) -> (MOVQstoreconst [0] destptr mem)
(Zero [s] destptr mem) && SizeAndAlign(s).Size() == 3 -> (Zero [3] destptr mem) ->
(MOVBstoreconst [makeValAndOff(0,2)] destptr (MOVBstoreconst [makeValAndOff(0,2)] destptr
(MOVWstoreconst [0] destptr mem)) (MOVWstoreconst [0] destptr mem))
(Zero [s] destptr mem) && SizeAndAlign(s).Size() == 5 -> (Zero [5] destptr mem) ->
(MOVBstoreconst [makeValAndOff(0,4)] destptr (MOVBstoreconst [makeValAndOff(0,4)] destptr
(MOVLstoreconst [0] destptr mem)) (MOVLstoreconst [0] destptr mem))
(Zero [s] destptr mem) && SizeAndAlign(s).Size() == 6 -> (Zero [6] destptr mem) ->
(MOVWstoreconst [makeValAndOff(0,4)] destptr (MOVWstoreconst [makeValAndOff(0,4)] destptr
(MOVLstoreconst [0] destptr mem)) (MOVLstoreconst [0] destptr mem))
(Zero [s] destptr mem) && SizeAndAlign(s).Size() == 7 -> (Zero [7] destptr mem) ->
(MOVLstoreconst [makeValAndOff(0,3)] destptr (MOVLstoreconst [makeValAndOff(0,3)] destptr
(MOVLstoreconst [0] destptr mem)) (MOVLstoreconst [0] destptr mem))
// Strip off any fractional word zeroing. // Strip off any fractional word zeroing.
(Zero [s] destptr mem) && SizeAndAlign(s).Size()%8 != 0 && SizeAndAlign(s).Size() > 8 -> (Zero [s] destptr mem) && s%8 != 0 && s > 8 ->
(Zero [SizeAndAlign(s).Size()-SizeAndAlign(s).Size()%8] (OffPtr <destptr.Type> destptr [SizeAndAlign(s).Size()%8]) (Zero [s-s%8] (OffPtr <destptr.Type> destptr [s%8])
(MOVQstoreconst [0] destptr mem)) (MOVQstoreconst [0] destptr mem))
// Zero small numbers of words directly. // Zero small numbers of words directly.
(Zero [s] destptr mem) && SizeAndAlign(s).Size() == 16 -> (Zero [16] destptr mem) ->
(MOVQstoreconst [makeValAndOff(0,8)] destptr (MOVQstoreconst [makeValAndOff(0,8)] destptr
(MOVQstoreconst [0] destptr mem)) (MOVQstoreconst [0] destptr mem))
(Zero [s] destptr mem) && SizeAndAlign(s).Size() == 24 -> (Zero [24] destptr mem) ->
(MOVQstoreconst [makeValAndOff(0,16)] destptr (MOVQstoreconst [makeValAndOff(0,16)] destptr
(MOVQstoreconst [makeValAndOff(0,8)] destptr (MOVQstoreconst [makeValAndOff(0,8)] destptr
(MOVQstoreconst [0] destptr mem))) (MOVQstoreconst [0] destptr mem)))
(Zero [s] destptr mem) && SizeAndAlign(s).Size() == 32 -> (Zero [32] destptr mem) ->
(MOVQstoreconst [makeValAndOff(0,24)] destptr (MOVQstoreconst [makeValAndOff(0,24)] destptr
(MOVQstoreconst [makeValAndOff(0,16)] destptr (MOVQstoreconst [makeValAndOff(0,16)] destptr
(MOVQstoreconst [makeValAndOff(0,8)] destptr (MOVQstoreconst [makeValAndOff(0,8)] destptr
@ -399,18 +399,18 @@
// Medium zeroing uses a duff device. // Medium zeroing uses a duff device.
(Zero [s] destptr mem) (Zero [s] destptr mem)
&& SizeAndAlign(s).Size() <= 1024 && SizeAndAlign(s).Size()%8 == 0 && SizeAndAlign(s).Size()%16 != 0 && s <= 1024 && s%8 == 0 && s%16 != 0
&& !config.noDuffDevice -> && !config.noDuffDevice ->
(Zero [SizeAndAlign(s).Size()-8] (OffPtr <destptr.Type> [8] destptr) (MOVQstore destptr (MOVQconst [0]) mem)) (Zero [s-8] (OffPtr <destptr.Type> [8] destptr) (MOVQstore destptr (MOVQconst [0]) mem))
(Zero [s] destptr mem) (Zero [s] destptr mem)
&& SizeAndAlign(s).Size() <= 1024 && SizeAndAlign(s).Size()%16 == 0 && !config.noDuffDevice -> && s <= 1024 && s%16 == 0 && !config.noDuffDevice ->
(DUFFZERO [SizeAndAlign(s).Size()] destptr (MOVOconst [0]) mem) (DUFFZERO [s] destptr (MOVOconst [0]) mem)
// Large zeroing uses REP STOSQ. // Large zeroing uses REP STOSQ.
(Zero [s] destptr mem) (Zero [s] destptr mem)
&& (SizeAndAlign(s).Size() > 1024 || (config.noDuffDevice && SizeAndAlign(s).Size() > 32)) && (s > 1024 || (config.noDuffDevice && s > 32))
&& SizeAndAlign(s).Size()%8 == 0 -> && s%8 == 0 ->
(REPSTOSQ destptr (MOVQconst [SizeAndAlign(s).Size()/8]) (MOVQconst [0]) mem) (REPSTOSQ destptr (MOVQconst [s/8]) (MOVQconst [0]) mem)
// Lowering constants // Lowering constants
(Const8 [val]) -> (MOVLconst [val]) (Const8 [val]) -> (MOVLconst [val])

View file

@ -290,90 +290,90 @@
(Load <t> ptr mem) && is64BitFloat(t) -> (MOVDload ptr mem) (Load <t> ptr mem) && is64BitFloat(t) -> (MOVDload ptr mem)
// stores // stores
(Store [1] ptr val mem) -> (MOVBstore ptr val mem) (Store {t} ptr val mem) && t.(Type).Size() == 1 -> (MOVBstore ptr val mem)
(Store [2] ptr val mem) -> (MOVHstore ptr val mem) (Store {t} ptr val mem) && t.(Type).Size() == 2 -> (MOVHstore ptr val mem)
(Store [4] ptr val mem) && !is32BitFloat(val.Type) -> (MOVWstore ptr val mem) (Store {t} ptr val mem) && t.(Type).Size() == 4 && !is32BitFloat(val.Type) -> (MOVWstore ptr val mem)
(Store [4] ptr val mem) && is32BitFloat(val.Type) -> (MOVFstore ptr val mem) (Store {t} ptr val mem) && t.(Type).Size() == 4 && is32BitFloat(val.Type) -> (MOVFstore ptr val mem)
(Store [8] ptr val mem) && is64BitFloat(val.Type) -> (MOVDstore ptr val mem) (Store {t} ptr val mem) && t.(Type).Size() == 8 && is64BitFloat(val.Type) -> (MOVDstore ptr val mem)
// zero instructions // zero instructions
(Zero [s] _ mem) && SizeAndAlign(s).Size() == 0 -> mem (Zero [0] _ mem) -> mem
(Zero [s] ptr mem) && SizeAndAlign(s).Size() == 1 -> (MOVBstore ptr (MOVWconst [0]) mem) (Zero [1] ptr mem) -> (MOVBstore ptr (MOVWconst [0]) mem)
(Zero [s] ptr mem) && SizeAndAlign(s).Size() == 2 && SizeAndAlign(s).Align()%2 == 0 -> (Zero [2] {t} ptr mem) && t.(Type).Alignment()%2 == 0 ->
(MOVHstore ptr (MOVWconst [0]) mem) (MOVHstore ptr (MOVWconst [0]) mem)
(Zero [s] ptr mem) && SizeAndAlign(s).Size() == 2 -> (Zero [2] ptr mem) ->
(MOVBstore [1] ptr (MOVWconst [0]) (MOVBstore [1] ptr (MOVWconst [0])
(MOVBstore [0] ptr (MOVWconst [0]) mem)) (MOVBstore [0] ptr (MOVWconst [0]) mem))
(Zero [s] ptr mem) && SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%4 == 0 -> (Zero [4] {t} ptr mem) && t.(Type).Alignment()%4 == 0 ->
(MOVWstore ptr (MOVWconst [0]) mem) (MOVWstore ptr (MOVWconst [0]) mem)
(Zero [s] ptr mem) && SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%2 == 0 -> (Zero [4] {t} ptr mem) && t.(Type).Alignment()%2 == 0 ->
(MOVHstore [2] ptr (MOVWconst [0]) (MOVHstore [2] ptr (MOVWconst [0])
(MOVHstore [0] ptr (MOVWconst [0]) mem)) (MOVHstore [0] ptr (MOVWconst [0]) mem))
(Zero [s] ptr mem) && SizeAndAlign(s).Size() == 4 -> (Zero [4] ptr mem) ->
(MOVBstore [3] ptr (MOVWconst [0]) (MOVBstore [3] ptr (MOVWconst [0])
(MOVBstore [2] ptr (MOVWconst [0]) (MOVBstore [2] ptr (MOVWconst [0])
(MOVBstore [1] ptr (MOVWconst [0]) (MOVBstore [1] ptr (MOVWconst [0])
(MOVBstore [0] ptr (MOVWconst [0]) mem)))) (MOVBstore [0] ptr (MOVWconst [0]) mem))))
(Zero [s] ptr mem) && SizeAndAlign(s).Size() == 3 -> (Zero [3] ptr mem) ->
(MOVBstore [2] ptr (MOVWconst [0]) (MOVBstore [2] ptr (MOVWconst [0])
(MOVBstore [1] ptr (MOVWconst [0]) (MOVBstore [1] ptr (MOVWconst [0])
(MOVBstore [0] ptr (MOVWconst [0]) mem))) (MOVBstore [0] ptr (MOVWconst [0]) mem)))
// Medium zeroing uses a duff device // Medium zeroing uses a duff device
// 4 and 128 are magic constants, see runtime/mkduff.go // 4 and 128 are magic constants, see runtime/mkduff.go
(Zero [s] ptr mem) (Zero [s] {t} ptr mem)
&& SizeAndAlign(s).Size()%4 == 0 && SizeAndAlign(s).Size() > 4 && SizeAndAlign(s).Size() <= 512 && s%4 == 0 && s > 4 && s <= 512
&& SizeAndAlign(s).Align()%4 == 0 && !config.noDuffDevice -> && t.(Type).Alignment()%4 == 0 && !config.noDuffDevice ->
(DUFFZERO [4 * (128 - int64(SizeAndAlign(s).Size()/4))] ptr (MOVWconst [0]) mem) (DUFFZERO [4 * (128 - int64(s/4))] ptr (MOVWconst [0]) mem)
// Large zeroing uses a loop // Large zeroing uses a loop
(Zero [s] ptr mem) (Zero [s] {t} ptr mem)
&& (SizeAndAlign(s).Size() > 512 || config.noDuffDevice) || SizeAndAlign(s).Align()%4 != 0 -> && (s > 512 || config.noDuffDevice) || t.(Type).Alignment()%4 != 0 ->
(LoweredZero [SizeAndAlign(s).Align()] (LoweredZero [t.(Type).Alignment()]
ptr ptr
(ADDconst <ptr.Type> ptr [SizeAndAlign(s).Size()-moveSize(SizeAndAlign(s).Align(), config)]) (ADDconst <ptr.Type> ptr [s-moveSize(t.(Type).Alignment(), config)])
(MOVWconst [0]) (MOVWconst [0])
mem) mem)
// moves // moves
(Move [s] _ _ mem) && SizeAndAlign(s).Size() == 0 -> mem (Move [0] _ _ mem) -> mem
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 1 -> (MOVBstore dst (MOVBUload src mem) mem) (Move [1] dst src mem) -> (MOVBstore dst (MOVBUload src mem) mem)
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 2 && SizeAndAlign(s).Align()%2 == 0 -> (Move [2] {t} dst src mem) && t.(Type).Alignment()%2 == 0 ->
(MOVHstore dst (MOVHUload src mem) mem) (MOVHstore dst (MOVHUload src mem) mem)
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 2 -> (Move [2] dst src mem) ->
(MOVBstore [1] dst (MOVBUload [1] src mem) (MOVBstore [1] dst (MOVBUload [1] src mem)
(MOVBstore dst (MOVBUload src mem) mem)) (MOVBstore dst (MOVBUload src mem) mem))
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%4 == 0 -> (Move [4] {t} dst src mem) && t.(Type).Alignment()%4 == 0 ->
(MOVWstore dst (MOVWload src mem) mem) (MOVWstore dst (MOVWload src mem) mem)
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%2 == 0 -> (Move [4] {t} dst src mem) && t.(Type).Alignment()%2 == 0 ->
(MOVHstore [2] dst (MOVHUload [2] src mem) (MOVHstore [2] dst (MOVHUload [2] src mem)
(MOVHstore dst (MOVHUload src mem) mem)) (MOVHstore dst (MOVHUload src mem) mem))
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 4 -> (Move [4] dst src mem) ->
(MOVBstore [3] dst (MOVBUload [3] src mem) (MOVBstore [3] dst (MOVBUload [3] src mem)
(MOVBstore [2] dst (MOVBUload [2] src mem) (MOVBstore [2] dst (MOVBUload [2] src mem)
(MOVBstore [1] dst (MOVBUload [1] src mem) (MOVBstore [1] dst (MOVBUload [1] src mem)
(MOVBstore dst (MOVBUload src mem) mem)))) (MOVBstore dst (MOVBUload src mem) mem))))
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 3 -> (Move [3] dst src mem) ->
(MOVBstore [2] dst (MOVBUload [2] src mem) (MOVBstore [2] dst (MOVBUload [2] src mem)
(MOVBstore [1] dst (MOVBUload [1] src mem) (MOVBstore [1] dst (MOVBUload [1] src mem)
(MOVBstore dst (MOVBUload src mem) mem))) (MOVBstore dst (MOVBUload src mem) mem)))
// Medium move uses a duff device // Medium move uses a duff device
// 8 and 128 are magic constants, see runtime/mkduff.go // 8 and 128 are magic constants, see runtime/mkduff.go
(Move [s] dst src mem) (Move [s] {t} dst src mem)
&& SizeAndAlign(s).Size()%4 == 0 && SizeAndAlign(s).Size() > 4 && SizeAndAlign(s).Size() <= 512 && s%4 == 0 && s > 4 && s <= 512
&& SizeAndAlign(s).Align()%4 == 0 && !config.noDuffDevice -> && t.(Type).Alignment()%4 == 0 && !config.noDuffDevice ->
(DUFFCOPY [8 * (128 - int64(SizeAndAlign(s).Size()/4))] dst src mem) (DUFFCOPY [8 * (128 - int64(s/4))] dst src mem)
// Large move uses a loop // Large move uses a loop
(Move [s] dst src mem) (Move [s] {t} dst src mem)
&& (SizeAndAlign(s).Size() > 512 || config.noDuffDevice) || SizeAndAlign(s).Align()%4 != 0 -> && (s > 512 || config.noDuffDevice) || t.(Type).Alignment()%4 != 0 ->
(LoweredMove [SizeAndAlign(s).Align()] (LoweredMove [t.(Type).Alignment()]
dst dst
src src
(ADDconst <src.Type> src [SizeAndAlign(s).Size()-moveSize(SizeAndAlign(s).Align(), config)]) (ADDconst <src.Type> src [s-moveSize(t.(Type).Alignment(), config)])
mem) mem)
// calls // calls

View file

@ -331,117 +331,117 @@
(Load <t> ptr mem) && is64BitFloat(t) -> (FMOVDload ptr mem) (Load <t> ptr mem) && is64BitFloat(t) -> (FMOVDload ptr mem)
// stores // stores
(Store [1] ptr val mem) -> (MOVBstore ptr val mem) (Store {t} ptr val mem) && t.(Type).Size() == 1 -> (MOVBstore ptr val mem)
(Store [2] ptr val mem) -> (MOVHstore ptr val mem) (Store {t} ptr val mem) && t.(Type).Size() == 2 -> (MOVHstore ptr val mem)
(Store [4] ptr val mem) && !is32BitFloat(val.Type) -> (MOVWstore ptr val mem) (Store {t} ptr val mem) && t.(Type).Size() == 4 && !is32BitFloat(val.Type) -> (MOVWstore ptr val mem)
(Store [8] ptr val mem) && !is64BitFloat(val.Type) -> (MOVDstore ptr val mem) (Store {t} ptr val mem) && t.(Type).Size() == 8 && !is64BitFloat(val.Type) -> (MOVDstore ptr val mem)
(Store [4] ptr val mem) && is32BitFloat(val.Type) -> (FMOVSstore ptr val mem) (Store {t} ptr val mem) && t.(Type).Size() == 4 && is32BitFloat(val.Type) -> (FMOVSstore ptr val mem)
(Store [8] ptr val mem) && is64BitFloat(val.Type) -> (FMOVDstore ptr val mem) (Store {t} ptr val mem) && t.(Type).Size() == 8 && is64BitFloat(val.Type) -> (FMOVDstore ptr val mem)
// zeroing // zeroing
(Zero [s] _ mem) && SizeAndAlign(s).Size() == 0 -> mem (Zero [0] _ mem) -> mem
(Zero [s] ptr mem) && SizeAndAlign(s).Size() == 1 -> (MOVBstore ptr (MOVDconst [0]) mem) (Zero [1] ptr mem) -> (MOVBstore ptr (MOVDconst [0]) mem)
(Zero [s] ptr mem) && SizeAndAlign(s).Size() == 2 -> (MOVHstore ptr (MOVDconst [0]) mem) (Zero [2] ptr mem) -> (MOVHstore ptr (MOVDconst [0]) mem)
(Zero [s] ptr mem) && SizeAndAlign(s).Size() == 4 -> (MOVWstore ptr (MOVDconst [0]) mem) (Zero [4] ptr mem) -> (MOVWstore ptr (MOVDconst [0]) mem)
(Zero [s] ptr mem) && SizeAndAlign(s).Size() == 8 -> (MOVDstore ptr (MOVDconst [0]) mem) (Zero [8] ptr mem) -> (MOVDstore ptr (MOVDconst [0]) mem)
(Zero [s] ptr mem) && SizeAndAlign(s).Size() == 3 -> (Zero [3] ptr mem) ->
(MOVBstore [2] ptr (MOVDconst [0]) (MOVBstore [2] ptr (MOVDconst [0])
(MOVHstore ptr (MOVDconst [0]) mem)) (MOVHstore ptr (MOVDconst [0]) mem))
(Zero [s] ptr mem) && SizeAndAlign(s).Size() == 5 -> (Zero [5] ptr mem) ->
(MOVBstore [4] ptr (MOVDconst [0]) (MOVBstore [4] ptr (MOVDconst [0])
(MOVWstore ptr (MOVDconst [0]) mem)) (MOVWstore ptr (MOVDconst [0]) mem))
(Zero [s] ptr mem) && SizeAndAlign(s).Size() == 6 -> (Zero [6] ptr mem) ->
(MOVHstore [4] ptr (MOVDconst [0]) (MOVHstore [4] ptr (MOVDconst [0])
(MOVWstore ptr (MOVDconst [0]) mem)) (MOVWstore ptr (MOVDconst [0]) mem))
(Zero [s] ptr mem) && SizeAndAlign(s).Size() == 7 -> (Zero [7] ptr mem) ->
(MOVBstore [6] ptr (MOVDconst [0]) (MOVBstore [6] ptr (MOVDconst [0])
(MOVHstore [4] ptr (MOVDconst [0]) (MOVHstore [4] ptr (MOVDconst [0])
(MOVWstore ptr (MOVDconst [0]) mem))) (MOVWstore ptr (MOVDconst [0]) mem)))
(Zero [s] ptr mem) && SizeAndAlign(s).Size() == 12 -> (Zero [12] ptr mem) ->
(MOVWstore [8] ptr (MOVDconst [0]) (MOVWstore [8] ptr (MOVDconst [0])
(MOVDstore ptr (MOVDconst [0]) mem)) (MOVDstore ptr (MOVDconst [0]) mem))
(Zero [s] ptr mem) && SizeAndAlign(s).Size() == 16 -> (Zero [16] ptr mem) ->
(MOVDstore [8] ptr (MOVDconst [0]) (MOVDstore [8] ptr (MOVDconst [0])
(MOVDstore ptr (MOVDconst [0]) mem)) (MOVDstore ptr (MOVDconst [0]) mem))
(Zero [s] ptr mem) && SizeAndAlign(s).Size() == 24 -> (Zero [24] ptr mem) ->
(MOVDstore [16] ptr (MOVDconst [0]) (MOVDstore [16] ptr (MOVDconst [0])
(MOVDstore [8] ptr (MOVDconst [0]) (MOVDstore [8] ptr (MOVDconst [0])
(MOVDstore ptr (MOVDconst [0]) mem))) (MOVDstore ptr (MOVDconst [0]) mem)))
// strip off fractional word zeroing // strip off fractional word zeroing
(Zero [s] ptr mem) && SizeAndAlign(s).Size()%8 != 0 && SizeAndAlign(s).Size() > 8 -> (Zero [s] ptr mem) && s%8 != 0 && s > 8 ->
(Zero [MakeSizeAndAlign(SizeAndAlign(s).Size()%8, 1).Int64()] (Zero [s%8]
(OffPtr <ptr.Type> ptr [SizeAndAlign(s).Size()-SizeAndAlign(s).Size()%8]) (OffPtr <ptr.Type> ptr [s-s%8])
(Zero [MakeSizeAndAlign(SizeAndAlign(s).Size()-SizeAndAlign(s).Size()%8, 1).Int64()] ptr mem)) (Zero [s-s%8] ptr mem))
// medium zeroing uses a duff device // medium zeroing uses a duff device
// 4, 8, and 128 are magic constants, see runtime/mkduff.go // 4, 8, and 128 are magic constants, see runtime/mkduff.go
(Zero [s] ptr mem) (Zero [s] ptr mem)
&& SizeAndAlign(s).Size()%8 == 0 && SizeAndAlign(s).Size() > 24 && SizeAndAlign(s).Size() <= 8*128 && s%8 == 0 && s > 24 && s <= 8*128
&& !config.noDuffDevice -> && !config.noDuffDevice ->
(DUFFZERO [4 * (128 - int64(SizeAndAlign(s).Size()/8))] ptr mem) (DUFFZERO [4 * (128 - int64(s/8))] ptr mem)
// large zeroing uses a loop // large zeroing uses a loop
(Zero [s] ptr mem) (Zero [s] ptr mem)
&& SizeAndAlign(s).Size()%8 == 0 && (SizeAndAlign(s).Size() > 8*128 || config.noDuffDevice) -> && s%8 == 0 && (s > 8*128 || config.noDuffDevice) ->
(LoweredZero (LoweredZero
ptr ptr
(ADDconst <ptr.Type> [SizeAndAlign(s).Size()-moveSize(SizeAndAlign(s).Align(), config)] ptr) (ADDconst <ptr.Type> [s-8] ptr)
mem) mem)
// moves // moves
(Move [s] _ _ mem) && SizeAndAlign(s).Size() == 0 -> mem (Move [0] _ _ mem) -> mem
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 1 -> (MOVBstore dst (MOVBUload src mem) mem) (Move [1] dst src mem) -> (MOVBstore dst (MOVBUload src mem) mem)
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 2 -> (MOVHstore dst (MOVHUload src mem) mem) (Move [2] dst src mem) -> (MOVHstore dst (MOVHUload src mem) mem)
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 4 -> (MOVWstore dst (MOVWUload src mem) mem) (Move [4] dst src mem) -> (MOVWstore dst (MOVWUload src mem) mem)
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 8 -> (MOVDstore dst (MOVDload src mem) mem) (Move [8] dst src mem) -> (MOVDstore dst (MOVDload src mem) mem)
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 3 -> (Move [3] dst src mem) ->
(MOVBstore [2] dst (MOVBUload [2] src mem) (MOVBstore [2] dst (MOVBUload [2] src mem)
(MOVHstore dst (MOVHUload src mem) mem)) (MOVHstore dst (MOVHUload src mem) mem))
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 5 -> (Move [5] dst src mem) ->
(MOVBstore [4] dst (MOVBUload [4] src mem) (MOVBstore [4] dst (MOVBUload [4] src mem)
(MOVWstore dst (MOVWUload src mem) mem)) (MOVWstore dst (MOVWUload src mem) mem))
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 6 -> (Move [6] dst src mem) ->
(MOVHstore [4] dst (MOVHUload [4] src mem) (MOVHstore [4] dst (MOVHUload [4] src mem)
(MOVWstore dst (MOVWUload src mem) mem)) (MOVWstore dst (MOVWUload src mem) mem))
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 7 -> (Move [7] dst src mem) ->
(MOVBstore [6] dst (MOVBUload [6] src mem) (MOVBstore [6] dst (MOVBUload [6] src mem)
(MOVHstore [4] dst (MOVHUload [4] src mem) (MOVHstore [4] dst (MOVHUload [4] src mem)
(MOVWstore dst (MOVWUload src mem) mem))) (MOVWstore dst (MOVWUload src mem) mem)))
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 12 -> (Move [12] dst src mem) ->
(MOVWstore [8] dst (MOVWUload [8] src mem) (MOVWstore [8] dst (MOVWUload [8] src mem)
(MOVDstore dst (MOVDload src mem) mem)) (MOVDstore dst (MOVDload src mem) mem))
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 16 -> (Move [16] dst src mem) ->
(MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore [8] dst (MOVDload [8] src mem)
(MOVDstore dst (MOVDload src mem) mem)) (MOVDstore dst (MOVDload src mem) mem))
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 24 -> (Move [24] dst src mem) ->
(MOVDstore [16] dst (MOVDload [16] src mem) (MOVDstore [16] dst (MOVDload [16] src mem)
(MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore [8] dst (MOVDload [8] src mem)
(MOVDstore dst (MOVDload src mem) mem))) (MOVDstore dst (MOVDload src mem) mem)))
// strip off fractional word move // strip off fractional word move
(Move [s] dst src mem) && SizeAndAlign(s).Size()%8 != 0 && SizeAndAlign(s).Size() > 8 -> (Move [s] dst src mem) && s%8 != 0 && s > 8 ->
(Move [MakeSizeAndAlign(SizeAndAlign(s).Size()%8, 1).Int64()] (Move [s%8]
(OffPtr <dst.Type> dst [SizeAndAlign(s).Size()-SizeAndAlign(s).Size()%8]) (OffPtr <dst.Type> dst [s-s%8])
(OffPtr <src.Type> src [SizeAndAlign(s).Size()-SizeAndAlign(s).Size()%8]) (OffPtr <src.Type> src [s-s%8])
(Move [MakeSizeAndAlign(SizeAndAlign(s).Size()-SizeAndAlign(s).Size()%8, 1).Int64()] dst src mem)) (Move [s-s%8] dst src mem))
// medium move uses a duff device // medium move uses a duff device
// 8 and 128 are magic constants, see runtime/mkduff.go // 8 and 128 are magic constants, see runtime/mkduff.go
(Move [s] dst src mem) (Move [s] dst src mem)
&& SizeAndAlign(s).Size()%8 == 0 && SizeAndAlign(s).Size() > 24 && SizeAndAlign(s).Size() <= 8*128 && s%8 == 0 && s > 24 && s <= 8*128
&& !config.noDuffDevice -> && !config.noDuffDevice ->
(DUFFCOPY [8 * (128 - int64(SizeAndAlign(s).Size()/8))] dst src mem) (DUFFCOPY [8 * (128 - int64(s/8))] dst src mem)
// large move uses a loop // large move uses a loop
(Move [s] dst src mem) (Move [s] dst src mem)
&& SizeAndAlign(s).Size() > 24 && SizeAndAlign(s).Size()%8 == 0 -> && s > 24 && s%8 == 0 ->
(LoweredMove (LoweredMove
dst dst
src src
(ADDconst <src.Type> src [SizeAndAlign(s).Size()-moveSize(SizeAndAlign(s).Align(), config)]) (ADDconst <src.Type> src [s-8])
mem) mem)
// calls // calls

View file

@ -264,99 +264,98 @@
(Load <t> ptr mem) && is64BitFloat(t) -> (MOVDload ptr mem) (Load <t> ptr mem) && is64BitFloat(t) -> (MOVDload ptr mem)
// stores // stores
(Store [1] ptr val mem) -> (MOVBstore ptr val mem) (Store {t} ptr val mem) && t.(Type).Size() == 1 -> (MOVBstore ptr val mem)
(Store [2] ptr val mem) -> (MOVHstore ptr val mem) (Store {t} ptr val mem) && t.(Type).Size() == 2 -> (MOVHstore ptr val mem)
(Store [4] ptr val mem) && !is32BitFloat(val.Type) -> (MOVWstore ptr val mem) (Store {t} ptr val mem) && t.(Type).Size() == 4 && !is32BitFloat(val.Type) -> (MOVWstore ptr val mem)
(Store [8] ptr val mem) && !is64BitFloat(val.Type) -> (MOVWstore ptr val mem) (Store {t} ptr val mem) && t.(Type).Size() == 4 && is32BitFloat(val.Type) -> (MOVFstore ptr val mem)
(Store [4] ptr val mem) && is32BitFloat(val.Type) -> (MOVFstore ptr val mem) (Store {t} ptr val mem) && t.(Type).Size() == 8 && is64BitFloat(val.Type) -> (MOVDstore ptr val mem)
(Store [8] ptr val mem) && is64BitFloat(val.Type) -> (MOVDstore ptr val mem)
// zero instructions // zero instructions
(Zero [s] _ mem) && SizeAndAlign(s).Size() == 0 -> mem (Zero [0] _ mem) -> mem
(Zero [s] ptr mem) && SizeAndAlign(s).Size() == 1 -> (MOVBstore ptr (MOVWconst [0]) mem) (Zero [1] ptr mem) -> (MOVBstore ptr (MOVWconst [0]) mem)
(Zero [s] ptr mem) && SizeAndAlign(s).Size() == 2 && SizeAndAlign(s).Align()%2 == 0 -> (Zero [2] {t} ptr mem) && t.(Type).Alignment()%2 == 0 ->
(MOVHstore ptr (MOVWconst [0]) mem) (MOVHstore ptr (MOVWconst [0]) mem)
(Zero [s] ptr mem) && SizeAndAlign(s).Size() == 2 -> (Zero [2] ptr mem) ->
(MOVBstore [1] ptr (MOVWconst [0]) (MOVBstore [1] ptr (MOVWconst [0])
(MOVBstore [0] ptr (MOVWconst [0]) mem)) (MOVBstore [0] ptr (MOVWconst [0]) mem))
(Zero [s] ptr mem) && SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%4 == 0 -> (Zero [4] {t} ptr mem) && t.(Type).Alignment()%4 == 0 ->
(MOVWstore ptr (MOVWconst [0]) mem) (MOVWstore ptr (MOVWconst [0]) mem)
(Zero [s] ptr mem) && SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%2 == 0 -> (Zero [4] {t} ptr mem) && t.(Type).Alignment()%2 == 0 ->
(MOVHstore [2] ptr (MOVWconst [0]) (MOVHstore [2] ptr (MOVWconst [0])
(MOVHstore [0] ptr (MOVWconst [0]) mem)) (MOVHstore [0] ptr (MOVWconst [0]) mem))
(Zero [s] ptr mem) && SizeAndAlign(s).Size() == 4 -> (Zero [4] ptr mem) ->
(MOVBstore [3] ptr (MOVWconst [0]) (MOVBstore [3] ptr (MOVWconst [0])
(MOVBstore [2] ptr (MOVWconst [0]) (MOVBstore [2] ptr (MOVWconst [0])
(MOVBstore [1] ptr (MOVWconst [0]) (MOVBstore [1] ptr (MOVWconst [0])
(MOVBstore [0] ptr (MOVWconst [0]) mem)))) (MOVBstore [0] ptr (MOVWconst [0]) mem))))
(Zero [s] ptr mem) && SizeAndAlign(s).Size() == 3 -> (Zero [3] ptr mem) ->
(MOVBstore [2] ptr (MOVWconst [0]) (MOVBstore [2] ptr (MOVWconst [0])
(MOVBstore [1] ptr (MOVWconst [0]) (MOVBstore [1] ptr (MOVWconst [0])
(MOVBstore [0] ptr (MOVWconst [0]) mem))) (MOVBstore [0] ptr (MOVWconst [0]) mem)))
(Zero [s] ptr mem) && SizeAndAlign(s).Size() == 6 && SizeAndAlign(s).Align()%2 == 0 -> (Zero [6] {t} ptr mem) && t.(Type).Alignment()%2 == 0 ->
(MOVHstore [4] ptr (MOVWconst [0]) (MOVHstore [4] ptr (MOVWconst [0])
(MOVHstore [2] ptr (MOVWconst [0]) (MOVHstore [2] ptr (MOVWconst [0])
(MOVHstore [0] ptr (MOVWconst [0]) mem))) (MOVHstore [0] ptr (MOVWconst [0]) mem)))
(Zero [s] ptr mem) && SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%4 == 0 -> (Zero [8] {t} ptr mem) && t.(Type).Alignment()%4 == 0 ->
(MOVWstore [4] ptr (MOVWconst [0]) (MOVWstore [4] ptr (MOVWconst [0])
(MOVWstore [0] ptr (MOVWconst [0]) mem)) (MOVWstore [0] ptr (MOVWconst [0]) mem))
(Zero [s] ptr mem) && SizeAndAlign(s).Size() == 12 && SizeAndAlign(s).Align()%4 == 0 -> (Zero [12] {t} ptr mem) && t.(Type).Alignment()%4 == 0 ->
(MOVWstore [8] ptr (MOVWconst [0]) (MOVWstore [8] ptr (MOVWconst [0])
(MOVWstore [4] ptr (MOVWconst [0]) (MOVWstore [4] ptr (MOVWconst [0])
(MOVWstore [0] ptr (MOVWconst [0]) mem))) (MOVWstore [0] ptr (MOVWconst [0]) mem)))
(Zero [s] ptr mem) && SizeAndAlign(s).Size() == 16 && SizeAndAlign(s).Align()%4 == 0 -> (Zero [16] {t} ptr mem) && t.(Type).Alignment()%4 == 0 ->
(MOVWstore [12] ptr (MOVWconst [0]) (MOVWstore [12] ptr (MOVWconst [0])
(MOVWstore [8] ptr (MOVWconst [0]) (MOVWstore [8] ptr (MOVWconst [0])
(MOVWstore [4] ptr (MOVWconst [0]) (MOVWstore [4] ptr (MOVWconst [0])
(MOVWstore [0] ptr (MOVWconst [0]) mem)))) (MOVWstore [0] ptr (MOVWconst [0]) mem))))
// large or unaligned zeroing uses a loop // large or unaligned zeroing uses a loop
(Zero [s] ptr mem) (Zero [s] {t} ptr mem)
&& (SizeAndAlign(s).Size() > 16 || SizeAndAlign(s).Align()%4 != 0) -> && (s > 16 || s%4 != 0) ->
(LoweredZero [SizeAndAlign(s).Align()] (LoweredZero [t.(Type).Alignment()]
ptr ptr
(ADDconst <ptr.Type> ptr [SizeAndAlign(s).Size()-moveSize(SizeAndAlign(s).Align(), config)]) (ADDconst <ptr.Type> ptr [s-moveSize(t.(Type).Alignment(), config)])
mem) mem)
// moves // moves
(Move [s] _ _ mem) && SizeAndAlign(s).Size() == 0 -> mem (Move [0] _ _ mem) -> mem
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 1 -> (MOVBstore dst (MOVBUload src mem) mem) (Move [1] dst src mem) -> (MOVBstore dst (MOVBUload src mem) mem)
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 2 && SizeAndAlign(s).Align()%2 == 0 -> (Move [2] {t} dst src mem) && t.(Type).Alignment()%2 == 0 ->
(MOVHstore dst (MOVHUload src mem) mem) (MOVHstore dst (MOVHUload src mem) mem)
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 2 -> (Move [2] dst src mem) ->
(MOVBstore [1] dst (MOVBUload [1] src mem) (MOVBstore [1] dst (MOVBUload [1] src mem)
(MOVBstore dst (MOVBUload src mem) mem)) (MOVBstore dst (MOVBUload src mem) mem))
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%4 == 0 -> (Move [4] {t} dst src mem) && t.(Type).Alignment()%4 == 0 ->
(MOVWstore dst (MOVWload src mem) mem) (MOVWstore dst (MOVWload src mem) mem)
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%2 == 0 -> (Move [4] {t} dst src mem) && t.(Type).Alignment()%2 == 0 ->
(MOVHstore [2] dst (MOVHUload [2] src mem) (MOVHstore [2] dst (MOVHUload [2] src mem)
(MOVHstore dst (MOVHUload src mem) mem)) (MOVHstore dst (MOVHUload src mem) mem))
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 4 -> (Move [4] dst src mem) ->
(MOVBstore [3] dst (MOVBUload [3] src mem) (MOVBstore [3] dst (MOVBUload [3] src mem)
(MOVBstore [2] dst (MOVBUload [2] src mem) (MOVBstore [2] dst (MOVBUload [2] src mem)
(MOVBstore [1] dst (MOVBUload [1] src mem) (MOVBstore [1] dst (MOVBUload [1] src mem)
(MOVBstore dst (MOVBUload src mem) mem)))) (MOVBstore dst (MOVBUload src mem) mem))))
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 3 -> (Move [3] dst src mem) ->
(MOVBstore [2] dst (MOVBUload [2] src mem) (MOVBstore [2] dst (MOVBUload [2] src mem)
(MOVBstore [1] dst (MOVBUload [1] src mem) (MOVBstore [1] dst (MOVBUload [1] src mem)
(MOVBstore dst (MOVBUload src mem) mem))) (MOVBstore dst (MOVBUload src mem) mem)))
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%4 == 0 -> (Move [8] {t} dst src mem) && t.(Type).Alignment()%4 == 0 ->
(MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore [4] dst (MOVWload [4] src mem)
(MOVWstore dst (MOVWload src mem) mem)) (MOVWstore dst (MOVWload src mem) mem))
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%2 == 0 -> (Move [8] {t} dst src mem) && t.(Type).Alignment()%2 == 0 ->
(MOVHstore [6] dst (MOVHload [6] src mem) (MOVHstore [6] dst (MOVHload [6] src mem)
(MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [4] dst (MOVHload [4] src mem)
(MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore [2] dst (MOVHload [2] src mem)
(MOVHstore dst (MOVHload src mem) mem)))) (MOVHstore dst (MOVHload src mem) mem))))
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 6 && SizeAndAlign(s).Align()%2 == 0 -> (Move [6] {t} dst src mem) && t.(Type).Alignment()%2 == 0 ->
(MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [4] dst (MOVHload [4] src mem)
(MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore [2] dst (MOVHload [2] src mem)
(MOVHstore dst (MOVHload src mem) mem))) (MOVHstore dst (MOVHload src mem) mem)))
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 12 && SizeAndAlign(s).Align()%4 == 0 -> (Move [12] {t} dst src mem) && t.(Type).Alignment()%4 == 0 ->
(MOVWstore [8] dst (MOVWload [8] src mem) (MOVWstore [8] dst (MOVWload [8] src mem)
(MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore [4] dst (MOVWload [4] src mem)
(MOVWstore dst (MOVWload src mem) mem))) (MOVWstore dst (MOVWload src mem) mem)))
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 16 && SizeAndAlign(s).Align()%4 == 0 -> (Move [16] {t} dst src mem) && t.(Type).Alignment()%4 == 0 ->
(MOVWstore [12] dst (MOVWload [12] src mem) (MOVWstore [12] dst (MOVWload [12] src mem)
(MOVWstore [8] dst (MOVWload [8] src mem) (MOVWstore [8] dst (MOVWload [8] src mem)
(MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore [4] dst (MOVWload [4] src mem)
@ -364,12 +363,12 @@
// large or unaligned move uses a loop // large or unaligned move uses a loop
(Move [s] dst src mem) (Move [s] {t} dst src mem)
&& (SizeAndAlign(s).Size() > 16 || SizeAndAlign(s).Align()%4 != 0) -> && (s > 16 || t.(Type).Alignment()%4 != 0) ->
(LoweredMove [SizeAndAlign(s).Align()] (LoweredMove [t.(Type).Alignment()]
dst dst
src src
(ADDconst <src.Type> src [SizeAndAlign(s).Size()-moveSize(SizeAndAlign(s).Align(), config)]) (ADDconst <src.Type> src [s-moveSize(t.(Type).Alignment(), config)])
mem) mem)
// calls // calls

View file

@ -285,133 +285,133 @@
(Load <t> ptr mem) && is64BitFloat(t) -> (MOVDload ptr mem) (Load <t> ptr mem) && is64BitFloat(t) -> (MOVDload ptr mem)
// stores // stores
(Store [1] ptr val mem) -> (MOVBstore ptr val mem) (Store {t} ptr val mem) && t.(Type).Size() == 1 -> (MOVBstore ptr val mem)
(Store [2] ptr val mem) -> (MOVHstore ptr val mem) (Store {t} ptr val mem) && t.(Type).Size() == 2 -> (MOVHstore ptr val mem)
(Store [4] ptr val mem) && !is32BitFloat(val.Type) -> (MOVWstore ptr val mem) (Store {t} ptr val mem) && t.(Type).Size() == 4 && !is32BitFloat(val.Type) -> (MOVWstore ptr val mem)
(Store [8] ptr val mem) && !is64BitFloat(val.Type) -> (MOVVstore ptr val mem) (Store {t} ptr val mem) && t.(Type).Size() == 8 && !is64BitFloat(val.Type) -> (MOVVstore ptr val mem)
(Store [4] ptr val mem) && is32BitFloat(val.Type) -> (MOVFstore ptr val mem) (Store {t} ptr val mem) && t.(Type).Size() == 4 && is32BitFloat(val.Type) -> (MOVFstore ptr val mem)
(Store [8] ptr val mem) && is64BitFloat(val.Type) -> (MOVDstore ptr val mem) (Store {t} ptr val mem) && t.(Type).Size() == 8 && is64BitFloat(val.Type) -> (MOVDstore ptr val mem)
// zeroing // zeroing
(Zero [s] _ mem) && SizeAndAlign(s).Size() == 0 -> mem (Zero [0] _ mem) -> mem
(Zero [s] ptr mem) && SizeAndAlign(s).Size() == 1 -> (MOVBstore ptr (MOVVconst [0]) mem) (Zero [1] ptr mem) -> (MOVBstore ptr (MOVVconst [0]) mem)
(Zero [s] ptr mem) && SizeAndAlign(s).Size() == 2 && SizeAndAlign(s).Align()%2 == 0 -> (Zero [2] {t} ptr mem) && t.(Type).Alignment()%2 == 0 ->
(MOVHstore ptr (MOVVconst [0]) mem) (MOVHstore ptr (MOVVconst [0]) mem)
(Zero [s] ptr mem) && SizeAndAlign(s).Size() == 2 -> (Zero [2] ptr mem) ->
(MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [1] ptr (MOVVconst [0])
(MOVBstore [0] ptr (MOVVconst [0]) mem)) (MOVBstore [0] ptr (MOVVconst [0]) mem))
(Zero [s] ptr mem) && SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%4 == 0 -> (Zero [4] {t} ptr mem) && t.(Type).Alignment()%4 == 0 ->
(MOVWstore ptr (MOVVconst [0]) mem) (MOVWstore ptr (MOVVconst [0]) mem)
(Zero [s] ptr mem) && SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%2 == 0 -> (Zero [4] {t} ptr mem) && t.(Type).Alignment()%2 == 0 ->
(MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [2] ptr (MOVVconst [0])
(MOVHstore [0] ptr (MOVVconst [0]) mem)) (MOVHstore [0] ptr (MOVVconst [0]) mem))
(Zero [s] ptr mem) && SizeAndAlign(s).Size() == 4 -> (Zero [4] ptr mem) ->
(MOVBstore [3] ptr (MOVVconst [0]) (MOVBstore [3] ptr (MOVVconst [0])
(MOVBstore [2] ptr (MOVVconst [0]) (MOVBstore [2] ptr (MOVVconst [0])
(MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [1] ptr (MOVVconst [0])
(MOVBstore [0] ptr (MOVVconst [0]) mem)))) (MOVBstore [0] ptr (MOVVconst [0]) mem))))
(Zero [s] ptr mem) && SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%8 == 0 -> (Zero [8] {t} ptr mem) && t.(Type).Alignment()%8 == 0 ->
(MOVVstore ptr (MOVVconst [0]) mem) (MOVVstore ptr (MOVVconst [0]) mem)
(Zero [s] ptr mem) && SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%4 == 0 -> (Zero [8] {t} ptr mem) && t.(Type).Alignment()%4 == 0 ->
(MOVWstore [4] ptr (MOVVconst [0]) (MOVWstore [4] ptr (MOVVconst [0])
(MOVWstore [0] ptr (MOVVconst [0]) mem)) (MOVWstore [0] ptr (MOVVconst [0]) mem))
(Zero [s] ptr mem) && SizeAndAlign(s).Size() == 4 -> (Zero [8] {t} ptr mem) && t.(Type).Alignment()%2 == 0 ->
(MOVHstore [6] ptr (MOVVconst [0]) (MOVHstore [6] ptr (MOVVconst [0])
(MOVHstore [4] ptr (MOVVconst [0]) (MOVHstore [4] ptr (MOVVconst [0])
(MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [2] ptr (MOVVconst [0])
(MOVHstore [0] ptr (MOVVconst [0]) mem)))) (MOVHstore [0] ptr (MOVVconst [0]) mem))))
(Zero [s] ptr mem) && SizeAndAlign(s).Size() == 3 -> (Zero [3] ptr mem) ->
(MOVBstore [2] ptr (MOVVconst [0]) (MOVBstore [2] ptr (MOVVconst [0])
(MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [1] ptr (MOVVconst [0])
(MOVBstore [0] ptr (MOVVconst [0]) mem))) (MOVBstore [0] ptr (MOVVconst [0]) mem)))
(Zero [s] ptr mem) && SizeAndAlign(s).Size() == 6 && SizeAndAlign(s).Align()%2 == 0 -> (Zero [6] {t} ptr mem) && t.(Type).Alignment()%2 == 0 ->
(MOVHstore [4] ptr (MOVVconst [0]) (MOVHstore [4] ptr (MOVVconst [0])
(MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [2] ptr (MOVVconst [0])
(MOVHstore [0] ptr (MOVVconst [0]) mem))) (MOVHstore [0] ptr (MOVVconst [0]) mem)))
(Zero [s] ptr mem) && SizeAndAlign(s).Size() == 12 && SizeAndAlign(s).Align()%4 == 0 -> (Zero [12] {t} ptr mem) && t.(Type).Alignment()%4 == 0 ->
(MOVWstore [8] ptr (MOVVconst [0]) (MOVWstore [8] ptr (MOVVconst [0])
(MOVWstore [4] ptr (MOVVconst [0]) (MOVWstore [4] ptr (MOVVconst [0])
(MOVWstore [0] ptr (MOVVconst [0]) mem))) (MOVWstore [0] ptr (MOVVconst [0]) mem)))
(Zero [s] ptr mem) && SizeAndAlign(s).Size() == 16 && SizeAndAlign(s).Align()%8 == 0 -> (Zero [16] {t} ptr mem) && t.(Type).Alignment()%8 == 0 ->
(MOVVstore [8] ptr (MOVVconst [0]) (MOVVstore [8] ptr (MOVVconst [0])
(MOVVstore [0] ptr (MOVVconst [0]) mem)) (MOVVstore [0] ptr (MOVVconst [0]) mem))
(Zero [s] ptr mem) && SizeAndAlign(s).Size() == 24 && SizeAndAlign(s).Align()%8 == 0 -> (Zero [24] {t} ptr mem) && t.(Type).Alignment()%8 == 0 ->
(MOVVstore [16] ptr (MOVVconst [0]) (MOVVstore [16] ptr (MOVVconst [0])
(MOVVstore [8] ptr (MOVVconst [0]) (MOVVstore [8] ptr (MOVVconst [0])
(MOVVstore [0] ptr (MOVVconst [0]) mem))) (MOVVstore [0] ptr (MOVVconst [0]) mem)))
// medium zeroing uses a duff device // medium zeroing uses a duff device
// 8, and 128 are magic constants, see runtime/mkduff.go // 8, and 128 are magic constants, see runtime/mkduff.go
(Zero [s] ptr mem) (Zero [s] {t} ptr mem)
&& SizeAndAlign(s).Size()%8 == 0 && SizeAndAlign(s).Size() > 24 && SizeAndAlign(s).Size() <= 8*128 && s%8 == 0 && s > 24 && s <= 8*128
&& SizeAndAlign(s).Align()%8 == 0 && !config.noDuffDevice -> && t.(Type).Alignment()%8 == 0 && !config.noDuffDevice ->
(DUFFZERO [8 * (128 - int64(SizeAndAlign(s).Size()/8))] ptr mem) (DUFFZERO [8 * (128 - int64(s/8))] ptr mem)
// large or unaligned zeroing uses a loop // large or unaligned zeroing uses a loop
(Zero [s] ptr mem) (Zero [s] {t} ptr mem)
&& (SizeAndAlign(s).Size() > 8*128 || config.noDuffDevice) || SizeAndAlign(s).Align()%8 != 0 -> && (s > 8*128 || config.noDuffDevice) || t.(Type).Alignment()%8 != 0 ->
(LoweredZero [SizeAndAlign(s).Align()] (LoweredZero [t.(Type).Alignment()]
ptr ptr
(ADDVconst <ptr.Type> ptr [SizeAndAlign(s).Size()-moveSize(SizeAndAlign(s).Align(), config)]) (ADDVconst <ptr.Type> ptr [s-moveSize(t.(Type).Alignment(), config)])
mem) mem)
// moves // moves
(Move [s] _ _ mem) && SizeAndAlign(s).Size() == 0 -> mem (Move [0] _ _ mem) -> mem
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 1 -> (MOVBstore dst (MOVBload src mem) mem) (Move [1] dst src mem) -> (MOVBstore dst (MOVBload src mem) mem)
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 2 && SizeAndAlign(s).Align()%2 == 0 -> (Move [2] {t} dst src mem) && t.(Type).Alignment()%2 == 0 ->
(MOVHstore dst (MOVHload src mem) mem) (MOVHstore dst (MOVHload src mem) mem)
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 2 -> (Move [2] dst src mem) ->
(MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore [1] dst (MOVBload [1] src mem)
(MOVBstore dst (MOVBload src mem) mem)) (MOVBstore dst (MOVBload src mem) mem))
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%4 == 0 -> (Move [4] {t} dst src mem) && t.(Type).Alignment()%4 == 0 ->
(MOVWstore dst (MOVWload src mem) mem) (MOVWstore dst (MOVWload src mem) mem)
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%2 == 0 -> (Move [4] {t} dst src mem) && t.(Type).Alignment()%2 == 0 ->
(MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore [2] dst (MOVHload [2] src mem)
(MOVHstore dst (MOVHload src mem) mem)) (MOVHstore dst (MOVHload src mem) mem))
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 4 -> (Move [4] dst src mem) ->
(MOVBstore [3] dst (MOVBload [3] src mem) (MOVBstore [3] dst (MOVBload [3] src mem)
(MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [2] dst (MOVBload [2] src mem)
(MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore [1] dst (MOVBload [1] src mem)
(MOVBstore dst (MOVBload src mem) mem)))) (MOVBstore dst (MOVBload src mem) mem))))
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%8 == 0 -> (Move [8] {t} dst src mem) && t.(Type).Alignment()%8 == 0 ->
(MOVVstore dst (MOVVload src mem) mem) (MOVVstore dst (MOVVload src mem) mem)
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%4 == 0 -> (Move [8] {t} dst src mem) && t.(Type).Alignment()%4 == 0 ->
(MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore [4] dst (MOVWload [4] src mem)
(MOVWstore dst (MOVWload src mem) mem)) (MOVWstore dst (MOVWload src mem) mem))
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%2 == 0 -> (Move [8] {t} dst src mem) && t.(Type).Alignment()%2 == 0 ->
(MOVHstore [6] dst (MOVHload [6] src mem) (MOVHstore [6] dst (MOVHload [6] src mem)
(MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [4] dst (MOVHload [4] src mem)
(MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore [2] dst (MOVHload [2] src mem)
(MOVHstore dst (MOVHload src mem) mem)))) (MOVHstore dst (MOVHload src mem) mem))))
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 3 -> (Move [3] dst src mem) ->
(MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [2] dst (MOVBload [2] src mem)
(MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore [1] dst (MOVBload [1] src mem)
(MOVBstore dst (MOVBload src mem) mem))) (MOVBstore dst (MOVBload src mem) mem)))
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 6 && SizeAndAlign(s).Align()%2 == 0 -> (Move [6] {t} dst src mem) && t.(Type).Alignment()%2 == 0 ->
(MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [4] dst (MOVHload [4] src mem)
(MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore [2] dst (MOVHload [2] src mem)
(MOVHstore dst (MOVHload src mem) mem))) (MOVHstore dst (MOVHload src mem) mem)))
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 12 && SizeAndAlign(s).Align()%4 == 0 -> (Move [12] {t} dst src mem) && t.(Type).Alignment()%4 == 0 ->
(MOVWstore [8] dst (MOVWload [8] src mem) (MOVWstore [8] dst (MOVWload [8] src mem)
(MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore [4] dst (MOVWload [4] src mem)
(MOVWstore dst (MOVWload src mem) mem))) (MOVWstore dst (MOVWload src mem) mem)))
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 16 && SizeAndAlign(s).Align()%8 == 0 -> (Move [16] {t} dst src mem) && t.(Type).Alignment()%8 == 0 ->
(MOVVstore [8] dst (MOVVload [8] src mem) (MOVVstore [8] dst (MOVVload [8] src mem)
(MOVVstore dst (MOVVload src mem) mem)) (MOVVstore dst (MOVVload src mem) mem))
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 24 && SizeAndAlign(s).Align()%8 == 0 -> (Move [24] {t} dst src mem) && t.(Type).Alignment()%8 == 0 ->
(MOVVstore [16] dst (MOVVload [16] src mem) (MOVVstore [16] dst (MOVVload [16] src mem)
(MOVVstore [8] dst (MOVVload [8] src mem) (MOVVstore [8] dst (MOVVload [8] src mem)
(MOVVstore dst (MOVVload src mem) mem))) (MOVVstore dst (MOVVload src mem) mem)))
// large or unaligned move uses a loop // large or unaligned move uses a loop
(Move [s] dst src mem) (Move [s] {t} dst src mem)
&& SizeAndAlign(s).Size() > 24 || SizeAndAlign(s).Align()%8 != 0 -> && s > 24 || t.(Type).Alignment()%8 != 0 ->
(LoweredMove [SizeAndAlign(s).Align()] (LoweredMove [t.(Type).Alignment()]
dst dst
src src
(ADDVconst <src.Type> src [SizeAndAlign(s).Size()-moveSize(SizeAndAlign(s).Align(), config)]) (ADDVconst <src.Type> src [s-moveSize(t.(Type).Alignment(), config)])
mem) mem)
// calls // calls

View file

@ -477,111 +477,111 @@
(Load <t> ptr mem) && is32BitFloat(t) -> (FMOVSload ptr mem) (Load <t> ptr mem) && is32BitFloat(t) -> (FMOVSload ptr mem)
(Load <t> ptr mem) && is64BitFloat(t) -> (FMOVDload ptr mem) (Load <t> ptr mem) && is64BitFloat(t) -> (FMOVDload ptr mem)
(Store [8] ptr val mem) && is64BitFloat(val.Type) -> (FMOVDstore ptr val mem) (Store {t} ptr val mem) && t.(Type).Size() == 8 && is64BitFloat(val.Type) -> (FMOVDstore ptr val mem)
(Store [8] ptr val mem) && is32BitFloat(val.Type) -> (FMOVDstore ptr val mem) // glitch from (Cvt32Fto64F x) -> x -- type is wrong (Store {t} ptr val mem) && t.(Type).Size() == 8 && is32BitFloat(val.Type) -> (FMOVDstore ptr val mem) // glitch from (Cvt32Fto64F x) -> x -- type is wrong
(Store [4] ptr val mem) && is32BitFloat(val.Type) -> (FMOVSstore ptr val mem) (Store {t} ptr val mem) && t.(Type).Size() == 4 && is32BitFloat(val.Type) -> (FMOVSstore ptr val mem)
(Store [8] ptr val mem) && (is64BitInt(val.Type) || isPtr(val.Type)) -> (MOVDstore ptr val mem) (Store {t} ptr val mem) && t.(Type).Size() == 8 && (is64BitInt(val.Type) || isPtr(val.Type)) -> (MOVDstore ptr val mem)
(Store [4] ptr val mem) && is32BitInt(val.Type) -> (MOVWstore ptr val mem) (Store {t} ptr val mem) && t.(Type).Size() == 4 && is32BitInt(val.Type) -> (MOVWstore ptr val mem)
(Store [2] ptr val mem) -> (MOVHstore ptr val mem) (Store {t} ptr val mem) && t.(Type).Size() == 2 -> (MOVHstore ptr val mem)
(Store [1] ptr val mem) -> (MOVBstore ptr val mem) (Store {t} ptr val mem) && t.(Type).Size() == 1 -> (MOVBstore ptr val mem)
(Zero [s] _ mem) && SizeAndAlign(s).Size() == 0 -> mem (Zero [0] _ mem) -> mem
(Zero [s] destptr mem) && SizeAndAlign(s).Size() == 1 -> (MOVBstorezero destptr mem) (Zero [1] destptr mem) -> (MOVBstorezero destptr mem)
(Zero [s] destptr mem) && SizeAndAlign(s).Size() == 2 && SizeAndAlign(s).Align()%2 == 0 -> (Zero [2] {t} destptr mem) && t.(Type).Alignment()%2 == 0 ->
(MOVHstorezero destptr mem) (MOVHstorezero destptr mem)
(Zero [s] destptr mem) && SizeAndAlign(s).Size() == 2 -> (Zero [2] destptr mem) ->
(MOVBstorezero [1] destptr (MOVBstorezero [1] destptr
(MOVBstorezero [0] destptr mem)) (MOVBstorezero [0] destptr mem))
(Zero [s] destptr mem) && SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%4 == 0 -> (Zero [4] {t} destptr mem) && t.(Type).Alignment()%4 == 0 ->
(MOVWstorezero destptr mem) (MOVWstorezero destptr mem)
(Zero [s] destptr mem) && SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%2 == 0 -> (Zero [4] {t} destptr mem) && t.(Type).Alignment()%2 == 0 ->
(MOVHstorezero [2] destptr (MOVHstorezero [2] destptr
(MOVHstorezero [0] destptr mem)) (MOVHstorezero [0] destptr mem))
(Zero [s] destptr mem) && SizeAndAlign(s).Size() == 4 -> (Zero [4] destptr mem) ->
(MOVBstorezero [3] destptr (MOVBstorezero [3] destptr
(MOVBstorezero [2] destptr (MOVBstorezero [2] destptr
(MOVBstorezero [1] destptr (MOVBstorezero [1] destptr
(MOVBstorezero [0] destptr mem)))) (MOVBstorezero [0] destptr mem))))
(Zero [s] destptr mem) && SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%8 == 0 -> (Zero [8] {t} destptr mem) && t.(Type).Alignment()%8 == 0 ->
(MOVDstorezero [0] destptr mem) (MOVDstorezero [0] destptr mem)
(Zero [s] destptr mem) && SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%4 == 0 -> (Zero [8] {t} destptr mem) && t.(Type).Alignment()%4 == 0 ->
(MOVWstorezero [4] destptr (MOVWstorezero [4] destptr
(MOVWstorezero [0] destptr mem)) (MOVWstorezero [0] destptr mem))
(Zero [s] destptr mem) && SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%2 == 0 -> (Zero [8] {t} destptr mem) && t.(Type).Alignment()%2 == 0 ->
(MOVHstorezero [6] destptr (MOVHstorezero [6] destptr
(MOVHstorezero [4] destptr (MOVHstorezero [4] destptr
(MOVHstorezero [2] destptr (MOVHstorezero [2] destptr
(MOVHstorezero [0] destptr mem)))) (MOVHstorezero [0] destptr mem))))
(Zero [s] destptr mem) && SizeAndAlign(s).Size() == 3 -> (Zero [3] destptr mem) ->
(MOVBstorezero [2] destptr (MOVBstorezero [2] destptr
(MOVBstorezero [1] destptr (MOVBstorezero [1] destptr
(MOVBstorezero [0] destptr mem))) (MOVBstorezero [0] destptr mem)))
// Zero small numbers of words directly. // Zero small numbers of words directly.
(Zero [s] destptr mem) && SizeAndAlign(s).Size() == 16 && SizeAndAlign(s).Align()%8 == 0 -> (Zero [16] {t} destptr mem) && t.(Type).Alignment()%8 == 0 ->
(MOVDstorezero [8] destptr (MOVDstorezero [8] destptr
(MOVDstorezero [0] destptr mem)) (MOVDstorezero [0] destptr mem))
(Zero [s] destptr mem) && SizeAndAlign(s).Size() == 24 && SizeAndAlign(s).Align()%8 == 0 -> (Zero [24] {t} destptr mem) && t.(Type).Alignment()%8 == 0 ->
(MOVDstorezero [16] destptr (MOVDstorezero [16] destptr
(MOVDstorezero [8] destptr (MOVDstorezero [8] destptr
(MOVDstorezero [0] destptr mem))) (MOVDstorezero [0] destptr mem)))
(Zero [s] destptr mem) && SizeAndAlign(s).Size() == 32 && SizeAndAlign(s).Align()%8 == 0 -> (Zero [32] {t} destptr mem) && t.(Type).Alignment()%8 == 0 ->
(MOVDstorezero [24] destptr (MOVDstorezero [24] destptr
(MOVDstorezero [16] destptr (MOVDstorezero [16] destptr
(MOVDstorezero [8] destptr (MOVDstorezero [8] destptr
(MOVDstorezero [0] destptr mem)))) (MOVDstorezero [0] destptr mem))))
// Large zeroing uses a loop // Large zeroing uses a loop
(Zero [s] ptr mem) (Zero [s] {t} ptr mem)
&& (SizeAndAlign(s).Size() > 512 || config.noDuffDevice) || SizeAndAlign(s).Align()%8 != 0 -> && (s > 512 || config.noDuffDevice) || t.(Type).Alignment()%8 != 0 ->
(LoweredZero [SizeAndAlign(s).Align()] (LoweredZero [t.(Type).Alignment()]
ptr ptr
(ADDconst <ptr.Type> ptr [SizeAndAlign(s).Size()-moveSize(SizeAndAlign(s).Align(), config)]) (ADDconst <ptr.Type> ptr [s-moveSize(t.(Type).Alignment(), config)])
mem) mem)
// moves // moves
(Move [s] _ _ mem) && SizeAndAlign(s).Size() == 0 -> mem (Move [0] _ _ mem) -> mem
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 1 -> (MOVBstore dst (MOVBZload src mem) mem) (Move [1] dst src mem) -> (MOVBstore dst (MOVBZload src mem) mem)
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 2 && SizeAndAlign(s).Align()%2 == 0 -> (Move [2] {t} dst src mem) && t.(Type).Alignment()%2 == 0 ->
(MOVHstore dst (MOVHZload src mem) mem) (MOVHstore dst (MOVHZload src mem) mem)
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 2 -> (Move [2] dst src mem) ->
(MOVBstore [1] dst (MOVBZload [1] src mem) (MOVBstore [1] dst (MOVBZload [1] src mem)
(MOVBstore dst (MOVBZload src mem) mem)) (MOVBstore dst (MOVBZload src mem) mem))
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%4 == 0 -> (Move [4] {t} dst src mem) && t.(Type).Alignment()%4 == 0 ->
(MOVWstore dst (MOVWload src mem) mem) (MOVWstore dst (MOVWload src mem) mem)
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%2 == 0 -> (Move [4] {t} dst src mem) && t.(Type).Alignment()%2 == 0 ->
(MOVHstore [2] dst (MOVHZload [2] src mem) (MOVHstore [2] dst (MOVHZload [2] src mem)
(MOVHstore dst (MOVHZload src mem) mem)) (MOVHstore dst (MOVHZload src mem) mem))
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 4 -> (Move [4] dst src mem) ->
(MOVBstore [3] dst (MOVBZload [3] src mem) (MOVBstore [3] dst (MOVBZload [3] src mem)
(MOVBstore [2] dst (MOVBZload [2] src mem) (MOVBstore [2] dst (MOVBZload [2] src mem)
(MOVBstore [1] dst (MOVBZload [1] src mem) (MOVBstore [1] dst (MOVBZload [1] src mem)
(MOVBstore dst (MOVBZload src mem) mem)))) (MOVBstore dst (MOVBZload src mem) mem))))
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%8 == 0 -> (Move [8] {t} dst src mem) && t.(Type).Alignment()%8 == 0 ->
(MOVDstore dst (MOVDload src mem) mem) (MOVDstore dst (MOVDload src mem) mem)
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%4 == 0 -> (Move [8] {t} dst src mem) && t.(Type).Alignment()%4 == 0 ->
(MOVWstore [4] dst (MOVWZload [4] src mem) (MOVWstore [4] dst (MOVWZload [4] src mem)
(MOVWstore dst (MOVWZload src mem) mem)) (MOVWstore dst (MOVWZload src mem) mem))
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%2 == 0-> (Move [8] {t} dst src mem) && t.(Type).Alignment()%2 == 0->
(MOVHstore [6] dst (MOVHZload [6] src mem) (MOVHstore [6] dst (MOVHZload [6] src mem)
(MOVHstore [4] dst (MOVHZload [4] src mem) (MOVHstore [4] dst (MOVHZload [4] src mem)
(MOVHstore [2] dst (MOVHZload [2] src mem) (MOVHstore [2] dst (MOVHZload [2] src mem)
(MOVHstore dst (MOVHZload src mem) mem)))) (MOVHstore dst (MOVHZload src mem) mem))))
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 3 -> (Move [3] dst src mem) ->
(MOVBstore [2] dst (MOVBZload [2] src mem) (MOVBstore [2] dst (MOVBZload [2] src mem)
(MOVBstore [1] dst (MOVBZload [1] src mem) (MOVBstore [1] dst (MOVBZload [1] src mem)
(MOVBstore dst (MOVBZload src mem) mem))) (MOVBstore dst (MOVBZload src mem) mem)))
// Large move uses a loop // Large move uses a loop
(Move [s] dst src mem) (Move [s] {t} dst src mem)
&& (SizeAndAlign(s).Size() > 512 || config.noDuffDevice) || SizeAndAlign(s).Align()%8 != 0 -> && (s > 512 || config.noDuffDevice) || t.(Type).Alignment()%8 != 0 ->
(LoweredMove [SizeAndAlign(s).Align()] (LoweredMove [t.(Type).Alignment()]
dst dst
src src
(ADDconst <src.Type> src [SizeAndAlign(s).Size()-moveSize(SizeAndAlign(s).Align(), config)]) (ADDconst <src.Type> src [s-moveSize(t.(Type).Alignment(), config)])
mem) mem)
// Calls // Calls

View file

@ -320,82 +320,82 @@
// Lowering stores // Lowering stores
// These more-specific FP versions of Store pattern should come first. // These more-specific FP versions of Store pattern should come first.
(Store [8] ptr val mem) && is64BitFloat(val.Type) -> (FMOVDstore ptr val mem) (Store {t} ptr val mem) && t.(Type).Size() == 8 && is64BitFloat(val.Type) -> (FMOVDstore ptr val mem)
(Store [4] ptr val mem) && is32BitFloat(val.Type) -> (FMOVSstore ptr val mem) (Store {t} ptr val mem) && t.(Type).Size() == 4 && is32BitFloat(val.Type) -> (FMOVSstore ptr val mem)
(Store [8] ptr val mem) -> (MOVDstore ptr val mem) (Store {t} ptr val mem) && t.(Type).Size() == 8 -> (MOVDstore ptr val mem)
(Store [4] ptr val mem) -> (MOVWstore ptr val mem) (Store {t} ptr val mem) && t.(Type).Size() == 4 -> (MOVWstore ptr val mem)
(Store [2] ptr val mem) -> (MOVHstore ptr val mem) (Store {t} ptr val mem) && t.(Type).Size() == 2 -> (MOVHstore ptr val mem)
(Store [1] ptr val mem) -> (MOVBstore ptr val mem) (Store {t} ptr val mem) && t.(Type).Size() == 1 -> (MOVBstore ptr val mem)
// Lowering moves // Lowering moves
// Load and store for small copies. // Load and store for small copies.
(Move [s] _ _ mem) && SizeAndAlign(s).Size() == 0 -> mem (Move [0] _ _ mem) -> mem
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 1 -> (MOVBstore dst (MOVBZload src mem) mem) (Move [1] dst src mem) -> (MOVBstore dst (MOVBZload src mem) mem)
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 2 -> (MOVHstore dst (MOVHZload src mem) mem) (Move [2] dst src mem) -> (MOVHstore dst (MOVHZload src mem) mem)
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 4 -> (MOVWstore dst (MOVWZload src mem) mem) (Move [4] dst src mem) -> (MOVWstore dst (MOVWZload src mem) mem)
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 8 -> (MOVDstore dst (MOVDload src mem) mem) (Move [8] dst src mem) -> (MOVDstore dst (MOVDload src mem) mem)
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 16 -> (Move [16] dst src mem) ->
(MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore [8] dst (MOVDload [8] src mem)
(MOVDstore dst (MOVDload src mem) mem)) (MOVDstore dst (MOVDload src mem) mem))
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 24 -> (Move [24] dst src mem) ->
(MOVDstore [16] dst (MOVDload [16] src mem) (MOVDstore [16] dst (MOVDload [16] src mem)
(MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore [8] dst (MOVDload [8] src mem)
(MOVDstore dst (MOVDload src mem) mem))) (MOVDstore dst (MOVDload src mem) mem)))
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 3 -> (Move [3] dst src mem) ->
(MOVBstore [2] dst (MOVBZload [2] src mem) (MOVBstore [2] dst (MOVBZload [2] src mem)
(MOVHstore dst (MOVHZload src mem) mem)) (MOVHstore dst (MOVHZload src mem) mem))
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 5 -> (Move [5] dst src mem) ->
(MOVBstore [4] dst (MOVBZload [4] src mem) (MOVBstore [4] dst (MOVBZload [4] src mem)
(MOVWstore dst (MOVWZload src mem) mem)) (MOVWstore dst (MOVWZload src mem) mem))
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 6 -> (Move [6] dst src mem) ->
(MOVHstore [4] dst (MOVHZload [4] src mem) (MOVHstore [4] dst (MOVHZload [4] src mem)
(MOVWstore dst (MOVWZload src mem) mem)) (MOVWstore dst (MOVWZload src mem) mem))
(Move [s] dst src mem) && SizeAndAlign(s).Size() == 7 -> (Move [7] dst src mem) ->
(MOVBstore [6] dst (MOVBZload [6] src mem) (MOVBstore [6] dst (MOVBZload [6] src mem)
(MOVHstore [4] dst (MOVHZload [4] src mem) (MOVHstore [4] dst (MOVHZload [4] src mem)
(MOVWstore dst (MOVWZload src mem) mem))) (MOVWstore dst (MOVWZload src mem) mem)))
// MVC for other moves. Use up to 4 instructions (sizes up to 1024 bytes). // MVC for other moves. Use up to 4 instructions (sizes up to 1024 bytes).
(Move [s] dst src mem) && SizeAndAlign(s).Size() > 0 && SizeAndAlign(s).Size() <= 256 -> (Move [s] dst src mem) && s > 0 && s <= 256 ->
(MVC [makeValAndOff(SizeAndAlign(s).Size(), 0)] dst src mem) (MVC [makeValAndOff(s, 0)] dst src mem)
(Move [s] dst src mem) && SizeAndAlign(s).Size() > 256 && SizeAndAlign(s).Size() <= 512 -> (Move [s] dst src mem) && s > 256 && s <= 512 ->
(MVC [makeValAndOff(SizeAndAlign(s).Size()-256, 256)] dst src (MVC [makeValAndOff(256, 0)] dst src mem)) (MVC [makeValAndOff(s-256, 256)] dst src (MVC [makeValAndOff(256, 0)] dst src mem))
(Move [s] dst src mem) && SizeAndAlign(s).Size() > 512 && SizeAndAlign(s).Size() <= 768 -> (Move [s] dst src mem) && s > 512 && s <= 768 ->
(MVC [makeValAndOff(SizeAndAlign(s).Size()-512, 512)] dst src (MVC [makeValAndOff(256, 256)] dst src (MVC [makeValAndOff(256, 0)] dst src mem))) (MVC [makeValAndOff(s-512, 512)] dst src (MVC [makeValAndOff(256, 256)] dst src (MVC [makeValAndOff(256, 0)] dst src mem)))
(Move [s] dst src mem) && SizeAndAlign(s).Size() > 768 && SizeAndAlign(s).Size() <= 1024 -> (Move [s] dst src mem) && s > 768 && s <= 1024 ->
(MVC [makeValAndOff(SizeAndAlign(s).Size()-768, 768)] dst src (MVC [makeValAndOff(256, 512)] dst src (MVC [makeValAndOff(256, 256)] dst src (MVC [makeValAndOff(256, 0)] dst src mem)))) (MVC [makeValAndOff(s-768, 768)] dst src (MVC [makeValAndOff(256, 512)] dst src (MVC [makeValAndOff(256, 256)] dst src (MVC [makeValAndOff(256, 0)] dst src mem))))
// Move more than 1024 bytes using a loop. // Move more than 1024 bytes using a loop.
(Move [s] dst src mem) && SizeAndAlign(s).Size() > 1024 -> (Move [s] dst src mem) && s > 1024 ->
(LoweredMove [SizeAndAlign(s).Size()%256] dst src (ADDconst <src.Type> src [(SizeAndAlign(s).Size()/256)*256]) mem) (LoweredMove [s%256] dst src (ADDconst <src.Type> src [(s/256)*256]) mem)
// Lowering Zero instructions // Lowering Zero instructions
(Zero [s] _ mem) && SizeAndAlign(s).Size() == 0 -> mem (Zero [0] _ mem) -> mem
(Zero [s] destptr mem) && SizeAndAlign(s).Size() == 1 -> (MOVBstoreconst [0] destptr mem) (Zero [1] destptr mem) -> (MOVBstoreconst [0] destptr mem)
(Zero [s] destptr mem) && SizeAndAlign(s).Size() == 2 -> (MOVHstoreconst [0] destptr mem) (Zero [2] destptr mem) -> (MOVHstoreconst [0] destptr mem)
(Zero [s] destptr mem) && SizeAndAlign(s).Size() == 4 -> (MOVWstoreconst [0] destptr mem) (Zero [4] destptr mem) -> (MOVWstoreconst [0] destptr mem)
(Zero [s] destptr mem) && SizeAndAlign(s).Size() == 8 -> (MOVDstoreconst [0] destptr mem) (Zero [8] destptr mem) -> (MOVDstoreconst [0] destptr mem)
(Zero [s] destptr mem) && SizeAndAlign(s).Size() == 3 -> (Zero [3] destptr mem) ->
(MOVBstoreconst [makeValAndOff(0,2)] destptr (MOVBstoreconst [makeValAndOff(0,2)] destptr
(MOVHstoreconst [0] destptr mem)) (MOVHstoreconst [0] destptr mem))
(Zero [s] destptr mem) && SizeAndAlign(s).Size() == 5 -> (Zero [5] destptr mem) ->
(MOVBstoreconst [makeValAndOff(0,4)] destptr (MOVBstoreconst [makeValAndOff(0,4)] destptr
(MOVWstoreconst [0] destptr mem)) (MOVWstoreconst [0] destptr mem))
(Zero [s] destptr mem) && SizeAndAlign(s).Size() == 6 -> (Zero [6] destptr mem) ->
(MOVHstoreconst [makeValAndOff(0,4)] destptr (MOVHstoreconst [makeValAndOff(0,4)] destptr
(MOVWstoreconst [0] destptr mem)) (MOVWstoreconst [0] destptr mem))
(Zero [s] destptr mem) && SizeAndAlign(s).Size() == 7 -> (Zero [7] destptr mem) ->
(MOVWstoreconst [makeValAndOff(0,3)] destptr (MOVWstoreconst [makeValAndOff(0,3)] destptr
(MOVWstoreconst [0] destptr mem)) (MOVWstoreconst [0] destptr mem))
(Zero [s] destptr mem) && SizeAndAlign(s).Size() > 0 && SizeAndAlign(s).Size() <= 1024 -> (Zero [s] destptr mem) && s > 0 && s <= 1024 ->
(CLEAR [makeValAndOff(SizeAndAlign(s).Size(), 0)] destptr mem) (CLEAR [makeValAndOff(s, 0)] destptr mem)
// Move more than 1024 bytes using a loop. // Move more than 1024 bytes using a loop.
(Zero [s] destptr mem) && SizeAndAlign(s).Size() > 1024 -> (Zero [s] destptr mem) && s > 1024 ->
(LoweredZero [SizeAndAlign(s).Size()%256] destptr (ADDconst <destptr.Type> destptr [(SizeAndAlign(s).Size()/256)*256]) mem) (LoweredZero [s%256] destptr (ADDconst <destptr.Type> destptr [(s/256)*256]) mem)
// Lowering constants // Lowering constants
(Const8 [val]) -> (MOVDconst [val]) (Const8 [val]) -> (MOVDconst [val])

View file

@ -18,11 +18,11 @@
(OffPtr <config.fe.TypeFloat32().PtrTo()> [4] ptr) (OffPtr <config.fe.TypeFloat32().PtrTo()> [4] ptr)
mem) mem)
) )
(Store [8] dst (ComplexMake real imag) mem) -> (Store {t} dst (ComplexMake real imag) mem) && t.(Type).Size() == 8 ->
(Store [4] {config.fe.TypeFloat32()} (Store {config.fe.TypeFloat32()}
(OffPtr <config.fe.TypeFloat32().PtrTo()> [4] dst) (OffPtr <config.fe.TypeFloat32().PtrTo()> [4] dst)
imag imag
(Store [4] {config.fe.TypeFloat32()} dst real mem)) (Store {config.fe.TypeFloat32()} dst real mem))
(Load <t> ptr mem) && t.IsComplex() && t.Size() == 16 -> (Load <t> ptr mem) && t.IsComplex() && t.Size() == 16 ->
(ComplexMake (ComplexMake
(Load <config.fe.TypeFloat64()> ptr mem) (Load <config.fe.TypeFloat64()> ptr mem)
@ -30,11 +30,11 @@
(OffPtr <config.fe.TypeFloat64().PtrTo()> [8] ptr) (OffPtr <config.fe.TypeFloat64().PtrTo()> [8] ptr)
mem) mem)
) )
(Store [16] dst (ComplexMake real imag) mem) -> (Store {t} dst (ComplexMake real imag) mem) && t.(Type).Size() == 16 ->
(Store [8] {config.fe.TypeFloat64()} (Store {config.fe.TypeFloat64()}
(OffPtr <config.fe.TypeFloat64().PtrTo()> [8] dst) (OffPtr <config.fe.TypeFloat64().PtrTo()> [8] dst)
imag imag
(Store [8] {config.fe.TypeFloat64()} dst real mem)) (Store {config.fe.TypeFloat64()} dst real mem))
// string ops // string ops
(StringPtr (StringMake ptr _)) -> ptr (StringPtr (StringMake ptr _)) -> ptr
@ -46,11 +46,11 @@
(Load <config.fe.TypeInt()> (Load <config.fe.TypeInt()>
(OffPtr <config.fe.TypeInt().PtrTo()> [config.PtrSize] ptr) (OffPtr <config.fe.TypeInt().PtrTo()> [config.PtrSize] ptr)
mem)) mem))
(Store [2*config.PtrSize] dst (StringMake ptr len) mem) -> (Store dst (StringMake ptr len) mem) ->
(Store [config.PtrSize] {config.fe.TypeInt()} (Store {config.fe.TypeInt()}
(OffPtr <config.fe.TypeInt().PtrTo()> [config.PtrSize] dst) (OffPtr <config.fe.TypeInt().PtrTo()> [config.PtrSize] dst)
len len
(Store [config.PtrSize] {config.fe.TypeBytePtr()} dst ptr mem)) (Store {config.fe.TypeBytePtr()} dst ptr mem))
// slice ops // slice ops
(SlicePtr (SliceMake ptr _ _ )) -> ptr (SlicePtr (SliceMake ptr _ _ )) -> ptr
@ -66,14 +66,14 @@
(Load <config.fe.TypeInt()> (Load <config.fe.TypeInt()>
(OffPtr <config.fe.TypeInt().PtrTo()> [2*config.PtrSize] ptr) (OffPtr <config.fe.TypeInt().PtrTo()> [2*config.PtrSize] ptr)
mem)) mem))
(Store [3*config.PtrSize] dst (SliceMake ptr len cap) mem) -> (Store dst (SliceMake ptr len cap) mem) ->
(Store [config.PtrSize] {config.fe.TypeInt()} (Store {config.fe.TypeInt()}
(OffPtr <config.fe.TypeInt().PtrTo()> [2*config.PtrSize] dst) (OffPtr <config.fe.TypeInt().PtrTo()> [2*config.PtrSize] dst)
cap cap
(Store [config.PtrSize] {config.fe.TypeInt()} (Store {config.fe.TypeInt()}
(OffPtr <config.fe.TypeInt().PtrTo()> [config.PtrSize] dst) (OffPtr <config.fe.TypeInt().PtrTo()> [config.PtrSize] dst)
len len
(Store [config.PtrSize] {config.fe.TypeBytePtr()} dst ptr mem))) (Store {config.fe.TypeBytePtr()} dst ptr mem)))
// interface ops // interface ops
(ITab (IMake itab _)) -> itab (ITab (IMake itab _)) -> itab
@ -85,8 +85,8 @@
(Load <config.fe.TypeBytePtr()> (Load <config.fe.TypeBytePtr()>
(OffPtr <config.fe.TypeBytePtr().PtrTo()> [config.PtrSize] ptr) (OffPtr <config.fe.TypeBytePtr().PtrTo()> [config.PtrSize] ptr)
mem)) mem))
(Store [2*config.PtrSize] dst (IMake itab data) mem) -> (Store dst (IMake itab data) mem) ->
(Store [config.PtrSize] {config.fe.TypeBytePtr()} (Store {config.fe.TypeBytePtr()}
(OffPtr <config.fe.TypeBytePtr().PtrTo()> [config.PtrSize] dst) (OffPtr <config.fe.TypeBytePtr().PtrTo()> [config.PtrSize] dst)
data data
(Store [config.PtrSize] {config.fe.TypeUintptr()} dst itab mem)) (Store {config.fe.TypeUintptr()} dst itab mem))

View file

@ -30,17 +30,17 @@
(Load <config.fe.TypeUInt32()> ptr mem) (Load <config.fe.TypeUInt32()> ptr mem)
(Load <config.fe.TypeUInt32()> (OffPtr <config.fe.TypeUInt32().PtrTo()> [4] ptr) mem)) (Load <config.fe.TypeUInt32()> (OffPtr <config.fe.TypeUInt32().PtrTo()> [4] ptr) mem))
(Store [8] dst (Int64Make hi lo) mem) && !config.BigEndian -> (Store {t} dst (Int64Make hi lo) mem) && t.(Type).Size() == 8 && !config.BigEndian ->
(Store [4] {hi.Type} (Store {hi.Type}
(OffPtr <hi.Type.PtrTo()> [4] dst) (OffPtr <hi.Type.PtrTo()> [4] dst)
hi hi
(Store [4] {lo.Type} dst lo mem)) (Store {lo.Type} dst lo mem))
(Store [8] dst (Int64Make hi lo) mem) && config.BigEndian -> (Store {t} dst (Int64Make hi lo) mem) && t.(Type).Size() == 8 && config.BigEndian ->
(Store [4] {lo.Type} (Store {lo.Type}
(OffPtr <lo.Type.PtrTo()> [4] dst) (OffPtr <lo.Type.PtrTo()> [4] dst)
lo lo
(Store [4] {hi.Type} dst hi mem)) (Store {hi.Type} dst hi mem))
(Arg {n} [off]) && is64BitInt(v.Type) && !config.BigEndian && v.Type.IsSigned() -> (Arg {n} [off]) && is64BitInt(v.Type) && !config.BigEndian && v.Type.IsSigned() ->
(Int64Make (Int64Make

View file

@ -746,7 +746,7 @@
(NeqSlice x y) -> (NeqPtr (SlicePtr x) (SlicePtr y)) (NeqSlice x y) -> (NeqPtr (SlicePtr x) (SlicePtr y))
// Load of store of same address, with compatibly typed value and same size // Load of store of same address, with compatibly typed value and same size
(Load <t1> p1 (Store [w] p2 x _)) && isSamePtr(p1,p2) && t1.Compare(x.Type)==CMPeq && w == t1.Size() -> x (Load <t1> p1 (Store {t2} p2 x _)) && isSamePtr(p1,p2) && t1.Compare(x.Type)==CMPeq && t1.Size() == t2.(Type).Size() -> x
// Collapse OffPtr // Collapse OffPtr
(OffPtr (OffPtr p [b]) [a]) -> (OffPtr p [a+b]) (OffPtr (OffPtr p [b]) [a]) -> (OffPtr p [a+b])
@ -795,35 +795,35 @@
(Store _ (StructMake0) mem) -> mem (Store _ (StructMake0) mem) -> mem
(Store dst (StructMake1 <t> f0) mem) -> (Store dst (StructMake1 <t> f0) mem) ->
(Store [t.FieldType(0).Size()] {t.FieldType(0)} (OffPtr <t.FieldType(0).PtrTo()> [0] dst) f0 mem) (Store {t.FieldType(0)} (OffPtr <t.FieldType(0).PtrTo()> [0] dst) f0 mem)
(Store dst (StructMake2 <t> f0 f1) mem) -> (Store dst (StructMake2 <t> f0 f1) mem) ->
(Store [t.FieldType(1).Size()] {t.FieldType(1)} (Store {t.FieldType(1)}
(OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst) (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst)
f1 f1
(Store [t.FieldType(0).Size()] {t.FieldType(0)} (Store {t.FieldType(0)}
(OffPtr <t.FieldType(0).PtrTo()> [0] dst) (OffPtr <t.FieldType(0).PtrTo()> [0] dst)
f0 mem)) f0 mem))
(Store dst (StructMake3 <t> f0 f1 f2) mem) -> (Store dst (StructMake3 <t> f0 f1 f2) mem) ->
(Store [t.FieldType(2).Size()] {t.FieldType(2)} (Store {t.FieldType(2)}
(OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] dst) (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] dst)
f2 f2
(Store [t.FieldType(1).Size()] {t.FieldType(1)} (Store {t.FieldType(1)}
(OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst) (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst)
f1 f1
(Store [t.FieldType(0).Size()] {t.FieldType(0)} (Store {t.FieldType(0)}
(OffPtr <t.FieldType(0).PtrTo()> [0] dst) (OffPtr <t.FieldType(0).PtrTo()> [0] dst)
f0 mem))) f0 mem)))
(Store dst (StructMake4 <t> f0 f1 f2 f3) mem) -> (Store dst (StructMake4 <t> f0 f1 f2 f3) mem) ->
(Store [t.FieldType(3).Size()] {t.FieldType(3)} (Store {t.FieldType(3)}
(OffPtr <t.FieldType(3).PtrTo()> [t.FieldOff(3)] dst) (OffPtr <t.FieldType(3).PtrTo()> [t.FieldOff(3)] dst)
f3 f3
(Store [t.FieldType(2).Size()] {t.FieldType(2)} (Store {t.FieldType(2)}
(OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] dst) (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] dst)
f2 f2
(Store [t.FieldType(1).Size()] {t.FieldType(1)} (Store {t.FieldType(1)}
(OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst) (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst)
f1 f1
(Store [t.FieldType(0).Size()] {t.FieldType(0)} (Store {t.FieldType(0)}
(OffPtr <t.FieldType(0).PtrTo()> [0] dst) (OffPtr <t.FieldType(0).PtrTo()> [0] dst)
f0 mem)))) f0 mem))))
@ -832,10 +832,10 @@
(StructSelect [0] x:(IData _)) -> x (StructSelect [0] x:(IData _)) -> x
// un-SSAable values use mem->mem copies // un-SSAable values use mem->mem copies
(Store [size] dst (Load <t> src mem) mem) && !config.fe.CanSSA(t) -> (Store {t} dst (Load src mem) mem) && !config.fe.CanSSA(t.(Type)) ->
(Move [MakeSizeAndAlign(size, t.Alignment()).Int64()] {t} dst src mem) (Move {t} [t.(Type).Size()] dst src mem)
(Store [size] dst (Load <t> src mem) (VarDef {x} mem)) && !config.fe.CanSSA(t) -> (Store {t} dst (Load src mem) (VarDef {x} mem)) && !config.fe.CanSSA(t.(Type)) ->
(Move [MakeSizeAndAlign(size, t.Alignment()).Int64()] {t} dst src (VarDef {x} mem)) (Move {t} [t.(Type).Size()] dst src (VarDef {x} mem))
// array ops // array ops
(ArraySelect (ArrayMake1 x)) -> x (ArraySelect (ArrayMake1 x)) -> x
@ -847,7 +847,7 @@
(ArrayMake1 (Load <t.ElemType()> ptr mem)) (ArrayMake1 (Load <t.ElemType()> ptr mem))
(Store _ (ArrayMake0) mem) -> mem (Store _ (ArrayMake0) mem) -> mem
(Store [size] dst (ArrayMake1 e) mem) -> (Store [size] {e.Type} dst e mem) (Store dst (ArrayMake1 e) mem) -> (Store {e.Type} dst e mem)
(ArraySelect [0] (Load ptr mem)) -> (Load ptr mem) (ArraySelect [0] (Load ptr mem)) -> (Load ptr mem)

View file

@ -287,15 +287,15 @@ var genericOps = []opData{
// Memory operations // Memory operations
{name: "Load", argLength: 2}, // Load from arg0. arg1=memory {name: "Load", argLength: 2}, // Load from arg0. arg1=memory
{name: "Store", argLength: 3, typ: "Mem", aux: "SymOff", symEffect: "None"}, // Store arg1 to arg0. arg2=memory, auxint=size, aux=type. Returns memory. {name: "Store", argLength: 3, typ: "Mem", aux: "Typ"}, // Store arg1 to arg0. arg2=memory, aux=type. Returns memory.
{name: "Move", argLength: 3, typ: "Mem", aux: "SymSizeAndAlign", symEffect: "None"}, // arg0=destptr, arg1=srcptr, arg2=mem, auxint=size+alignment, aux=type. Returns memory. {name: "Move", argLength: 3, typ: "Mem", aux: "TypSize"}, // arg0=destptr, arg1=srcptr, arg2=mem, auxint=size, aux=type. Returns memory.
{name: "Zero", argLength: 2, typ: "Mem", aux: "SymSizeAndAlign", symEffect: "None"}, // arg0=destptr, arg1=mem, auxint=size+alignment, aux=type. Returns memory. {name: "Zero", argLength: 2, typ: "Mem", aux: "TypSize"}, // arg0=destptr, arg1=mem, auxint=size, aux=type. Returns memory.
// Memory operations with write barriers. // Memory operations with write barriers.
// Expand to runtime calls. Write barrier will be removed if write on stack. // Expand to runtime calls. Write barrier will be removed if write on stack.
{name: "StoreWB", argLength: 3, typ: "Mem", aux: "SymOff", symEffect: "None"}, // Store arg1 to arg0. arg2=memory, auxint=size, aux=type. Returns memory. {name: "StoreWB", argLength: 3, typ: "Mem", aux: "Typ"}, // Store arg1 to arg0. arg2=memory, aux=type. Returns memory.
{name: "MoveWB", argLength: 3, typ: "Mem", aux: "SymSizeAndAlign", symEffect: "None"}, // arg0=destptr, arg1=srcptr, arg2=mem, auxint=size+alignment, aux=type. Returns memory. {name: "MoveWB", argLength: 3, typ: "Mem", aux: "TypSize"}, // arg0=destptr, arg1=srcptr, arg2=mem, auxint=size, aux=type. Returns memory.
{name: "ZeroWB", argLength: 2, typ: "Mem", aux: "SymSizeAndAlign", symEffect: "None"}, // arg0=destptr, arg1=mem, auxint=size+alignment, aux=type. Returns memory. {name: "ZeroWB", argLength: 2, typ: "Mem", aux: "TypSize"}, // arg0=destptr, arg1=mem, auxint=size, aux=type. Returns memory.
// Function calls. Arguments to the call have already been written to the stack. // Function calls. Arguments to the call have already been written to the stack.
// Return values appear on the stack. The method receiver, if any, is treated // Return values appear on the stack. The method receiver, if any, is treated

View file

@ -657,14 +657,14 @@ func parseValue(val string, arch arch, loc string) (op opData, oparch string, ty
// Sanity check aux, auxint. // Sanity check aux, auxint.
if auxint != "" { if auxint != "" {
switch op.aux { switch op.aux {
case "Bool", "Int8", "Int16", "Int32", "Int64", "Int128", "Float32", "Float64", "SymOff", "SymValAndOff", "SymInt32", "SizeAndAlign", "SymSizeAndAlign": case "Bool", "Int8", "Int16", "Int32", "Int64", "Int128", "Float32", "Float64", "SymOff", "SymValAndOff", "SymInt32", "TypSize":
default: default:
log.Fatalf("%s: op %s %s can't have auxint", loc, op.name, op.aux) log.Fatalf("%s: op %s %s can't have auxint", loc, op.name, op.aux)
} }
} }
if aux != "" { if aux != "" {
switch op.aux { switch op.aux {
case "String", "Sym", "SymOff", "SymValAndOff", "SymInt32", "SymSizeAndAlign": case "String", "Sym", "SymOff", "SymValAndOff", "SymInt32", "Typ", "TypSize":
default: default:
log.Fatalf("%s: op %s %s can't have aux", loc, op.name, op.aux) log.Fatalf("%s: op %s %s can't have aux", loc, op.name, op.aux)
} }

View file

@ -66,7 +66,7 @@ func TestLoopConditionS390X(t *testing.T) {
Goto("b1")), Goto("b1")),
Bloc("b3", Bloc("b3",
Valu("retdef", OpVarDef, TypeMem, 0, nil, "mem"), Valu("retdef", OpVarDef, TypeMem, 0, nil, "mem"),
Valu("store", OpStore, TypeMem, 8, TypeInt64, "ret", "phisum", "retdef"), Valu("store", OpStore, TypeMem, 0, TypeInt64, "ret", "phisum", "retdef"),
Exit("store"))) Exit("store")))
CheckFunc(fun.f) CheckFunc(fun.f)
Compile(fun.f) Compile(fun.f)

View file

@ -405,7 +405,7 @@ func TestNilcheckBug(t *testing.T) {
If("bool2", "extra", "exit")), If("bool2", "extra", "exit")),
Bloc("extra", Bloc("extra",
// prevent fuse from eliminating this block // prevent fuse from eliminating this block
Valu("store", OpStore, TypeMem, 8, nil, "ptr1", "nilptr", "mem"), Valu("store", OpStore, TypeMem, 0, ptrType, "ptr1", "nilptr", "mem"),
Goto("exit")), Goto("exit")),
Bloc("exit", Bloc("exit",
Valu("phi", OpPhi, TypeMem, 0, nil, "mem", "store"), Valu("phi", OpPhi, TypeMem, 0, nil, "mem", "store"),

View file

@ -66,12 +66,12 @@ const (
auxInt128 // auxInt represents a 128-bit integer. Always 0. auxInt128 // auxInt represents a 128-bit integer. Always 0.
auxFloat32 // auxInt is a float32 (encoded with math.Float64bits) auxFloat32 // auxInt is a float32 (encoded with math.Float64bits)
auxFloat64 // auxInt is a float64 (encoded with math.Float64bits) auxFloat64 // auxInt is a float64 (encoded with math.Float64bits)
auxSizeAndAlign // auxInt is a SizeAndAlign
auxString // aux is a string auxString // aux is a string
auxSym // aux is a symbol auxSym // aux is a symbol
auxSymOff // aux is a symbol, auxInt is an offset auxSymOff // aux is a symbol, auxInt is an offset
auxSymValAndOff // aux is a symbol, auxInt is a ValAndOff auxSymValAndOff // aux is a symbol, auxInt is a ValAndOff
auxSymSizeAndAlign // aux is a symbol, auxInt is a SizeAndAlign auxTyp // aux is a type
auxTypSize // aux is a type, auxInt is a size, must have Aux.(Type).Size() == AuxInt
auxSymInt32 // aux is a symbol, auxInt is a 32-bit integer auxSymInt32 // aux is a symbol, auxInt is a 32-bit integer
) )
@ -154,31 +154,3 @@ func (x ValAndOff) add(off int64) int64 {
} }
return makeValAndOff(x.Val(), x.Off()+off) return makeValAndOff(x.Val(), x.Off()+off)
} }
// SizeAndAlign holds both the size and the alignment of a type,
// used in Zero and Move ops.
// The high 8 bits hold the alignment.
// The low 56 bits hold the size.
type SizeAndAlign int64
func (x SizeAndAlign) Size() int64 {
return int64(x) & (1<<56 - 1)
}
func (x SizeAndAlign) Align() int64 {
return int64(uint64(x) >> 56)
}
func (x SizeAndAlign) Int64() int64 {
return int64(x)
}
func (x SizeAndAlign) String() string {
return fmt.Sprintf("size=%d,align=%d", x.Size(), x.Align())
}
func MakeSizeAndAlign(size, align int64) SizeAndAlign {
if size&^(1<<56-1) != 0 {
panic("size too big in SizeAndAlign")
}
if align >= 1<<8 {
panic("alignment too big in SizeAndAlign")
}
return SizeAndAlign(size | align<<56)
}

View file

@ -21558,44 +21558,38 @@ var opcodeTable = [...]opInfo{
}, },
{ {
name: "Store", name: "Store",
auxType: auxSymOff, auxType: auxTyp,
argLen: 3, argLen: 3,
symEffect: SymNone,
generic: true, generic: true,
}, },
{ {
name: "Move", name: "Move",
auxType: auxSymSizeAndAlign, auxType: auxTypSize,
argLen: 3, argLen: 3,
symEffect: SymNone,
generic: true, generic: true,
}, },
{ {
name: "Zero", name: "Zero",
auxType: auxSymSizeAndAlign, auxType: auxTypSize,
argLen: 2, argLen: 2,
symEffect: SymNone,
generic: true, generic: true,
}, },
{ {
name: "StoreWB", name: "StoreWB",
auxType: auxSymOff, auxType: auxTyp,
argLen: 3, argLen: 3,
symEffect: SymNone,
generic: true, generic: true,
}, },
{ {
name: "MoveWB", name: "MoveWB",
auxType: auxSymSizeAndAlign, auxType: auxTypSize,
argLen: 3, argLen: 3,
symEffect: SymNone,
generic: true, generic: true,
}, },
{ {
name: "ZeroWB", name: "ZeroWB",
auxType: auxSymSizeAndAlign, auxType: auxTypSize,
argLen: 2, argLen: 2,
symEffect: SymNone,
generic: true, generic: true,
}, },
{ {

View file

@ -77,15 +77,15 @@ func genFunction(size int) []bloc {
Valu(valn("addr", i, 1), OpAddr, ptrType, 0, nil, "sb"), Valu(valn("addr", i, 1), OpAddr, ptrType, 0, nil, "sb"),
Valu(valn("addr", i, 2), OpAddr, ptrType, 0, nil, "sb"), Valu(valn("addr", i, 2), OpAddr, ptrType, 0, nil, "sb"),
Valu(valn("addr", i, 3), OpAddr, ptrType, 0, nil, "sb"), Valu(valn("addr", i, 3), OpAddr, ptrType, 0, nil, "sb"),
Valu(valn("zero", i, 1), OpZero, TypeMem, 8, nil, valn("addr", i, 3), Valu(valn("zero", i, 1), OpZero, TypeMem, 8, elemType, valn("addr", i, 3),
valn("store", i-1, 4)), valn("store", i-1, 4)),
Valu(valn("store", i, 1), OpStore, TypeMem, 0, nil, valn("addr", i, 1), Valu(valn("store", i, 1), OpStore, TypeMem, 0, elemType, valn("addr", i, 1),
valn("v", i, 0), valn("zero", i, 1)), valn("v", i, 0), valn("zero", i, 1)),
Valu(valn("store", i, 2), OpStore, TypeMem, 0, nil, valn("addr", i, 2), Valu(valn("store", i, 2), OpStore, TypeMem, 0, elemType, valn("addr", i, 2),
valn("v", i, 0), valn("store", i, 1)), valn("v", i, 0), valn("store", i, 1)),
Valu(valn("store", i, 3), OpStore, TypeMem, 0, nil, valn("addr", i, 1), Valu(valn("store", i, 3), OpStore, TypeMem, 0, elemType, valn("addr", i, 1),
valn("v", i, 0), valn("store", i, 2)), valn("v", i, 0), valn("store", i, 2)),
Valu(valn("store", i, 4), OpStore, TypeMem, 0, nil, valn("addr", i, 3), Valu(valn("store", i, 4), OpStore, TypeMem, 0, elemType, valn("addr", i, 3),
valn("v", i, 0), valn("store", i, 3)), valn("v", i, 0), valn("store", i, 3)),
Goto(blockn(i+1)))) Goto(blockn(i+1))))
} }

View file

@ -11405,31 +11405,29 @@ func rewriteValue386_OpMod8u(v *Value, config *Config) bool {
func rewriteValue386_OpMove(v *Value, config *Config) bool { func rewriteValue386_OpMove(v *Value, config *Config) bool {
b := v.Block b := v.Block
_ = b _ = b
// match: (Move [s] _ _ mem) // match: (Move [0] _ _ mem)
// cond: SizeAndAlign(s).Size() == 0 // cond:
// result: mem // result: mem
for { for {
s := v.AuxInt if v.AuxInt != 0 {
mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 0) {
break break
} }
mem := v.Args[2]
v.reset(OpCopy) v.reset(OpCopy)
v.Type = mem.Type v.Type = mem.Type
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [1] dst src mem)
// cond: SizeAndAlign(s).Size() == 1 // cond:
// result: (MOVBstore dst (MOVBload src mem) mem) // result: (MOVBstore dst (MOVBload src mem) mem)
for { for {
s := v.AuxInt if v.AuxInt != 1 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 1) {
break
}
v.reset(Op386MOVBstore) v.reset(Op386MOVBstore)
v.AddArg(dst) v.AddArg(dst)
v0 := b.NewValue0(v.Pos, Op386MOVBload, config.fe.TypeUInt8()) v0 := b.NewValue0(v.Pos, Op386MOVBload, config.fe.TypeUInt8())
@ -11439,17 +11437,16 @@ func rewriteValue386_OpMove(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [2] dst src mem)
// cond: SizeAndAlign(s).Size() == 2 // cond:
// result: (MOVWstore dst (MOVWload src mem) mem) // result: (MOVWstore dst (MOVWload src mem) mem)
for { for {
s := v.AuxInt if v.AuxInt != 2 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 2) {
break
}
v.reset(Op386MOVWstore) v.reset(Op386MOVWstore)
v.AddArg(dst) v.AddArg(dst)
v0 := b.NewValue0(v.Pos, Op386MOVWload, config.fe.TypeUInt16()) v0 := b.NewValue0(v.Pos, Op386MOVWload, config.fe.TypeUInt16())
@ -11459,17 +11456,16 @@ func rewriteValue386_OpMove(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [4] dst src mem)
// cond: SizeAndAlign(s).Size() == 4 // cond:
// result: (MOVLstore dst (MOVLload src mem) mem) // result: (MOVLstore dst (MOVLload src mem) mem)
for { for {
s := v.AuxInt if v.AuxInt != 4 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 4) {
break
}
v.reset(Op386MOVLstore) v.reset(Op386MOVLstore)
v.AddArg(dst) v.AddArg(dst)
v0 := b.NewValue0(v.Pos, Op386MOVLload, config.fe.TypeUInt32()) v0 := b.NewValue0(v.Pos, Op386MOVLload, config.fe.TypeUInt32())
@ -11479,17 +11475,16 @@ func rewriteValue386_OpMove(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [3] dst src mem)
// cond: SizeAndAlign(s).Size() == 3 // cond:
// result: (MOVBstore [2] dst (MOVBload [2] src mem) (MOVWstore dst (MOVWload src mem) mem)) // result: (MOVBstore [2] dst (MOVBload [2] src mem) (MOVWstore dst (MOVWload src mem) mem))
for { for {
s := v.AuxInt if v.AuxInt != 3 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 3) {
break
}
v.reset(Op386MOVBstore) v.reset(Op386MOVBstore)
v.AuxInt = 2 v.AuxInt = 2
v.AddArg(dst) v.AddArg(dst)
@ -11508,17 +11503,16 @@ func rewriteValue386_OpMove(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [5] dst src mem)
// cond: SizeAndAlign(s).Size() == 5 // cond:
// result: (MOVBstore [4] dst (MOVBload [4] src mem) (MOVLstore dst (MOVLload src mem) mem)) // result: (MOVBstore [4] dst (MOVBload [4] src mem) (MOVLstore dst (MOVLload src mem) mem))
for { for {
s := v.AuxInt if v.AuxInt != 5 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 5) {
break
}
v.reset(Op386MOVBstore) v.reset(Op386MOVBstore)
v.AuxInt = 4 v.AuxInt = 4
v.AddArg(dst) v.AddArg(dst)
@ -11537,17 +11531,16 @@ func rewriteValue386_OpMove(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [6] dst src mem)
// cond: SizeAndAlign(s).Size() == 6 // cond:
// result: (MOVWstore [4] dst (MOVWload [4] src mem) (MOVLstore dst (MOVLload src mem) mem)) // result: (MOVWstore [4] dst (MOVWload [4] src mem) (MOVLstore dst (MOVLload src mem) mem))
for { for {
s := v.AuxInt if v.AuxInt != 6 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 6) {
break
}
v.reset(Op386MOVWstore) v.reset(Op386MOVWstore)
v.AuxInt = 4 v.AuxInt = 4
v.AddArg(dst) v.AddArg(dst)
@ -11566,17 +11559,16 @@ func rewriteValue386_OpMove(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [7] dst src mem)
// cond: SizeAndAlign(s).Size() == 7 // cond:
// result: (MOVLstore [3] dst (MOVLload [3] src mem) (MOVLstore dst (MOVLload src mem) mem)) // result: (MOVLstore [3] dst (MOVLload [3] src mem) (MOVLstore dst (MOVLload src mem) mem))
for { for {
s := v.AuxInt if v.AuxInt != 7 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 7) {
break
}
v.reset(Op386MOVLstore) v.reset(Op386MOVLstore)
v.AuxInt = 3 v.AuxInt = 3
v.AddArg(dst) v.AddArg(dst)
@ -11595,17 +11587,16 @@ func rewriteValue386_OpMove(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [8] dst src mem)
// cond: SizeAndAlign(s).Size() == 8 // cond:
// result: (MOVLstore [4] dst (MOVLload [4] src mem) (MOVLstore dst (MOVLload src mem) mem)) // result: (MOVLstore [4] dst (MOVLload [4] src mem) (MOVLstore dst (MOVLload src mem) mem))
for { for {
s := v.AuxInt if v.AuxInt != 8 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 8) {
break
}
v.reset(Op386MOVLstore) v.reset(Op386MOVLstore)
v.AuxInt = 4 v.AuxInt = 4
v.AddArg(dst) v.AddArg(dst)
@ -11625,24 +11616,24 @@ func rewriteValue386_OpMove(v *Value, config *Config) bool {
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [s] dst src mem)
// cond: SizeAndAlign(s).Size() > 8 && SizeAndAlign(s).Size()%4 != 0 // cond: s > 8 && s%4 != 0
// result: (Move [SizeAndAlign(s).Size()-SizeAndAlign(s).Size()%4] (ADDLconst <dst.Type> dst [SizeAndAlign(s).Size()%4]) (ADDLconst <src.Type> src [SizeAndAlign(s).Size()%4]) (MOVLstore dst (MOVLload src mem) mem)) // result: (Move [s-s%4] (ADDLconst <dst.Type> dst [s%4]) (ADDLconst <src.Type> src [s%4]) (MOVLstore dst (MOVLload src mem) mem))
for { for {
s := v.AuxInt s := v.AuxInt
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() > 8 && SizeAndAlign(s).Size()%4 != 0) { if !(s > 8 && s%4 != 0) {
break break
} }
v.reset(OpMove) v.reset(OpMove)
v.AuxInt = SizeAndAlign(s).Size() - SizeAndAlign(s).Size()%4 v.AuxInt = s - s%4
v0 := b.NewValue0(v.Pos, Op386ADDLconst, dst.Type) v0 := b.NewValue0(v.Pos, Op386ADDLconst, dst.Type)
v0.AuxInt = SizeAndAlign(s).Size() % 4 v0.AuxInt = s % 4
v0.AddArg(dst) v0.AddArg(dst)
v.AddArg(v0) v.AddArg(v0)
v1 := b.NewValue0(v.Pos, Op386ADDLconst, src.Type) v1 := b.NewValue0(v.Pos, Op386ADDLconst, src.Type)
v1.AuxInt = SizeAndAlign(s).Size() % 4 v1.AuxInt = s % 4
v1.AddArg(src) v1.AddArg(src)
v.AddArg(v1) v.AddArg(v1)
v2 := b.NewValue0(v.Pos, Op386MOVLstore, TypeMem) v2 := b.NewValue0(v.Pos, Op386MOVLstore, TypeMem)
@ -11656,39 +11647,39 @@ func rewriteValue386_OpMove(v *Value, config *Config) bool {
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [s] dst src mem)
// cond: SizeAndAlign(s).Size() > 8 && SizeAndAlign(s).Size() <= 4*128 && SizeAndAlign(s).Size()%4 == 0 && !config.noDuffDevice // cond: s > 8 && s <= 4*128 && s%4 == 0 && !config.noDuffDevice
// result: (DUFFCOPY [10*(128-SizeAndAlign(s).Size()/4)] dst src mem) // result: (DUFFCOPY [10*(128-s/4)] dst src mem)
for { for {
s := v.AuxInt s := v.AuxInt
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() > 8 && SizeAndAlign(s).Size() <= 4*128 && SizeAndAlign(s).Size()%4 == 0 && !config.noDuffDevice) { if !(s > 8 && s <= 4*128 && s%4 == 0 && !config.noDuffDevice) {
break break
} }
v.reset(Op386DUFFCOPY) v.reset(Op386DUFFCOPY)
v.AuxInt = 10 * (128 - SizeAndAlign(s).Size()/4) v.AuxInt = 10 * (128 - s/4)
v.AddArg(dst) v.AddArg(dst)
v.AddArg(src) v.AddArg(src)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [s] dst src mem)
// cond: (SizeAndAlign(s).Size() > 4*128 || config.noDuffDevice) && SizeAndAlign(s).Size()%4 == 0 // cond: (s > 4*128 || config.noDuffDevice) && s%4 == 0
// result: (REPMOVSL dst src (MOVLconst [SizeAndAlign(s).Size()/4]) mem) // result: (REPMOVSL dst src (MOVLconst [s/4]) mem)
for { for {
s := v.AuxInt s := v.AuxInt
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !((SizeAndAlign(s).Size() > 4*128 || config.noDuffDevice) && SizeAndAlign(s).Size()%4 == 0) { if !((s > 4*128 || config.noDuffDevice) && s%4 == 0) {
break break
} }
v.reset(Op386REPMOVSL) v.reset(Op386REPMOVSL)
v.AddArg(dst) v.AddArg(dst)
v.AddArg(src) v.AddArg(src)
v0 := b.NewValue0(v.Pos, Op386MOVLconst, config.fe.TypeUInt32()) v0 := b.NewValue0(v.Pos, Op386MOVLconst, config.fe.TypeUInt32())
v0.AuxInt = SizeAndAlign(s).Size() / 4 v0.AuxInt = s / 4
v.AddArg(v0) v.AddArg(v0)
v.AddArg(mem) v.AddArg(mem)
return true return true
@ -12941,17 +12932,15 @@ func rewriteValue386_OpStaticCall(v *Value, config *Config) bool {
func rewriteValue386_OpStore(v *Value, config *Config) bool { func rewriteValue386_OpStore(v *Value, config *Config) bool {
b := v.Block b := v.Block
_ = b _ = b
// match: (Store [8] ptr val mem) // match: (Store {t} ptr val mem)
// cond: is64BitFloat(val.Type) // cond: t.(Type).Size() == 8 && is64BitFloat(val.Type)
// result: (MOVSDstore ptr val mem) // result: (MOVSDstore ptr val mem)
for { for {
if v.AuxInt != 8 { t := v.Aux
break
}
ptr := v.Args[0] ptr := v.Args[0]
val := v.Args[1] val := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(is64BitFloat(val.Type)) { if !(t.(Type).Size() == 8 && is64BitFloat(val.Type)) {
break break
} }
v.reset(Op386MOVSDstore) v.reset(Op386MOVSDstore)
@ -12960,17 +12949,15 @@ func rewriteValue386_OpStore(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Store [4] ptr val mem) // match: (Store {t} ptr val mem)
// cond: is32BitFloat(val.Type) // cond: t.(Type).Size() == 4 && is32BitFloat(val.Type)
// result: (MOVSSstore ptr val mem) // result: (MOVSSstore ptr val mem)
for { for {
if v.AuxInt != 4 { t := v.Aux
break
}
ptr := v.Args[0] ptr := v.Args[0]
val := v.Args[1] val := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(is32BitFloat(val.Type)) { if !(t.(Type).Size() == 4 && is32BitFloat(val.Type)) {
break break
} }
v.reset(Op386MOVSSstore) v.reset(Op386MOVSSstore)
@ -12979,48 +12966,51 @@ func rewriteValue386_OpStore(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Store [4] ptr val mem) // match: (Store {t} ptr val mem)
// cond: // cond: t.(Type).Size() == 4
// result: (MOVLstore ptr val mem) // result: (MOVLstore ptr val mem)
for { for {
if v.AuxInt != 4 { t := v.Aux
break
}
ptr := v.Args[0] ptr := v.Args[0]
val := v.Args[1] val := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(t.(Type).Size() == 4) {
break
}
v.reset(Op386MOVLstore) v.reset(Op386MOVLstore)
v.AddArg(ptr) v.AddArg(ptr)
v.AddArg(val) v.AddArg(val)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Store [2] ptr val mem) // match: (Store {t} ptr val mem)
// cond: // cond: t.(Type).Size() == 2
// result: (MOVWstore ptr val mem) // result: (MOVWstore ptr val mem)
for { for {
if v.AuxInt != 2 { t := v.Aux
break
}
ptr := v.Args[0] ptr := v.Args[0]
val := v.Args[1] val := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(t.(Type).Size() == 2) {
break
}
v.reset(Op386MOVWstore) v.reset(Op386MOVWstore)
v.AddArg(ptr) v.AddArg(ptr)
v.AddArg(val) v.AddArg(val)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Store [1] ptr val mem) // match: (Store {t} ptr val mem)
// cond: // cond: t.(Type).Size() == 1
// result: (MOVBstore ptr val mem) // result: (MOVBstore ptr val mem)
for { for {
if v.AuxInt != 1 { t := v.Aux
break
}
ptr := v.Args[0] ptr := v.Args[0]
val := v.Args[1] val := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(t.(Type).Size() == 1) {
break
}
v.reset(Op386MOVBstore) v.reset(Op386MOVBstore)
v.AddArg(ptr) v.AddArg(ptr)
v.AddArg(val) v.AddArg(val)
@ -13241,78 +13231,73 @@ func rewriteValue386_OpXor8(v *Value, config *Config) bool {
func rewriteValue386_OpZero(v *Value, config *Config) bool { func rewriteValue386_OpZero(v *Value, config *Config) bool {
b := v.Block b := v.Block
_ = b _ = b
// match: (Zero [s] _ mem) // match: (Zero [0] _ mem)
// cond: SizeAndAlign(s).Size() == 0 // cond:
// result: mem // result: mem
for { for {
s := v.AuxInt if v.AuxInt != 0 {
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 0) {
break break
} }
mem := v.Args[1]
v.reset(OpCopy) v.reset(OpCopy)
v.Type = mem.Type v.Type = mem.Type
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [1] destptr mem)
// cond: SizeAndAlign(s).Size() == 1 // cond:
// result: (MOVBstoreconst [0] destptr mem) // result: (MOVBstoreconst [0] destptr mem)
for { for {
s := v.AuxInt if v.AuxInt != 1 {
destptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 1) {
break break
} }
destptr := v.Args[0]
mem := v.Args[1]
v.reset(Op386MOVBstoreconst) v.reset(Op386MOVBstoreconst)
v.AuxInt = 0 v.AuxInt = 0
v.AddArg(destptr) v.AddArg(destptr)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [2] destptr mem)
// cond: SizeAndAlign(s).Size() == 2 // cond:
// result: (MOVWstoreconst [0] destptr mem) // result: (MOVWstoreconst [0] destptr mem)
for { for {
s := v.AuxInt if v.AuxInt != 2 {
destptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 2) {
break break
} }
destptr := v.Args[0]
mem := v.Args[1]
v.reset(Op386MOVWstoreconst) v.reset(Op386MOVWstoreconst)
v.AuxInt = 0 v.AuxInt = 0
v.AddArg(destptr) v.AddArg(destptr)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [4] destptr mem)
// cond: SizeAndAlign(s).Size() == 4 // cond:
// result: (MOVLstoreconst [0] destptr mem) // result: (MOVLstoreconst [0] destptr mem)
for { for {
s := v.AuxInt if v.AuxInt != 4 {
destptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 4) {
break break
} }
destptr := v.Args[0]
mem := v.Args[1]
v.reset(Op386MOVLstoreconst) v.reset(Op386MOVLstoreconst)
v.AuxInt = 0 v.AuxInt = 0
v.AddArg(destptr) v.AddArg(destptr)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [3] destptr mem)
// cond: SizeAndAlign(s).Size() == 3 // cond:
// result: (MOVBstoreconst [makeValAndOff(0,2)] destptr (MOVWstoreconst [0] destptr mem)) // result: (MOVBstoreconst [makeValAndOff(0,2)] destptr (MOVWstoreconst [0] destptr mem))
for { for {
s := v.AuxInt if v.AuxInt != 3 {
destptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 3) {
break break
} }
destptr := v.Args[0]
mem := v.Args[1]
v.reset(Op386MOVBstoreconst) v.reset(Op386MOVBstoreconst)
v.AuxInt = makeValAndOff(0, 2) v.AuxInt = makeValAndOff(0, 2)
v.AddArg(destptr) v.AddArg(destptr)
@ -13323,16 +13308,15 @@ func rewriteValue386_OpZero(v *Value, config *Config) bool {
v.AddArg(v0) v.AddArg(v0)
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [5] destptr mem)
// cond: SizeAndAlign(s).Size() == 5 // cond:
// result: (MOVBstoreconst [makeValAndOff(0,4)] destptr (MOVLstoreconst [0] destptr mem)) // result: (MOVBstoreconst [makeValAndOff(0,4)] destptr (MOVLstoreconst [0] destptr mem))
for { for {
s := v.AuxInt if v.AuxInt != 5 {
destptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 5) {
break break
} }
destptr := v.Args[0]
mem := v.Args[1]
v.reset(Op386MOVBstoreconst) v.reset(Op386MOVBstoreconst)
v.AuxInt = makeValAndOff(0, 4) v.AuxInt = makeValAndOff(0, 4)
v.AddArg(destptr) v.AddArg(destptr)
@ -13343,16 +13327,15 @@ func rewriteValue386_OpZero(v *Value, config *Config) bool {
v.AddArg(v0) v.AddArg(v0)
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [6] destptr mem)
// cond: SizeAndAlign(s).Size() == 6 // cond:
// result: (MOVWstoreconst [makeValAndOff(0,4)] destptr (MOVLstoreconst [0] destptr mem)) // result: (MOVWstoreconst [makeValAndOff(0,4)] destptr (MOVLstoreconst [0] destptr mem))
for { for {
s := v.AuxInt if v.AuxInt != 6 {
destptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 6) {
break break
} }
destptr := v.Args[0]
mem := v.Args[1]
v.reset(Op386MOVWstoreconst) v.reset(Op386MOVWstoreconst)
v.AuxInt = makeValAndOff(0, 4) v.AuxInt = makeValAndOff(0, 4)
v.AddArg(destptr) v.AddArg(destptr)
@ -13363,16 +13346,15 @@ func rewriteValue386_OpZero(v *Value, config *Config) bool {
v.AddArg(v0) v.AddArg(v0)
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [7] destptr mem)
// cond: SizeAndAlign(s).Size() == 7 // cond:
// result: (MOVLstoreconst [makeValAndOff(0,3)] destptr (MOVLstoreconst [0] destptr mem)) // result: (MOVLstoreconst [makeValAndOff(0,3)] destptr (MOVLstoreconst [0] destptr mem))
for { for {
s := v.AuxInt if v.AuxInt != 7 {
destptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 7) {
break break
} }
destptr := v.Args[0]
mem := v.Args[1]
v.reset(Op386MOVLstoreconst) v.reset(Op386MOVLstoreconst)
v.AuxInt = makeValAndOff(0, 3) v.AuxInt = makeValAndOff(0, 3)
v.AddArg(destptr) v.AddArg(destptr)
@ -13384,19 +13366,19 @@ func rewriteValue386_OpZero(v *Value, config *Config) bool {
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [s] destptr mem)
// cond: SizeAndAlign(s).Size()%4 != 0 && SizeAndAlign(s).Size() > 4 // cond: s%4 != 0 && s > 4
// result: (Zero [SizeAndAlign(s).Size()-SizeAndAlign(s).Size()%4] (ADDLconst destptr [SizeAndAlign(s).Size()%4]) (MOVLstoreconst [0] destptr mem)) // result: (Zero [s-s%4] (ADDLconst destptr [s%4]) (MOVLstoreconst [0] destptr mem))
for { for {
s := v.AuxInt s := v.AuxInt
destptr := v.Args[0] destptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(SizeAndAlign(s).Size()%4 != 0 && SizeAndAlign(s).Size() > 4) { if !(s%4 != 0 && s > 4) {
break break
} }
v.reset(OpZero) v.reset(OpZero)
v.AuxInt = SizeAndAlign(s).Size() - SizeAndAlign(s).Size()%4 v.AuxInt = s - s%4
v0 := b.NewValue0(v.Pos, Op386ADDLconst, config.fe.TypeUInt32()) v0 := b.NewValue0(v.Pos, Op386ADDLconst, config.fe.TypeUInt32())
v0.AuxInt = SizeAndAlign(s).Size() % 4 v0.AuxInt = s % 4
v0.AddArg(destptr) v0.AddArg(destptr)
v.AddArg(v0) v.AddArg(v0)
v1 := b.NewValue0(v.Pos, Op386MOVLstoreconst, TypeMem) v1 := b.NewValue0(v.Pos, Op386MOVLstoreconst, TypeMem)
@ -13406,16 +13388,15 @@ func rewriteValue386_OpZero(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [8] destptr mem)
// cond: SizeAndAlign(s).Size() == 8 // cond:
// result: (MOVLstoreconst [makeValAndOff(0,4)] destptr (MOVLstoreconst [0] destptr mem)) // result: (MOVLstoreconst [makeValAndOff(0,4)] destptr (MOVLstoreconst [0] destptr mem))
for { for {
s := v.AuxInt if v.AuxInt != 8 {
destptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 8) {
break break
} }
destptr := v.Args[0]
mem := v.Args[1]
v.reset(Op386MOVLstoreconst) v.reset(Op386MOVLstoreconst)
v.AuxInt = makeValAndOff(0, 4) v.AuxInt = makeValAndOff(0, 4)
v.AddArg(destptr) v.AddArg(destptr)
@ -13426,16 +13407,15 @@ func rewriteValue386_OpZero(v *Value, config *Config) bool {
v.AddArg(v0) v.AddArg(v0)
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [12] destptr mem)
// cond: SizeAndAlign(s).Size() == 12 // cond:
// result: (MOVLstoreconst [makeValAndOff(0,8)] destptr (MOVLstoreconst [makeValAndOff(0,4)] destptr (MOVLstoreconst [0] destptr mem))) // result: (MOVLstoreconst [makeValAndOff(0,8)] destptr (MOVLstoreconst [makeValAndOff(0,4)] destptr (MOVLstoreconst [0] destptr mem)))
for { for {
s := v.AuxInt if v.AuxInt != 12 {
destptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 12) {
break break
} }
destptr := v.Args[0]
mem := v.Args[1]
v.reset(Op386MOVLstoreconst) v.reset(Op386MOVLstoreconst)
v.AuxInt = makeValAndOff(0, 8) v.AuxInt = makeValAndOff(0, 8)
v.AddArg(destptr) v.AddArg(destptr)
@ -13450,16 +13430,15 @@ func rewriteValue386_OpZero(v *Value, config *Config) bool {
v.AddArg(v0) v.AddArg(v0)
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [16] destptr mem)
// cond: SizeAndAlign(s).Size() == 16 // cond:
// result: (MOVLstoreconst [makeValAndOff(0,12)] destptr (MOVLstoreconst [makeValAndOff(0,8)] destptr (MOVLstoreconst [makeValAndOff(0,4)] destptr (MOVLstoreconst [0] destptr mem)))) // result: (MOVLstoreconst [makeValAndOff(0,12)] destptr (MOVLstoreconst [makeValAndOff(0,8)] destptr (MOVLstoreconst [makeValAndOff(0,4)] destptr (MOVLstoreconst [0] destptr mem))))
for { for {
s := v.AuxInt if v.AuxInt != 16 {
destptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 16) {
break break
} }
destptr := v.Args[0]
mem := v.Args[1]
v.reset(Op386MOVLstoreconst) v.reset(Op386MOVLstoreconst)
v.AuxInt = makeValAndOff(0, 12) v.AuxInt = makeValAndOff(0, 12)
v.AddArg(destptr) v.AddArg(destptr)
@ -13479,17 +13458,17 @@ func rewriteValue386_OpZero(v *Value, config *Config) bool {
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [s] destptr mem)
// cond: SizeAndAlign(s).Size() > 16 && SizeAndAlign(s).Size() <= 4*128 && SizeAndAlign(s).Size()%4 == 0 && !config.noDuffDevice // cond: s > 16 && s <= 4*128 && s%4 == 0 && !config.noDuffDevice
// result: (DUFFZERO [1*(128-SizeAndAlign(s).Size()/4)] destptr (MOVLconst [0]) mem) // result: (DUFFZERO [1*(128-s/4)] destptr (MOVLconst [0]) mem)
for { for {
s := v.AuxInt s := v.AuxInt
destptr := v.Args[0] destptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(SizeAndAlign(s).Size() > 16 && SizeAndAlign(s).Size() <= 4*128 && SizeAndAlign(s).Size()%4 == 0 && !config.noDuffDevice) { if !(s > 16 && s <= 4*128 && s%4 == 0 && !config.noDuffDevice) {
break break
} }
v.reset(Op386DUFFZERO) v.reset(Op386DUFFZERO)
v.AuxInt = 1 * (128 - SizeAndAlign(s).Size()/4) v.AuxInt = 1 * (128 - s/4)
v.AddArg(destptr) v.AddArg(destptr)
v0 := b.NewValue0(v.Pos, Op386MOVLconst, config.fe.TypeUInt32()) v0 := b.NewValue0(v.Pos, Op386MOVLconst, config.fe.TypeUInt32())
v0.AuxInt = 0 v0.AuxInt = 0
@ -13498,19 +13477,19 @@ func rewriteValue386_OpZero(v *Value, config *Config) bool {
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [s] destptr mem)
// cond: (SizeAndAlign(s).Size() > 4*128 || (config.noDuffDevice && SizeAndAlign(s).Size() > 16)) && SizeAndAlign(s).Size()%4 == 0 // cond: (s > 4*128 || (config.noDuffDevice && s > 16)) && s%4 == 0
// result: (REPSTOSL destptr (MOVLconst [SizeAndAlign(s).Size()/4]) (MOVLconst [0]) mem) // result: (REPSTOSL destptr (MOVLconst [s/4]) (MOVLconst [0]) mem)
for { for {
s := v.AuxInt s := v.AuxInt
destptr := v.Args[0] destptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
if !((SizeAndAlign(s).Size() > 4*128 || (config.noDuffDevice && SizeAndAlign(s).Size() > 16)) && SizeAndAlign(s).Size()%4 == 0) { if !((s > 4*128 || (config.noDuffDevice && s > 16)) && s%4 == 0) {
break break
} }
v.reset(Op386REPSTOSL) v.reset(Op386REPSTOSL)
v.AddArg(destptr) v.AddArg(destptr)
v0 := b.NewValue0(v.Pos, Op386MOVLconst, config.fe.TypeUInt32()) v0 := b.NewValue0(v.Pos, Op386MOVLconst, config.fe.TypeUInt32())
v0.AuxInt = SizeAndAlign(s).Size() / 4 v0.AuxInt = s / 4
v.AddArg(v0) v.AddArg(v0)
v1 := b.NewValue0(v.Pos, Op386MOVLconst, config.fe.TypeUInt32()) v1 := b.NewValue0(v.Pos, Op386MOVLconst, config.fe.TypeUInt32())
v1.AuxInt = 0 v1.AuxInt = 0

View file

@ -20000,31 +20000,29 @@ func rewriteValueAMD64_OpMod8u(v *Value, config *Config) bool {
func rewriteValueAMD64_OpMove(v *Value, config *Config) bool { func rewriteValueAMD64_OpMove(v *Value, config *Config) bool {
b := v.Block b := v.Block
_ = b _ = b
// match: (Move [s] _ _ mem) // match: (Move [0] _ _ mem)
// cond: SizeAndAlign(s).Size() == 0 // cond:
// result: mem // result: mem
for { for {
s := v.AuxInt if v.AuxInt != 0 {
mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 0) {
break break
} }
mem := v.Args[2]
v.reset(OpCopy) v.reset(OpCopy)
v.Type = mem.Type v.Type = mem.Type
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [1] dst src mem)
// cond: SizeAndAlign(s).Size() == 1 // cond:
// result: (MOVBstore dst (MOVBload src mem) mem) // result: (MOVBstore dst (MOVBload src mem) mem)
for { for {
s := v.AuxInt if v.AuxInt != 1 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 1) {
break
}
v.reset(OpAMD64MOVBstore) v.reset(OpAMD64MOVBstore)
v.AddArg(dst) v.AddArg(dst)
v0 := b.NewValue0(v.Pos, OpAMD64MOVBload, config.fe.TypeUInt8()) v0 := b.NewValue0(v.Pos, OpAMD64MOVBload, config.fe.TypeUInt8())
@ -20034,17 +20032,16 @@ func rewriteValueAMD64_OpMove(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [2] dst src mem)
// cond: SizeAndAlign(s).Size() == 2 // cond:
// result: (MOVWstore dst (MOVWload src mem) mem) // result: (MOVWstore dst (MOVWload src mem) mem)
for { for {
s := v.AuxInt if v.AuxInt != 2 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 2) {
break
}
v.reset(OpAMD64MOVWstore) v.reset(OpAMD64MOVWstore)
v.AddArg(dst) v.AddArg(dst)
v0 := b.NewValue0(v.Pos, OpAMD64MOVWload, config.fe.TypeUInt16()) v0 := b.NewValue0(v.Pos, OpAMD64MOVWload, config.fe.TypeUInt16())
@ -20054,17 +20051,16 @@ func rewriteValueAMD64_OpMove(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [4] dst src mem)
// cond: SizeAndAlign(s).Size() == 4 // cond:
// result: (MOVLstore dst (MOVLload src mem) mem) // result: (MOVLstore dst (MOVLload src mem) mem)
for { for {
s := v.AuxInt if v.AuxInt != 4 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 4) {
break
}
v.reset(OpAMD64MOVLstore) v.reset(OpAMD64MOVLstore)
v.AddArg(dst) v.AddArg(dst)
v0 := b.NewValue0(v.Pos, OpAMD64MOVLload, config.fe.TypeUInt32()) v0 := b.NewValue0(v.Pos, OpAMD64MOVLload, config.fe.TypeUInt32())
@ -20074,17 +20070,16 @@ func rewriteValueAMD64_OpMove(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [8] dst src mem)
// cond: SizeAndAlign(s).Size() == 8 // cond:
// result: (MOVQstore dst (MOVQload src mem) mem) // result: (MOVQstore dst (MOVQload src mem) mem)
for { for {
s := v.AuxInt if v.AuxInt != 8 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 8) {
break
}
v.reset(OpAMD64MOVQstore) v.reset(OpAMD64MOVQstore)
v.AddArg(dst) v.AddArg(dst)
v0 := b.NewValue0(v.Pos, OpAMD64MOVQload, config.fe.TypeUInt64()) v0 := b.NewValue0(v.Pos, OpAMD64MOVQload, config.fe.TypeUInt64())
@ -20094,17 +20089,16 @@ func rewriteValueAMD64_OpMove(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [16] dst src mem)
// cond: SizeAndAlign(s).Size() == 16 // cond:
// result: (MOVOstore dst (MOVOload src mem) mem) // result: (MOVOstore dst (MOVOload src mem) mem)
for { for {
s := v.AuxInt if v.AuxInt != 16 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 16) {
break
}
v.reset(OpAMD64MOVOstore) v.reset(OpAMD64MOVOstore)
v.AddArg(dst) v.AddArg(dst)
v0 := b.NewValue0(v.Pos, OpAMD64MOVOload, TypeInt128) v0 := b.NewValue0(v.Pos, OpAMD64MOVOload, TypeInt128)
@ -20114,17 +20108,16 @@ func rewriteValueAMD64_OpMove(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [3] dst src mem)
// cond: SizeAndAlign(s).Size() == 3 // cond:
// result: (MOVBstore [2] dst (MOVBload [2] src mem) (MOVWstore dst (MOVWload src mem) mem)) // result: (MOVBstore [2] dst (MOVBload [2] src mem) (MOVWstore dst (MOVWload src mem) mem))
for { for {
s := v.AuxInt if v.AuxInt != 3 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 3) {
break
}
v.reset(OpAMD64MOVBstore) v.reset(OpAMD64MOVBstore)
v.AuxInt = 2 v.AuxInt = 2
v.AddArg(dst) v.AddArg(dst)
@ -20143,17 +20136,16 @@ func rewriteValueAMD64_OpMove(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [5] dst src mem)
// cond: SizeAndAlign(s).Size() == 5 // cond:
// result: (MOVBstore [4] dst (MOVBload [4] src mem) (MOVLstore dst (MOVLload src mem) mem)) // result: (MOVBstore [4] dst (MOVBload [4] src mem) (MOVLstore dst (MOVLload src mem) mem))
for { for {
s := v.AuxInt if v.AuxInt != 5 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 5) {
break
}
v.reset(OpAMD64MOVBstore) v.reset(OpAMD64MOVBstore)
v.AuxInt = 4 v.AuxInt = 4
v.AddArg(dst) v.AddArg(dst)
@ -20172,17 +20164,16 @@ func rewriteValueAMD64_OpMove(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [6] dst src mem)
// cond: SizeAndAlign(s).Size() == 6 // cond:
// result: (MOVWstore [4] dst (MOVWload [4] src mem) (MOVLstore dst (MOVLload src mem) mem)) // result: (MOVWstore [4] dst (MOVWload [4] src mem) (MOVLstore dst (MOVLload src mem) mem))
for { for {
s := v.AuxInt if v.AuxInt != 6 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 6) {
break
}
v.reset(OpAMD64MOVWstore) v.reset(OpAMD64MOVWstore)
v.AuxInt = 4 v.AuxInt = 4
v.AddArg(dst) v.AddArg(dst)
@ -20201,17 +20192,16 @@ func rewriteValueAMD64_OpMove(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [7] dst src mem)
// cond: SizeAndAlign(s).Size() == 7 // cond:
// result: (MOVLstore [3] dst (MOVLload [3] src mem) (MOVLstore dst (MOVLload src mem) mem)) // result: (MOVLstore [3] dst (MOVLload [3] src mem) (MOVLstore dst (MOVLload src mem) mem))
for { for {
s := v.AuxInt if v.AuxInt != 7 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 7) {
break
}
v.reset(OpAMD64MOVLstore) v.reset(OpAMD64MOVLstore)
v.AuxInt = 3 v.AuxInt = 3
v.AddArg(dst) v.AddArg(dst)
@ -20231,21 +20221,21 @@ func rewriteValueAMD64_OpMove(v *Value, config *Config) bool {
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [s] dst src mem)
// cond: SizeAndAlign(s).Size() > 8 && SizeAndAlign(s).Size() < 16 // cond: s > 8 && s < 16
// result: (MOVQstore [SizeAndAlign(s).Size()-8] dst (MOVQload [SizeAndAlign(s).Size()-8] src mem) (MOVQstore dst (MOVQload src mem) mem)) // result: (MOVQstore [s-8] dst (MOVQload [s-8] src mem) (MOVQstore dst (MOVQload src mem) mem))
for { for {
s := v.AuxInt s := v.AuxInt
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() > 8 && SizeAndAlign(s).Size() < 16) { if !(s > 8 && s < 16) {
break break
} }
v.reset(OpAMD64MOVQstore) v.reset(OpAMD64MOVQstore)
v.AuxInt = SizeAndAlign(s).Size() - 8 v.AuxInt = s - 8
v.AddArg(dst) v.AddArg(dst)
v0 := b.NewValue0(v.Pos, OpAMD64MOVQload, config.fe.TypeUInt64()) v0 := b.NewValue0(v.Pos, OpAMD64MOVQload, config.fe.TypeUInt64())
v0.AuxInt = SizeAndAlign(s).Size() - 8 v0.AuxInt = s - 8
v0.AddArg(src) v0.AddArg(src)
v0.AddArg(mem) v0.AddArg(mem)
v.AddArg(v0) v.AddArg(v0)
@ -20260,24 +20250,24 @@ func rewriteValueAMD64_OpMove(v *Value, config *Config) bool {
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [s] dst src mem)
// cond: SizeAndAlign(s).Size() > 16 && SizeAndAlign(s).Size()%16 != 0 && SizeAndAlign(s).Size()%16 <= 8 // cond: s > 16 && s%16 != 0 && s%16 <= 8
// result: (Move [SizeAndAlign(s).Size()-SizeAndAlign(s).Size()%16] (OffPtr <dst.Type> dst [SizeAndAlign(s).Size()%16]) (OffPtr <src.Type> src [SizeAndAlign(s).Size()%16]) (MOVQstore dst (MOVQload src mem) mem)) // result: (Move [s-s%16] (OffPtr <dst.Type> dst [s%16]) (OffPtr <src.Type> src [s%16]) (MOVQstore dst (MOVQload src mem) mem))
for { for {
s := v.AuxInt s := v.AuxInt
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() > 16 && SizeAndAlign(s).Size()%16 != 0 && SizeAndAlign(s).Size()%16 <= 8) { if !(s > 16 && s%16 != 0 && s%16 <= 8) {
break break
} }
v.reset(OpMove) v.reset(OpMove)
v.AuxInt = SizeAndAlign(s).Size() - SizeAndAlign(s).Size()%16 v.AuxInt = s - s%16
v0 := b.NewValue0(v.Pos, OpOffPtr, dst.Type) v0 := b.NewValue0(v.Pos, OpOffPtr, dst.Type)
v0.AuxInt = SizeAndAlign(s).Size() % 16 v0.AuxInt = s % 16
v0.AddArg(dst) v0.AddArg(dst)
v.AddArg(v0) v.AddArg(v0)
v1 := b.NewValue0(v.Pos, OpOffPtr, src.Type) v1 := b.NewValue0(v.Pos, OpOffPtr, src.Type)
v1.AuxInt = SizeAndAlign(s).Size() % 16 v1.AuxInt = s % 16
v1.AddArg(src) v1.AddArg(src)
v.AddArg(v1) v.AddArg(v1)
v2 := b.NewValue0(v.Pos, OpAMD64MOVQstore, TypeMem) v2 := b.NewValue0(v.Pos, OpAMD64MOVQstore, TypeMem)
@ -20291,24 +20281,24 @@ func rewriteValueAMD64_OpMove(v *Value, config *Config) bool {
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [s] dst src mem)
// cond: SizeAndAlign(s).Size() > 16 && SizeAndAlign(s).Size()%16 != 0 && SizeAndAlign(s).Size()%16 > 8 // cond: s > 16 && s%16 != 0 && s%16 > 8
// result: (Move [SizeAndAlign(s).Size()-SizeAndAlign(s).Size()%16] (OffPtr <dst.Type> dst [SizeAndAlign(s).Size()%16]) (OffPtr <src.Type> src [SizeAndAlign(s).Size()%16]) (MOVOstore dst (MOVOload src mem) mem)) // result: (Move [s-s%16] (OffPtr <dst.Type> dst [s%16]) (OffPtr <src.Type> src [s%16]) (MOVOstore dst (MOVOload src mem) mem))
for { for {
s := v.AuxInt s := v.AuxInt
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() > 16 && SizeAndAlign(s).Size()%16 != 0 && SizeAndAlign(s).Size()%16 > 8) { if !(s > 16 && s%16 != 0 && s%16 > 8) {
break break
} }
v.reset(OpMove) v.reset(OpMove)
v.AuxInt = SizeAndAlign(s).Size() - SizeAndAlign(s).Size()%16 v.AuxInt = s - s%16
v0 := b.NewValue0(v.Pos, OpOffPtr, dst.Type) v0 := b.NewValue0(v.Pos, OpOffPtr, dst.Type)
v0.AuxInt = SizeAndAlign(s).Size() % 16 v0.AuxInt = s % 16
v0.AddArg(dst) v0.AddArg(dst)
v.AddArg(v0) v.AddArg(v0)
v1 := b.NewValue0(v.Pos, OpOffPtr, src.Type) v1 := b.NewValue0(v.Pos, OpOffPtr, src.Type)
v1.AuxInt = SizeAndAlign(s).Size() % 16 v1.AuxInt = s % 16
v1.AddArg(src) v1.AddArg(src)
v.AddArg(v1) v.AddArg(v1)
v2 := b.NewValue0(v.Pos, OpAMD64MOVOstore, TypeMem) v2 := b.NewValue0(v.Pos, OpAMD64MOVOstore, TypeMem)
@ -20322,39 +20312,39 @@ func rewriteValueAMD64_OpMove(v *Value, config *Config) bool {
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [s] dst src mem)
// cond: SizeAndAlign(s).Size() >= 32 && SizeAndAlign(s).Size() <= 16*64 && SizeAndAlign(s).Size()%16 == 0 && !config.noDuffDevice // cond: s >= 32 && s <= 16*64 && s%16 == 0 && !config.noDuffDevice
// result: (DUFFCOPY [14*(64-SizeAndAlign(s).Size()/16)] dst src mem) // result: (DUFFCOPY [14*(64-s/16)] dst src mem)
for { for {
s := v.AuxInt s := v.AuxInt
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() >= 32 && SizeAndAlign(s).Size() <= 16*64 && SizeAndAlign(s).Size()%16 == 0 && !config.noDuffDevice) { if !(s >= 32 && s <= 16*64 && s%16 == 0 && !config.noDuffDevice) {
break break
} }
v.reset(OpAMD64DUFFCOPY) v.reset(OpAMD64DUFFCOPY)
v.AuxInt = 14 * (64 - SizeAndAlign(s).Size()/16) v.AuxInt = 14 * (64 - s/16)
v.AddArg(dst) v.AddArg(dst)
v.AddArg(src) v.AddArg(src)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [s] dst src mem)
// cond: (SizeAndAlign(s).Size() > 16*64 || config.noDuffDevice) && SizeAndAlign(s).Size()%8 == 0 // cond: (s > 16*64 || config.noDuffDevice) && s%8 == 0
// result: (REPMOVSQ dst src (MOVQconst [SizeAndAlign(s).Size()/8]) mem) // result: (REPMOVSQ dst src (MOVQconst [s/8]) mem)
for { for {
s := v.AuxInt s := v.AuxInt
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !((SizeAndAlign(s).Size() > 16*64 || config.noDuffDevice) && SizeAndAlign(s).Size()%8 == 0) { if !((s > 16*64 || config.noDuffDevice) && s%8 == 0) {
break break
} }
v.reset(OpAMD64REPMOVSQ) v.reset(OpAMD64REPMOVSQ)
v.AddArg(dst) v.AddArg(dst)
v.AddArg(src) v.AddArg(src)
v0 := b.NewValue0(v.Pos, OpAMD64MOVQconst, config.fe.TypeUInt64()) v0 := b.NewValue0(v.Pos, OpAMD64MOVQconst, config.fe.TypeUInt64())
v0.AuxInt = SizeAndAlign(s).Size() / 8 v0.AuxInt = s / 8
v.AddArg(v0) v.AddArg(v0)
v.AddArg(mem) v.AddArg(mem)
return true return true
@ -21903,17 +21893,15 @@ func rewriteValueAMD64_OpStaticCall(v *Value, config *Config) bool {
func rewriteValueAMD64_OpStore(v *Value, config *Config) bool { func rewriteValueAMD64_OpStore(v *Value, config *Config) bool {
b := v.Block b := v.Block
_ = b _ = b
// match: (Store [8] ptr val mem) // match: (Store {t} ptr val mem)
// cond: is64BitFloat(val.Type) // cond: t.(Type).Size() == 8 && is64BitFloat(val.Type)
// result: (MOVSDstore ptr val mem) // result: (MOVSDstore ptr val mem)
for { for {
if v.AuxInt != 8 { t := v.Aux
break
}
ptr := v.Args[0] ptr := v.Args[0]
val := v.Args[1] val := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(is64BitFloat(val.Type)) { if !(t.(Type).Size() == 8 && is64BitFloat(val.Type)) {
break break
} }
v.reset(OpAMD64MOVSDstore) v.reset(OpAMD64MOVSDstore)
@ -21922,17 +21910,15 @@ func rewriteValueAMD64_OpStore(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Store [4] ptr val mem) // match: (Store {t} ptr val mem)
// cond: is32BitFloat(val.Type) // cond: t.(Type).Size() == 4 && is32BitFloat(val.Type)
// result: (MOVSSstore ptr val mem) // result: (MOVSSstore ptr val mem)
for { for {
if v.AuxInt != 4 { t := v.Aux
break
}
ptr := v.Args[0] ptr := v.Args[0]
val := v.Args[1] val := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(is32BitFloat(val.Type)) { if !(t.(Type).Size() == 4 && is32BitFloat(val.Type)) {
break break
} }
v.reset(OpAMD64MOVSSstore) v.reset(OpAMD64MOVSSstore)
@ -21941,64 +21927,68 @@ func rewriteValueAMD64_OpStore(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Store [8] ptr val mem) // match: (Store {t} ptr val mem)
// cond: // cond: t.(Type).Size() == 8
// result: (MOVQstore ptr val mem) // result: (MOVQstore ptr val mem)
for { for {
if v.AuxInt != 8 { t := v.Aux
break
}
ptr := v.Args[0] ptr := v.Args[0]
val := v.Args[1] val := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(t.(Type).Size() == 8) {
break
}
v.reset(OpAMD64MOVQstore) v.reset(OpAMD64MOVQstore)
v.AddArg(ptr) v.AddArg(ptr)
v.AddArg(val) v.AddArg(val)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Store [4] ptr val mem) // match: (Store {t} ptr val mem)
// cond: // cond: t.(Type).Size() == 4
// result: (MOVLstore ptr val mem) // result: (MOVLstore ptr val mem)
for { for {
if v.AuxInt != 4 { t := v.Aux
break
}
ptr := v.Args[0] ptr := v.Args[0]
val := v.Args[1] val := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(t.(Type).Size() == 4) {
break
}
v.reset(OpAMD64MOVLstore) v.reset(OpAMD64MOVLstore)
v.AddArg(ptr) v.AddArg(ptr)
v.AddArg(val) v.AddArg(val)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Store [2] ptr val mem) // match: (Store {t} ptr val mem)
// cond: // cond: t.(Type).Size() == 2
// result: (MOVWstore ptr val mem) // result: (MOVWstore ptr val mem)
for { for {
if v.AuxInt != 2 { t := v.Aux
break
}
ptr := v.Args[0] ptr := v.Args[0]
val := v.Args[1] val := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(t.(Type).Size() == 2) {
break
}
v.reset(OpAMD64MOVWstore) v.reset(OpAMD64MOVWstore)
v.AddArg(ptr) v.AddArg(ptr)
v.AddArg(val) v.AddArg(val)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Store [1] ptr val mem) // match: (Store {t} ptr val mem)
// cond: // cond: t.(Type).Size() == 1
// result: (MOVBstore ptr val mem) // result: (MOVBstore ptr val mem)
for { for {
if v.AuxInt != 1 { t := v.Aux
break
}
ptr := v.Args[0] ptr := v.Args[0]
val := v.Args[1] val := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(t.(Type).Size() == 1) {
break
}
v.reset(OpAMD64MOVBstore) v.reset(OpAMD64MOVBstore)
v.AddArg(ptr) v.AddArg(ptr)
v.AddArg(val) v.AddArg(val)
@ -22277,94 +22267,88 @@ func rewriteValueAMD64_OpXor8(v *Value, config *Config) bool {
func rewriteValueAMD64_OpZero(v *Value, config *Config) bool { func rewriteValueAMD64_OpZero(v *Value, config *Config) bool {
b := v.Block b := v.Block
_ = b _ = b
// match: (Zero [s] _ mem) // match: (Zero [0] _ mem)
// cond: SizeAndAlign(s).Size() == 0 // cond:
// result: mem // result: mem
for { for {
s := v.AuxInt if v.AuxInt != 0 {
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 0) {
break break
} }
mem := v.Args[1]
v.reset(OpCopy) v.reset(OpCopy)
v.Type = mem.Type v.Type = mem.Type
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [1] destptr mem)
// cond: SizeAndAlign(s).Size() == 1 // cond:
// result: (MOVBstoreconst [0] destptr mem) // result: (MOVBstoreconst [0] destptr mem)
for { for {
s := v.AuxInt if v.AuxInt != 1 {
destptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 1) {
break break
} }
destptr := v.Args[0]
mem := v.Args[1]
v.reset(OpAMD64MOVBstoreconst) v.reset(OpAMD64MOVBstoreconst)
v.AuxInt = 0 v.AuxInt = 0
v.AddArg(destptr) v.AddArg(destptr)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [2] destptr mem)
// cond: SizeAndAlign(s).Size() == 2 // cond:
// result: (MOVWstoreconst [0] destptr mem) // result: (MOVWstoreconst [0] destptr mem)
for { for {
s := v.AuxInt if v.AuxInt != 2 {
destptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 2) {
break break
} }
destptr := v.Args[0]
mem := v.Args[1]
v.reset(OpAMD64MOVWstoreconst) v.reset(OpAMD64MOVWstoreconst)
v.AuxInt = 0 v.AuxInt = 0
v.AddArg(destptr) v.AddArg(destptr)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [4] destptr mem)
// cond: SizeAndAlign(s).Size() == 4 // cond:
// result: (MOVLstoreconst [0] destptr mem) // result: (MOVLstoreconst [0] destptr mem)
for { for {
s := v.AuxInt if v.AuxInt != 4 {
destptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 4) {
break break
} }
destptr := v.Args[0]
mem := v.Args[1]
v.reset(OpAMD64MOVLstoreconst) v.reset(OpAMD64MOVLstoreconst)
v.AuxInt = 0 v.AuxInt = 0
v.AddArg(destptr) v.AddArg(destptr)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [8] destptr mem)
// cond: SizeAndAlign(s).Size() == 8 // cond:
// result: (MOVQstoreconst [0] destptr mem) // result: (MOVQstoreconst [0] destptr mem)
for { for {
s := v.AuxInt if v.AuxInt != 8 {
destptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 8) {
break break
} }
destptr := v.Args[0]
mem := v.Args[1]
v.reset(OpAMD64MOVQstoreconst) v.reset(OpAMD64MOVQstoreconst)
v.AuxInt = 0 v.AuxInt = 0
v.AddArg(destptr) v.AddArg(destptr)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [3] destptr mem)
// cond: SizeAndAlign(s).Size() == 3 // cond:
// result: (MOVBstoreconst [makeValAndOff(0,2)] destptr (MOVWstoreconst [0] destptr mem)) // result: (MOVBstoreconst [makeValAndOff(0,2)] destptr (MOVWstoreconst [0] destptr mem))
for { for {
s := v.AuxInt if v.AuxInt != 3 {
destptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 3) {
break break
} }
destptr := v.Args[0]
mem := v.Args[1]
v.reset(OpAMD64MOVBstoreconst) v.reset(OpAMD64MOVBstoreconst)
v.AuxInt = makeValAndOff(0, 2) v.AuxInt = makeValAndOff(0, 2)
v.AddArg(destptr) v.AddArg(destptr)
@ -22375,16 +22359,15 @@ func rewriteValueAMD64_OpZero(v *Value, config *Config) bool {
v.AddArg(v0) v.AddArg(v0)
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [5] destptr mem)
// cond: SizeAndAlign(s).Size() == 5 // cond:
// result: (MOVBstoreconst [makeValAndOff(0,4)] destptr (MOVLstoreconst [0] destptr mem)) // result: (MOVBstoreconst [makeValAndOff(0,4)] destptr (MOVLstoreconst [0] destptr mem))
for { for {
s := v.AuxInt if v.AuxInt != 5 {
destptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 5) {
break break
} }
destptr := v.Args[0]
mem := v.Args[1]
v.reset(OpAMD64MOVBstoreconst) v.reset(OpAMD64MOVBstoreconst)
v.AuxInt = makeValAndOff(0, 4) v.AuxInt = makeValAndOff(0, 4)
v.AddArg(destptr) v.AddArg(destptr)
@ -22395,16 +22378,15 @@ func rewriteValueAMD64_OpZero(v *Value, config *Config) bool {
v.AddArg(v0) v.AddArg(v0)
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [6] destptr mem)
// cond: SizeAndAlign(s).Size() == 6 // cond:
// result: (MOVWstoreconst [makeValAndOff(0,4)] destptr (MOVLstoreconst [0] destptr mem)) // result: (MOVWstoreconst [makeValAndOff(0,4)] destptr (MOVLstoreconst [0] destptr mem))
for { for {
s := v.AuxInt if v.AuxInt != 6 {
destptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 6) {
break break
} }
destptr := v.Args[0]
mem := v.Args[1]
v.reset(OpAMD64MOVWstoreconst) v.reset(OpAMD64MOVWstoreconst)
v.AuxInt = makeValAndOff(0, 4) v.AuxInt = makeValAndOff(0, 4)
v.AddArg(destptr) v.AddArg(destptr)
@ -22415,16 +22397,15 @@ func rewriteValueAMD64_OpZero(v *Value, config *Config) bool {
v.AddArg(v0) v.AddArg(v0)
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [7] destptr mem)
// cond: SizeAndAlign(s).Size() == 7 // cond:
// result: (MOVLstoreconst [makeValAndOff(0,3)] destptr (MOVLstoreconst [0] destptr mem)) // result: (MOVLstoreconst [makeValAndOff(0,3)] destptr (MOVLstoreconst [0] destptr mem))
for { for {
s := v.AuxInt if v.AuxInt != 7 {
destptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 7) {
break break
} }
destptr := v.Args[0]
mem := v.Args[1]
v.reset(OpAMD64MOVLstoreconst) v.reset(OpAMD64MOVLstoreconst)
v.AuxInt = makeValAndOff(0, 3) v.AuxInt = makeValAndOff(0, 3)
v.AddArg(destptr) v.AddArg(destptr)
@ -22436,19 +22417,19 @@ func rewriteValueAMD64_OpZero(v *Value, config *Config) bool {
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [s] destptr mem)
// cond: SizeAndAlign(s).Size()%8 != 0 && SizeAndAlign(s).Size() > 8 // cond: s%8 != 0 && s > 8
// result: (Zero [SizeAndAlign(s).Size()-SizeAndAlign(s).Size()%8] (OffPtr <destptr.Type> destptr [SizeAndAlign(s).Size()%8]) (MOVQstoreconst [0] destptr mem)) // result: (Zero [s-s%8] (OffPtr <destptr.Type> destptr [s%8]) (MOVQstoreconst [0] destptr mem))
for { for {
s := v.AuxInt s := v.AuxInt
destptr := v.Args[0] destptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(SizeAndAlign(s).Size()%8 != 0 && SizeAndAlign(s).Size() > 8) { if !(s%8 != 0 && s > 8) {
break break
} }
v.reset(OpZero) v.reset(OpZero)
v.AuxInt = SizeAndAlign(s).Size() - SizeAndAlign(s).Size()%8 v.AuxInt = s - s%8
v0 := b.NewValue0(v.Pos, OpOffPtr, destptr.Type) v0 := b.NewValue0(v.Pos, OpOffPtr, destptr.Type)
v0.AuxInt = SizeAndAlign(s).Size() % 8 v0.AuxInt = s % 8
v0.AddArg(destptr) v0.AddArg(destptr)
v.AddArg(v0) v.AddArg(v0)
v1 := b.NewValue0(v.Pos, OpAMD64MOVQstoreconst, TypeMem) v1 := b.NewValue0(v.Pos, OpAMD64MOVQstoreconst, TypeMem)
@ -22458,16 +22439,15 @@ func rewriteValueAMD64_OpZero(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [16] destptr mem)
// cond: SizeAndAlign(s).Size() == 16 // cond:
// result: (MOVQstoreconst [makeValAndOff(0,8)] destptr (MOVQstoreconst [0] destptr mem)) // result: (MOVQstoreconst [makeValAndOff(0,8)] destptr (MOVQstoreconst [0] destptr mem))
for { for {
s := v.AuxInt if v.AuxInt != 16 {
destptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 16) {
break break
} }
destptr := v.Args[0]
mem := v.Args[1]
v.reset(OpAMD64MOVQstoreconst) v.reset(OpAMD64MOVQstoreconst)
v.AuxInt = makeValAndOff(0, 8) v.AuxInt = makeValAndOff(0, 8)
v.AddArg(destptr) v.AddArg(destptr)
@ -22478,16 +22458,15 @@ func rewriteValueAMD64_OpZero(v *Value, config *Config) bool {
v.AddArg(v0) v.AddArg(v0)
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [24] destptr mem)
// cond: SizeAndAlign(s).Size() == 24 // cond:
// result: (MOVQstoreconst [makeValAndOff(0,16)] destptr (MOVQstoreconst [makeValAndOff(0,8)] destptr (MOVQstoreconst [0] destptr mem))) // result: (MOVQstoreconst [makeValAndOff(0,16)] destptr (MOVQstoreconst [makeValAndOff(0,8)] destptr (MOVQstoreconst [0] destptr mem)))
for { for {
s := v.AuxInt if v.AuxInt != 24 {
destptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 24) {
break break
} }
destptr := v.Args[0]
mem := v.Args[1]
v.reset(OpAMD64MOVQstoreconst) v.reset(OpAMD64MOVQstoreconst)
v.AuxInt = makeValAndOff(0, 16) v.AuxInt = makeValAndOff(0, 16)
v.AddArg(destptr) v.AddArg(destptr)
@ -22502,16 +22481,15 @@ func rewriteValueAMD64_OpZero(v *Value, config *Config) bool {
v.AddArg(v0) v.AddArg(v0)
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [32] destptr mem)
// cond: SizeAndAlign(s).Size() == 32 // cond:
// result: (MOVQstoreconst [makeValAndOff(0,24)] destptr (MOVQstoreconst [makeValAndOff(0,16)] destptr (MOVQstoreconst [makeValAndOff(0,8)] destptr (MOVQstoreconst [0] destptr mem)))) // result: (MOVQstoreconst [makeValAndOff(0,24)] destptr (MOVQstoreconst [makeValAndOff(0,16)] destptr (MOVQstoreconst [makeValAndOff(0,8)] destptr (MOVQstoreconst [0] destptr mem))))
for { for {
s := v.AuxInt if v.AuxInt != 32 {
destptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 32) {
break break
} }
destptr := v.Args[0]
mem := v.Args[1]
v.reset(OpAMD64MOVQstoreconst) v.reset(OpAMD64MOVQstoreconst)
v.AuxInt = makeValAndOff(0, 24) v.AuxInt = makeValAndOff(0, 24)
v.AddArg(destptr) v.AddArg(destptr)
@ -22531,17 +22509,17 @@ func rewriteValueAMD64_OpZero(v *Value, config *Config) bool {
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [s] destptr mem)
// cond: SizeAndAlign(s).Size() <= 1024 && SizeAndAlign(s).Size()%8 == 0 && SizeAndAlign(s).Size()%16 != 0 && !config.noDuffDevice // cond: s <= 1024 && s%8 == 0 && s%16 != 0 && !config.noDuffDevice
// result: (Zero [SizeAndAlign(s).Size()-8] (OffPtr <destptr.Type> [8] destptr) (MOVQstore destptr (MOVQconst [0]) mem)) // result: (Zero [s-8] (OffPtr <destptr.Type> [8] destptr) (MOVQstore destptr (MOVQconst [0]) mem))
for { for {
s := v.AuxInt s := v.AuxInt
destptr := v.Args[0] destptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(SizeAndAlign(s).Size() <= 1024 && SizeAndAlign(s).Size()%8 == 0 && SizeAndAlign(s).Size()%16 != 0 && !config.noDuffDevice) { if !(s <= 1024 && s%8 == 0 && s%16 != 0 && !config.noDuffDevice) {
break break
} }
v.reset(OpZero) v.reset(OpZero)
v.AuxInt = SizeAndAlign(s).Size() - 8 v.AuxInt = s - 8
v0 := b.NewValue0(v.Pos, OpOffPtr, destptr.Type) v0 := b.NewValue0(v.Pos, OpOffPtr, destptr.Type)
v0.AuxInt = 8 v0.AuxInt = 8
v0.AddArg(destptr) v0.AddArg(destptr)
@ -22556,17 +22534,17 @@ func rewriteValueAMD64_OpZero(v *Value, config *Config) bool {
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [s] destptr mem)
// cond: SizeAndAlign(s).Size() <= 1024 && SizeAndAlign(s).Size()%16 == 0 && !config.noDuffDevice // cond: s <= 1024 && s%16 == 0 && !config.noDuffDevice
// result: (DUFFZERO [SizeAndAlign(s).Size()] destptr (MOVOconst [0]) mem) // result: (DUFFZERO [s] destptr (MOVOconst [0]) mem)
for { for {
s := v.AuxInt s := v.AuxInt
destptr := v.Args[0] destptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(SizeAndAlign(s).Size() <= 1024 && SizeAndAlign(s).Size()%16 == 0 && !config.noDuffDevice) { if !(s <= 1024 && s%16 == 0 && !config.noDuffDevice) {
break break
} }
v.reset(OpAMD64DUFFZERO) v.reset(OpAMD64DUFFZERO)
v.AuxInt = SizeAndAlign(s).Size() v.AuxInt = s
v.AddArg(destptr) v.AddArg(destptr)
v0 := b.NewValue0(v.Pos, OpAMD64MOVOconst, TypeInt128) v0 := b.NewValue0(v.Pos, OpAMD64MOVOconst, TypeInt128)
v0.AuxInt = 0 v0.AuxInt = 0
@ -22575,19 +22553,19 @@ func rewriteValueAMD64_OpZero(v *Value, config *Config) bool {
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [s] destptr mem)
// cond: (SizeAndAlign(s).Size() > 1024 || (config.noDuffDevice && SizeAndAlign(s).Size() > 32)) && SizeAndAlign(s).Size()%8 == 0 // cond: (s > 1024 || (config.noDuffDevice && s > 32)) && s%8 == 0
// result: (REPSTOSQ destptr (MOVQconst [SizeAndAlign(s).Size()/8]) (MOVQconst [0]) mem) // result: (REPSTOSQ destptr (MOVQconst [s/8]) (MOVQconst [0]) mem)
for { for {
s := v.AuxInt s := v.AuxInt
destptr := v.Args[0] destptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
if !((SizeAndAlign(s).Size() > 1024 || (config.noDuffDevice && SizeAndAlign(s).Size() > 32)) && SizeAndAlign(s).Size()%8 == 0) { if !((s > 1024 || (config.noDuffDevice && s > 32)) && s%8 == 0) {
break break
} }
v.reset(OpAMD64REPSTOSQ) v.reset(OpAMD64REPSTOSQ)
v.AddArg(destptr) v.AddArg(destptr)
v0 := b.NewValue0(v.Pos, OpAMD64MOVQconst, config.fe.TypeUInt64()) v0 := b.NewValue0(v.Pos, OpAMD64MOVQconst, config.fe.TypeUInt64())
v0.AuxInt = SizeAndAlign(s).Size() / 8 v0.AuxInt = s / 8
v.AddArg(v0) v.AddArg(v0)
v1 := b.NewValue0(v.Pos, OpAMD64MOVQconst, config.fe.TypeUInt64()) v1 := b.NewValue0(v.Pos, OpAMD64MOVQconst, config.fe.TypeUInt64())
v1.AuxInt = 0 v1.AuxInt = 0

View file

@ -14990,31 +14990,29 @@ func rewriteValueARM_OpMod8u(v *Value, config *Config) bool {
func rewriteValueARM_OpMove(v *Value, config *Config) bool { func rewriteValueARM_OpMove(v *Value, config *Config) bool {
b := v.Block b := v.Block
_ = b _ = b
// match: (Move [s] _ _ mem) // match: (Move [0] _ _ mem)
// cond: SizeAndAlign(s).Size() == 0 // cond:
// result: mem // result: mem
for { for {
s := v.AuxInt if v.AuxInt != 0 {
mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 0) {
break break
} }
mem := v.Args[2]
v.reset(OpCopy) v.reset(OpCopy)
v.Type = mem.Type v.Type = mem.Type
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [1] dst src mem)
// cond: SizeAndAlign(s).Size() == 1 // cond:
// result: (MOVBstore dst (MOVBUload src mem) mem) // result: (MOVBstore dst (MOVBUload src mem) mem)
for { for {
s := v.AuxInt if v.AuxInt != 1 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 1) {
break
}
v.reset(OpARMMOVBstore) v.reset(OpARMMOVBstore)
v.AddArg(dst) v.AddArg(dst)
v0 := b.NewValue0(v.Pos, OpARMMOVBUload, config.fe.TypeUInt8()) v0 := b.NewValue0(v.Pos, OpARMMOVBUload, config.fe.TypeUInt8())
@ -15024,15 +15022,18 @@ func rewriteValueARM_OpMove(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [2] {t} dst src mem)
// cond: SizeAndAlign(s).Size() == 2 && SizeAndAlign(s).Align()%2 == 0 // cond: t.(Type).Alignment()%2 == 0
// result: (MOVHstore dst (MOVHUload src mem) mem) // result: (MOVHstore dst (MOVHUload src mem) mem)
for { for {
s := v.AuxInt if v.AuxInt != 2 {
break
}
t := v.Aux
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 2 && SizeAndAlign(s).Align()%2 == 0) { if !(t.(Type).Alignment()%2 == 0) {
break break
} }
v.reset(OpARMMOVHstore) v.reset(OpARMMOVHstore)
@ -15044,17 +15045,16 @@ func rewriteValueARM_OpMove(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [2] dst src mem)
// cond: SizeAndAlign(s).Size() == 2 // cond:
// result: (MOVBstore [1] dst (MOVBUload [1] src mem) (MOVBstore dst (MOVBUload src mem) mem)) // result: (MOVBstore [1] dst (MOVBUload [1] src mem) (MOVBstore dst (MOVBUload src mem) mem))
for { for {
s := v.AuxInt if v.AuxInt != 2 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 2) {
break
}
v.reset(OpARMMOVBstore) v.reset(OpARMMOVBstore)
v.AuxInt = 1 v.AuxInt = 1
v.AddArg(dst) v.AddArg(dst)
@ -15073,15 +15073,18 @@ func rewriteValueARM_OpMove(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [4] {t} dst src mem)
// cond: SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%4 == 0 // cond: t.(Type).Alignment()%4 == 0
// result: (MOVWstore dst (MOVWload src mem) mem) // result: (MOVWstore dst (MOVWload src mem) mem)
for { for {
s := v.AuxInt if v.AuxInt != 4 {
break
}
t := v.Aux
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%4 == 0) { if !(t.(Type).Alignment()%4 == 0) {
break break
} }
v.reset(OpARMMOVWstore) v.reset(OpARMMOVWstore)
@ -15093,15 +15096,18 @@ func rewriteValueARM_OpMove(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [4] {t} dst src mem)
// cond: SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%2 == 0 // cond: t.(Type).Alignment()%2 == 0
// result: (MOVHstore [2] dst (MOVHUload [2] src mem) (MOVHstore dst (MOVHUload src mem) mem)) // result: (MOVHstore [2] dst (MOVHUload [2] src mem) (MOVHstore dst (MOVHUload src mem) mem))
for { for {
s := v.AuxInt if v.AuxInt != 4 {
break
}
t := v.Aux
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%2 == 0) { if !(t.(Type).Alignment()%2 == 0) {
break break
} }
v.reset(OpARMMOVHstore) v.reset(OpARMMOVHstore)
@ -15122,17 +15128,16 @@ func rewriteValueARM_OpMove(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [4] dst src mem)
// cond: SizeAndAlign(s).Size() == 4 // cond:
// result: (MOVBstore [3] dst (MOVBUload [3] src mem) (MOVBstore [2] dst (MOVBUload [2] src mem) (MOVBstore [1] dst (MOVBUload [1] src mem) (MOVBstore dst (MOVBUload src mem) mem)))) // result: (MOVBstore [3] dst (MOVBUload [3] src mem) (MOVBstore [2] dst (MOVBUload [2] src mem) (MOVBstore [1] dst (MOVBUload [1] src mem) (MOVBstore dst (MOVBUload src mem) mem))))
for { for {
s := v.AuxInt if v.AuxInt != 4 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 4) {
break
}
v.reset(OpARMMOVBstore) v.reset(OpARMMOVBstore)
v.AuxInt = 3 v.AuxInt = 3
v.AddArg(dst) v.AddArg(dst)
@ -15169,17 +15174,16 @@ func rewriteValueARM_OpMove(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [3] dst src mem)
// cond: SizeAndAlign(s).Size() == 3 // cond:
// result: (MOVBstore [2] dst (MOVBUload [2] src mem) (MOVBstore [1] dst (MOVBUload [1] src mem) (MOVBstore dst (MOVBUload src mem) mem))) // result: (MOVBstore [2] dst (MOVBUload [2] src mem) (MOVBstore [1] dst (MOVBUload [1] src mem) (MOVBstore dst (MOVBUload src mem) mem)))
for { for {
s := v.AuxInt if v.AuxInt != 3 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 3) {
break
}
v.reset(OpARMMOVBstore) v.reset(OpARMMOVBstore)
v.AuxInt = 2 v.AuxInt = 2
v.AddArg(dst) v.AddArg(dst)
@ -15207,41 +15211,43 @@ func rewriteValueARM_OpMove(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [s] {t} dst src mem)
// cond: SizeAndAlign(s).Size()%4 == 0 && SizeAndAlign(s).Size() > 4 && SizeAndAlign(s).Size() <= 512 && SizeAndAlign(s).Align()%4 == 0 && !config.noDuffDevice // cond: s%4 == 0 && s > 4 && s <= 512 && t.(Type).Alignment()%4 == 0 && !config.noDuffDevice
// result: (DUFFCOPY [8 * (128 - int64(SizeAndAlign(s).Size()/4))] dst src mem) // result: (DUFFCOPY [8 * (128 - int64(s/4))] dst src mem)
for { for {
s := v.AuxInt s := v.AuxInt
t := v.Aux
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size()%4 == 0 && SizeAndAlign(s).Size() > 4 && SizeAndAlign(s).Size() <= 512 && SizeAndAlign(s).Align()%4 == 0 && !config.noDuffDevice) { if !(s%4 == 0 && s > 4 && s <= 512 && t.(Type).Alignment()%4 == 0 && !config.noDuffDevice) {
break break
} }
v.reset(OpARMDUFFCOPY) v.reset(OpARMDUFFCOPY)
v.AuxInt = 8 * (128 - int64(SizeAndAlign(s).Size()/4)) v.AuxInt = 8 * (128 - int64(s/4))
v.AddArg(dst) v.AddArg(dst)
v.AddArg(src) v.AddArg(src)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [s] {t} dst src mem)
// cond: (SizeAndAlign(s).Size() > 512 || config.noDuffDevice) || SizeAndAlign(s).Align()%4 != 0 // cond: (s > 512 || config.noDuffDevice) || t.(Type).Alignment()%4 != 0
// result: (LoweredMove [SizeAndAlign(s).Align()] dst src (ADDconst <src.Type> src [SizeAndAlign(s).Size()-moveSize(SizeAndAlign(s).Align(), config)]) mem) // result: (LoweredMove [t.(Type).Alignment()] dst src (ADDconst <src.Type> src [s-moveSize(t.(Type).Alignment(), config)]) mem)
for { for {
s := v.AuxInt s := v.AuxInt
t := v.Aux
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !((SizeAndAlign(s).Size() > 512 || config.noDuffDevice) || SizeAndAlign(s).Align()%4 != 0) { if !((s > 512 || config.noDuffDevice) || t.(Type).Alignment()%4 != 0) {
break break
} }
v.reset(OpARMLoweredMove) v.reset(OpARMLoweredMove)
v.AuxInt = SizeAndAlign(s).Align() v.AuxInt = t.(Type).Alignment()
v.AddArg(dst) v.AddArg(dst)
v.AddArg(src) v.AddArg(src)
v0 := b.NewValue0(v.Pos, OpARMADDconst, src.Type) v0 := b.NewValue0(v.Pos, OpARMADDconst, src.Type)
v0.AuxInt = SizeAndAlign(s).Size() - moveSize(SizeAndAlign(s).Align(), config) v0.AuxInt = s - moveSize(t.(Type).Alignment(), config)
v0.AddArg(src) v0.AddArg(src)
v.AddArg(v0) v.AddArg(v0)
v.AddArg(mem) v.AddArg(mem)
@ -16574,49 +16580,49 @@ func rewriteValueARM_OpStaticCall(v *Value, config *Config) bool {
func rewriteValueARM_OpStore(v *Value, config *Config) bool { func rewriteValueARM_OpStore(v *Value, config *Config) bool {
b := v.Block b := v.Block
_ = b _ = b
// match: (Store [1] ptr val mem) // match: (Store {t} ptr val mem)
// cond: // cond: t.(Type).Size() == 1
// result: (MOVBstore ptr val mem) // result: (MOVBstore ptr val mem)
for { for {
if v.AuxInt != 1 { t := v.Aux
break
}
ptr := v.Args[0] ptr := v.Args[0]
val := v.Args[1] val := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(t.(Type).Size() == 1) {
break
}
v.reset(OpARMMOVBstore) v.reset(OpARMMOVBstore)
v.AddArg(ptr) v.AddArg(ptr)
v.AddArg(val) v.AddArg(val)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Store [2] ptr val mem) // match: (Store {t} ptr val mem)
// cond: // cond: t.(Type).Size() == 2
// result: (MOVHstore ptr val mem) // result: (MOVHstore ptr val mem)
for { for {
if v.AuxInt != 2 { t := v.Aux
break
}
ptr := v.Args[0] ptr := v.Args[0]
val := v.Args[1] val := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(t.(Type).Size() == 2) {
break
}
v.reset(OpARMMOVHstore) v.reset(OpARMMOVHstore)
v.AddArg(ptr) v.AddArg(ptr)
v.AddArg(val) v.AddArg(val)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Store [4] ptr val mem) // match: (Store {t} ptr val mem)
// cond: !is32BitFloat(val.Type) // cond: t.(Type).Size() == 4 && !is32BitFloat(val.Type)
// result: (MOVWstore ptr val mem) // result: (MOVWstore ptr val mem)
for { for {
if v.AuxInt != 4 { t := v.Aux
break
}
ptr := v.Args[0] ptr := v.Args[0]
val := v.Args[1] val := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(!is32BitFloat(val.Type)) { if !(t.(Type).Size() == 4 && !is32BitFloat(val.Type)) {
break break
} }
v.reset(OpARMMOVWstore) v.reset(OpARMMOVWstore)
@ -16625,17 +16631,15 @@ func rewriteValueARM_OpStore(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Store [4] ptr val mem) // match: (Store {t} ptr val mem)
// cond: is32BitFloat(val.Type) // cond: t.(Type).Size() == 4 && is32BitFloat(val.Type)
// result: (MOVFstore ptr val mem) // result: (MOVFstore ptr val mem)
for { for {
if v.AuxInt != 4 { t := v.Aux
break
}
ptr := v.Args[0] ptr := v.Args[0]
val := v.Args[1] val := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(is32BitFloat(val.Type)) { if !(t.(Type).Size() == 4 && is32BitFloat(val.Type)) {
break break
} }
v.reset(OpARMMOVFstore) v.reset(OpARMMOVFstore)
@ -16644,17 +16648,15 @@ func rewriteValueARM_OpStore(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Store [8] ptr val mem) // match: (Store {t} ptr val mem)
// cond: is64BitFloat(val.Type) // cond: t.(Type).Size() == 8 && is64BitFloat(val.Type)
// result: (MOVDstore ptr val mem) // result: (MOVDstore ptr val mem)
for { for {
if v.AuxInt != 8 { t := v.Aux
break
}
ptr := v.Args[0] ptr := v.Args[0]
val := v.Args[1] val := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(is64BitFloat(val.Type)) { if !(t.(Type).Size() == 8 && is64BitFloat(val.Type)) {
break break
} }
v.reset(OpARMMOVDstore) v.reset(OpARMMOVDstore)
@ -16877,30 +16879,28 @@ func rewriteValueARM_OpXor8(v *Value, config *Config) bool {
func rewriteValueARM_OpZero(v *Value, config *Config) bool { func rewriteValueARM_OpZero(v *Value, config *Config) bool {
b := v.Block b := v.Block
_ = b _ = b
// match: (Zero [s] _ mem) // match: (Zero [0] _ mem)
// cond: SizeAndAlign(s).Size() == 0 // cond:
// result: mem // result: mem
for { for {
s := v.AuxInt if v.AuxInt != 0 {
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 0) {
break break
} }
mem := v.Args[1]
v.reset(OpCopy) v.reset(OpCopy)
v.Type = mem.Type v.Type = mem.Type
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [1] ptr mem)
// cond: SizeAndAlign(s).Size() == 1 // cond:
// result: (MOVBstore ptr (MOVWconst [0]) mem) // result: (MOVBstore ptr (MOVWconst [0]) mem)
for { for {
s := v.AuxInt if v.AuxInt != 1 {
ptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 1) {
break break
} }
ptr := v.Args[0]
mem := v.Args[1]
v.reset(OpARMMOVBstore) v.reset(OpARMMOVBstore)
v.AddArg(ptr) v.AddArg(ptr)
v0 := b.NewValue0(v.Pos, OpARMMOVWconst, config.fe.TypeUInt32()) v0 := b.NewValue0(v.Pos, OpARMMOVWconst, config.fe.TypeUInt32())
@ -16909,14 +16909,17 @@ func rewriteValueARM_OpZero(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [2] {t} ptr mem)
// cond: SizeAndAlign(s).Size() == 2 && SizeAndAlign(s).Align()%2 == 0 // cond: t.(Type).Alignment()%2 == 0
// result: (MOVHstore ptr (MOVWconst [0]) mem) // result: (MOVHstore ptr (MOVWconst [0]) mem)
for { for {
s := v.AuxInt if v.AuxInt != 2 {
break
}
t := v.Aux
ptr := v.Args[0] ptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 2 && SizeAndAlign(s).Align()%2 == 0) { if !(t.(Type).Alignment()%2 == 0) {
break break
} }
v.reset(OpARMMOVHstore) v.reset(OpARMMOVHstore)
@ -16927,16 +16930,15 @@ func rewriteValueARM_OpZero(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [2] ptr mem)
// cond: SizeAndAlign(s).Size() == 2 // cond:
// result: (MOVBstore [1] ptr (MOVWconst [0]) (MOVBstore [0] ptr (MOVWconst [0]) mem)) // result: (MOVBstore [1] ptr (MOVWconst [0]) (MOVBstore [0] ptr (MOVWconst [0]) mem))
for { for {
s := v.AuxInt if v.AuxInt != 2 {
ptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 2) {
break break
} }
ptr := v.Args[0]
mem := v.Args[1]
v.reset(OpARMMOVBstore) v.reset(OpARMMOVBstore)
v.AuxInt = 1 v.AuxInt = 1
v.AddArg(ptr) v.AddArg(ptr)
@ -16953,14 +16955,17 @@ func rewriteValueARM_OpZero(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [4] {t} ptr mem)
// cond: SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%4 == 0 // cond: t.(Type).Alignment()%4 == 0
// result: (MOVWstore ptr (MOVWconst [0]) mem) // result: (MOVWstore ptr (MOVWconst [0]) mem)
for { for {
s := v.AuxInt if v.AuxInt != 4 {
break
}
t := v.Aux
ptr := v.Args[0] ptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%4 == 0) { if !(t.(Type).Alignment()%4 == 0) {
break break
} }
v.reset(OpARMMOVWstore) v.reset(OpARMMOVWstore)
@ -16971,14 +16976,17 @@ func rewriteValueARM_OpZero(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [4] {t} ptr mem)
// cond: SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%2 == 0 // cond: t.(Type).Alignment()%2 == 0
// result: (MOVHstore [2] ptr (MOVWconst [0]) (MOVHstore [0] ptr (MOVWconst [0]) mem)) // result: (MOVHstore [2] ptr (MOVWconst [0]) (MOVHstore [0] ptr (MOVWconst [0]) mem))
for { for {
s := v.AuxInt if v.AuxInt != 4 {
break
}
t := v.Aux
ptr := v.Args[0] ptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%2 == 0) { if !(t.(Type).Alignment()%2 == 0) {
break break
} }
v.reset(OpARMMOVHstore) v.reset(OpARMMOVHstore)
@ -16997,16 +17005,15 @@ func rewriteValueARM_OpZero(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [4] ptr mem)
// cond: SizeAndAlign(s).Size() == 4 // cond:
// result: (MOVBstore [3] ptr (MOVWconst [0]) (MOVBstore [2] ptr (MOVWconst [0]) (MOVBstore [1] ptr (MOVWconst [0]) (MOVBstore [0] ptr (MOVWconst [0]) mem)))) // result: (MOVBstore [3] ptr (MOVWconst [0]) (MOVBstore [2] ptr (MOVWconst [0]) (MOVBstore [1] ptr (MOVWconst [0]) (MOVBstore [0] ptr (MOVWconst [0]) mem))))
for { for {
s := v.AuxInt if v.AuxInt != 4 {
ptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 4) {
break break
} }
ptr := v.Args[0]
mem := v.Args[1]
v.reset(OpARMMOVBstore) v.reset(OpARMMOVBstore)
v.AuxInt = 3 v.AuxInt = 3
v.AddArg(ptr) v.AddArg(ptr)
@ -17037,16 +17044,15 @@ func rewriteValueARM_OpZero(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [3] ptr mem)
// cond: SizeAndAlign(s).Size() == 3 // cond:
// result: (MOVBstore [2] ptr (MOVWconst [0]) (MOVBstore [1] ptr (MOVWconst [0]) (MOVBstore [0] ptr (MOVWconst [0]) mem))) // result: (MOVBstore [2] ptr (MOVWconst [0]) (MOVBstore [1] ptr (MOVWconst [0]) (MOVBstore [0] ptr (MOVWconst [0]) mem)))
for { for {
s := v.AuxInt if v.AuxInt != 3 {
ptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 3) {
break break
} }
ptr := v.Args[0]
mem := v.Args[1]
v.reset(OpARMMOVBstore) v.reset(OpARMMOVBstore)
v.AuxInt = 2 v.AuxInt = 2
v.AddArg(ptr) v.AddArg(ptr)
@ -17070,18 +17076,19 @@ func rewriteValueARM_OpZero(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [s] {t} ptr mem)
// cond: SizeAndAlign(s).Size()%4 == 0 && SizeAndAlign(s).Size() > 4 && SizeAndAlign(s).Size() <= 512 && SizeAndAlign(s).Align()%4 == 0 && !config.noDuffDevice // cond: s%4 == 0 && s > 4 && s <= 512 && t.(Type).Alignment()%4 == 0 && !config.noDuffDevice
// result: (DUFFZERO [4 * (128 - int64(SizeAndAlign(s).Size()/4))] ptr (MOVWconst [0]) mem) // result: (DUFFZERO [4 * (128 - int64(s/4))] ptr (MOVWconst [0]) mem)
for { for {
s := v.AuxInt s := v.AuxInt
t := v.Aux
ptr := v.Args[0] ptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(SizeAndAlign(s).Size()%4 == 0 && SizeAndAlign(s).Size() > 4 && SizeAndAlign(s).Size() <= 512 && SizeAndAlign(s).Align()%4 == 0 && !config.noDuffDevice) { if !(s%4 == 0 && s > 4 && s <= 512 && t.(Type).Alignment()%4 == 0 && !config.noDuffDevice) {
break break
} }
v.reset(OpARMDUFFZERO) v.reset(OpARMDUFFZERO)
v.AuxInt = 4 * (128 - int64(SizeAndAlign(s).Size()/4)) v.AuxInt = 4 * (128 - int64(s/4))
v.AddArg(ptr) v.AddArg(ptr)
v0 := b.NewValue0(v.Pos, OpARMMOVWconst, config.fe.TypeUInt32()) v0 := b.NewValue0(v.Pos, OpARMMOVWconst, config.fe.TypeUInt32())
v0.AuxInt = 0 v0.AuxInt = 0
@ -17089,21 +17096,22 @@ func rewriteValueARM_OpZero(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [s] {t} ptr mem)
// cond: (SizeAndAlign(s).Size() > 512 || config.noDuffDevice) || SizeAndAlign(s).Align()%4 != 0 // cond: (s > 512 || config.noDuffDevice) || t.(Type).Alignment()%4 != 0
// result: (LoweredZero [SizeAndAlign(s).Align()] ptr (ADDconst <ptr.Type> ptr [SizeAndAlign(s).Size()-moveSize(SizeAndAlign(s).Align(), config)]) (MOVWconst [0]) mem) // result: (LoweredZero [t.(Type).Alignment()] ptr (ADDconst <ptr.Type> ptr [s-moveSize(t.(Type).Alignment(), config)]) (MOVWconst [0]) mem)
for { for {
s := v.AuxInt s := v.AuxInt
t := v.Aux
ptr := v.Args[0] ptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
if !((SizeAndAlign(s).Size() > 512 || config.noDuffDevice) || SizeAndAlign(s).Align()%4 != 0) { if !((s > 512 || config.noDuffDevice) || t.(Type).Alignment()%4 != 0) {
break break
} }
v.reset(OpARMLoweredZero) v.reset(OpARMLoweredZero)
v.AuxInt = SizeAndAlign(s).Align() v.AuxInt = t.(Type).Alignment()
v.AddArg(ptr) v.AddArg(ptr)
v0 := b.NewValue0(v.Pos, OpARMADDconst, ptr.Type) v0 := b.NewValue0(v.Pos, OpARMADDconst, ptr.Type)
v0.AuxInt = SizeAndAlign(s).Size() - moveSize(SizeAndAlign(s).Align(), config) v0.AuxInt = s - moveSize(t.(Type).Alignment(), config)
v0.AddArg(ptr) v0.AddArg(ptr)
v.AddArg(v0) v.AddArg(v0)
v1 := b.NewValue0(v.Pos, OpARMMOVWconst, config.fe.TypeUInt32()) v1 := b.NewValue0(v.Pos, OpARMMOVWconst, config.fe.TypeUInt32())

View file

@ -12216,31 +12216,29 @@ func rewriteValueARM64_OpMod8u(v *Value, config *Config) bool {
func rewriteValueARM64_OpMove(v *Value, config *Config) bool { func rewriteValueARM64_OpMove(v *Value, config *Config) bool {
b := v.Block b := v.Block
_ = b _ = b
// match: (Move [s] _ _ mem) // match: (Move [0] _ _ mem)
// cond: SizeAndAlign(s).Size() == 0 // cond:
// result: mem // result: mem
for { for {
s := v.AuxInt if v.AuxInt != 0 {
mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 0) {
break break
} }
mem := v.Args[2]
v.reset(OpCopy) v.reset(OpCopy)
v.Type = mem.Type v.Type = mem.Type
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [1] dst src mem)
// cond: SizeAndAlign(s).Size() == 1 // cond:
// result: (MOVBstore dst (MOVBUload src mem) mem) // result: (MOVBstore dst (MOVBUload src mem) mem)
for { for {
s := v.AuxInt if v.AuxInt != 1 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 1) {
break
}
v.reset(OpARM64MOVBstore) v.reset(OpARM64MOVBstore)
v.AddArg(dst) v.AddArg(dst)
v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, config.fe.TypeUInt8()) v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, config.fe.TypeUInt8())
@ -12250,17 +12248,16 @@ func rewriteValueARM64_OpMove(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [2] dst src mem)
// cond: SizeAndAlign(s).Size() == 2 // cond:
// result: (MOVHstore dst (MOVHUload src mem) mem) // result: (MOVHstore dst (MOVHUload src mem) mem)
for { for {
s := v.AuxInt if v.AuxInt != 2 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 2) {
break
}
v.reset(OpARM64MOVHstore) v.reset(OpARM64MOVHstore)
v.AddArg(dst) v.AddArg(dst)
v0 := b.NewValue0(v.Pos, OpARM64MOVHUload, config.fe.TypeUInt16()) v0 := b.NewValue0(v.Pos, OpARM64MOVHUload, config.fe.TypeUInt16())
@ -12270,17 +12267,16 @@ func rewriteValueARM64_OpMove(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [4] dst src mem)
// cond: SizeAndAlign(s).Size() == 4 // cond:
// result: (MOVWstore dst (MOVWUload src mem) mem) // result: (MOVWstore dst (MOVWUload src mem) mem)
for { for {
s := v.AuxInt if v.AuxInt != 4 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 4) {
break
}
v.reset(OpARM64MOVWstore) v.reset(OpARM64MOVWstore)
v.AddArg(dst) v.AddArg(dst)
v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, config.fe.TypeUInt32()) v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, config.fe.TypeUInt32())
@ -12290,17 +12286,16 @@ func rewriteValueARM64_OpMove(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [8] dst src mem)
// cond: SizeAndAlign(s).Size() == 8 // cond:
// result: (MOVDstore dst (MOVDload src mem) mem) // result: (MOVDstore dst (MOVDload src mem) mem)
for { for {
s := v.AuxInt if v.AuxInt != 8 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 8) {
break
}
v.reset(OpARM64MOVDstore) v.reset(OpARM64MOVDstore)
v.AddArg(dst) v.AddArg(dst)
v0 := b.NewValue0(v.Pos, OpARM64MOVDload, config.fe.TypeUInt64()) v0 := b.NewValue0(v.Pos, OpARM64MOVDload, config.fe.TypeUInt64())
@ -12310,17 +12305,16 @@ func rewriteValueARM64_OpMove(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [3] dst src mem)
// cond: SizeAndAlign(s).Size() == 3 // cond:
// result: (MOVBstore [2] dst (MOVBUload [2] src mem) (MOVHstore dst (MOVHUload src mem) mem)) // result: (MOVBstore [2] dst (MOVBUload [2] src mem) (MOVHstore dst (MOVHUload src mem) mem))
for { for {
s := v.AuxInt if v.AuxInt != 3 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 3) {
break
}
v.reset(OpARM64MOVBstore) v.reset(OpARM64MOVBstore)
v.AuxInt = 2 v.AuxInt = 2
v.AddArg(dst) v.AddArg(dst)
@ -12339,17 +12333,16 @@ func rewriteValueARM64_OpMove(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [5] dst src mem)
// cond: SizeAndAlign(s).Size() == 5 // cond:
// result: (MOVBstore [4] dst (MOVBUload [4] src mem) (MOVWstore dst (MOVWUload src mem) mem)) // result: (MOVBstore [4] dst (MOVBUload [4] src mem) (MOVWstore dst (MOVWUload src mem) mem))
for { for {
s := v.AuxInt if v.AuxInt != 5 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 5) {
break
}
v.reset(OpARM64MOVBstore) v.reset(OpARM64MOVBstore)
v.AuxInt = 4 v.AuxInt = 4
v.AddArg(dst) v.AddArg(dst)
@ -12368,17 +12361,16 @@ func rewriteValueARM64_OpMove(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [6] dst src mem)
// cond: SizeAndAlign(s).Size() == 6 // cond:
// result: (MOVHstore [4] dst (MOVHUload [4] src mem) (MOVWstore dst (MOVWUload src mem) mem)) // result: (MOVHstore [4] dst (MOVHUload [4] src mem) (MOVWstore dst (MOVWUload src mem) mem))
for { for {
s := v.AuxInt if v.AuxInt != 6 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 6) {
break
}
v.reset(OpARM64MOVHstore) v.reset(OpARM64MOVHstore)
v.AuxInt = 4 v.AuxInt = 4
v.AddArg(dst) v.AddArg(dst)
@ -12397,17 +12389,16 @@ func rewriteValueARM64_OpMove(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [7] dst src mem)
// cond: SizeAndAlign(s).Size() == 7 // cond:
// result: (MOVBstore [6] dst (MOVBUload [6] src mem) (MOVHstore [4] dst (MOVHUload [4] src mem) (MOVWstore dst (MOVWUload src mem) mem))) // result: (MOVBstore [6] dst (MOVBUload [6] src mem) (MOVHstore [4] dst (MOVHUload [4] src mem) (MOVWstore dst (MOVWUload src mem) mem)))
for { for {
s := v.AuxInt if v.AuxInt != 7 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 7) {
break
}
v.reset(OpARM64MOVBstore) v.reset(OpARM64MOVBstore)
v.AuxInt = 6 v.AuxInt = 6
v.AddArg(dst) v.AddArg(dst)
@ -12435,17 +12426,16 @@ func rewriteValueARM64_OpMove(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [12] dst src mem)
// cond: SizeAndAlign(s).Size() == 12 // cond:
// result: (MOVWstore [8] dst (MOVWUload [8] src mem) (MOVDstore dst (MOVDload src mem) mem)) // result: (MOVWstore [8] dst (MOVWUload [8] src mem) (MOVDstore dst (MOVDload src mem) mem))
for { for {
s := v.AuxInt if v.AuxInt != 12 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 12) {
break
}
v.reset(OpARM64MOVWstore) v.reset(OpARM64MOVWstore)
v.AuxInt = 8 v.AuxInt = 8
v.AddArg(dst) v.AddArg(dst)
@ -12464,17 +12454,16 @@ func rewriteValueARM64_OpMove(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [16] dst src mem)
// cond: SizeAndAlign(s).Size() == 16 // cond:
// result: (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem)) // result: (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem))
for { for {
s := v.AuxInt if v.AuxInt != 16 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 16) {
break
}
v.reset(OpARM64MOVDstore) v.reset(OpARM64MOVDstore)
v.AuxInt = 8 v.AuxInt = 8
v.AddArg(dst) v.AddArg(dst)
@ -12493,17 +12482,16 @@ func rewriteValueARM64_OpMove(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [24] dst src mem)
// cond: SizeAndAlign(s).Size() == 24 // cond:
// result: (MOVDstore [16] dst (MOVDload [16] src mem) (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem))) // result: (MOVDstore [16] dst (MOVDload [16] src mem) (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem)))
for { for {
s := v.AuxInt if v.AuxInt != 24 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 24) {
break
}
v.reset(OpARM64MOVDstore) v.reset(OpARM64MOVDstore)
v.AuxInt = 16 v.AuxInt = 16
v.AddArg(dst) v.AddArg(dst)
@ -12532,28 +12520,28 @@ func rewriteValueARM64_OpMove(v *Value, config *Config) bool {
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [s] dst src mem)
// cond: SizeAndAlign(s).Size()%8 != 0 && SizeAndAlign(s).Size() > 8 // cond: s%8 != 0 && s > 8
// result: (Move [MakeSizeAndAlign(SizeAndAlign(s).Size()%8, 1).Int64()] (OffPtr <dst.Type> dst [SizeAndAlign(s).Size()-SizeAndAlign(s).Size()%8]) (OffPtr <src.Type> src [SizeAndAlign(s).Size()-SizeAndAlign(s).Size()%8]) (Move [MakeSizeAndAlign(SizeAndAlign(s).Size()-SizeAndAlign(s).Size()%8, 1).Int64()] dst src mem)) // result: (Move [s%8] (OffPtr <dst.Type> dst [s-s%8]) (OffPtr <src.Type> src [s-s%8]) (Move [s-s%8] dst src mem))
for { for {
s := v.AuxInt s := v.AuxInt
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size()%8 != 0 && SizeAndAlign(s).Size() > 8) { if !(s%8 != 0 && s > 8) {
break break
} }
v.reset(OpMove) v.reset(OpMove)
v.AuxInt = MakeSizeAndAlign(SizeAndAlign(s).Size()%8, 1).Int64() v.AuxInt = s % 8
v0 := b.NewValue0(v.Pos, OpOffPtr, dst.Type) v0 := b.NewValue0(v.Pos, OpOffPtr, dst.Type)
v0.AuxInt = SizeAndAlign(s).Size() - SizeAndAlign(s).Size()%8 v0.AuxInt = s - s%8
v0.AddArg(dst) v0.AddArg(dst)
v.AddArg(v0) v.AddArg(v0)
v1 := b.NewValue0(v.Pos, OpOffPtr, src.Type) v1 := b.NewValue0(v.Pos, OpOffPtr, src.Type)
v1.AuxInt = SizeAndAlign(s).Size() - SizeAndAlign(s).Size()%8 v1.AuxInt = s - s%8
v1.AddArg(src) v1.AddArg(src)
v.AddArg(v1) v.AddArg(v1)
v2 := b.NewValue0(v.Pos, OpMove, TypeMem) v2 := b.NewValue0(v.Pos, OpMove, TypeMem)
v2.AuxInt = MakeSizeAndAlign(SizeAndAlign(s).Size()-SizeAndAlign(s).Size()%8, 1).Int64() v2.AuxInt = s - s%8
v2.AddArg(dst) v2.AddArg(dst)
v2.AddArg(src) v2.AddArg(src)
v2.AddArg(mem) v2.AddArg(mem)
@ -12561,39 +12549,39 @@ func rewriteValueARM64_OpMove(v *Value, config *Config) bool {
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [s] dst src mem)
// cond: SizeAndAlign(s).Size()%8 == 0 && SizeAndAlign(s).Size() > 24 && SizeAndAlign(s).Size() <= 8*128 && !config.noDuffDevice // cond: s%8 == 0 && s > 24 && s <= 8*128 && !config.noDuffDevice
// result: (DUFFCOPY [8 * (128 - int64(SizeAndAlign(s).Size()/8))] dst src mem) // result: (DUFFCOPY [8 * (128 - int64(s/8))] dst src mem)
for { for {
s := v.AuxInt s := v.AuxInt
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size()%8 == 0 && SizeAndAlign(s).Size() > 24 && SizeAndAlign(s).Size() <= 8*128 && !config.noDuffDevice) { if !(s%8 == 0 && s > 24 && s <= 8*128 && !config.noDuffDevice) {
break break
} }
v.reset(OpARM64DUFFCOPY) v.reset(OpARM64DUFFCOPY)
v.AuxInt = 8 * (128 - int64(SizeAndAlign(s).Size()/8)) v.AuxInt = 8 * (128 - int64(s/8))
v.AddArg(dst) v.AddArg(dst)
v.AddArg(src) v.AddArg(src)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [s] dst src mem)
// cond: SizeAndAlign(s).Size() > 24 && SizeAndAlign(s).Size()%8 == 0 // cond: s > 24 && s%8 == 0
// result: (LoweredMove dst src (ADDconst <src.Type> src [SizeAndAlign(s).Size()-moveSize(SizeAndAlign(s).Align(), config)]) mem) // result: (LoweredMove dst src (ADDconst <src.Type> src [s-8]) mem)
for { for {
s := v.AuxInt s := v.AuxInt
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() > 24 && SizeAndAlign(s).Size()%8 == 0) { if !(s > 24 && s%8 == 0) {
break break
} }
v.reset(OpARM64LoweredMove) v.reset(OpARM64LoweredMove)
v.AddArg(dst) v.AddArg(dst)
v.AddArg(src) v.AddArg(src)
v0 := b.NewValue0(v.Pos, OpARM64ADDconst, src.Type) v0 := b.NewValue0(v.Pos, OpARM64ADDconst, src.Type)
v0.AuxInt = SizeAndAlign(s).Size() - moveSize(SizeAndAlign(s).Align(), config) v0.AuxInt = s - 8
v0.AddArg(src) v0.AddArg(src)
v.AddArg(v0) v.AddArg(v0)
v.AddArg(mem) v.AddArg(mem)
@ -14428,49 +14416,49 @@ func rewriteValueARM64_OpStaticCall(v *Value, config *Config) bool {
func rewriteValueARM64_OpStore(v *Value, config *Config) bool { func rewriteValueARM64_OpStore(v *Value, config *Config) bool {
b := v.Block b := v.Block
_ = b _ = b
// match: (Store [1] ptr val mem) // match: (Store {t} ptr val mem)
// cond: // cond: t.(Type).Size() == 1
// result: (MOVBstore ptr val mem) // result: (MOVBstore ptr val mem)
for { for {
if v.AuxInt != 1 { t := v.Aux
break
}
ptr := v.Args[0] ptr := v.Args[0]
val := v.Args[1] val := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(t.(Type).Size() == 1) {
break
}
v.reset(OpARM64MOVBstore) v.reset(OpARM64MOVBstore)
v.AddArg(ptr) v.AddArg(ptr)
v.AddArg(val) v.AddArg(val)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Store [2] ptr val mem) // match: (Store {t} ptr val mem)
// cond: // cond: t.(Type).Size() == 2
// result: (MOVHstore ptr val mem) // result: (MOVHstore ptr val mem)
for { for {
if v.AuxInt != 2 { t := v.Aux
break
}
ptr := v.Args[0] ptr := v.Args[0]
val := v.Args[1] val := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(t.(Type).Size() == 2) {
break
}
v.reset(OpARM64MOVHstore) v.reset(OpARM64MOVHstore)
v.AddArg(ptr) v.AddArg(ptr)
v.AddArg(val) v.AddArg(val)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Store [4] ptr val mem) // match: (Store {t} ptr val mem)
// cond: !is32BitFloat(val.Type) // cond: t.(Type).Size() == 4 && !is32BitFloat(val.Type)
// result: (MOVWstore ptr val mem) // result: (MOVWstore ptr val mem)
for { for {
if v.AuxInt != 4 { t := v.Aux
break
}
ptr := v.Args[0] ptr := v.Args[0]
val := v.Args[1] val := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(!is32BitFloat(val.Type)) { if !(t.(Type).Size() == 4 && !is32BitFloat(val.Type)) {
break break
} }
v.reset(OpARM64MOVWstore) v.reset(OpARM64MOVWstore)
@ -14479,17 +14467,15 @@ func rewriteValueARM64_OpStore(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Store [8] ptr val mem) // match: (Store {t} ptr val mem)
// cond: !is64BitFloat(val.Type) // cond: t.(Type).Size() == 8 && !is64BitFloat(val.Type)
// result: (MOVDstore ptr val mem) // result: (MOVDstore ptr val mem)
for { for {
if v.AuxInt != 8 { t := v.Aux
break
}
ptr := v.Args[0] ptr := v.Args[0]
val := v.Args[1] val := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(!is64BitFloat(val.Type)) { if !(t.(Type).Size() == 8 && !is64BitFloat(val.Type)) {
break break
} }
v.reset(OpARM64MOVDstore) v.reset(OpARM64MOVDstore)
@ -14498,17 +14484,15 @@ func rewriteValueARM64_OpStore(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Store [4] ptr val mem) // match: (Store {t} ptr val mem)
// cond: is32BitFloat(val.Type) // cond: t.(Type).Size() == 4 && is32BitFloat(val.Type)
// result: (FMOVSstore ptr val mem) // result: (FMOVSstore ptr val mem)
for { for {
if v.AuxInt != 4 { t := v.Aux
break
}
ptr := v.Args[0] ptr := v.Args[0]
val := v.Args[1] val := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(is32BitFloat(val.Type)) { if !(t.(Type).Size() == 4 && is32BitFloat(val.Type)) {
break break
} }
v.reset(OpARM64FMOVSstore) v.reset(OpARM64FMOVSstore)
@ -14517,17 +14501,15 @@ func rewriteValueARM64_OpStore(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Store [8] ptr val mem) // match: (Store {t} ptr val mem)
// cond: is64BitFloat(val.Type) // cond: t.(Type).Size() == 8 && is64BitFloat(val.Type)
// result: (FMOVDstore ptr val mem) // result: (FMOVDstore ptr val mem)
for { for {
if v.AuxInt != 8 { t := v.Aux
break
}
ptr := v.Args[0] ptr := v.Args[0]
val := v.Args[1] val := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(is64BitFloat(val.Type)) { if !(t.(Type).Size() == 8 && is64BitFloat(val.Type)) {
break break
} }
v.reset(OpARM64FMOVDstore) v.reset(OpARM64FMOVDstore)
@ -14790,30 +14772,28 @@ func rewriteValueARM64_OpXor8(v *Value, config *Config) bool {
func rewriteValueARM64_OpZero(v *Value, config *Config) bool { func rewriteValueARM64_OpZero(v *Value, config *Config) bool {
b := v.Block b := v.Block
_ = b _ = b
// match: (Zero [s] _ mem) // match: (Zero [0] _ mem)
// cond: SizeAndAlign(s).Size() == 0 // cond:
// result: mem // result: mem
for { for {
s := v.AuxInt if v.AuxInt != 0 {
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 0) {
break break
} }
mem := v.Args[1]
v.reset(OpCopy) v.reset(OpCopy)
v.Type = mem.Type v.Type = mem.Type
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [1] ptr mem)
// cond: SizeAndAlign(s).Size() == 1 // cond:
// result: (MOVBstore ptr (MOVDconst [0]) mem) // result: (MOVBstore ptr (MOVDconst [0]) mem)
for { for {
s := v.AuxInt if v.AuxInt != 1 {
ptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 1) {
break break
} }
ptr := v.Args[0]
mem := v.Args[1]
v.reset(OpARM64MOVBstore) v.reset(OpARM64MOVBstore)
v.AddArg(ptr) v.AddArg(ptr)
v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, config.fe.TypeUInt64()) v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, config.fe.TypeUInt64())
@ -14822,16 +14802,15 @@ func rewriteValueARM64_OpZero(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [2] ptr mem)
// cond: SizeAndAlign(s).Size() == 2 // cond:
// result: (MOVHstore ptr (MOVDconst [0]) mem) // result: (MOVHstore ptr (MOVDconst [0]) mem)
for { for {
s := v.AuxInt if v.AuxInt != 2 {
ptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 2) {
break break
} }
ptr := v.Args[0]
mem := v.Args[1]
v.reset(OpARM64MOVHstore) v.reset(OpARM64MOVHstore)
v.AddArg(ptr) v.AddArg(ptr)
v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, config.fe.TypeUInt64()) v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, config.fe.TypeUInt64())
@ -14840,16 +14819,15 @@ func rewriteValueARM64_OpZero(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [4] ptr mem)
// cond: SizeAndAlign(s).Size() == 4 // cond:
// result: (MOVWstore ptr (MOVDconst [0]) mem) // result: (MOVWstore ptr (MOVDconst [0]) mem)
for { for {
s := v.AuxInt if v.AuxInt != 4 {
ptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 4) {
break break
} }
ptr := v.Args[0]
mem := v.Args[1]
v.reset(OpARM64MOVWstore) v.reset(OpARM64MOVWstore)
v.AddArg(ptr) v.AddArg(ptr)
v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, config.fe.TypeUInt64()) v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, config.fe.TypeUInt64())
@ -14858,16 +14836,15 @@ func rewriteValueARM64_OpZero(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [8] ptr mem)
// cond: SizeAndAlign(s).Size() == 8 // cond:
// result: (MOVDstore ptr (MOVDconst [0]) mem) // result: (MOVDstore ptr (MOVDconst [0]) mem)
for { for {
s := v.AuxInt if v.AuxInt != 8 {
ptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 8) {
break break
} }
ptr := v.Args[0]
mem := v.Args[1]
v.reset(OpARM64MOVDstore) v.reset(OpARM64MOVDstore)
v.AddArg(ptr) v.AddArg(ptr)
v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, config.fe.TypeUInt64()) v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, config.fe.TypeUInt64())
@ -14876,16 +14853,15 @@ func rewriteValueARM64_OpZero(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [3] ptr mem)
// cond: SizeAndAlign(s).Size() == 3 // cond:
// result: (MOVBstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem)) // result: (MOVBstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem))
for { for {
s := v.AuxInt if v.AuxInt != 3 {
ptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 3) {
break break
} }
ptr := v.Args[0]
mem := v.Args[1]
v.reset(OpARM64MOVBstore) v.reset(OpARM64MOVBstore)
v.AuxInt = 2 v.AuxInt = 2
v.AddArg(ptr) v.AddArg(ptr)
@ -14901,16 +14877,15 @@ func rewriteValueARM64_OpZero(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [5] ptr mem)
// cond: SizeAndAlign(s).Size() == 5 // cond:
// result: (MOVBstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem)) // result: (MOVBstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem))
for { for {
s := v.AuxInt if v.AuxInt != 5 {
ptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 5) {
break break
} }
ptr := v.Args[0]
mem := v.Args[1]
v.reset(OpARM64MOVBstore) v.reset(OpARM64MOVBstore)
v.AuxInt = 4 v.AuxInt = 4
v.AddArg(ptr) v.AddArg(ptr)
@ -14926,16 +14901,15 @@ func rewriteValueARM64_OpZero(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [6] ptr mem)
// cond: SizeAndAlign(s).Size() == 6 // cond:
// result: (MOVHstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem)) // result: (MOVHstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem))
for { for {
s := v.AuxInt if v.AuxInt != 6 {
ptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 6) {
break break
} }
ptr := v.Args[0]
mem := v.Args[1]
v.reset(OpARM64MOVHstore) v.reset(OpARM64MOVHstore)
v.AuxInt = 4 v.AuxInt = 4
v.AddArg(ptr) v.AddArg(ptr)
@ -14951,16 +14925,15 @@ func rewriteValueARM64_OpZero(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [7] ptr mem)
// cond: SizeAndAlign(s).Size() == 7 // cond:
// result: (MOVBstore [6] ptr (MOVDconst [0]) (MOVHstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem))) // result: (MOVBstore [6] ptr (MOVDconst [0]) (MOVHstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem)))
for { for {
s := v.AuxInt if v.AuxInt != 7 {
ptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 7) {
break break
} }
ptr := v.Args[0]
mem := v.Args[1]
v.reset(OpARM64MOVBstore) v.reset(OpARM64MOVBstore)
v.AuxInt = 6 v.AuxInt = 6
v.AddArg(ptr) v.AddArg(ptr)
@ -14983,16 +14956,15 @@ func rewriteValueARM64_OpZero(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [12] ptr mem)
// cond: SizeAndAlign(s).Size() == 12 // cond:
// result: (MOVWstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)) // result: (MOVWstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))
for { for {
s := v.AuxInt if v.AuxInt != 12 {
ptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 12) {
break break
} }
ptr := v.Args[0]
mem := v.Args[1]
v.reset(OpARM64MOVWstore) v.reset(OpARM64MOVWstore)
v.AuxInt = 8 v.AuxInt = 8
v.AddArg(ptr) v.AddArg(ptr)
@ -15008,16 +14980,15 @@ func rewriteValueARM64_OpZero(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [16] ptr mem)
// cond: SizeAndAlign(s).Size() == 16 // cond:
// result: (MOVDstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)) // result: (MOVDstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))
for { for {
s := v.AuxInt if v.AuxInt != 16 {
ptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 16) {
break break
} }
ptr := v.Args[0]
mem := v.Args[1]
v.reset(OpARM64MOVDstore) v.reset(OpARM64MOVDstore)
v.AuxInt = 8 v.AuxInt = 8
v.AddArg(ptr) v.AddArg(ptr)
@ -15033,16 +15004,15 @@ func rewriteValueARM64_OpZero(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [24] ptr mem)
// cond: SizeAndAlign(s).Size() == 24 // cond:
// result: (MOVDstore [16] ptr (MOVDconst [0]) (MOVDstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))) // result: (MOVDstore [16] ptr (MOVDconst [0]) (MOVDstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)))
for { for {
s := v.AuxInt if v.AuxInt != 24 {
ptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 24) {
break break
} }
ptr := v.Args[0]
mem := v.Args[1]
v.reset(OpARM64MOVDstore) v.reset(OpARM64MOVDstore)
v.AuxInt = 16 v.AuxInt = 16
v.AddArg(ptr) v.AddArg(ptr)
@ -15066,58 +15036,58 @@ func rewriteValueARM64_OpZero(v *Value, config *Config) bool {
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [s] ptr mem)
// cond: SizeAndAlign(s).Size()%8 != 0 && SizeAndAlign(s).Size() > 8 // cond: s%8 != 0 && s > 8
// result: (Zero [MakeSizeAndAlign(SizeAndAlign(s).Size()%8, 1).Int64()] (OffPtr <ptr.Type> ptr [SizeAndAlign(s).Size()-SizeAndAlign(s).Size()%8]) (Zero [MakeSizeAndAlign(SizeAndAlign(s).Size()-SizeAndAlign(s).Size()%8, 1).Int64()] ptr mem)) // result: (Zero [s%8] (OffPtr <ptr.Type> ptr [s-s%8]) (Zero [s-s%8] ptr mem))
for { for {
s := v.AuxInt s := v.AuxInt
ptr := v.Args[0] ptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(SizeAndAlign(s).Size()%8 != 0 && SizeAndAlign(s).Size() > 8) { if !(s%8 != 0 && s > 8) {
break break
} }
v.reset(OpZero) v.reset(OpZero)
v.AuxInt = MakeSizeAndAlign(SizeAndAlign(s).Size()%8, 1).Int64() v.AuxInt = s % 8
v0 := b.NewValue0(v.Pos, OpOffPtr, ptr.Type) v0 := b.NewValue0(v.Pos, OpOffPtr, ptr.Type)
v0.AuxInt = SizeAndAlign(s).Size() - SizeAndAlign(s).Size()%8 v0.AuxInt = s - s%8
v0.AddArg(ptr) v0.AddArg(ptr)
v.AddArg(v0) v.AddArg(v0)
v1 := b.NewValue0(v.Pos, OpZero, TypeMem) v1 := b.NewValue0(v.Pos, OpZero, TypeMem)
v1.AuxInt = MakeSizeAndAlign(SizeAndAlign(s).Size()-SizeAndAlign(s).Size()%8, 1).Int64() v1.AuxInt = s - s%8
v1.AddArg(ptr) v1.AddArg(ptr)
v1.AddArg(mem) v1.AddArg(mem)
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [s] ptr mem)
// cond: SizeAndAlign(s).Size()%8 == 0 && SizeAndAlign(s).Size() > 24 && SizeAndAlign(s).Size() <= 8*128 && !config.noDuffDevice // cond: s%8 == 0 && s > 24 && s <= 8*128 && !config.noDuffDevice
// result: (DUFFZERO [4 * (128 - int64(SizeAndAlign(s).Size()/8))] ptr mem) // result: (DUFFZERO [4 * (128 - int64(s/8))] ptr mem)
for { for {
s := v.AuxInt s := v.AuxInt
ptr := v.Args[0] ptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(SizeAndAlign(s).Size()%8 == 0 && SizeAndAlign(s).Size() > 24 && SizeAndAlign(s).Size() <= 8*128 && !config.noDuffDevice) { if !(s%8 == 0 && s > 24 && s <= 8*128 && !config.noDuffDevice) {
break break
} }
v.reset(OpARM64DUFFZERO) v.reset(OpARM64DUFFZERO)
v.AuxInt = 4 * (128 - int64(SizeAndAlign(s).Size()/8)) v.AuxInt = 4 * (128 - int64(s/8))
v.AddArg(ptr) v.AddArg(ptr)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [s] ptr mem)
// cond: SizeAndAlign(s).Size()%8 == 0 && (SizeAndAlign(s).Size() > 8*128 || config.noDuffDevice) // cond: s%8 == 0 && (s > 8*128 || config.noDuffDevice)
// result: (LoweredZero ptr (ADDconst <ptr.Type> [SizeAndAlign(s).Size()-moveSize(SizeAndAlign(s).Align(), config)] ptr) mem) // result: (LoweredZero ptr (ADDconst <ptr.Type> [s-8] ptr) mem)
for { for {
s := v.AuxInt s := v.AuxInt
ptr := v.Args[0] ptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(SizeAndAlign(s).Size()%8 == 0 && (SizeAndAlign(s).Size() > 8*128 || config.noDuffDevice)) { if !(s%8 == 0 && (s > 8*128 || config.noDuffDevice)) {
break break
} }
v.reset(OpARM64LoweredZero) v.reset(OpARM64LoweredZero)
v.AddArg(ptr) v.AddArg(ptr)
v0 := b.NewValue0(v.Pos, OpARM64ADDconst, ptr.Type) v0 := b.NewValue0(v.Pos, OpARM64ADDconst, ptr.Type)
v0.AuxInt = SizeAndAlign(s).Size() - moveSize(SizeAndAlign(s).Align(), config) v0.AuxInt = s - 8
v0.AddArg(ptr) v0.AddArg(ptr)
v.AddArg(v0) v.AddArg(v0)
v.AddArg(mem) v.AddArg(mem)

View file

@ -6087,31 +6087,29 @@ func rewriteValueMIPS_OpMod8u(v *Value, config *Config) bool {
func rewriteValueMIPS_OpMove(v *Value, config *Config) bool { func rewriteValueMIPS_OpMove(v *Value, config *Config) bool {
b := v.Block b := v.Block
_ = b _ = b
// match: (Move [s] _ _ mem) // match: (Move [0] _ _ mem)
// cond: SizeAndAlign(s).Size() == 0 // cond:
// result: mem // result: mem
for { for {
s := v.AuxInt if v.AuxInt != 0 {
mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 0) {
break break
} }
mem := v.Args[2]
v.reset(OpCopy) v.reset(OpCopy)
v.Type = mem.Type v.Type = mem.Type
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [1] dst src mem)
// cond: SizeAndAlign(s).Size() == 1 // cond:
// result: (MOVBstore dst (MOVBUload src mem) mem) // result: (MOVBstore dst (MOVBUload src mem) mem)
for { for {
s := v.AuxInt if v.AuxInt != 1 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 1) {
break
}
v.reset(OpMIPSMOVBstore) v.reset(OpMIPSMOVBstore)
v.AddArg(dst) v.AddArg(dst)
v0 := b.NewValue0(v.Pos, OpMIPSMOVBUload, config.fe.TypeUInt8()) v0 := b.NewValue0(v.Pos, OpMIPSMOVBUload, config.fe.TypeUInt8())
@ -6121,15 +6119,18 @@ func rewriteValueMIPS_OpMove(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [2] {t} dst src mem)
// cond: SizeAndAlign(s).Size() == 2 && SizeAndAlign(s).Align()%2 == 0 // cond: t.(Type).Alignment()%2 == 0
// result: (MOVHstore dst (MOVHUload src mem) mem) // result: (MOVHstore dst (MOVHUload src mem) mem)
for { for {
s := v.AuxInt if v.AuxInt != 2 {
break
}
t := v.Aux
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 2 && SizeAndAlign(s).Align()%2 == 0) { if !(t.(Type).Alignment()%2 == 0) {
break break
} }
v.reset(OpMIPSMOVHstore) v.reset(OpMIPSMOVHstore)
@ -6141,17 +6142,16 @@ func rewriteValueMIPS_OpMove(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [2] dst src mem)
// cond: SizeAndAlign(s).Size() == 2 // cond:
// result: (MOVBstore [1] dst (MOVBUload [1] src mem) (MOVBstore dst (MOVBUload src mem) mem)) // result: (MOVBstore [1] dst (MOVBUload [1] src mem) (MOVBstore dst (MOVBUload src mem) mem))
for { for {
s := v.AuxInt if v.AuxInt != 2 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 2) {
break
}
v.reset(OpMIPSMOVBstore) v.reset(OpMIPSMOVBstore)
v.AuxInt = 1 v.AuxInt = 1
v.AddArg(dst) v.AddArg(dst)
@ -6170,15 +6170,18 @@ func rewriteValueMIPS_OpMove(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [4] {t} dst src mem)
// cond: SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%4 == 0 // cond: t.(Type).Alignment()%4 == 0
// result: (MOVWstore dst (MOVWload src mem) mem) // result: (MOVWstore dst (MOVWload src mem) mem)
for { for {
s := v.AuxInt if v.AuxInt != 4 {
break
}
t := v.Aux
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%4 == 0) { if !(t.(Type).Alignment()%4 == 0) {
break break
} }
v.reset(OpMIPSMOVWstore) v.reset(OpMIPSMOVWstore)
@ -6190,15 +6193,18 @@ func rewriteValueMIPS_OpMove(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [4] {t} dst src mem)
// cond: SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%2 == 0 // cond: t.(Type).Alignment()%2 == 0
// result: (MOVHstore [2] dst (MOVHUload [2] src mem) (MOVHstore dst (MOVHUload src mem) mem)) // result: (MOVHstore [2] dst (MOVHUload [2] src mem) (MOVHstore dst (MOVHUload src mem) mem))
for { for {
s := v.AuxInt if v.AuxInt != 4 {
break
}
t := v.Aux
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%2 == 0) { if !(t.(Type).Alignment()%2 == 0) {
break break
} }
v.reset(OpMIPSMOVHstore) v.reset(OpMIPSMOVHstore)
@ -6219,17 +6225,16 @@ func rewriteValueMIPS_OpMove(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [4] dst src mem)
// cond: SizeAndAlign(s).Size() == 4 // cond:
// result: (MOVBstore [3] dst (MOVBUload [3] src mem) (MOVBstore [2] dst (MOVBUload [2] src mem) (MOVBstore [1] dst (MOVBUload [1] src mem) (MOVBstore dst (MOVBUload src mem) mem)))) // result: (MOVBstore [3] dst (MOVBUload [3] src mem) (MOVBstore [2] dst (MOVBUload [2] src mem) (MOVBstore [1] dst (MOVBUload [1] src mem) (MOVBstore dst (MOVBUload src mem) mem))))
for { for {
s := v.AuxInt if v.AuxInt != 4 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 4) {
break
}
v.reset(OpMIPSMOVBstore) v.reset(OpMIPSMOVBstore)
v.AuxInt = 3 v.AuxInt = 3
v.AddArg(dst) v.AddArg(dst)
@ -6266,17 +6271,16 @@ func rewriteValueMIPS_OpMove(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [3] dst src mem)
// cond: SizeAndAlign(s).Size() == 3 // cond:
// result: (MOVBstore [2] dst (MOVBUload [2] src mem) (MOVBstore [1] dst (MOVBUload [1] src mem) (MOVBstore dst (MOVBUload src mem) mem))) // result: (MOVBstore [2] dst (MOVBUload [2] src mem) (MOVBstore [1] dst (MOVBUload [1] src mem) (MOVBstore dst (MOVBUload src mem) mem)))
for { for {
s := v.AuxInt if v.AuxInt != 3 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 3) {
break
}
v.reset(OpMIPSMOVBstore) v.reset(OpMIPSMOVBstore)
v.AuxInt = 2 v.AuxInt = 2
v.AddArg(dst) v.AddArg(dst)
@ -6304,15 +6308,18 @@ func rewriteValueMIPS_OpMove(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [8] {t} dst src mem)
// cond: SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%4 == 0 // cond: t.(Type).Alignment()%4 == 0
// result: (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem)) // result: (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem))
for { for {
s := v.AuxInt if v.AuxInt != 8 {
break
}
t := v.Aux
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%4 == 0) { if !(t.(Type).Alignment()%4 == 0) {
break break
} }
v.reset(OpMIPSMOVWstore) v.reset(OpMIPSMOVWstore)
@ -6333,15 +6340,18 @@ func rewriteValueMIPS_OpMove(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [8] {t} dst src mem)
// cond: SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%2 == 0 // cond: t.(Type).Alignment()%2 == 0
// result: (MOVHstore [6] dst (MOVHload [6] src mem) (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem)))) // result: (MOVHstore [6] dst (MOVHload [6] src mem) (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))))
for { for {
s := v.AuxInt if v.AuxInt != 8 {
break
}
t := v.Aux
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%2 == 0) { if !(t.(Type).Alignment()%2 == 0) {
break break
} }
v.reset(OpMIPSMOVHstore) v.reset(OpMIPSMOVHstore)
@ -6380,15 +6390,18 @@ func rewriteValueMIPS_OpMove(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [6] {t} dst src mem)
// cond: SizeAndAlign(s).Size() == 6 && SizeAndAlign(s).Align()%2 == 0 // cond: t.(Type).Alignment()%2 == 0
// result: (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))) // result: (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem)))
for { for {
s := v.AuxInt if v.AuxInt != 6 {
break
}
t := v.Aux
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 6 && SizeAndAlign(s).Align()%2 == 0) { if !(t.(Type).Alignment()%2 == 0) {
break break
} }
v.reset(OpMIPSMOVHstore) v.reset(OpMIPSMOVHstore)
@ -6418,15 +6431,18 @@ func rewriteValueMIPS_OpMove(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [12] {t} dst src mem)
// cond: SizeAndAlign(s).Size() == 12 && SizeAndAlign(s).Align()%4 == 0 // cond: t.(Type).Alignment()%4 == 0
// result: (MOVWstore [8] dst (MOVWload [8] src mem) (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem))) // result: (MOVWstore [8] dst (MOVWload [8] src mem) (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem)))
for { for {
s := v.AuxInt if v.AuxInt != 12 {
break
}
t := v.Aux
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 12 && SizeAndAlign(s).Align()%4 == 0) { if !(t.(Type).Alignment()%4 == 0) {
break break
} }
v.reset(OpMIPSMOVWstore) v.reset(OpMIPSMOVWstore)
@ -6456,15 +6472,18 @@ func rewriteValueMIPS_OpMove(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [16] {t} dst src mem)
// cond: SizeAndAlign(s).Size() == 16 && SizeAndAlign(s).Align()%4 == 0 // cond: t.(Type).Alignment()%4 == 0
// result: (MOVWstore [12] dst (MOVWload [12] src mem) (MOVWstore [8] dst (MOVWload [8] src mem) (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem)))) // result: (MOVWstore [12] dst (MOVWload [12] src mem) (MOVWstore [8] dst (MOVWload [8] src mem) (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem))))
for { for {
s := v.AuxInt if v.AuxInt != 16 {
break
}
t := v.Aux
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 16 && SizeAndAlign(s).Align()%4 == 0) { if !(t.(Type).Alignment()%4 == 0) {
break break
} }
v.reset(OpMIPSMOVWstore) v.reset(OpMIPSMOVWstore)
@ -6503,23 +6522,24 @@ func rewriteValueMIPS_OpMove(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [s] {t} dst src mem)
// cond: (SizeAndAlign(s).Size() > 16 || SizeAndAlign(s).Align()%4 != 0) // cond: (s > 16 || t.(Type).Alignment()%4 != 0)
// result: (LoweredMove [SizeAndAlign(s).Align()] dst src (ADDconst <src.Type> src [SizeAndAlign(s).Size()-moveSize(SizeAndAlign(s).Align(), config)]) mem) // result: (LoweredMove [t.(Type).Alignment()] dst src (ADDconst <src.Type> src [s-moveSize(t.(Type).Alignment(), config)]) mem)
for { for {
s := v.AuxInt s := v.AuxInt
t := v.Aux
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() > 16 || SizeAndAlign(s).Align()%4 != 0) { if !(s > 16 || t.(Type).Alignment()%4 != 0) {
break break
} }
v.reset(OpMIPSLoweredMove) v.reset(OpMIPSLoweredMove)
v.AuxInt = SizeAndAlign(s).Align() v.AuxInt = t.(Type).Alignment()
v.AddArg(dst) v.AddArg(dst)
v.AddArg(src) v.AddArg(src)
v0 := b.NewValue0(v.Pos, OpMIPSADDconst, src.Type) v0 := b.NewValue0(v.Pos, OpMIPSADDconst, src.Type)
v0.AuxInt = SizeAndAlign(s).Size() - moveSize(SizeAndAlign(s).Align(), config) v0.AuxInt = s - moveSize(t.(Type).Alignment(), config)
v0.AddArg(src) v0.AddArg(src)
v.AddArg(v0) v.AddArg(v0)
v.AddArg(mem) v.AddArg(mem)
@ -8277,49 +8297,49 @@ func rewriteValueMIPS_OpStaticCall(v *Value, config *Config) bool {
func rewriteValueMIPS_OpStore(v *Value, config *Config) bool { func rewriteValueMIPS_OpStore(v *Value, config *Config) bool {
b := v.Block b := v.Block
_ = b _ = b
// match: (Store [1] ptr val mem) // match: (Store {t} ptr val mem)
// cond: // cond: t.(Type).Size() == 1
// result: (MOVBstore ptr val mem) // result: (MOVBstore ptr val mem)
for { for {
if v.AuxInt != 1 { t := v.Aux
break
}
ptr := v.Args[0] ptr := v.Args[0]
val := v.Args[1] val := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(t.(Type).Size() == 1) {
break
}
v.reset(OpMIPSMOVBstore) v.reset(OpMIPSMOVBstore)
v.AddArg(ptr) v.AddArg(ptr)
v.AddArg(val) v.AddArg(val)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Store [2] ptr val mem) // match: (Store {t} ptr val mem)
// cond: // cond: t.(Type).Size() == 2
// result: (MOVHstore ptr val mem) // result: (MOVHstore ptr val mem)
for { for {
if v.AuxInt != 2 { t := v.Aux
break
}
ptr := v.Args[0] ptr := v.Args[0]
val := v.Args[1] val := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(t.(Type).Size() == 2) {
break
}
v.reset(OpMIPSMOVHstore) v.reset(OpMIPSMOVHstore)
v.AddArg(ptr) v.AddArg(ptr)
v.AddArg(val) v.AddArg(val)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Store [4] ptr val mem) // match: (Store {t} ptr val mem)
// cond: !is32BitFloat(val.Type) // cond: t.(Type).Size() == 4 && !is32BitFloat(val.Type)
// result: (MOVWstore ptr val mem) // result: (MOVWstore ptr val mem)
for { for {
if v.AuxInt != 4 { t := v.Aux
break
}
ptr := v.Args[0] ptr := v.Args[0]
val := v.Args[1] val := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(!is32BitFloat(val.Type)) { if !(t.(Type).Size() == 4 && !is32BitFloat(val.Type)) {
break break
} }
v.reset(OpMIPSMOVWstore) v.reset(OpMIPSMOVWstore)
@ -8328,36 +8348,15 @@ func rewriteValueMIPS_OpStore(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Store [8] ptr val mem) // match: (Store {t} ptr val mem)
// cond: !is64BitFloat(val.Type) // cond: t.(Type).Size() == 4 && is32BitFloat(val.Type)
// result: (MOVWstore ptr val mem)
for {
if v.AuxInt != 8 {
break
}
ptr := v.Args[0]
val := v.Args[1]
mem := v.Args[2]
if !(!is64BitFloat(val.Type)) {
break
}
v.reset(OpMIPSMOVWstore)
v.AddArg(ptr)
v.AddArg(val)
v.AddArg(mem)
return true
}
// match: (Store [4] ptr val mem)
// cond: is32BitFloat(val.Type)
// result: (MOVFstore ptr val mem) // result: (MOVFstore ptr val mem)
for { for {
if v.AuxInt != 4 { t := v.Aux
break
}
ptr := v.Args[0] ptr := v.Args[0]
val := v.Args[1] val := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(is32BitFloat(val.Type)) { if !(t.(Type).Size() == 4 && is32BitFloat(val.Type)) {
break break
} }
v.reset(OpMIPSMOVFstore) v.reset(OpMIPSMOVFstore)
@ -8366,17 +8365,15 @@ func rewriteValueMIPS_OpStore(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Store [8] ptr val mem) // match: (Store {t} ptr val mem)
// cond: is64BitFloat(val.Type) // cond: t.(Type).Size() == 8 && is64BitFloat(val.Type)
// result: (MOVDstore ptr val mem) // result: (MOVDstore ptr val mem)
for { for {
if v.AuxInt != 8 { t := v.Aux
break
}
ptr := v.Args[0] ptr := v.Args[0]
val := v.Args[1] val := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(is64BitFloat(val.Type)) { if !(t.(Type).Size() == 8 && is64BitFloat(val.Type)) {
break break
} }
v.reset(OpMIPSMOVDstore) v.reset(OpMIPSMOVDstore)
@ -8587,30 +8584,28 @@ func rewriteValueMIPS_OpXor8(v *Value, config *Config) bool {
func rewriteValueMIPS_OpZero(v *Value, config *Config) bool { func rewriteValueMIPS_OpZero(v *Value, config *Config) bool {
b := v.Block b := v.Block
_ = b _ = b
// match: (Zero [s] _ mem) // match: (Zero [0] _ mem)
// cond: SizeAndAlign(s).Size() == 0 // cond:
// result: mem // result: mem
for { for {
s := v.AuxInt if v.AuxInt != 0 {
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 0) {
break break
} }
mem := v.Args[1]
v.reset(OpCopy) v.reset(OpCopy)
v.Type = mem.Type v.Type = mem.Type
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [1] ptr mem)
// cond: SizeAndAlign(s).Size() == 1 // cond:
// result: (MOVBstore ptr (MOVWconst [0]) mem) // result: (MOVBstore ptr (MOVWconst [0]) mem)
for { for {
s := v.AuxInt if v.AuxInt != 1 {
ptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 1) {
break break
} }
ptr := v.Args[0]
mem := v.Args[1]
v.reset(OpMIPSMOVBstore) v.reset(OpMIPSMOVBstore)
v.AddArg(ptr) v.AddArg(ptr)
v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32()) v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
@ -8619,14 +8614,17 @@ func rewriteValueMIPS_OpZero(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [2] {t} ptr mem)
// cond: SizeAndAlign(s).Size() == 2 && SizeAndAlign(s).Align()%2 == 0 // cond: t.(Type).Alignment()%2 == 0
// result: (MOVHstore ptr (MOVWconst [0]) mem) // result: (MOVHstore ptr (MOVWconst [0]) mem)
for { for {
s := v.AuxInt if v.AuxInt != 2 {
break
}
t := v.Aux
ptr := v.Args[0] ptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 2 && SizeAndAlign(s).Align()%2 == 0) { if !(t.(Type).Alignment()%2 == 0) {
break break
} }
v.reset(OpMIPSMOVHstore) v.reset(OpMIPSMOVHstore)
@ -8637,16 +8635,15 @@ func rewriteValueMIPS_OpZero(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [2] ptr mem)
// cond: SizeAndAlign(s).Size() == 2 // cond:
// result: (MOVBstore [1] ptr (MOVWconst [0]) (MOVBstore [0] ptr (MOVWconst [0]) mem)) // result: (MOVBstore [1] ptr (MOVWconst [0]) (MOVBstore [0] ptr (MOVWconst [0]) mem))
for { for {
s := v.AuxInt if v.AuxInt != 2 {
ptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 2) {
break break
} }
ptr := v.Args[0]
mem := v.Args[1]
v.reset(OpMIPSMOVBstore) v.reset(OpMIPSMOVBstore)
v.AuxInt = 1 v.AuxInt = 1
v.AddArg(ptr) v.AddArg(ptr)
@ -8663,14 +8660,17 @@ func rewriteValueMIPS_OpZero(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [4] {t} ptr mem)
// cond: SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%4 == 0 // cond: t.(Type).Alignment()%4 == 0
// result: (MOVWstore ptr (MOVWconst [0]) mem) // result: (MOVWstore ptr (MOVWconst [0]) mem)
for { for {
s := v.AuxInt if v.AuxInt != 4 {
break
}
t := v.Aux
ptr := v.Args[0] ptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%4 == 0) { if !(t.(Type).Alignment()%4 == 0) {
break break
} }
v.reset(OpMIPSMOVWstore) v.reset(OpMIPSMOVWstore)
@ -8681,14 +8681,17 @@ func rewriteValueMIPS_OpZero(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [4] {t} ptr mem)
// cond: SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%2 == 0 // cond: t.(Type).Alignment()%2 == 0
// result: (MOVHstore [2] ptr (MOVWconst [0]) (MOVHstore [0] ptr (MOVWconst [0]) mem)) // result: (MOVHstore [2] ptr (MOVWconst [0]) (MOVHstore [0] ptr (MOVWconst [0]) mem))
for { for {
s := v.AuxInt if v.AuxInt != 4 {
break
}
t := v.Aux
ptr := v.Args[0] ptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%2 == 0) { if !(t.(Type).Alignment()%2 == 0) {
break break
} }
v.reset(OpMIPSMOVHstore) v.reset(OpMIPSMOVHstore)
@ -8707,16 +8710,15 @@ func rewriteValueMIPS_OpZero(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [4] ptr mem)
// cond: SizeAndAlign(s).Size() == 4 // cond:
// result: (MOVBstore [3] ptr (MOVWconst [0]) (MOVBstore [2] ptr (MOVWconst [0]) (MOVBstore [1] ptr (MOVWconst [0]) (MOVBstore [0] ptr (MOVWconst [0]) mem)))) // result: (MOVBstore [3] ptr (MOVWconst [0]) (MOVBstore [2] ptr (MOVWconst [0]) (MOVBstore [1] ptr (MOVWconst [0]) (MOVBstore [0] ptr (MOVWconst [0]) mem))))
for { for {
s := v.AuxInt if v.AuxInt != 4 {
ptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 4) {
break break
} }
ptr := v.Args[0]
mem := v.Args[1]
v.reset(OpMIPSMOVBstore) v.reset(OpMIPSMOVBstore)
v.AuxInt = 3 v.AuxInt = 3
v.AddArg(ptr) v.AddArg(ptr)
@ -8747,16 +8749,15 @@ func rewriteValueMIPS_OpZero(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [3] ptr mem)
// cond: SizeAndAlign(s).Size() == 3 // cond:
// result: (MOVBstore [2] ptr (MOVWconst [0]) (MOVBstore [1] ptr (MOVWconst [0]) (MOVBstore [0] ptr (MOVWconst [0]) mem))) // result: (MOVBstore [2] ptr (MOVWconst [0]) (MOVBstore [1] ptr (MOVWconst [0]) (MOVBstore [0] ptr (MOVWconst [0]) mem)))
for { for {
s := v.AuxInt if v.AuxInt != 3 {
ptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 3) {
break break
} }
ptr := v.Args[0]
mem := v.Args[1]
v.reset(OpMIPSMOVBstore) v.reset(OpMIPSMOVBstore)
v.AuxInt = 2 v.AuxInt = 2
v.AddArg(ptr) v.AddArg(ptr)
@ -8780,14 +8781,17 @@ func rewriteValueMIPS_OpZero(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [6] {t} ptr mem)
// cond: SizeAndAlign(s).Size() == 6 && SizeAndAlign(s).Align()%2 == 0 // cond: t.(Type).Alignment()%2 == 0
// result: (MOVHstore [4] ptr (MOVWconst [0]) (MOVHstore [2] ptr (MOVWconst [0]) (MOVHstore [0] ptr (MOVWconst [0]) mem))) // result: (MOVHstore [4] ptr (MOVWconst [0]) (MOVHstore [2] ptr (MOVWconst [0]) (MOVHstore [0] ptr (MOVWconst [0]) mem)))
for { for {
s := v.AuxInt if v.AuxInt != 6 {
break
}
t := v.Aux
ptr := v.Args[0] ptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 6 && SizeAndAlign(s).Align()%2 == 0) { if !(t.(Type).Alignment()%2 == 0) {
break break
} }
v.reset(OpMIPSMOVHstore) v.reset(OpMIPSMOVHstore)
@ -8813,14 +8817,17 @@ func rewriteValueMIPS_OpZero(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [8] {t} ptr mem)
// cond: SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%4 == 0 // cond: t.(Type).Alignment()%4 == 0
// result: (MOVWstore [4] ptr (MOVWconst [0]) (MOVWstore [0] ptr (MOVWconst [0]) mem)) // result: (MOVWstore [4] ptr (MOVWconst [0]) (MOVWstore [0] ptr (MOVWconst [0]) mem))
for { for {
s := v.AuxInt if v.AuxInt != 8 {
break
}
t := v.Aux
ptr := v.Args[0] ptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%4 == 0) { if !(t.(Type).Alignment()%4 == 0) {
break break
} }
v.reset(OpMIPSMOVWstore) v.reset(OpMIPSMOVWstore)
@ -8839,14 +8846,17 @@ func rewriteValueMIPS_OpZero(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [12] {t} ptr mem)
// cond: SizeAndAlign(s).Size() == 12 && SizeAndAlign(s).Align()%4 == 0 // cond: t.(Type).Alignment()%4 == 0
// result: (MOVWstore [8] ptr (MOVWconst [0]) (MOVWstore [4] ptr (MOVWconst [0]) (MOVWstore [0] ptr (MOVWconst [0]) mem))) // result: (MOVWstore [8] ptr (MOVWconst [0]) (MOVWstore [4] ptr (MOVWconst [0]) (MOVWstore [0] ptr (MOVWconst [0]) mem)))
for { for {
s := v.AuxInt if v.AuxInt != 12 {
break
}
t := v.Aux
ptr := v.Args[0] ptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 12 && SizeAndAlign(s).Align()%4 == 0) { if !(t.(Type).Alignment()%4 == 0) {
break break
} }
v.reset(OpMIPSMOVWstore) v.reset(OpMIPSMOVWstore)
@ -8872,14 +8882,17 @@ func rewriteValueMIPS_OpZero(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [16] {t} ptr mem)
// cond: SizeAndAlign(s).Size() == 16 && SizeAndAlign(s).Align()%4 == 0 // cond: t.(Type).Alignment()%4 == 0
// result: (MOVWstore [12] ptr (MOVWconst [0]) (MOVWstore [8] ptr (MOVWconst [0]) (MOVWstore [4] ptr (MOVWconst [0]) (MOVWstore [0] ptr (MOVWconst [0]) mem)))) // result: (MOVWstore [12] ptr (MOVWconst [0]) (MOVWstore [8] ptr (MOVWconst [0]) (MOVWstore [4] ptr (MOVWconst [0]) (MOVWstore [0] ptr (MOVWconst [0]) mem))))
for { for {
s := v.AuxInt if v.AuxInt != 16 {
break
}
t := v.Aux
ptr := v.Args[0] ptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 16 && SizeAndAlign(s).Align()%4 == 0) { if !(t.(Type).Alignment()%4 == 0) {
break break
} }
v.reset(OpMIPSMOVWstore) v.reset(OpMIPSMOVWstore)
@ -8912,21 +8925,22 @@ func rewriteValueMIPS_OpZero(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [s] {t} ptr mem)
// cond: (SizeAndAlign(s).Size() > 16 || SizeAndAlign(s).Align()%4 != 0) // cond: (s > 16 || s%4 != 0)
// result: (LoweredZero [SizeAndAlign(s).Align()] ptr (ADDconst <ptr.Type> ptr [SizeAndAlign(s).Size()-moveSize(SizeAndAlign(s).Align(), config)]) mem) // result: (LoweredZero [t.(Type).Alignment()] ptr (ADDconst <ptr.Type> ptr [s-moveSize(t.(Type).Alignment(), config)]) mem)
for { for {
s := v.AuxInt s := v.AuxInt
t := v.Aux
ptr := v.Args[0] ptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(SizeAndAlign(s).Size() > 16 || SizeAndAlign(s).Align()%4 != 0) { if !(s > 16 || s%4 != 0) {
break break
} }
v.reset(OpMIPSLoweredZero) v.reset(OpMIPSLoweredZero)
v.AuxInt = SizeAndAlign(s).Align() v.AuxInt = t.(Type).Alignment()
v.AddArg(ptr) v.AddArg(ptr)
v0 := b.NewValue0(v.Pos, OpMIPSADDconst, ptr.Type) v0 := b.NewValue0(v.Pos, OpMIPSADDconst, ptr.Type)
v0.AuxInt = SizeAndAlign(s).Size() - moveSize(SizeAndAlign(s).Align(), config) v0.AuxInt = s - moveSize(t.(Type).Alignment(), config)
v0.AddArg(ptr) v0.AddArg(ptr)
v.AddArg(v0) v.AddArg(v0)
v.AddArg(mem) v.AddArg(mem)

View file

@ -6293,31 +6293,29 @@ func rewriteValueMIPS64_OpMod8u(v *Value, config *Config) bool {
func rewriteValueMIPS64_OpMove(v *Value, config *Config) bool { func rewriteValueMIPS64_OpMove(v *Value, config *Config) bool {
b := v.Block b := v.Block
_ = b _ = b
// match: (Move [s] _ _ mem) // match: (Move [0] _ _ mem)
// cond: SizeAndAlign(s).Size() == 0 // cond:
// result: mem // result: mem
for { for {
s := v.AuxInt if v.AuxInt != 0 {
mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 0) {
break break
} }
mem := v.Args[2]
v.reset(OpCopy) v.reset(OpCopy)
v.Type = mem.Type v.Type = mem.Type
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [1] dst src mem)
// cond: SizeAndAlign(s).Size() == 1 // cond:
// result: (MOVBstore dst (MOVBload src mem) mem) // result: (MOVBstore dst (MOVBload src mem) mem)
for { for {
s := v.AuxInt if v.AuxInt != 1 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 1) {
break
}
v.reset(OpMIPS64MOVBstore) v.reset(OpMIPS64MOVBstore)
v.AddArg(dst) v.AddArg(dst)
v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, config.fe.TypeInt8()) v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, config.fe.TypeInt8())
@ -6327,15 +6325,18 @@ func rewriteValueMIPS64_OpMove(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [2] {t} dst src mem)
// cond: SizeAndAlign(s).Size() == 2 && SizeAndAlign(s).Align()%2 == 0 // cond: t.(Type).Alignment()%2 == 0
// result: (MOVHstore dst (MOVHload src mem) mem) // result: (MOVHstore dst (MOVHload src mem) mem)
for { for {
s := v.AuxInt if v.AuxInt != 2 {
break
}
t := v.Aux
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 2 && SizeAndAlign(s).Align()%2 == 0) { if !(t.(Type).Alignment()%2 == 0) {
break break
} }
v.reset(OpMIPS64MOVHstore) v.reset(OpMIPS64MOVHstore)
@ -6347,17 +6348,16 @@ func rewriteValueMIPS64_OpMove(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [2] dst src mem)
// cond: SizeAndAlign(s).Size() == 2 // cond:
// result: (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem)) // result: (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))
for { for {
s := v.AuxInt if v.AuxInt != 2 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 2) {
break
}
v.reset(OpMIPS64MOVBstore) v.reset(OpMIPS64MOVBstore)
v.AuxInt = 1 v.AuxInt = 1
v.AddArg(dst) v.AddArg(dst)
@ -6376,15 +6376,18 @@ func rewriteValueMIPS64_OpMove(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [4] {t} dst src mem)
// cond: SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%4 == 0 // cond: t.(Type).Alignment()%4 == 0
// result: (MOVWstore dst (MOVWload src mem) mem) // result: (MOVWstore dst (MOVWload src mem) mem)
for { for {
s := v.AuxInt if v.AuxInt != 4 {
break
}
t := v.Aux
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%4 == 0) { if !(t.(Type).Alignment()%4 == 0) {
break break
} }
v.reset(OpMIPS64MOVWstore) v.reset(OpMIPS64MOVWstore)
@ -6396,15 +6399,18 @@ func rewriteValueMIPS64_OpMove(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [4] {t} dst src mem)
// cond: SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%2 == 0 // cond: t.(Type).Alignment()%2 == 0
// result: (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem)) // result: (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))
for { for {
s := v.AuxInt if v.AuxInt != 4 {
break
}
t := v.Aux
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%2 == 0) { if !(t.(Type).Alignment()%2 == 0) {
break break
} }
v.reset(OpMIPS64MOVHstore) v.reset(OpMIPS64MOVHstore)
@ -6425,17 +6431,16 @@ func rewriteValueMIPS64_OpMove(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [4] dst src mem)
// cond: SizeAndAlign(s).Size() == 4 // cond:
// result: (MOVBstore [3] dst (MOVBload [3] src mem) (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem)))) // result: (MOVBstore [3] dst (MOVBload [3] src mem) (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))))
for { for {
s := v.AuxInt if v.AuxInt != 4 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 4) {
break
}
v.reset(OpMIPS64MOVBstore) v.reset(OpMIPS64MOVBstore)
v.AuxInt = 3 v.AuxInt = 3
v.AddArg(dst) v.AddArg(dst)
@ -6472,15 +6477,18 @@ func rewriteValueMIPS64_OpMove(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [8] {t} dst src mem)
// cond: SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%8 == 0 // cond: t.(Type).Alignment()%8 == 0
// result: (MOVVstore dst (MOVVload src mem) mem) // result: (MOVVstore dst (MOVVload src mem) mem)
for { for {
s := v.AuxInt if v.AuxInt != 8 {
break
}
t := v.Aux
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%8 == 0) { if !(t.(Type).Alignment()%8 == 0) {
break break
} }
v.reset(OpMIPS64MOVVstore) v.reset(OpMIPS64MOVVstore)
@ -6492,15 +6500,18 @@ func rewriteValueMIPS64_OpMove(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [8] {t} dst src mem)
// cond: SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%4 == 0 // cond: t.(Type).Alignment()%4 == 0
// result: (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem)) // result: (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem))
for { for {
s := v.AuxInt if v.AuxInt != 8 {
break
}
t := v.Aux
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%4 == 0) { if !(t.(Type).Alignment()%4 == 0) {
break break
} }
v.reset(OpMIPS64MOVWstore) v.reset(OpMIPS64MOVWstore)
@ -6521,15 +6532,18 @@ func rewriteValueMIPS64_OpMove(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [8] {t} dst src mem)
// cond: SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%2 == 0 // cond: t.(Type).Alignment()%2 == 0
// result: (MOVHstore [6] dst (MOVHload [6] src mem) (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem)))) // result: (MOVHstore [6] dst (MOVHload [6] src mem) (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))))
for { for {
s := v.AuxInt if v.AuxInt != 8 {
break
}
t := v.Aux
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%2 == 0) { if !(t.(Type).Alignment()%2 == 0) {
break break
} }
v.reset(OpMIPS64MOVHstore) v.reset(OpMIPS64MOVHstore)
@ -6568,17 +6582,16 @@ func rewriteValueMIPS64_OpMove(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [3] dst src mem)
// cond: SizeAndAlign(s).Size() == 3 // cond:
// result: (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))) // result: (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem)))
for { for {
s := v.AuxInt if v.AuxInt != 3 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 3) {
break
}
v.reset(OpMIPS64MOVBstore) v.reset(OpMIPS64MOVBstore)
v.AuxInt = 2 v.AuxInt = 2
v.AddArg(dst) v.AddArg(dst)
@ -6606,15 +6619,18 @@ func rewriteValueMIPS64_OpMove(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [6] {t} dst src mem)
// cond: SizeAndAlign(s).Size() == 6 && SizeAndAlign(s).Align()%2 == 0 // cond: t.(Type).Alignment()%2 == 0
// result: (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))) // result: (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem)))
for { for {
s := v.AuxInt if v.AuxInt != 6 {
break
}
t := v.Aux
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 6 && SizeAndAlign(s).Align()%2 == 0) { if !(t.(Type).Alignment()%2 == 0) {
break break
} }
v.reset(OpMIPS64MOVHstore) v.reset(OpMIPS64MOVHstore)
@ -6644,15 +6660,18 @@ func rewriteValueMIPS64_OpMove(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [12] {t} dst src mem)
// cond: SizeAndAlign(s).Size() == 12 && SizeAndAlign(s).Align()%4 == 0 // cond: t.(Type).Alignment()%4 == 0
// result: (MOVWstore [8] dst (MOVWload [8] src mem) (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem))) // result: (MOVWstore [8] dst (MOVWload [8] src mem) (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem)))
for { for {
s := v.AuxInt if v.AuxInt != 12 {
break
}
t := v.Aux
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 12 && SizeAndAlign(s).Align()%4 == 0) { if !(t.(Type).Alignment()%4 == 0) {
break break
} }
v.reset(OpMIPS64MOVWstore) v.reset(OpMIPS64MOVWstore)
@ -6682,15 +6701,18 @@ func rewriteValueMIPS64_OpMove(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [16] {t} dst src mem)
// cond: SizeAndAlign(s).Size() == 16 && SizeAndAlign(s).Align()%8 == 0 // cond: t.(Type).Alignment()%8 == 0
// result: (MOVVstore [8] dst (MOVVload [8] src mem) (MOVVstore dst (MOVVload src mem) mem)) // result: (MOVVstore [8] dst (MOVVload [8] src mem) (MOVVstore dst (MOVVload src mem) mem))
for { for {
s := v.AuxInt if v.AuxInt != 16 {
break
}
t := v.Aux
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 16 && SizeAndAlign(s).Align()%8 == 0) { if !(t.(Type).Alignment()%8 == 0) {
break break
} }
v.reset(OpMIPS64MOVVstore) v.reset(OpMIPS64MOVVstore)
@ -6711,15 +6733,18 @@ func rewriteValueMIPS64_OpMove(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [24] {t} dst src mem)
// cond: SizeAndAlign(s).Size() == 24 && SizeAndAlign(s).Align()%8 == 0 // cond: t.(Type).Alignment()%8 == 0
// result: (MOVVstore [16] dst (MOVVload [16] src mem) (MOVVstore [8] dst (MOVVload [8] src mem) (MOVVstore dst (MOVVload src mem) mem))) // result: (MOVVstore [16] dst (MOVVload [16] src mem) (MOVVstore [8] dst (MOVVload [8] src mem) (MOVVstore dst (MOVVload src mem) mem)))
for { for {
s := v.AuxInt if v.AuxInt != 24 {
break
}
t := v.Aux
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 24 && SizeAndAlign(s).Align()%8 == 0) { if !(t.(Type).Alignment()%8 == 0) {
break break
} }
v.reset(OpMIPS64MOVVstore) v.reset(OpMIPS64MOVVstore)
@ -6749,23 +6774,24 @@ func rewriteValueMIPS64_OpMove(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [s] {t} dst src mem)
// cond: SizeAndAlign(s).Size() > 24 || SizeAndAlign(s).Align()%8 != 0 // cond: s > 24 || t.(Type).Alignment()%8 != 0
// result: (LoweredMove [SizeAndAlign(s).Align()] dst src (ADDVconst <src.Type> src [SizeAndAlign(s).Size()-moveSize(SizeAndAlign(s).Align(), config)]) mem) // result: (LoweredMove [t.(Type).Alignment()] dst src (ADDVconst <src.Type> src [s-moveSize(t.(Type).Alignment(), config)]) mem)
for { for {
s := v.AuxInt s := v.AuxInt
t := v.Aux
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() > 24 || SizeAndAlign(s).Align()%8 != 0) { if !(s > 24 || t.(Type).Alignment()%8 != 0) {
break break
} }
v.reset(OpMIPS64LoweredMove) v.reset(OpMIPS64LoweredMove)
v.AuxInt = SizeAndAlign(s).Align() v.AuxInt = t.(Type).Alignment()
v.AddArg(dst) v.AddArg(dst)
v.AddArg(src) v.AddArg(src)
v0 := b.NewValue0(v.Pos, OpMIPS64ADDVconst, src.Type) v0 := b.NewValue0(v.Pos, OpMIPS64ADDVconst, src.Type)
v0.AuxInt = SizeAndAlign(s).Size() - moveSize(SizeAndAlign(s).Align(), config) v0.AuxInt = s - moveSize(t.(Type).Alignment(), config)
v0.AddArg(src) v0.AddArg(src)
v.AddArg(v0) v.AddArg(v0)
v.AddArg(mem) v.AddArg(mem)
@ -8730,49 +8756,49 @@ func rewriteValueMIPS64_OpStaticCall(v *Value, config *Config) bool {
func rewriteValueMIPS64_OpStore(v *Value, config *Config) bool { func rewriteValueMIPS64_OpStore(v *Value, config *Config) bool {
b := v.Block b := v.Block
_ = b _ = b
// match: (Store [1] ptr val mem) // match: (Store {t} ptr val mem)
// cond: // cond: t.(Type).Size() == 1
// result: (MOVBstore ptr val mem) // result: (MOVBstore ptr val mem)
for { for {
if v.AuxInt != 1 { t := v.Aux
break
}
ptr := v.Args[0] ptr := v.Args[0]
val := v.Args[1] val := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(t.(Type).Size() == 1) {
break
}
v.reset(OpMIPS64MOVBstore) v.reset(OpMIPS64MOVBstore)
v.AddArg(ptr) v.AddArg(ptr)
v.AddArg(val) v.AddArg(val)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Store [2] ptr val mem) // match: (Store {t} ptr val mem)
// cond: // cond: t.(Type).Size() == 2
// result: (MOVHstore ptr val mem) // result: (MOVHstore ptr val mem)
for { for {
if v.AuxInt != 2 { t := v.Aux
break
}
ptr := v.Args[0] ptr := v.Args[0]
val := v.Args[1] val := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(t.(Type).Size() == 2) {
break
}
v.reset(OpMIPS64MOVHstore) v.reset(OpMIPS64MOVHstore)
v.AddArg(ptr) v.AddArg(ptr)
v.AddArg(val) v.AddArg(val)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Store [4] ptr val mem) // match: (Store {t} ptr val mem)
// cond: !is32BitFloat(val.Type) // cond: t.(Type).Size() == 4 && !is32BitFloat(val.Type)
// result: (MOVWstore ptr val mem) // result: (MOVWstore ptr val mem)
for { for {
if v.AuxInt != 4 { t := v.Aux
break
}
ptr := v.Args[0] ptr := v.Args[0]
val := v.Args[1] val := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(!is32BitFloat(val.Type)) { if !(t.(Type).Size() == 4 && !is32BitFloat(val.Type)) {
break break
} }
v.reset(OpMIPS64MOVWstore) v.reset(OpMIPS64MOVWstore)
@ -8781,17 +8807,15 @@ func rewriteValueMIPS64_OpStore(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Store [8] ptr val mem) // match: (Store {t} ptr val mem)
// cond: !is64BitFloat(val.Type) // cond: t.(Type).Size() == 8 && !is64BitFloat(val.Type)
// result: (MOVVstore ptr val mem) // result: (MOVVstore ptr val mem)
for { for {
if v.AuxInt != 8 { t := v.Aux
break
}
ptr := v.Args[0] ptr := v.Args[0]
val := v.Args[1] val := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(!is64BitFloat(val.Type)) { if !(t.(Type).Size() == 8 && !is64BitFloat(val.Type)) {
break break
} }
v.reset(OpMIPS64MOVVstore) v.reset(OpMIPS64MOVVstore)
@ -8800,17 +8824,15 @@ func rewriteValueMIPS64_OpStore(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Store [4] ptr val mem) // match: (Store {t} ptr val mem)
// cond: is32BitFloat(val.Type) // cond: t.(Type).Size() == 4 && is32BitFloat(val.Type)
// result: (MOVFstore ptr val mem) // result: (MOVFstore ptr val mem)
for { for {
if v.AuxInt != 4 { t := v.Aux
break
}
ptr := v.Args[0] ptr := v.Args[0]
val := v.Args[1] val := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(is32BitFloat(val.Type)) { if !(t.(Type).Size() == 4 && is32BitFloat(val.Type)) {
break break
} }
v.reset(OpMIPS64MOVFstore) v.reset(OpMIPS64MOVFstore)
@ -8819,17 +8841,15 @@ func rewriteValueMIPS64_OpStore(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Store [8] ptr val mem) // match: (Store {t} ptr val mem)
// cond: is64BitFloat(val.Type) // cond: t.(Type).Size() == 8 && is64BitFloat(val.Type)
// result: (MOVDstore ptr val mem) // result: (MOVDstore ptr val mem)
for { for {
if v.AuxInt != 8 { t := v.Aux
break
}
ptr := v.Args[0] ptr := v.Args[0]
val := v.Args[1] val := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(is64BitFloat(val.Type)) { if !(t.(Type).Size() == 8 && is64BitFloat(val.Type)) {
break break
} }
v.reset(OpMIPS64MOVDstore) v.reset(OpMIPS64MOVDstore)
@ -9092,30 +9112,28 @@ func rewriteValueMIPS64_OpXor8(v *Value, config *Config) bool {
func rewriteValueMIPS64_OpZero(v *Value, config *Config) bool { func rewriteValueMIPS64_OpZero(v *Value, config *Config) bool {
b := v.Block b := v.Block
_ = b _ = b
// match: (Zero [s] _ mem) // match: (Zero [0] _ mem)
// cond: SizeAndAlign(s).Size() == 0 // cond:
// result: mem // result: mem
for { for {
s := v.AuxInt if v.AuxInt != 0 {
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 0) {
break break
} }
mem := v.Args[1]
v.reset(OpCopy) v.reset(OpCopy)
v.Type = mem.Type v.Type = mem.Type
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [1] ptr mem)
// cond: SizeAndAlign(s).Size() == 1 // cond:
// result: (MOVBstore ptr (MOVVconst [0]) mem) // result: (MOVBstore ptr (MOVVconst [0]) mem)
for { for {
s := v.AuxInt if v.AuxInt != 1 {
ptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 1) {
break break
} }
ptr := v.Args[0]
mem := v.Args[1]
v.reset(OpMIPS64MOVBstore) v.reset(OpMIPS64MOVBstore)
v.AddArg(ptr) v.AddArg(ptr)
v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, config.fe.TypeUInt64()) v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, config.fe.TypeUInt64())
@ -9124,14 +9142,17 @@ func rewriteValueMIPS64_OpZero(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [2] {t} ptr mem)
// cond: SizeAndAlign(s).Size() == 2 && SizeAndAlign(s).Align()%2 == 0 // cond: t.(Type).Alignment()%2 == 0
// result: (MOVHstore ptr (MOVVconst [0]) mem) // result: (MOVHstore ptr (MOVVconst [0]) mem)
for { for {
s := v.AuxInt if v.AuxInt != 2 {
break
}
t := v.Aux
ptr := v.Args[0] ptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 2 && SizeAndAlign(s).Align()%2 == 0) { if !(t.(Type).Alignment()%2 == 0) {
break break
} }
v.reset(OpMIPS64MOVHstore) v.reset(OpMIPS64MOVHstore)
@ -9142,16 +9163,15 @@ func rewriteValueMIPS64_OpZero(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [2] ptr mem)
// cond: SizeAndAlign(s).Size() == 2 // cond:
// result: (MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [0] ptr (MOVVconst [0]) mem)) // result: (MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [0] ptr (MOVVconst [0]) mem))
for { for {
s := v.AuxInt if v.AuxInt != 2 {
ptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 2) {
break break
} }
ptr := v.Args[0]
mem := v.Args[1]
v.reset(OpMIPS64MOVBstore) v.reset(OpMIPS64MOVBstore)
v.AuxInt = 1 v.AuxInt = 1
v.AddArg(ptr) v.AddArg(ptr)
@ -9168,14 +9188,17 @@ func rewriteValueMIPS64_OpZero(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [4] {t} ptr mem)
// cond: SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%4 == 0 // cond: t.(Type).Alignment()%4 == 0
// result: (MOVWstore ptr (MOVVconst [0]) mem) // result: (MOVWstore ptr (MOVVconst [0]) mem)
for { for {
s := v.AuxInt if v.AuxInt != 4 {
break
}
t := v.Aux
ptr := v.Args[0] ptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%4 == 0) { if !(t.(Type).Alignment()%4 == 0) {
break break
} }
v.reset(OpMIPS64MOVWstore) v.reset(OpMIPS64MOVWstore)
@ -9186,14 +9209,17 @@ func rewriteValueMIPS64_OpZero(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [4] {t} ptr mem)
// cond: SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%2 == 0 // cond: t.(Type).Alignment()%2 == 0
// result: (MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [0] ptr (MOVVconst [0]) mem)) // result: (MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [0] ptr (MOVVconst [0]) mem))
for { for {
s := v.AuxInt if v.AuxInt != 4 {
break
}
t := v.Aux
ptr := v.Args[0] ptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%2 == 0) { if !(t.(Type).Alignment()%2 == 0) {
break break
} }
v.reset(OpMIPS64MOVHstore) v.reset(OpMIPS64MOVHstore)
@ -9212,16 +9238,15 @@ func rewriteValueMIPS64_OpZero(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [4] ptr mem)
// cond: SizeAndAlign(s).Size() == 4 // cond:
// result: (MOVBstore [3] ptr (MOVVconst [0]) (MOVBstore [2] ptr (MOVVconst [0]) (MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [0] ptr (MOVVconst [0]) mem)))) // result: (MOVBstore [3] ptr (MOVVconst [0]) (MOVBstore [2] ptr (MOVVconst [0]) (MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [0] ptr (MOVVconst [0]) mem))))
for { for {
s := v.AuxInt if v.AuxInt != 4 {
ptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 4) {
break break
} }
ptr := v.Args[0]
mem := v.Args[1]
v.reset(OpMIPS64MOVBstore) v.reset(OpMIPS64MOVBstore)
v.AuxInt = 3 v.AuxInt = 3
v.AddArg(ptr) v.AddArg(ptr)
@ -9252,14 +9277,17 @@ func rewriteValueMIPS64_OpZero(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [8] {t} ptr mem)
// cond: SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%8 == 0 // cond: t.(Type).Alignment()%8 == 0
// result: (MOVVstore ptr (MOVVconst [0]) mem) // result: (MOVVstore ptr (MOVVconst [0]) mem)
for { for {
s := v.AuxInt if v.AuxInt != 8 {
break
}
t := v.Aux
ptr := v.Args[0] ptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%8 == 0) { if !(t.(Type).Alignment()%8 == 0) {
break break
} }
v.reset(OpMIPS64MOVVstore) v.reset(OpMIPS64MOVVstore)
@ -9270,14 +9298,17 @@ func rewriteValueMIPS64_OpZero(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [8] {t} ptr mem)
// cond: SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%4 == 0 // cond: t.(Type).Alignment()%4 == 0
// result: (MOVWstore [4] ptr (MOVVconst [0]) (MOVWstore [0] ptr (MOVVconst [0]) mem)) // result: (MOVWstore [4] ptr (MOVVconst [0]) (MOVWstore [0] ptr (MOVVconst [0]) mem))
for { for {
s := v.AuxInt if v.AuxInt != 8 {
break
}
t := v.Aux
ptr := v.Args[0] ptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%4 == 0) { if !(t.(Type).Alignment()%4 == 0) {
break break
} }
v.reset(OpMIPS64MOVWstore) v.reset(OpMIPS64MOVWstore)
@ -9296,14 +9327,17 @@ func rewriteValueMIPS64_OpZero(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [8] {t} ptr mem)
// cond: SizeAndAlign(s).Size() == 4 // cond: t.(Type).Alignment()%2 == 0
// result: (MOVHstore [6] ptr (MOVVconst [0]) (MOVHstore [4] ptr (MOVVconst [0]) (MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [0] ptr (MOVVconst [0]) mem)))) // result: (MOVHstore [6] ptr (MOVVconst [0]) (MOVHstore [4] ptr (MOVVconst [0]) (MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [0] ptr (MOVVconst [0]) mem))))
for { for {
s := v.AuxInt if v.AuxInt != 8 {
break
}
t := v.Aux
ptr := v.Args[0] ptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 4) { if !(t.(Type).Alignment()%2 == 0) {
break break
} }
v.reset(OpMIPS64MOVHstore) v.reset(OpMIPS64MOVHstore)
@ -9336,16 +9370,15 @@ func rewriteValueMIPS64_OpZero(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [3] ptr mem)
// cond: SizeAndAlign(s).Size() == 3 // cond:
// result: (MOVBstore [2] ptr (MOVVconst [0]) (MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [0] ptr (MOVVconst [0]) mem))) // result: (MOVBstore [2] ptr (MOVVconst [0]) (MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [0] ptr (MOVVconst [0]) mem)))
for { for {
s := v.AuxInt if v.AuxInt != 3 {
ptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 3) {
break break
} }
ptr := v.Args[0]
mem := v.Args[1]
v.reset(OpMIPS64MOVBstore) v.reset(OpMIPS64MOVBstore)
v.AuxInt = 2 v.AuxInt = 2
v.AddArg(ptr) v.AddArg(ptr)
@ -9369,14 +9402,17 @@ func rewriteValueMIPS64_OpZero(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [6] {t} ptr mem)
// cond: SizeAndAlign(s).Size() == 6 && SizeAndAlign(s).Align()%2 == 0 // cond: t.(Type).Alignment()%2 == 0
// result: (MOVHstore [4] ptr (MOVVconst [0]) (MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [0] ptr (MOVVconst [0]) mem))) // result: (MOVHstore [4] ptr (MOVVconst [0]) (MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [0] ptr (MOVVconst [0]) mem)))
for { for {
s := v.AuxInt if v.AuxInt != 6 {
break
}
t := v.Aux
ptr := v.Args[0] ptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 6 && SizeAndAlign(s).Align()%2 == 0) { if !(t.(Type).Alignment()%2 == 0) {
break break
} }
v.reset(OpMIPS64MOVHstore) v.reset(OpMIPS64MOVHstore)
@ -9402,14 +9438,17 @@ func rewriteValueMIPS64_OpZero(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [12] {t} ptr mem)
// cond: SizeAndAlign(s).Size() == 12 && SizeAndAlign(s).Align()%4 == 0 // cond: t.(Type).Alignment()%4 == 0
// result: (MOVWstore [8] ptr (MOVVconst [0]) (MOVWstore [4] ptr (MOVVconst [0]) (MOVWstore [0] ptr (MOVVconst [0]) mem))) // result: (MOVWstore [8] ptr (MOVVconst [0]) (MOVWstore [4] ptr (MOVVconst [0]) (MOVWstore [0] ptr (MOVVconst [0]) mem)))
for { for {
s := v.AuxInt if v.AuxInt != 12 {
break
}
t := v.Aux
ptr := v.Args[0] ptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 12 && SizeAndAlign(s).Align()%4 == 0) { if !(t.(Type).Alignment()%4 == 0) {
break break
} }
v.reset(OpMIPS64MOVWstore) v.reset(OpMIPS64MOVWstore)
@ -9435,14 +9474,17 @@ func rewriteValueMIPS64_OpZero(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [16] {t} ptr mem)
// cond: SizeAndAlign(s).Size() == 16 && SizeAndAlign(s).Align()%8 == 0 // cond: t.(Type).Alignment()%8 == 0
// result: (MOVVstore [8] ptr (MOVVconst [0]) (MOVVstore [0] ptr (MOVVconst [0]) mem)) // result: (MOVVstore [8] ptr (MOVVconst [0]) (MOVVstore [0] ptr (MOVVconst [0]) mem))
for { for {
s := v.AuxInt if v.AuxInt != 16 {
break
}
t := v.Aux
ptr := v.Args[0] ptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 16 && SizeAndAlign(s).Align()%8 == 0) { if !(t.(Type).Alignment()%8 == 0) {
break break
} }
v.reset(OpMIPS64MOVVstore) v.reset(OpMIPS64MOVVstore)
@ -9461,14 +9503,17 @@ func rewriteValueMIPS64_OpZero(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [24] {t} ptr mem)
// cond: SizeAndAlign(s).Size() == 24 && SizeAndAlign(s).Align()%8 == 0 // cond: t.(Type).Alignment()%8 == 0
// result: (MOVVstore [16] ptr (MOVVconst [0]) (MOVVstore [8] ptr (MOVVconst [0]) (MOVVstore [0] ptr (MOVVconst [0]) mem))) // result: (MOVVstore [16] ptr (MOVVconst [0]) (MOVVstore [8] ptr (MOVVconst [0]) (MOVVstore [0] ptr (MOVVconst [0]) mem)))
for { for {
s := v.AuxInt if v.AuxInt != 24 {
break
}
t := v.Aux
ptr := v.Args[0] ptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 24 && SizeAndAlign(s).Align()%8 == 0) { if !(t.(Type).Alignment()%8 == 0) {
break break
} }
v.reset(OpMIPS64MOVVstore) v.reset(OpMIPS64MOVVstore)
@ -9494,37 +9539,39 @@ func rewriteValueMIPS64_OpZero(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [s] {t} ptr mem)
// cond: SizeAndAlign(s).Size()%8 == 0 && SizeAndAlign(s).Size() > 24 && SizeAndAlign(s).Size() <= 8*128 && SizeAndAlign(s).Align()%8 == 0 && !config.noDuffDevice // cond: s%8 == 0 && s > 24 && s <= 8*128 && t.(Type).Alignment()%8 == 0 && !config.noDuffDevice
// result: (DUFFZERO [8 * (128 - int64(SizeAndAlign(s).Size()/8))] ptr mem) // result: (DUFFZERO [8 * (128 - int64(s/8))] ptr mem)
for { for {
s := v.AuxInt s := v.AuxInt
t := v.Aux
ptr := v.Args[0] ptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(SizeAndAlign(s).Size()%8 == 0 && SizeAndAlign(s).Size() > 24 && SizeAndAlign(s).Size() <= 8*128 && SizeAndAlign(s).Align()%8 == 0 && !config.noDuffDevice) { if !(s%8 == 0 && s > 24 && s <= 8*128 && t.(Type).Alignment()%8 == 0 && !config.noDuffDevice) {
break break
} }
v.reset(OpMIPS64DUFFZERO) v.reset(OpMIPS64DUFFZERO)
v.AuxInt = 8 * (128 - int64(SizeAndAlign(s).Size()/8)) v.AuxInt = 8 * (128 - int64(s/8))
v.AddArg(ptr) v.AddArg(ptr)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [s] {t} ptr mem)
// cond: (SizeAndAlign(s).Size() > 8*128 || config.noDuffDevice) || SizeAndAlign(s).Align()%8 != 0 // cond: (s > 8*128 || config.noDuffDevice) || t.(Type).Alignment()%8 != 0
// result: (LoweredZero [SizeAndAlign(s).Align()] ptr (ADDVconst <ptr.Type> ptr [SizeAndAlign(s).Size()-moveSize(SizeAndAlign(s).Align(), config)]) mem) // result: (LoweredZero [t.(Type).Alignment()] ptr (ADDVconst <ptr.Type> ptr [s-moveSize(t.(Type).Alignment(), config)]) mem)
for { for {
s := v.AuxInt s := v.AuxInt
t := v.Aux
ptr := v.Args[0] ptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
if !((SizeAndAlign(s).Size() > 8*128 || config.noDuffDevice) || SizeAndAlign(s).Align()%8 != 0) { if !((s > 8*128 || config.noDuffDevice) || t.(Type).Alignment()%8 != 0) {
break break
} }
v.reset(OpMIPS64LoweredZero) v.reset(OpMIPS64LoweredZero)
v.AuxInt = SizeAndAlign(s).Align() v.AuxInt = t.(Type).Alignment()
v.AddArg(ptr) v.AddArg(ptr)
v0 := b.NewValue0(v.Pos, OpMIPS64ADDVconst, ptr.Type) v0 := b.NewValue0(v.Pos, OpMIPS64ADDVconst, ptr.Type)
v0.AuxInt = SizeAndAlign(s).Size() - moveSize(SizeAndAlign(s).Align(), config) v0.AuxInt = s - moveSize(t.(Type).Alignment(), config)
v0.AddArg(ptr) v0.AddArg(ptr)
v.AddArg(v0) v.AddArg(v0)
v.AddArg(mem) v.AddArg(mem)

View file

@ -3672,31 +3672,29 @@ func rewriteValuePPC64_OpMod8u(v *Value, config *Config) bool {
func rewriteValuePPC64_OpMove(v *Value, config *Config) bool { func rewriteValuePPC64_OpMove(v *Value, config *Config) bool {
b := v.Block b := v.Block
_ = b _ = b
// match: (Move [s] _ _ mem) // match: (Move [0] _ _ mem)
// cond: SizeAndAlign(s).Size() == 0 // cond:
// result: mem // result: mem
for { for {
s := v.AuxInt if v.AuxInt != 0 {
mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 0) {
break break
} }
mem := v.Args[2]
v.reset(OpCopy) v.reset(OpCopy)
v.Type = mem.Type v.Type = mem.Type
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [1] dst src mem)
// cond: SizeAndAlign(s).Size() == 1 // cond:
// result: (MOVBstore dst (MOVBZload src mem) mem) // result: (MOVBstore dst (MOVBZload src mem) mem)
for { for {
s := v.AuxInt if v.AuxInt != 1 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 1) {
break
}
v.reset(OpPPC64MOVBstore) v.reset(OpPPC64MOVBstore)
v.AddArg(dst) v.AddArg(dst)
v0 := b.NewValue0(v.Pos, OpPPC64MOVBZload, config.fe.TypeUInt8()) v0 := b.NewValue0(v.Pos, OpPPC64MOVBZload, config.fe.TypeUInt8())
@ -3706,15 +3704,18 @@ func rewriteValuePPC64_OpMove(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [2] {t} dst src mem)
// cond: SizeAndAlign(s).Size() == 2 && SizeAndAlign(s).Align()%2 == 0 // cond: t.(Type).Alignment()%2 == 0
// result: (MOVHstore dst (MOVHZload src mem) mem) // result: (MOVHstore dst (MOVHZload src mem) mem)
for { for {
s := v.AuxInt if v.AuxInt != 2 {
break
}
t := v.Aux
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 2 && SizeAndAlign(s).Align()%2 == 0) { if !(t.(Type).Alignment()%2 == 0) {
break break
} }
v.reset(OpPPC64MOVHstore) v.reset(OpPPC64MOVHstore)
@ -3726,17 +3727,16 @@ func rewriteValuePPC64_OpMove(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [2] dst src mem)
// cond: SizeAndAlign(s).Size() == 2 // cond:
// result: (MOVBstore [1] dst (MOVBZload [1] src mem) (MOVBstore dst (MOVBZload src mem) mem)) // result: (MOVBstore [1] dst (MOVBZload [1] src mem) (MOVBstore dst (MOVBZload src mem) mem))
for { for {
s := v.AuxInt if v.AuxInt != 2 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 2) {
break
}
v.reset(OpPPC64MOVBstore) v.reset(OpPPC64MOVBstore)
v.AuxInt = 1 v.AuxInt = 1
v.AddArg(dst) v.AddArg(dst)
@ -3755,15 +3755,18 @@ func rewriteValuePPC64_OpMove(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [4] {t} dst src mem)
// cond: SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%4 == 0 // cond: t.(Type).Alignment()%4 == 0
// result: (MOVWstore dst (MOVWload src mem) mem) // result: (MOVWstore dst (MOVWload src mem) mem)
for { for {
s := v.AuxInt if v.AuxInt != 4 {
break
}
t := v.Aux
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%4 == 0) { if !(t.(Type).Alignment()%4 == 0) {
break break
} }
v.reset(OpPPC64MOVWstore) v.reset(OpPPC64MOVWstore)
@ -3775,15 +3778,18 @@ func rewriteValuePPC64_OpMove(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [4] {t} dst src mem)
// cond: SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%2 == 0 // cond: t.(Type).Alignment()%2 == 0
// result: (MOVHstore [2] dst (MOVHZload [2] src mem) (MOVHstore dst (MOVHZload src mem) mem)) // result: (MOVHstore [2] dst (MOVHZload [2] src mem) (MOVHstore dst (MOVHZload src mem) mem))
for { for {
s := v.AuxInt if v.AuxInt != 4 {
break
}
t := v.Aux
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%2 == 0) { if !(t.(Type).Alignment()%2 == 0) {
break break
} }
v.reset(OpPPC64MOVHstore) v.reset(OpPPC64MOVHstore)
@ -3804,17 +3810,16 @@ func rewriteValuePPC64_OpMove(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [4] dst src mem)
// cond: SizeAndAlign(s).Size() == 4 // cond:
// result: (MOVBstore [3] dst (MOVBZload [3] src mem) (MOVBstore [2] dst (MOVBZload [2] src mem) (MOVBstore [1] dst (MOVBZload [1] src mem) (MOVBstore dst (MOVBZload src mem) mem)))) // result: (MOVBstore [3] dst (MOVBZload [3] src mem) (MOVBstore [2] dst (MOVBZload [2] src mem) (MOVBstore [1] dst (MOVBZload [1] src mem) (MOVBstore dst (MOVBZload src mem) mem))))
for { for {
s := v.AuxInt if v.AuxInt != 4 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 4) {
break
}
v.reset(OpPPC64MOVBstore) v.reset(OpPPC64MOVBstore)
v.AuxInt = 3 v.AuxInt = 3
v.AddArg(dst) v.AddArg(dst)
@ -3851,15 +3856,18 @@ func rewriteValuePPC64_OpMove(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [8] {t} dst src mem)
// cond: SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%8 == 0 // cond: t.(Type).Alignment()%8 == 0
// result: (MOVDstore dst (MOVDload src mem) mem) // result: (MOVDstore dst (MOVDload src mem) mem)
for { for {
s := v.AuxInt if v.AuxInt != 8 {
break
}
t := v.Aux
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%8 == 0) { if !(t.(Type).Alignment()%8 == 0) {
break break
} }
v.reset(OpPPC64MOVDstore) v.reset(OpPPC64MOVDstore)
@ -3871,15 +3879,18 @@ func rewriteValuePPC64_OpMove(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [8] {t} dst src mem)
// cond: SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%4 == 0 // cond: t.(Type).Alignment()%4 == 0
// result: (MOVWstore [4] dst (MOVWZload [4] src mem) (MOVWstore dst (MOVWZload src mem) mem)) // result: (MOVWstore [4] dst (MOVWZload [4] src mem) (MOVWstore dst (MOVWZload src mem) mem))
for { for {
s := v.AuxInt if v.AuxInt != 8 {
break
}
t := v.Aux
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%4 == 0) { if !(t.(Type).Alignment()%4 == 0) {
break break
} }
v.reset(OpPPC64MOVWstore) v.reset(OpPPC64MOVWstore)
@ -3900,15 +3911,18 @@ func rewriteValuePPC64_OpMove(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [8] {t} dst src mem)
// cond: SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%2 == 0 // cond: t.(Type).Alignment()%2 == 0
// result: (MOVHstore [6] dst (MOVHZload [6] src mem) (MOVHstore [4] dst (MOVHZload [4] src mem) (MOVHstore [2] dst (MOVHZload [2] src mem) (MOVHstore dst (MOVHZload src mem) mem)))) // result: (MOVHstore [6] dst (MOVHZload [6] src mem) (MOVHstore [4] dst (MOVHZload [4] src mem) (MOVHstore [2] dst (MOVHZload [2] src mem) (MOVHstore dst (MOVHZload src mem) mem))))
for { for {
s := v.AuxInt if v.AuxInt != 8 {
break
}
t := v.Aux
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%2 == 0) { if !(t.(Type).Alignment()%2 == 0) {
break break
} }
v.reset(OpPPC64MOVHstore) v.reset(OpPPC64MOVHstore)
@ -3947,17 +3961,16 @@ func rewriteValuePPC64_OpMove(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [3] dst src mem)
// cond: SizeAndAlign(s).Size() == 3 // cond:
// result: (MOVBstore [2] dst (MOVBZload [2] src mem) (MOVBstore [1] dst (MOVBZload [1] src mem) (MOVBstore dst (MOVBZload src mem) mem))) // result: (MOVBstore [2] dst (MOVBZload [2] src mem) (MOVBstore [1] dst (MOVBZload [1] src mem) (MOVBstore dst (MOVBZload src mem) mem)))
for { for {
s := v.AuxInt if v.AuxInt != 3 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 3) {
break
}
v.reset(OpPPC64MOVBstore) v.reset(OpPPC64MOVBstore)
v.AuxInt = 2 v.AuxInt = 2
v.AddArg(dst) v.AddArg(dst)
@ -3985,23 +3998,24 @@ func rewriteValuePPC64_OpMove(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [s] {t} dst src mem)
// cond: (SizeAndAlign(s).Size() > 512 || config.noDuffDevice) || SizeAndAlign(s).Align()%8 != 0 // cond: (s > 512 || config.noDuffDevice) || t.(Type).Alignment()%8 != 0
// result: (LoweredMove [SizeAndAlign(s).Align()] dst src (ADDconst <src.Type> src [SizeAndAlign(s).Size()-moveSize(SizeAndAlign(s).Align(), config)]) mem) // result: (LoweredMove [t.(Type).Alignment()] dst src (ADDconst <src.Type> src [s-moveSize(t.(Type).Alignment(), config)]) mem)
for { for {
s := v.AuxInt s := v.AuxInt
t := v.Aux
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !((SizeAndAlign(s).Size() > 512 || config.noDuffDevice) || SizeAndAlign(s).Align()%8 != 0) { if !((s > 512 || config.noDuffDevice) || t.(Type).Alignment()%8 != 0) {
break break
} }
v.reset(OpPPC64LoweredMove) v.reset(OpPPC64LoweredMove)
v.AuxInt = SizeAndAlign(s).Align() v.AuxInt = t.(Type).Alignment()
v.AddArg(dst) v.AddArg(dst)
v.AddArg(src) v.AddArg(src)
v0 := b.NewValue0(v.Pos, OpPPC64ADDconst, src.Type) v0 := b.NewValue0(v.Pos, OpPPC64ADDconst, src.Type)
v0.AuxInt = SizeAndAlign(s).Size() - moveSize(SizeAndAlign(s).Align(), config) v0.AuxInt = s - moveSize(t.(Type).Alignment(), config)
v0.AddArg(src) v0.AddArg(src)
v.AddArg(v0) v.AddArg(v0)
v.AddArg(mem) v.AddArg(mem)
@ -9263,17 +9277,15 @@ func rewriteValuePPC64_OpStaticCall(v *Value, config *Config) bool {
func rewriteValuePPC64_OpStore(v *Value, config *Config) bool { func rewriteValuePPC64_OpStore(v *Value, config *Config) bool {
b := v.Block b := v.Block
_ = b _ = b
// match: (Store [8] ptr val mem) // match: (Store {t} ptr val mem)
// cond: is64BitFloat(val.Type) // cond: t.(Type).Size() == 8 && is64BitFloat(val.Type)
// result: (FMOVDstore ptr val mem) // result: (FMOVDstore ptr val mem)
for { for {
if v.AuxInt != 8 { t := v.Aux
break
}
ptr := v.Args[0] ptr := v.Args[0]
val := v.Args[1] val := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(is64BitFloat(val.Type)) { if !(t.(Type).Size() == 8 && is64BitFloat(val.Type)) {
break break
} }
v.reset(OpPPC64FMOVDstore) v.reset(OpPPC64FMOVDstore)
@ -9282,17 +9294,15 @@ func rewriteValuePPC64_OpStore(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Store [8] ptr val mem) // match: (Store {t} ptr val mem)
// cond: is32BitFloat(val.Type) // cond: t.(Type).Size() == 8 && is32BitFloat(val.Type)
// result: (FMOVDstore ptr val mem) // result: (FMOVDstore ptr val mem)
for { for {
if v.AuxInt != 8 { t := v.Aux
break
}
ptr := v.Args[0] ptr := v.Args[0]
val := v.Args[1] val := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(is32BitFloat(val.Type)) { if !(t.(Type).Size() == 8 && is32BitFloat(val.Type)) {
break break
} }
v.reset(OpPPC64FMOVDstore) v.reset(OpPPC64FMOVDstore)
@ -9301,17 +9311,15 @@ func rewriteValuePPC64_OpStore(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Store [4] ptr val mem) // match: (Store {t} ptr val mem)
// cond: is32BitFloat(val.Type) // cond: t.(Type).Size() == 4 && is32BitFloat(val.Type)
// result: (FMOVSstore ptr val mem) // result: (FMOVSstore ptr val mem)
for { for {
if v.AuxInt != 4 { t := v.Aux
break
}
ptr := v.Args[0] ptr := v.Args[0]
val := v.Args[1] val := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(is32BitFloat(val.Type)) { if !(t.(Type).Size() == 4 && is32BitFloat(val.Type)) {
break break
} }
v.reset(OpPPC64FMOVSstore) v.reset(OpPPC64FMOVSstore)
@ -9320,17 +9328,15 @@ func rewriteValuePPC64_OpStore(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Store [8] ptr val mem) // match: (Store {t} ptr val mem)
// cond: (is64BitInt(val.Type) || isPtr(val.Type)) // cond: t.(Type).Size() == 8 && (is64BitInt(val.Type) || isPtr(val.Type))
// result: (MOVDstore ptr val mem) // result: (MOVDstore ptr val mem)
for { for {
if v.AuxInt != 8 { t := v.Aux
break
}
ptr := v.Args[0] ptr := v.Args[0]
val := v.Args[1] val := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(is64BitInt(val.Type) || isPtr(val.Type)) { if !(t.(Type).Size() == 8 && (is64BitInt(val.Type) || isPtr(val.Type))) {
break break
} }
v.reset(OpPPC64MOVDstore) v.reset(OpPPC64MOVDstore)
@ -9339,17 +9345,15 @@ func rewriteValuePPC64_OpStore(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Store [4] ptr val mem) // match: (Store {t} ptr val mem)
// cond: is32BitInt(val.Type) // cond: t.(Type).Size() == 4 && is32BitInt(val.Type)
// result: (MOVWstore ptr val mem) // result: (MOVWstore ptr val mem)
for { for {
if v.AuxInt != 4 { t := v.Aux
break
}
ptr := v.Args[0] ptr := v.Args[0]
val := v.Args[1] val := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(is32BitInt(val.Type)) { if !(t.(Type).Size() == 4 && is32BitInt(val.Type)) {
break break
} }
v.reset(OpPPC64MOVWstore) v.reset(OpPPC64MOVWstore)
@ -9358,32 +9362,34 @@ func rewriteValuePPC64_OpStore(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Store [2] ptr val mem) // match: (Store {t} ptr val mem)
// cond: // cond: t.(Type).Size() == 2
// result: (MOVHstore ptr val mem) // result: (MOVHstore ptr val mem)
for { for {
if v.AuxInt != 2 { t := v.Aux
break
}
ptr := v.Args[0] ptr := v.Args[0]
val := v.Args[1] val := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(t.(Type).Size() == 2) {
break
}
v.reset(OpPPC64MOVHstore) v.reset(OpPPC64MOVHstore)
v.AddArg(ptr) v.AddArg(ptr)
v.AddArg(val) v.AddArg(val)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Store [1] ptr val mem) // match: (Store {t} ptr val mem)
// cond: // cond: t.(Type).Size() == 1
// result: (MOVBstore ptr val mem) // result: (MOVBstore ptr val mem)
for { for {
if v.AuxInt != 1 { t := v.Aux
break
}
ptr := v.Args[0] ptr := v.Args[0]
val := v.Args[1] val := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(t.(Type).Size() == 1) {
break
}
v.reset(OpPPC64MOVBstore) v.reset(OpPPC64MOVBstore)
v.AddArg(ptr) v.AddArg(ptr)
v.AddArg(val) v.AddArg(val)
@ -9638,43 +9644,44 @@ func rewriteValuePPC64_OpXor8(v *Value, config *Config) bool {
func rewriteValuePPC64_OpZero(v *Value, config *Config) bool { func rewriteValuePPC64_OpZero(v *Value, config *Config) bool {
b := v.Block b := v.Block
_ = b _ = b
// match: (Zero [s] _ mem) // match: (Zero [0] _ mem)
// cond: SizeAndAlign(s).Size() == 0 // cond:
// result: mem // result: mem
for { for {
s := v.AuxInt if v.AuxInt != 0 {
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 0) {
break break
} }
mem := v.Args[1]
v.reset(OpCopy) v.reset(OpCopy)
v.Type = mem.Type v.Type = mem.Type
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [1] destptr mem)
// cond: SizeAndAlign(s).Size() == 1 // cond:
// result: (MOVBstorezero destptr mem) // result: (MOVBstorezero destptr mem)
for { for {
s := v.AuxInt if v.AuxInt != 1 {
destptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 1) {
break break
} }
destptr := v.Args[0]
mem := v.Args[1]
v.reset(OpPPC64MOVBstorezero) v.reset(OpPPC64MOVBstorezero)
v.AddArg(destptr) v.AddArg(destptr)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [2] {t} destptr mem)
// cond: SizeAndAlign(s).Size() == 2 && SizeAndAlign(s).Align()%2 == 0 // cond: t.(Type).Alignment()%2 == 0
// result: (MOVHstorezero destptr mem) // result: (MOVHstorezero destptr mem)
for { for {
s := v.AuxInt if v.AuxInt != 2 {
break
}
t := v.Aux
destptr := v.Args[0] destptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 2 && SizeAndAlign(s).Align()%2 == 0) { if !(t.(Type).Alignment()%2 == 0) {
break break
} }
v.reset(OpPPC64MOVHstorezero) v.reset(OpPPC64MOVHstorezero)
@ -9682,16 +9689,15 @@ func rewriteValuePPC64_OpZero(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [2] destptr mem)
// cond: SizeAndAlign(s).Size() == 2 // cond:
// result: (MOVBstorezero [1] destptr (MOVBstorezero [0] destptr mem)) // result: (MOVBstorezero [1] destptr (MOVBstorezero [0] destptr mem))
for { for {
s := v.AuxInt if v.AuxInt != 2 {
destptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 2) {
break break
} }
destptr := v.Args[0]
mem := v.Args[1]
v.reset(OpPPC64MOVBstorezero) v.reset(OpPPC64MOVBstorezero)
v.AuxInt = 1 v.AuxInt = 1
v.AddArg(destptr) v.AddArg(destptr)
@ -9702,14 +9708,17 @@ func rewriteValuePPC64_OpZero(v *Value, config *Config) bool {
v.AddArg(v0) v.AddArg(v0)
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [4] {t} destptr mem)
// cond: SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%4 == 0 // cond: t.(Type).Alignment()%4 == 0
// result: (MOVWstorezero destptr mem) // result: (MOVWstorezero destptr mem)
for { for {
s := v.AuxInt if v.AuxInt != 4 {
break
}
t := v.Aux
destptr := v.Args[0] destptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%4 == 0) { if !(t.(Type).Alignment()%4 == 0) {
break break
} }
v.reset(OpPPC64MOVWstorezero) v.reset(OpPPC64MOVWstorezero)
@ -9717,14 +9726,17 @@ func rewriteValuePPC64_OpZero(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [4] {t} destptr mem)
// cond: SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%2 == 0 // cond: t.(Type).Alignment()%2 == 0
// result: (MOVHstorezero [2] destptr (MOVHstorezero [0] destptr mem)) // result: (MOVHstorezero [2] destptr (MOVHstorezero [0] destptr mem))
for { for {
s := v.AuxInt if v.AuxInt != 4 {
break
}
t := v.Aux
destptr := v.Args[0] destptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%2 == 0) { if !(t.(Type).Alignment()%2 == 0) {
break break
} }
v.reset(OpPPC64MOVHstorezero) v.reset(OpPPC64MOVHstorezero)
@ -9737,16 +9749,15 @@ func rewriteValuePPC64_OpZero(v *Value, config *Config) bool {
v.AddArg(v0) v.AddArg(v0)
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [4] destptr mem)
// cond: SizeAndAlign(s).Size() == 4 // cond:
// result: (MOVBstorezero [3] destptr (MOVBstorezero [2] destptr (MOVBstorezero [1] destptr (MOVBstorezero [0] destptr mem)))) // result: (MOVBstorezero [3] destptr (MOVBstorezero [2] destptr (MOVBstorezero [1] destptr (MOVBstorezero [0] destptr mem))))
for { for {
s := v.AuxInt if v.AuxInt != 4 {
destptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 4) {
break break
} }
destptr := v.Args[0]
mem := v.Args[1]
v.reset(OpPPC64MOVBstorezero) v.reset(OpPPC64MOVBstorezero)
v.AuxInt = 3 v.AuxInt = 3
v.AddArg(destptr) v.AddArg(destptr)
@ -9765,14 +9776,17 @@ func rewriteValuePPC64_OpZero(v *Value, config *Config) bool {
v.AddArg(v0) v.AddArg(v0)
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [8] {t} destptr mem)
// cond: SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%8 == 0 // cond: t.(Type).Alignment()%8 == 0
// result: (MOVDstorezero [0] destptr mem) // result: (MOVDstorezero [0] destptr mem)
for { for {
s := v.AuxInt if v.AuxInt != 8 {
break
}
t := v.Aux
destptr := v.Args[0] destptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%8 == 0) { if !(t.(Type).Alignment()%8 == 0) {
break break
} }
v.reset(OpPPC64MOVDstorezero) v.reset(OpPPC64MOVDstorezero)
@ -9781,14 +9795,17 @@ func rewriteValuePPC64_OpZero(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [8] {t} destptr mem)
// cond: SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%4 == 0 // cond: t.(Type).Alignment()%4 == 0
// result: (MOVWstorezero [4] destptr (MOVWstorezero [0] destptr mem)) // result: (MOVWstorezero [4] destptr (MOVWstorezero [0] destptr mem))
for { for {
s := v.AuxInt if v.AuxInt != 8 {
break
}
t := v.Aux
destptr := v.Args[0] destptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%4 == 0) { if !(t.(Type).Alignment()%4 == 0) {
break break
} }
v.reset(OpPPC64MOVWstorezero) v.reset(OpPPC64MOVWstorezero)
@ -9801,14 +9818,17 @@ func rewriteValuePPC64_OpZero(v *Value, config *Config) bool {
v.AddArg(v0) v.AddArg(v0)
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [8] {t} destptr mem)
// cond: SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%2 == 0 // cond: t.(Type).Alignment()%2 == 0
// result: (MOVHstorezero [6] destptr (MOVHstorezero [4] destptr (MOVHstorezero [2] destptr (MOVHstorezero [0] destptr mem)))) // result: (MOVHstorezero [6] destptr (MOVHstorezero [4] destptr (MOVHstorezero [2] destptr (MOVHstorezero [0] destptr mem))))
for { for {
s := v.AuxInt if v.AuxInt != 8 {
break
}
t := v.Aux
destptr := v.Args[0] destptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%2 == 0) { if !(t.(Type).Alignment()%2 == 0) {
break break
} }
v.reset(OpPPC64MOVHstorezero) v.reset(OpPPC64MOVHstorezero)
@ -9829,16 +9849,15 @@ func rewriteValuePPC64_OpZero(v *Value, config *Config) bool {
v.AddArg(v0) v.AddArg(v0)
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [3] destptr mem)
// cond: SizeAndAlign(s).Size() == 3 // cond:
// result: (MOVBstorezero [2] destptr (MOVBstorezero [1] destptr (MOVBstorezero [0] destptr mem))) // result: (MOVBstorezero [2] destptr (MOVBstorezero [1] destptr (MOVBstorezero [0] destptr mem)))
for { for {
s := v.AuxInt if v.AuxInt != 3 {
destptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 3) {
break break
} }
destptr := v.Args[0]
mem := v.Args[1]
v.reset(OpPPC64MOVBstorezero) v.reset(OpPPC64MOVBstorezero)
v.AuxInt = 2 v.AuxInt = 2
v.AddArg(destptr) v.AddArg(destptr)
@ -9853,14 +9872,17 @@ func rewriteValuePPC64_OpZero(v *Value, config *Config) bool {
v.AddArg(v0) v.AddArg(v0)
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [16] {t} destptr mem)
// cond: SizeAndAlign(s).Size() == 16 && SizeAndAlign(s).Align()%8 == 0 // cond: t.(Type).Alignment()%8 == 0
// result: (MOVDstorezero [8] destptr (MOVDstorezero [0] destptr mem)) // result: (MOVDstorezero [8] destptr (MOVDstorezero [0] destptr mem))
for { for {
s := v.AuxInt if v.AuxInt != 16 {
break
}
t := v.Aux
destptr := v.Args[0] destptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 16 && SizeAndAlign(s).Align()%8 == 0) { if !(t.(Type).Alignment()%8 == 0) {
break break
} }
v.reset(OpPPC64MOVDstorezero) v.reset(OpPPC64MOVDstorezero)
@ -9873,14 +9895,17 @@ func rewriteValuePPC64_OpZero(v *Value, config *Config) bool {
v.AddArg(v0) v.AddArg(v0)
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [24] {t} destptr mem)
// cond: SizeAndAlign(s).Size() == 24 && SizeAndAlign(s).Align()%8 == 0 // cond: t.(Type).Alignment()%8 == 0
// result: (MOVDstorezero [16] destptr (MOVDstorezero [8] destptr (MOVDstorezero [0] destptr mem))) // result: (MOVDstorezero [16] destptr (MOVDstorezero [8] destptr (MOVDstorezero [0] destptr mem)))
for { for {
s := v.AuxInt if v.AuxInt != 24 {
break
}
t := v.Aux
destptr := v.Args[0] destptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 24 && SizeAndAlign(s).Align()%8 == 0) { if !(t.(Type).Alignment()%8 == 0) {
break break
} }
v.reset(OpPPC64MOVDstorezero) v.reset(OpPPC64MOVDstorezero)
@ -9897,14 +9922,17 @@ func rewriteValuePPC64_OpZero(v *Value, config *Config) bool {
v.AddArg(v0) v.AddArg(v0)
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [32] {t} destptr mem)
// cond: SizeAndAlign(s).Size() == 32 && SizeAndAlign(s).Align()%8 == 0 // cond: t.(Type).Alignment()%8 == 0
// result: (MOVDstorezero [24] destptr (MOVDstorezero [16] destptr (MOVDstorezero [8] destptr (MOVDstorezero [0] destptr mem)))) // result: (MOVDstorezero [24] destptr (MOVDstorezero [16] destptr (MOVDstorezero [8] destptr (MOVDstorezero [0] destptr mem))))
for { for {
s := v.AuxInt if v.AuxInt != 32 {
break
}
t := v.Aux
destptr := v.Args[0] destptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 32 && SizeAndAlign(s).Align()%8 == 0) { if !(t.(Type).Alignment()%8 == 0) {
break break
} }
v.reset(OpPPC64MOVDstorezero) v.reset(OpPPC64MOVDstorezero)
@ -9925,21 +9953,22 @@ func rewriteValuePPC64_OpZero(v *Value, config *Config) bool {
v.AddArg(v0) v.AddArg(v0)
return true return true
} }
// match: (Zero [s] ptr mem) // match: (Zero [s] {t} ptr mem)
// cond: (SizeAndAlign(s).Size() > 512 || config.noDuffDevice) || SizeAndAlign(s).Align()%8 != 0 // cond: (s > 512 || config.noDuffDevice) || t.(Type).Alignment()%8 != 0
// result: (LoweredZero [SizeAndAlign(s).Align()] ptr (ADDconst <ptr.Type> ptr [SizeAndAlign(s).Size()-moveSize(SizeAndAlign(s).Align(), config)]) mem) // result: (LoweredZero [t.(Type).Alignment()] ptr (ADDconst <ptr.Type> ptr [s-moveSize(t.(Type).Alignment(), config)]) mem)
for { for {
s := v.AuxInt s := v.AuxInt
t := v.Aux
ptr := v.Args[0] ptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
if !((SizeAndAlign(s).Size() > 512 || config.noDuffDevice) || SizeAndAlign(s).Align()%8 != 0) { if !((s > 512 || config.noDuffDevice) || t.(Type).Alignment()%8 != 0) {
break break
} }
v.reset(OpPPC64LoweredZero) v.reset(OpPPC64LoweredZero)
v.AuxInt = SizeAndAlign(s).Align() v.AuxInt = t.(Type).Alignment()
v.AddArg(ptr) v.AddArg(ptr)
v0 := b.NewValue0(v.Pos, OpPPC64ADDconst, ptr.Type) v0 := b.NewValue0(v.Pos, OpPPC64ADDconst, ptr.Type)
v0.AuxInt = SizeAndAlign(s).Size() - moveSize(SizeAndAlign(s).Align(), config) v0.AuxInt = s - moveSize(t.(Type).Alignment(), config)
v0.AddArg(ptr) v0.AddArg(ptr)
v.AddArg(v0) v.AddArg(v0)
v.AddArg(mem) v.AddArg(mem)

View file

@ -3777,31 +3777,29 @@ func rewriteValueS390X_OpMod8u(v *Value, config *Config) bool {
func rewriteValueS390X_OpMove(v *Value, config *Config) bool { func rewriteValueS390X_OpMove(v *Value, config *Config) bool {
b := v.Block b := v.Block
_ = b _ = b
// match: (Move [s] _ _ mem) // match: (Move [0] _ _ mem)
// cond: SizeAndAlign(s).Size() == 0 // cond:
// result: mem // result: mem
for { for {
s := v.AuxInt if v.AuxInt != 0 {
mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 0) {
break break
} }
mem := v.Args[2]
v.reset(OpCopy) v.reset(OpCopy)
v.Type = mem.Type v.Type = mem.Type
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [1] dst src mem)
// cond: SizeAndAlign(s).Size() == 1 // cond:
// result: (MOVBstore dst (MOVBZload src mem) mem) // result: (MOVBstore dst (MOVBZload src mem) mem)
for { for {
s := v.AuxInt if v.AuxInt != 1 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 1) {
break
}
v.reset(OpS390XMOVBstore) v.reset(OpS390XMOVBstore)
v.AddArg(dst) v.AddArg(dst)
v0 := b.NewValue0(v.Pos, OpS390XMOVBZload, config.fe.TypeUInt8()) v0 := b.NewValue0(v.Pos, OpS390XMOVBZload, config.fe.TypeUInt8())
@ -3811,17 +3809,16 @@ func rewriteValueS390X_OpMove(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [2] dst src mem)
// cond: SizeAndAlign(s).Size() == 2 // cond:
// result: (MOVHstore dst (MOVHZload src mem) mem) // result: (MOVHstore dst (MOVHZload src mem) mem)
for { for {
s := v.AuxInt if v.AuxInt != 2 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 2) {
break
}
v.reset(OpS390XMOVHstore) v.reset(OpS390XMOVHstore)
v.AddArg(dst) v.AddArg(dst)
v0 := b.NewValue0(v.Pos, OpS390XMOVHZload, config.fe.TypeUInt16()) v0 := b.NewValue0(v.Pos, OpS390XMOVHZload, config.fe.TypeUInt16())
@ -3831,17 +3828,16 @@ func rewriteValueS390X_OpMove(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [4] dst src mem)
// cond: SizeAndAlign(s).Size() == 4 // cond:
// result: (MOVWstore dst (MOVWZload src mem) mem) // result: (MOVWstore dst (MOVWZload src mem) mem)
for { for {
s := v.AuxInt if v.AuxInt != 4 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 4) {
break
}
v.reset(OpS390XMOVWstore) v.reset(OpS390XMOVWstore)
v.AddArg(dst) v.AddArg(dst)
v0 := b.NewValue0(v.Pos, OpS390XMOVWZload, config.fe.TypeUInt32()) v0 := b.NewValue0(v.Pos, OpS390XMOVWZload, config.fe.TypeUInt32())
@ -3851,17 +3847,16 @@ func rewriteValueS390X_OpMove(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [8] dst src mem)
// cond: SizeAndAlign(s).Size() == 8 // cond:
// result: (MOVDstore dst (MOVDload src mem) mem) // result: (MOVDstore dst (MOVDload src mem) mem)
for { for {
s := v.AuxInt if v.AuxInt != 8 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 8) {
break
}
v.reset(OpS390XMOVDstore) v.reset(OpS390XMOVDstore)
v.AddArg(dst) v.AddArg(dst)
v0 := b.NewValue0(v.Pos, OpS390XMOVDload, config.fe.TypeUInt64()) v0 := b.NewValue0(v.Pos, OpS390XMOVDload, config.fe.TypeUInt64())
@ -3871,17 +3866,16 @@ func rewriteValueS390X_OpMove(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [16] dst src mem)
// cond: SizeAndAlign(s).Size() == 16 // cond:
// result: (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem)) // result: (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem))
for { for {
s := v.AuxInt if v.AuxInt != 16 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 16) {
break
}
v.reset(OpS390XMOVDstore) v.reset(OpS390XMOVDstore)
v.AuxInt = 8 v.AuxInt = 8
v.AddArg(dst) v.AddArg(dst)
@ -3900,17 +3894,16 @@ func rewriteValueS390X_OpMove(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [24] dst src mem)
// cond: SizeAndAlign(s).Size() == 24 // cond:
// result: (MOVDstore [16] dst (MOVDload [16] src mem) (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem))) // result: (MOVDstore [16] dst (MOVDload [16] src mem) (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem)))
for { for {
s := v.AuxInt if v.AuxInt != 24 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 24) {
break
}
v.reset(OpS390XMOVDstore) v.reset(OpS390XMOVDstore)
v.AuxInt = 16 v.AuxInt = 16
v.AddArg(dst) v.AddArg(dst)
@ -3938,17 +3931,16 @@ func rewriteValueS390X_OpMove(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [3] dst src mem)
// cond: SizeAndAlign(s).Size() == 3 // cond:
// result: (MOVBstore [2] dst (MOVBZload [2] src mem) (MOVHstore dst (MOVHZload src mem) mem)) // result: (MOVBstore [2] dst (MOVBZload [2] src mem) (MOVHstore dst (MOVHZload src mem) mem))
for { for {
s := v.AuxInt if v.AuxInt != 3 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 3) {
break
}
v.reset(OpS390XMOVBstore) v.reset(OpS390XMOVBstore)
v.AuxInt = 2 v.AuxInt = 2
v.AddArg(dst) v.AddArg(dst)
@ -3967,17 +3959,16 @@ func rewriteValueS390X_OpMove(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [5] dst src mem)
// cond: SizeAndAlign(s).Size() == 5 // cond:
// result: (MOVBstore [4] dst (MOVBZload [4] src mem) (MOVWstore dst (MOVWZload src mem) mem)) // result: (MOVBstore [4] dst (MOVBZload [4] src mem) (MOVWstore dst (MOVWZload src mem) mem))
for { for {
s := v.AuxInt if v.AuxInt != 5 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 5) {
break
}
v.reset(OpS390XMOVBstore) v.reset(OpS390XMOVBstore)
v.AuxInt = 4 v.AuxInt = 4
v.AddArg(dst) v.AddArg(dst)
@ -3996,17 +3987,16 @@ func rewriteValueS390X_OpMove(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [6] dst src mem)
// cond: SizeAndAlign(s).Size() == 6 // cond:
// result: (MOVHstore [4] dst (MOVHZload [4] src mem) (MOVWstore dst (MOVWZload src mem) mem)) // result: (MOVHstore [4] dst (MOVHZload [4] src mem) (MOVWstore dst (MOVWZload src mem) mem))
for { for {
s := v.AuxInt if v.AuxInt != 6 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 6) {
break
}
v.reset(OpS390XMOVHstore) v.reset(OpS390XMOVHstore)
v.AuxInt = 4 v.AuxInt = 4
v.AddArg(dst) v.AddArg(dst)
@ -4025,17 +4015,16 @@ func rewriteValueS390X_OpMove(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [7] dst src mem)
// cond: SizeAndAlign(s).Size() == 7 // cond:
// result: (MOVBstore [6] dst (MOVBZload [6] src mem) (MOVHstore [4] dst (MOVHZload [4] src mem) (MOVWstore dst (MOVWZload src mem) mem))) // result: (MOVBstore [6] dst (MOVBZload [6] src mem) (MOVHstore [4] dst (MOVHZload [4] src mem) (MOVWstore dst (MOVWZload src mem) mem)))
for { for {
s := v.AuxInt if v.AuxInt != 7 {
break
}
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() == 7) {
break
}
v.reset(OpS390XMOVBstore) v.reset(OpS390XMOVBstore)
v.AuxInt = 6 v.AuxInt = 6
v.AddArg(dst) v.AddArg(dst)
@ -4064,36 +4053,36 @@ func rewriteValueS390X_OpMove(v *Value, config *Config) bool {
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [s] dst src mem)
// cond: SizeAndAlign(s).Size() > 0 && SizeAndAlign(s).Size() <= 256 // cond: s > 0 && s <= 256
// result: (MVC [makeValAndOff(SizeAndAlign(s).Size(), 0)] dst src mem) // result: (MVC [makeValAndOff(s, 0)] dst src mem)
for { for {
s := v.AuxInt s := v.AuxInt
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() > 0 && SizeAndAlign(s).Size() <= 256) { if !(s > 0 && s <= 256) {
break break
} }
v.reset(OpS390XMVC) v.reset(OpS390XMVC)
v.AuxInt = makeValAndOff(SizeAndAlign(s).Size(), 0) v.AuxInt = makeValAndOff(s, 0)
v.AddArg(dst) v.AddArg(dst)
v.AddArg(src) v.AddArg(src)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [s] dst src mem)
// cond: SizeAndAlign(s).Size() > 256 && SizeAndAlign(s).Size() <= 512 // cond: s > 256 && s <= 512
// result: (MVC [makeValAndOff(SizeAndAlign(s).Size()-256, 256)] dst src (MVC [makeValAndOff(256, 0)] dst src mem)) // result: (MVC [makeValAndOff(s-256, 256)] dst src (MVC [makeValAndOff(256, 0)] dst src mem))
for { for {
s := v.AuxInt s := v.AuxInt
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() > 256 && SizeAndAlign(s).Size() <= 512) { if !(s > 256 && s <= 512) {
break break
} }
v.reset(OpS390XMVC) v.reset(OpS390XMVC)
v.AuxInt = makeValAndOff(SizeAndAlign(s).Size()-256, 256) v.AuxInt = makeValAndOff(s-256, 256)
v.AddArg(dst) v.AddArg(dst)
v.AddArg(src) v.AddArg(src)
v0 := b.NewValue0(v.Pos, OpS390XMVC, TypeMem) v0 := b.NewValue0(v.Pos, OpS390XMVC, TypeMem)
@ -4105,18 +4094,18 @@ func rewriteValueS390X_OpMove(v *Value, config *Config) bool {
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [s] dst src mem)
// cond: SizeAndAlign(s).Size() > 512 && SizeAndAlign(s).Size() <= 768 // cond: s > 512 && s <= 768
// result: (MVC [makeValAndOff(SizeAndAlign(s).Size()-512, 512)] dst src (MVC [makeValAndOff(256, 256)] dst src (MVC [makeValAndOff(256, 0)] dst src mem))) // result: (MVC [makeValAndOff(s-512, 512)] dst src (MVC [makeValAndOff(256, 256)] dst src (MVC [makeValAndOff(256, 0)] dst src mem)))
for { for {
s := v.AuxInt s := v.AuxInt
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() > 512 && SizeAndAlign(s).Size() <= 768) { if !(s > 512 && s <= 768) {
break break
} }
v.reset(OpS390XMVC) v.reset(OpS390XMVC)
v.AuxInt = makeValAndOff(SizeAndAlign(s).Size()-512, 512) v.AuxInt = makeValAndOff(s-512, 512)
v.AddArg(dst) v.AddArg(dst)
v.AddArg(src) v.AddArg(src)
v0 := b.NewValue0(v.Pos, OpS390XMVC, TypeMem) v0 := b.NewValue0(v.Pos, OpS390XMVC, TypeMem)
@ -4133,18 +4122,18 @@ func rewriteValueS390X_OpMove(v *Value, config *Config) bool {
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [s] dst src mem)
// cond: SizeAndAlign(s).Size() > 768 && SizeAndAlign(s).Size() <= 1024 // cond: s > 768 && s <= 1024
// result: (MVC [makeValAndOff(SizeAndAlign(s).Size()-768, 768)] dst src (MVC [makeValAndOff(256, 512)] dst src (MVC [makeValAndOff(256, 256)] dst src (MVC [makeValAndOff(256, 0)] dst src mem)))) // result: (MVC [makeValAndOff(s-768, 768)] dst src (MVC [makeValAndOff(256, 512)] dst src (MVC [makeValAndOff(256, 256)] dst src (MVC [makeValAndOff(256, 0)] dst src mem))))
for { for {
s := v.AuxInt s := v.AuxInt
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() > 768 && SizeAndAlign(s).Size() <= 1024) { if !(s > 768 && s <= 1024) {
break break
} }
v.reset(OpS390XMVC) v.reset(OpS390XMVC)
v.AuxInt = makeValAndOff(SizeAndAlign(s).Size()-768, 768) v.AuxInt = makeValAndOff(s-768, 768)
v.AddArg(dst) v.AddArg(dst)
v.AddArg(src) v.AddArg(src)
v0 := b.NewValue0(v.Pos, OpS390XMVC, TypeMem) v0 := b.NewValue0(v.Pos, OpS390XMVC, TypeMem)
@ -4166,22 +4155,22 @@ func rewriteValueS390X_OpMove(v *Value, config *Config) bool {
return true return true
} }
// match: (Move [s] dst src mem) // match: (Move [s] dst src mem)
// cond: SizeAndAlign(s).Size() > 1024 // cond: s > 1024
// result: (LoweredMove [SizeAndAlign(s).Size()%256] dst src (ADDconst <src.Type> src [(SizeAndAlign(s).Size()/256)*256]) mem) // result: (LoweredMove [s%256] dst src (ADDconst <src.Type> src [(s/256)*256]) mem)
for { for {
s := v.AuxInt s := v.AuxInt
dst := v.Args[0] dst := v.Args[0]
src := v.Args[1] src := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(SizeAndAlign(s).Size() > 1024) { if !(s > 1024) {
break break
} }
v.reset(OpS390XLoweredMove) v.reset(OpS390XLoweredMove)
v.AuxInt = SizeAndAlign(s).Size() % 256 v.AuxInt = s % 256
v.AddArg(dst) v.AddArg(dst)
v.AddArg(src) v.AddArg(src)
v0 := b.NewValue0(v.Pos, OpS390XADDconst, src.Type) v0 := b.NewValue0(v.Pos, OpS390XADDconst, src.Type)
v0.AuxInt = (SizeAndAlign(s).Size() / 256) * 256 v0.AuxInt = (s / 256) * 256
v0.AddArg(src) v0.AddArg(src)
v.AddArg(v0) v.AddArg(v0)
v.AddArg(mem) v.AddArg(mem)
@ -18095,17 +18084,15 @@ func rewriteValueS390X_OpStaticCall(v *Value, config *Config) bool {
func rewriteValueS390X_OpStore(v *Value, config *Config) bool { func rewriteValueS390X_OpStore(v *Value, config *Config) bool {
b := v.Block b := v.Block
_ = b _ = b
// match: (Store [8] ptr val mem) // match: (Store {t} ptr val mem)
// cond: is64BitFloat(val.Type) // cond: t.(Type).Size() == 8 && is64BitFloat(val.Type)
// result: (FMOVDstore ptr val mem) // result: (FMOVDstore ptr val mem)
for { for {
if v.AuxInt != 8 { t := v.Aux
break
}
ptr := v.Args[0] ptr := v.Args[0]
val := v.Args[1] val := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(is64BitFloat(val.Type)) { if !(t.(Type).Size() == 8 && is64BitFloat(val.Type)) {
break break
} }
v.reset(OpS390XFMOVDstore) v.reset(OpS390XFMOVDstore)
@ -18114,17 +18101,15 @@ func rewriteValueS390X_OpStore(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Store [4] ptr val mem) // match: (Store {t} ptr val mem)
// cond: is32BitFloat(val.Type) // cond: t.(Type).Size() == 4 && is32BitFloat(val.Type)
// result: (FMOVSstore ptr val mem) // result: (FMOVSstore ptr val mem)
for { for {
if v.AuxInt != 4 { t := v.Aux
break
}
ptr := v.Args[0] ptr := v.Args[0]
val := v.Args[1] val := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(is32BitFloat(val.Type)) { if !(t.(Type).Size() == 4 && is32BitFloat(val.Type)) {
break break
} }
v.reset(OpS390XFMOVSstore) v.reset(OpS390XFMOVSstore)
@ -18133,64 +18118,68 @@ func rewriteValueS390X_OpStore(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Store [8] ptr val mem) // match: (Store {t} ptr val mem)
// cond: // cond: t.(Type).Size() == 8
// result: (MOVDstore ptr val mem) // result: (MOVDstore ptr val mem)
for { for {
if v.AuxInt != 8 { t := v.Aux
break
}
ptr := v.Args[0] ptr := v.Args[0]
val := v.Args[1] val := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(t.(Type).Size() == 8) {
break
}
v.reset(OpS390XMOVDstore) v.reset(OpS390XMOVDstore)
v.AddArg(ptr) v.AddArg(ptr)
v.AddArg(val) v.AddArg(val)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Store [4] ptr val mem) // match: (Store {t} ptr val mem)
// cond: // cond: t.(Type).Size() == 4
// result: (MOVWstore ptr val mem) // result: (MOVWstore ptr val mem)
for { for {
if v.AuxInt != 4 { t := v.Aux
break
}
ptr := v.Args[0] ptr := v.Args[0]
val := v.Args[1] val := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(t.(Type).Size() == 4) {
break
}
v.reset(OpS390XMOVWstore) v.reset(OpS390XMOVWstore)
v.AddArg(ptr) v.AddArg(ptr)
v.AddArg(val) v.AddArg(val)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Store [2] ptr val mem) // match: (Store {t} ptr val mem)
// cond: // cond: t.(Type).Size() == 2
// result: (MOVHstore ptr val mem) // result: (MOVHstore ptr val mem)
for { for {
if v.AuxInt != 2 { t := v.Aux
break
}
ptr := v.Args[0] ptr := v.Args[0]
val := v.Args[1] val := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(t.(Type).Size() == 2) {
break
}
v.reset(OpS390XMOVHstore) v.reset(OpS390XMOVHstore)
v.AddArg(ptr) v.AddArg(ptr)
v.AddArg(val) v.AddArg(val)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Store [1] ptr val mem) // match: (Store {t} ptr val mem)
// cond: // cond: t.(Type).Size() == 1
// result: (MOVBstore ptr val mem) // result: (MOVBstore ptr val mem)
for { for {
if v.AuxInt != 1 { t := v.Aux
break
}
ptr := v.Args[0] ptr := v.Args[0]
val := v.Args[1] val := v.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(t.(Type).Size() == 1) {
break
}
v.reset(OpS390XMOVBstore) v.reset(OpS390XMOVBstore)
v.AddArg(ptr) v.AddArg(ptr)
v.AddArg(val) v.AddArg(val)
@ -18451,94 +18440,88 @@ func rewriteValueS390X_OpXor8(v *Value, config *Config) bool {
func rewriteValueS390X_OpZero(v *Value, config *Config) bool { func rewriteValueS390X_OpZero(v *Value, config *Config) bool {
b := v.Block b := v.Block
_ = b _ = b
// match: (Zero [s] _ mem) // match: (Zero [0] _ mem)
// cond: SizeAndAlign(s).Size() == 0 // cond:
// result: mem // result: mem
for { for {
s := v.AuxInt if v.AuxInt != 0 {
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 0) {
break break
} }
mem := v.Args[1]
v.reset(OpCopy) v.reset(OpCopy)
v.Type = mem.Type v.Type = mem.Type
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [1] destptr mem)
// cond: SizeAndAlign(s).Size() == 1 // cond:
// result: (MOVBstoreconst [0] destptr mem) // result: (MOVBstoreconst [0] destptr mem)
for { for {
s := v.AuxInt if v.AuxInt != 1 {
destptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 1) {
break break
} }
destptr := v.Args[0]
mem := v.Args[1]
v.reset(OpS390XMOVBstoreconst) v.reset(OpS390XMOVBstoreconst)
v.AuxInt = 0 v.AuxInt = 0
v.AddArg(destptr) v.AddArg(destptr)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [2] destptr mem)
// cond: SizeAndAlign(s).Size() == 2 // cond:
// result: (MOVHstoreconst [0] destptr mem) // result: (MOVHstoreconst [0] destptr mem)
for { for {
s := v.AuxInt if v.AuxInt != 2 {
destptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 2) {
break break
} }
destptr := v.Args[0]
mem := v.Args[1]
v.reset(OpS390XMOVHstoreconst) v.reset(OpS390XMOVHstoreconst)
v.AuxInt = 0 v.AuxInt = 0
v.AddArg(destptr) v.AddArg(destptr)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [4] destptr mem)
// cond: SizeAndAlign(s).Size() == 4 // cond:
// result: (MOVWstoreconst [0] destptr mem) // result: (MOVWstoreconst [0] destptr mem)
for { for {
s := v.AuxInt if v.AuxInt != 4 {
destptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 4) {
break break
} }
destptr := v.Args[0]
mem := v.Args[1]
v.reset(OpS390XMOVWstoreconst) v.reset(OpS390XMOVWstoreconst)
v.AuxInt = 0 v.AuxInt = 0
v.AddArg(destptr) v.AddArg(destptr)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [8] destptr mem)
// cond: SizeAndAlign(s).Size() == 8 // cond:
// result: (MOVDstoreconst [0] destptr mem) // result: (MOVDstoreconst [0] destptr mem)
for { for {
s := v.AuxInt if v.AuxInt != 8 {
destptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 8) {
break break
} }
destptr := v.Args[0]
mem := v.Args[1]
v.reset(OpS390XMOVDstoreconst) v.reset(OpS390XMOVDstoreconst)
v.AuxInt = 0 v.AuxInt = 0
v.AddArg(destptr) v.AddArg(destptr)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [3] destptr mem)
// cond: SizeAndAlign(s).Size() == 3 // cond:
// result: (MOVBstoreconst [makeValAndOff(0,2)] destptr (MOVHstoreconst [0] destptr mem)) // result: (MOVBstoreconst [makeValAndOff(0,2)] destptr (MOVHstoreconst [0] destptr mem))
for { for {
s := v.AuxInt if v.AuxInt != 3 {
destptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 3) {
break break
} }
destptr := v.Args[0]
mem := v.Args[1]
v.reset(OpS390XMOVBstoreconst) v.reset(OpS390XMOVBstoreconst)
v.AuxInt = makeValAndOff(0, 2) v.AuxInt = makeValAndOff(0, 2)
v.AddArg(destptr) v.AddArg(destptr)
@ -18549,16 +18532,15 @@ func rewriteValueS390X_OpZero(v *Value, config *Config) bool {
v.AddArg(v0) v.AddArg(v0)
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [5] destptr mem)
// cond: SizeAndAlign(s).Size() == 5 // cond:
// result: (MOVBstoreconst [makeValAndOff(0,4)] destptr (MOVWstoreconst [0] destptr mem)) // result: (MOVBstoreconst [makeValAndOff(0,4)] destptr (MOVWstoreconst [0] destptr mem))
for { for {
s := v.AuxInt if v.AuxInt != 5 {
destptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 5) {
break break
} }
destptr := v.Args[0]
mem := v.Args[1]
v.reset(OpS390XMOVBstoreconst) v.reset(OpS390XMOVBstoreconst)
v.AuxInt = makeValAndOff(0, 4) v.AuxInt = makeValAndOff(0, 4)
v.AddArg(destptr) v.AddArg(destptr)
@ -18569,16 +18551,15 @@ func rewriteValueS390X_OpZero(v *Value, config *Config) bool {
v.AddArg(v0) v.AddArg(v0)
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [6] destptr mem)
// cond: SizeAndAlign(s).Size() == 6 // cond:
// result: (MOVHstoreconst [makeValAndOff(0,4)] destptr (MOVWstoreconst [0] destptr mem)) // result: (MOVHstoreconst [makeValAndOff(0,4)] destptr (MOVWstoreconst [0] destptr mem))
for { for {
s := v.AuxInt if v.AuxInt != 6 {
destptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 6) {
break break
} }
destptr := v.Args[0]
mem := v.Args[1]
v.reset(OpS390XMOVHstoreconst) v.reset(OpS390XMOVHstoreconst)
v.AuxInt = makeValAndOff(0, 4) v.AuxInt = makeValAndOff(0, 4)
v.AddArg(destptr) v.AddArg(destptr)
@ -18589,16 +18570,15 @@ func rewriteValueS390X_OpZero(v *Value, config *Config) bool {
v.AddArg(v0) v.AddArg(v0)
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [7] destptr mem)
// cond: SizeAndAlign(s).Size() == 7 // cond:
// result: (MOVWstoreconst [makeValAndOff(0,3)] destptr (MOVWstoreconst [0] destptr mem)) // result: (MOVWstoreconst [makeValAndOff(0,3)] destptr (MOVWstoreconst [0] destptr mem))
for { for {
s := v.AuxInt if v.AuxInt != 7 {
destptr := v.Args[0]
mem := v.Args[1]
if !(SizeAndAlign(s).Size() == 7) {
break break
} }
destptr := v.Args[0]
mem := v.Args[1]
v.reset(OpS390XMOVWstoreconst) v.reset(OpS390XMOVWstoreconst)
v.AuxInt = makeValAndOff(0, 3) v.AuxInt = makeValAndOff(0, 3)
v.AddArg(destptr) v.AddArg(destptr)
@ -18610,36 +18590,36 @@ func rewriteValueS390X_OpZero(v *Value, config *Config) bool {
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [s] destptr mem)
// cond: SizeAndAlign(s).Size() > 0 && SizeAndAlign(s).Size() <= 1024 // cond: s > 0 && s <= 1024
// result: (CLEAR [makeValAndOff(SizeAndAlign(s).Size(), 0)] destptr mem) // result: (CLEAR [makeValAndOff(s, 0)] destptr mem)
for { for {
s := v.AuxInt s := v.AuxInt
destptr := v.Args[0] destptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(SizeAndAlign(s).Size() > 0 && SizeAndAlign(s).Size() <= 1024) { if !(s > 0 && s <= 1024) {
break break
} }
v.reset(OpS390XCLEAR) v.reset(OpS390XCLEAR)
v.AuxInt = makeValAndOff(SizeAndAlign(s).Size(), 0) v.AuxInt = makeValAndOff(s, 0)
v.AddArg(destptr) v.AddArg(destptr)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Zero [s] destptr mem) // match: (Zero [s] destptr mem)
// cond: SizeAndAlign(s).Size() > 1024 // cond: s > 1024
// result: (LoweredZero [SizeAndAlign(s).Size()%256] destptr (ADDconst <destptr.Type> destptr [(SizeAndAlign(s).Size()/256)*256]) mem) // result: (LoweredZero [s%256] destptr (ADDconst <destptr.Type> destptr [(s/256)*256]) mem)
for { for {
s := v.AuxInt s := v.AuxInt
destptr := v.Args[0] destptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(SizeAndAlign(s).Size() > 1024) { if !(s > 1024) {
break break
} }
v.reset(OpS390XLoweredZero) v.reset(OpS390XLoweredZero)
v.AuxInt = SizeAndAlign(s).Size() % 256 v.AuxInt = s % 256
v.AddArg(destptr) v.AddArg(destptr)
v0 := b.NewValue0(v.Pos, OpS390XADDconst, destptr.Type) v0 := b.NewValue0(v.Pos, OpS390XADDconst, destptr.Type)
v0.AuxInt = (SizeAndAlign(s).Size() / 256) * 256 v0.AuxInt = (s / 256) * 256
v0.AddArg(destptr) v0.AddArg(destptr)
v.AddArg(v0) v.AddArg(v0)
v.AddArg(mem) v.AddArg(mem)

View file

@ -301,13 +301,11 @@ func rewriteValuedec_OpSlicePtr(v *Value, config *Config) bool {
func rewriteValuedec_OpStore(v *Value, config *Config) bool { func rewriteValuedec_OpStore(v *Value, config *Config) bool {
b := v.Block b := v.Block
_ = b _ = b
// match: (Store [8] dst (ComplexMake real imag) mem) // match: (Store {t} dst (ComplexMake real imag) mem)
// cond: // cond: t.(Type).Size() == 8
// result: (Store [4] {config.fe.TypeFloat32()} (OffPtr <config.fe.TypeFloat32().PtrTo()> [4] dst) imag (Store [4] {config.fe.TypeFloat32()} dst real mem)) // result: (Store {config.fe.TypeFloat32()} (OffPtr <config.fe.TypeFloat32().PtrTo()> [4] dst) imag (Store {config.fe.TypeFloat32()} dst real mem))
for { for {
if v.AuxInt != 8 { t := v.Aux
break
}
dst := v.Args[0] dst := v.Args[0]
v_1 := v.Args[1] v_1 := v.Args[1]
if v_1.Op != OpComplexMake { if v_1.Op != OpComplexMake {
@ -316,8 +314,10 @@ func rewriteValuedec_OpStore(v *Value, config *Config) bool {
real := v_1.Args[0] real := v_1.Args[0]
imag := v_1.Args[1] imag := v_1.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(t.(Type).Size() == 8) {
break
}
v.reset(OpStore) v.reset(OpStore)
v.AuxInt = 4
v.Aux = config.fe.TypeFloat32() v.Aux = config.fe.TypeFloat32()
v0 := b.NewValue0(v.Pos, OpOffPtr, config.fe.TypeFloat32().PtrTo()) v0 := b.NewValue0(v.Pos, OpOffPtr, config.fe.TypeFloat32().PtrTo())
v0.AuxInt = 4 v0.AuxInt = 4
@ -325,7 +325,6 @@ func rewriteValuedec_OpStore(v *Value, config *Config) bool {
v.AddArg(v0) v.AddArg(v0)
v.AddArg(imag) v.AddArg(imag)
v1 := b.NewValue0(v.Pos, OpStore, TypeMem) v1 := b.NewValue0(v.Pos, OpStore, TypeMem)
v1.AuxInt = 4
v1.Aux = config.fe.TypeFloat32() v1.Aux = config.fe.TypeFloat32()
v1.AddArg(dst) v1.AddArg(dst)
v1.AddArg(real) v1.AddArg(real)
@ -333,13 +332,11 @@ func rewriteValuedec_OpStore(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Store [16] dst (ComplexMake real imag) mem) // match: (Store {t} dst (ComplexMake real imag) mem)
// cond: // cond: t.(Type).Size() == 16
// result: (Store [8] {config.fe.TypeFloat64()} (OffPtr <config.fe.TypeFloat64().PtrTo()> [8] dst) imag (Store [8] {config.fe.TypeFloat64()} dst real mem)) // result: (Store {config.fe.TypeFloat64()} (OffPtr <config.fe.TypeFloat64().PtrTo()> [8] dst) imag (Store {config.fe.TypeFloat64()} dst real mem))
for { for {
if v.AuxInt != 16 { t := v.Aux
break
}
dst := v.Args[0] dst := v.Args[0]
v_1 := v.Args[1] v_1 := v.Args[1]
if v_1.Op != OpComplexMake { if v_1.Op != OpComplexMake {
@ -348,8 +345,10 @@ func rewriteValuedec_OpStore(v *Value, config *Config) bool {
real := v_1.Args[0] real := v_1.Args[0]
imag := v_1.Args[1] imag := v_1.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(t.(Type).Size() == 16) {
break
}
v.reset(OpStore) v.reset(OpStore)
v.AuxInt = 8
v.Aux = config.fe.TypeFloat64() v.Aux = config.fe.TypeFloat64()
v0 := b.NewValue0(v.Pos, OpOffPtr, config.fe.TypeFloat64().PtrTo()) v0 := b.NewValue0(v.Pos, OpOffPtr, config.fe.TypeFloat64().PtrTo())
v0.AuxInt = 8 v0.AuxInt = 8
@ -357,7 +356,6 @@ func rewriteValuedec_OpStore(v *Value, config *Config) bool {
v.AddArg(v0) v.AddArg(v0)
v.AddArg(imag) v.AddArg(imag)
v1 := b.NewValue0(v.Pos, OpStore, TypeMem) v1 := b.NewValue0(v.Pos, OpStore, TypeMem)
v1.AuxInt = 8
v1.Aux = config.fe.TypeFloat64() v1.Aux = config.fe.TypeFloat64()
v1.AddArg(dst) v1.AddArg(dst)
v1.AddArg(real) v1.AddArg(real)
@ -365,13 +363,10 @@ func rewriteValuedec_OpStore(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Store [2*config.PtrSize] dst (StringMake ptr len) mem) // match: (Store dst (StringMake ptr len) mem)
// cond: // cond:
// result: (Store [config.PtrSize] {config.fe.TypeInt()} (OffPtr <config.fe.TypeInt().PtrTo()> [config.PtrSize] dst) len (Store [config.PtrSize] {config.fe.TypeBytePtr()} dst ptr mem)) // result: (Store {config.fe.TypeInt()} (OffPtr <config.fe.TypeInt().PtrTo()> [config.PtrSize] dst) len (Store {config.fe.TypeBytePtr()} dst ptr mem))
for { for {
if v.AuxInt != 2*config.PtrSize {
break
}
dst := v.Args[0] dst := v.Args[0]
v_1 := v.Args[1] v_1 := v.Args[1]
if v_1.Op != OpStringMake { if v_1.Op != OpStringMake {
@ -381,7 +376,6 @@ func rewriteValuedec_OpStore(v *Value, config *Config) bool {
len := v_1.Args[1] len := v_1.Args[1]
mem := v.Args[2] mem := v.Args[2]
v.reset(OpStore) v.reset(OpStore)
v.AuxInt = config.PtrSize
v.Aux = config.fe.TypeInt() v.Aux = config.fe.TypeInt()
v0 := b.NewValue0(v.Pos, OpOffPtr, config.fe.TypeInt().PtrTo()) v0 := b.NewValue0(v.Pos, OpOffPtr, config.fe.TypeInt().PtrTo())
v0.AuxInt = config.PtrSize v0.AuxInt = config.PtrSize
@ -389,7 +383,6 @@ func rewriteValuedec_OpStore(v *Value, config *Config) bool {
v.AddArg(v0) v.AddArg(v0)
v.AddArg(len) v.AddArg(len)
v1 := b.NewValue0(v.Pos, OpStore, TypeMem) v1 := b.NewValue0(v.Pos, OpStore, TypeMem)
v1.AuxInt = config.PtrSize
v1.Aux = config.fe.TypeBytePtr() v1.Aux = config.fe.TypeBytePtr()
v1.AddArg(dst) v1.AddArg(dst)
v1.AddArg(ptr) v1.AddArg(ptr)
@ -397,13 +390,10 @@ func rewriteValuedec_OpStore(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Store [3*config.PtrSize] dst (SliceMake ptr len cap) mem) // match: (Store dst (SliceMake ptr len cap) mem)
// cond: // cond:
// result: (Store [config.PtrSize] {config.fe.TypeInt()} (OffPtr <config.fe.TypeInt().PtrTo()> [2*config.PtrSize] dst) cap (Store [config.PtrSize] {config.fe.TypeInt()} (OffPtr <config.fe.TypeInt().PtrTo()> [config.PtrSize] dst) len (Store [config.PtrSize] {config.fe.TypeBytePtr()} dst ptr mem))) // result: (Store {config.fe.TypeInt()} (OffPtr <config.fe.TypeInt().PtrTo()> [2*config.PtrSize] dst) cap (Store {config.fe.TypeInt()} (OffPtr <config.fe.TypeInt().PtrTo()> [config.PtrSize] dst) len (Store {config.fe.TypeBytePtr()} dst ptr mem)))
for { for {
if v.AuxInt != 3*config.PtrSize {
break
}
dst := v.Args[0] dst := v.Args[0]
v_1 := v.Args[1] v_1 := v.Args[1]
if v_1.Op != OpSliceMake { if v_1.Op != OpSliceMake {
@ -414,7 +404,6 @@ func rewriteValuedec_OpStore(v *Value, config *Config) bool {
cap := v_1.Args[2] cap := v_1.Args[2]
mem := v.Args[2] mem := v.Args[2]
v.reset(OpStore) v.reset(OpStore)
v.AuxInt = config.PtrSize
v.Aux = config.fe.TypeInt() v.Aux = config.fe.TypeInt()
v0 := b.NewValue0(v.Pos, OpOffPtr, config.fe.TypeInt().PtrTo()) v0 := b.NewValue0(v.Pos, OpOffPtr, config.fe.TypeInt().PtrTo())
v0.AuxInt = 2 * config.PtrSize v0.AuxInt = 2 * config.PtrSize
@ -422,7 +411,6 @@ func rewriteValuedec_OpStore(v *Value, config *Config) bool {
v.AddArg(v0) v.AddArg(v0)
v.AddArg(cap) v.AddArg(cap)
v1 := b.NewValue0(v.Pos, OpStore, TypeMem) v1 := b.NewValue0(v.Pos, OpStore, TypeMem)
v1.AuxInt = config.PtrSize
v1.Aux = config.fe.TypeInt() v1.Aux = config.fe.TypeInt()
v2 := b.NewValue0(v.Pos, OpOffPtr, config.fe.TypeInt().PtrTo()) v2 := b.NewValue0(v.Pos, OpOffPtr, config.fe.TypeInt().PtrTo())
v2.AuxInt = config.PtrSize v2.AuxInt = config.PtrSize
@ -430,7 +418,6 @@ func rewriteValuedec_OpStore(v *Value, config *Config) bool {
v1.AddArg(v2) v1.AddArg(v2)
v1.AddArg(len) v1.AddArg(len)
v3 := b.NewValue0(v.Pos, OpStore, TypeMem) v3 := b.NewValue0(v.Pos, OpStore, TypeMem)
v3.AuxInt = config.PtrSize
v3.Aux = config.fe.TypeBytePtr() v3.Aux = config.fe.TypeBytePtr()
v3.AddArg(dst) v3.AddArg(dst)
v3.AddArg(ptr) v3.AddArg(ptr)
@ -439,13 +426,10 @@ func rewriteValuedec_OpStore(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Store [2*config.PtrSize] dst (IMake itab data) mem) // match: (Store dst (IMake itab data) mem)
// cond: // cond:
// result: (Store [config.PtrSize] {config.fe.TypeBytePtr()} (OffPtr <config.fe.TypeBytePtr().PtrTo()> [config.PtrSize] dst) data (Store [config.PtrSize] {config.fe.TypeUintptr()} dst itab mem)) // result: (Store {config.fe.TypeBytePtr()} (OffPtr <config.fe.TypeBytePtr().PtrTo()> [config.PtrSize] dst) data (Store {config.fe.TypeUintptr()} dst itab mem))
for { for {
if v.AuxInt != 2*config.PtrSize {
break
}
dst := v.Args[0] dst := v.Args[0]
v_1 := v.Args[1] v_1 := v.Args[1]
if v_1.Op != OpIMake { if v_1.Op != OpIMake {
@ -455,7 +439,6 @@ func rewriteValuedec_OpStore(v *Value, config *Config) bool {
data := v_1.Args[1] data := v_1.Args[1]
mem := v.Args[2] mem := v.Args[2]
v.reset(OpStore) v.reset(OpStore)
v.AuxInt = config.PtrSize
v.Aux = config.fe.TypeBytePtr() v.Aux = config.fe.TypeBytePtr()
v0 := b.NewValue0(v.Pos, OpOffPtr, config.fe.TypeBytePtr().PtrTo()) v0 := b.NewValue0(v.Pos, OpOffPtr, config.fe.TypeBytePtr().PtrTo())
v0.AuxInt = config.PtrSize v0.AuxInt = config.PtrSize
@ -463,7 +446,6 @@ func rewriteValuedec_OpStore(v *Value, config *Config) bool {
v.AddArg(v0) v.AddArg(v0)
v.AddArg(data) v.AddArg(data)
v1 := b.NewValue0(v.Pos, OpStore, TypeMem) v1 := b.NewValue0(v.Pos, OpStore, TypeMem)
v1.AuxInt = config.PtrSize
v1.Aux = config.fe.TypeUintptr() v1.Aux = config.fe.TypeUintptr()
v1.AddArg(dst) v1.AddArg(dst)
v1.AddArg(itab) v1.AddArg(itab)

View file

@ -2396,13 +2396,11 @@ func rewriteValuedec64_OpSignExt8to64(v *Value, config *Config) bool {
func rewriteValuedec64_OpStore(v *Value, config *Config) bool { func rewriteValuedec64_OpStore(v *Value, config *Config) bool {
b := v.Block b := v.Block
_ = b _ = b
// match: (Store [8] dst (Int64Make hi lo) mem) // match: (Store {t} dst (Int64Make hi lo) mem)
// cond: !config.BigEndian // cond: t.(Type).Size() == 8 && !config.BigEndian
// result: (Store [4] {hi.Type} (OffPtr <hi.Type.PtrTo()> [4] dst) hi (Store [4] {lo.Type} dst lo mem)) // result: (Store {hi.Type} (OffPtr <hi.Type.PtrTo()> [4] dst) hi (Store {lo.Type} dst lo mem))
for { for {
if v.AuxInt != 8 { t := v.Aux
break
}
dst := v.Args[0] dst := v.Args[0]
v_1 := v.Args[1] v_1 := v.Args[1]
if v_1.Op != OpInt64Make { if v_1.Op != OpInt64Make {
@ -2411,11 +2409,10 @@ func rewriteValuedec64_OpStore(v *Value, config *Config) bool {
hi := v_1.Args[0] hi := v_1.Args[0]
lo := v_1.Args[1] lo := v_1.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(!config.BigEndian) { if !(t.(Type).Size() == 8 && !config.BigEndian) {
break break
} }
v.reset(OpStore) v.reset(OpStore)
v.AuxInt = 4
v.Aux = hi.Type v.Aux = hi.Type
v0 := b.NewValue0(v.Pos, OpOffPtr, hi.Type.PtrTo()) v0 := b.NewValue0(v.Pos, OpOffPtr, hi.Type.PtrTo())
v0.AuxInt = 4 v0.AuxInt = 4
@ -2423,7 +2420,6 @@ func rewriteValuedec64_OpStore(v *Value, config *Config) bool {
v.AddArg(v0) v.AddArg(v0)
v.AddArg(hi) v.AddArg(hi)
v1 := b.NewValue0(v.Pos, OpStore, TypeMem) v1 := b.NewValue0(v.Pos, OpStore, TypeMem)
v1.AuxInt = 4
v1.Aux = lo.Type v1.Aux = lo.Type
v1.AddArg(dst) v1.AddArg(dst)
v1.AddArg(lo) v1.AddArg(lo)
@ -2431,13 +2427,11 @@ func rewriteValuedec64_OpStore(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Store [8] dst (Int64Make hi lo) mem) // match: (Store {t} dst (Int64Make hi lo) mem)
// cond: config.BigEndian // cond: t.(Type).Size() == 8 && config.BigEndian
// result: (Store [4] {lo.Type} (OffPtr <lo.Type.PtrTo()> [4] dst) lo (Store [4] {hi.Type} dst hi mem)) // result: (Store {lo.Type} (OffPtr <lo.Type.PtrTo()> [4] dst) lo (Store {hi.Type} dst hi mem))
for { for {
if v.AuxInt != 8 { t := v.Aux
break
}
dst := v.Args[0] dst := v.Args[0]
v_1 := v.Args[1] v_1 := v.Args[1]
if v_1.Op != OpInt64Make { if v_1.Op != OpInt64Make {
@ -2446,11 +2440,10 @@ func rewriteValuedec64_OpStore(v *Value, config *Config) bool {
hi := v_1.Args[0] hi := v_1.Args[0]
lo := v_1.Args[1] lo := v_1.Args[1]
mem := v.Args[2] mem := v.Args[2]
if !(config.BigEndian) { if !(t.(Type).Size() == 8 && config.BigEndian) {
break break
} }
v.reset(OpStore) v.reset(OpStore)
v.AuxInt = 4
v.Aux = lo.Type v.Aux = lo.Type
v0 := b.NewValue0(v.Pos, OpOffPtr, lo.Type.PtrTo()) v0 := b.NewValue0(v.Pos, OpOffPtr, lo.Type.PtrTo())
v0.AuxInt = 4 v0.AuxInt = 4
@ -2458,7 +2451,6 @@ func rewriteValuedec64_OpStore(v *Value, config *Config) bool {
v.AddArg(v0) v.AddArg(v0)
v.AddArg(lo) v.AddArg(lo)
v1 := b.NewValue0(v.Pos, OpStore, TypeMem) v1 := b.NewValue0(v.Pos, OpStore, TypeMem)
v1.AuxInt = 4
v1.Aux = hi.Type v1.Aux = hi.Type
v1.AddArg(dst) v1.AddArg(dst)
v1.AddArg(hi) v1.AddArg(hi)

View file

@ -6764,8 +6764,8 @@ func rewriteValuegeneric_OpLess8U(v *Value, config *Config) bool {
func rewriteValuegeneric_OpLoad(v *Value, config *Config) bool { func rewriteValuegeneric_OpLoad(v *Value, config *Config) bool {
b := v.Block b := v.Block
_ = b _ = b
// match: (Load <t1> p1 (Store [w] p2 x _)) // match: (Load <t1> p1 (Store {t2} p2 x _))
// cond: isSamePtr(p1,p2) && t1.Compare(x.Type)==CMPeq && w == t1.Size() // cond: isSamePtr(p1,p2) && t1.Compare(x.Type)==CMPeq && t1.Size() == t2.(Type).Size()
// result: x // result: x
for { for {
t1 := v.Type t1 := v.Type
@ -6774,10 +6774,10 @@ func rewriteValuegeneric_OpLoad(v *Value, config *Config) bool {
if v_1.Op != OpStore { if v_1.Op != OpStore {
break break
} }
w := v_1.AuxInt t2 := v_1.Aux
p2 := v_1.Args[0] p2 := v_1.Args[0]
x := v_1.Args[1] x := v_1.Args[1]
if !(isSamePtr(p1, p2) && t1.Compare(x.Type) == CMPeq && w == t1.Size()) { if !(isSamePtr(p1, p2) && t1.Compare(x.Type) == CMPeq && t1.Size() == t2.(Type).Size()) {
break break
} }
v.reset(OpCopy) v.reset(OpCopy)
@ -14748,7 +14748,7 @@ func rewriteValuegeneric_OpStore(v *Value, config *Config) bool {
} }
// match: (Store dst (StructMake1 <t> f0) mem) // match: (Store dst (StructMake1 <t> f0) mem)
// cond: // cond:
// result: (Store [t.FieldType(0).Size()] {t.FieldType(0)} (OffPtr <t.FieldType(0).PtrTo()> [0] dst) f0 mem) // result: (Store {t.FieldType(0)} (OffPtr <t.FieldType(0).PtrTo()> [0] dst) f0 mem)
for { for {
dst := v.Args[0] dst := v.Args[0]
v_1 := v.Args[1] v_1 := v.Args[1]
@ -14759,7 +14759,6 @@ func rewriteValuegeneric_OpStore(v *Value, config *Config) bool {
f0 := v_1.Args[0] f0 := v_1.Args[0]
mem := v.Args[2] mem := v.Args[2]
v.reset(OpStore) v.reset(OpStore)
v.AuxInt = t.FieldType(0).Size()
v.Aux = t.FieldType(0) v.Aux = t.FieldType(0)
v0 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo()) v0 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo())
v0.AuxInt = 0 v0.AuxInt = 0
@ -14771,7 +14770,7 @@ func rewriteValuegeneric_OpStore(v *Value, config *Config) bool {
} }
// match: (Store dst (StructMake2 <t> f0 f1) mem) // match: (Store dst (StructMake2 <t> f0 f1) mem)
// cond: // cond:
// result: (Store [t.FieldType(1).Size()] {t.FieldType(1)} (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst) f1 (Store [t.FieldType(0).Size()] {t.FieldType(0)} (OffPtr <t.FieldType(0).PtrTo()> [0] dst) f0 mem)) // result: (Store {t.FieldType(1)} (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst) f1 (Store {t.FieldType(0)} (OffPtr <t.FieldType(0).PtrTo()> [0] dst) f0 mem))
for { for {
dst := v.Args[0] dst := v.Args[0]
v_1 := v.Args[1] v_1 := v.Args[1]
@ -14783,7 +14782,6 @@ func rewriteValuegeneric_OpStore(v *Value, config *Config) bool {
f1 := v_1.Args[1] f1 := v_1.Args[1]
mem := v.Args[2] mem := v.Args[2]
v.reset(OpStore) v.reset(OpStore)
v.AuxInt = t.FieldType(1).Size()
v.Aux = t.FieldType(1) v.Aux = t.FieldType(1)
v0 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo()) v0 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo())
v0.AuxInt = t.FieldOff(1) v0.AuxInt = t.FieldOff(1)
@ -14791,7 +14789,6 @@ func rewriteValuegeneric_OpStore(v *Value, config *Config) bool {
v.AddArg(v0) v.AddArg(v0)
v.AddArg(f1) v.AddArg(f1)
v1 := b.NewValue0(v.Pos, OpStore, TypeMem) v1 := b.NewValue0(v.Pos, OpStore, TypeMem)
v1.AuxInt = t.FieldType(0).Size()
v1.Aux = t.FieldType(0) v1.Aux = t.FieldType(0)
v2 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo()) v2 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo())
v2.AuxInt = 0 v2.AuxInt = 0
@ -14804,7 +14801,7 @@ func rewriteValuegeneric_OpStore(v *Value, config *Config) bool {
} }
// match: (Store dst (StructMake3 <t> f0 f1 f2) mem) // match: (Store dst (StructMake3 <t> f0 f1 f2) mem)
// cond: // cond:
// result: (Store [t.FieldType(2).Size()] {t.FieldType(2)} (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] dst) f2 (Store [t.FieldType(1).Size()] {t.FieldType(1)} (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst) f1 (Store [t.FieldType(0).Size()] {t.FieldType(0)} (OffPtr <t.FieldType(0).PtrTo()> [0] dst) f0 mem))) // result: (Store {t.FieldType(2)} (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] dst) f2 (Store {t.FieldType(1)} (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst) f1 (Store {t.FieldType(0)} (OffPtr <t.FieldType(0).PtrTo()> [0] dst) f0 mem)))
for { for {
dst := v.Args[0] dst := v.Args[0]
v_1 := v.Args[1] v_1 := v.Args[1]
@ -14817,7 +14814,6 @@ func rewriteValuegeneric_OpStore(v *Value, config *Config) bool {
f2 := v_1.Args[2] f2 := v_1.Args[2]
mem := v.Args[2] mem := v.Args[2]
v.reset(OpStore) v.reset(OpStore)
v.AuxInt = t.FieldType(2).Size()
v.Aux = t.FieldType(2) v.Aux = t.FieldType(2)
v0 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(2).PtrTo()) v0 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(2).PtrTo())
v0.AuxInt = t.FieldOff(2) v0.AuxInt = t.FieldOff(2)
@ -14825,7 +14821,6 @@ func rewriteValuegeneric_OpStore(v *Value, config *Config) bool {
v.AddArg(v0) v.AddArg(v0)
v.AddArg(f2) v.AddArg(f2)
v1 := b.NewValue0(v.Pos, OpStore, TypeMem) v1 := b.NewValue0(v.Pos, OpStore, TypeMem)
v1.AuxInt = t.FieldType(1).Size()
v1.Aux = t.FieldType(1) v1.Aux = t.FieldType(1)
v2 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo()) v2 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo())
v2.AuxInt = t.FieldOff(1) v2.AuxInt = t.FieldOff(1)
@ -14833,7 +14828,6 @@ func rewriteValuegeneric_OpStore(v *Value, config *Config) bool {
v1.AddArg(v2) v1.AddArg(v2)
v1.AddArg(f1) v1.AddArg(f1)
v3 := b.NewValue0(v.Pos, OpStore, TypeMem) v3 := b.NewValue0(v.Pos, OpStore, TypeMem)
v3.AuxInt = t.FieldType(0).Size()
v3.Aux = t.FieldType(0) v3.Aux = t.FieldType(0)
v4 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo()) v4 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo())
v4.AuxInt = 0 v4.AuxInt = 0
@ -14847,7 +14841,7 @@ func rewriteValuegeneric_OpStore(v *Value, config *Config) bool {
} }
// match: (Store dst (StructMake4 <t> f0 f1 f2 f3) mem) // match: (Store dst (StructMake4 <t> f0 f1 f2 f3) mem)
// cond: // cond:
// result: (Store [t.FieldType(3).Size()] {t.FieldType(3)} (OffPtr <t.FieldType(3).PtrTo()> [t.FieldOff(3)] dst) f3 (Store [t.FieldType(2).Size()] {t.FieldType(2)} (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] dst) f2 (Store [t.FieldType(1).Size()] {t.FieldType(1)} (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst) f1 (Store [t.FieldType(0).Size()] {t.FieldType(0)} (OffPtr <t.FieldType(0).PtrTo()> [0] dst) f0 mem)))) // result: (Store {t.FieldType(3)} (OffPtr <t.FieldType(3).PtrTo()> [t.FieldOff(3)] dst) f3 (Store {t.FieldType(2)} (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] dst) f2 (Store {t.FieldType(1)} (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst) f1 (Store {t.FieldType(0)} (OffPtr <t.FieldType(0).PtrTo()> [0] dst) f0 mem))))
for { for {
dst := v.Args[0] dst := v.Args[0]
v_1 := v.Args[1] v_1 := v.Args[1]
@ -14861,7 +14855,6 @@ func rewriteValuegeneric_OpStore(v *Value, config *Config) bool {
f3 := v_1.Args[3] f3 := v_1.Args[3]
mem := v.Args[2] mem := v.Args[2]
v.reset(OpStore) v.reset(OpStore)
v.AuxInt = t.FieldType(3).Size()
v.Aux = t.FieldType(3) v.Aux = t.FieldType(3)
v0 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(3).PtrTo()) v0 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(3).PtrTo())
v0.AuxInt = t.FieldOff(3) v0.AuxInt = t.FieldOff(3)
@ -14869,7 +14862,6 @@ func rewriteValuegeneric_OpStore(v *Value, config *Config) bool {
v.AddArg(v0) v.AddArg(v0)
v.AddArg(f3) v.AddArg(f3)
v1 := b.NewValue0(v.Pos, OpStore, TypeMem) v1 := b.NewValue0(v.Pos, OpStore, TypeMem)
v1.AuxInt = t.FieldType(2).Size()
v1.Aux = t.FieldType(2) v1.Aux = t.FieldType(2)
v2 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(2).PtrTo()) v2 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(2).PtrTo())
v2.AuxInt = t.FieldOff(2) v2.AuxInt = t.FieldOff(2)
@ -14877,7 +14869,6 @@ func rewriteValuegeneric_OpStore(v *Value, config *Config) bool {
v1.AddArg(v2) v1.AddArg(v2)
v1.AddArg(f2) v1.AddArg(f2)
v3 := b.NewValue0(v.Pos, OpStore, TypeMem) v3 := b.NewValue0(v.Pos, OpStore, TypeMem)
v3.AuxInt = t.FieldType(1).Size()
v3.Aux = t.FieldType(1) v3.Aux = t.FieldType(1)
v4 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo()) v4 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo())
v4.AuxInt = t.FieldOff(1) v4.AuxInt = t.FieldOff(1)
@ -14885,7 +14876,6 @@ func rewriteValuegeneric_OpStore(v *Value, config *Config) bool {
v3.AddArg(v4) v3.AddArg(v4)
v3.AddArg(f1) v3.AddArg(f1)
v5 := b.NewValue0(v.Pos, OpStore, TypeMem) v5 := b.NewValue0(v.Pos, OpStore, TypeMem)
v5.AuxInt = t.FieldType(0).Size()
v5.Aux = t.FieldType(0) v5.Aux = t.FieldType(0)
v6 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo()) v6 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo())
v6.AuxInt = 0 v6.AuxInt = 0
@ -14898,44 +14888,42 @@ func rewriteValuegeneric_OpStore(v *Value, config *Config) bool {
v.AddArg(v1) v.AddArg(v1)
return true return true
} }
// match: (Store [size] dst (Load <t> src mem) mem) // match: (Store {t} dst (Load src mem) mem)
// cond: !config.fe.CanSSA(t) // cond: !config.fe.CanSSA(t.(Type))
// result: (Move [MakeSizeAndAlign(size, t.Alignment()).Int64()] {t} dst src mem) // result: (Move {t} [t.(Type).Size()] dst src mem)
for { for {
size := v.AuxInt t := v.Aux
dst := v.Args[0] dst := v.Args[0]
v_1 := v.Args[1] v_1 := v.Args[1]
if v_1.Op != OpLoad { if v_1.Op != OpLoad {
break break
} }
t := v_1.Type
src := v_1.Args[0] src := v_1.Args[0]
mem := v_1.Args[1] mem := v_1.Args[1]
if mem != v.Args[2] { if mem != v.Args[2] {
break break
} }
if !(!config.fe.CanSSA(t)) { if !(!config.fe.CanSSA(t.(Type))) {
break break
} }
v.reset(OpMove) v.reset(OpMove)
v.AuxInt = MakeSizeAndAlign(size, t.Alignment()).Int64() v.AuxInt = t.(Type).Size()
v.Aux = t v.Aux = t
v.AddArg(dst) v.AddArg(dst)
v.AddArg(src) v.AddArg(src)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Store [size] dst (Load <t> src mem) (VarDef {x} mem)) // match: (Store {t} dst (Load src mem) (VarDef {x} mem))
// cond: !config.fe.CanSSA(t) // cond: !config.fe.CanSSA(t.(Type))
// result: (Move [MakeSizeAndAlign(size, t.Alignment()).Int64()] {t} dst src (VarDef {x} mem)) // result: (Move {t} [t.(Type).Size()] dst src (VarDef {x} mem))
for { for {
size := v.AuxInt t := v.Aux
dst := v.Args[0] dst := v.Args[0]
v_1 := v.Args[1] v_1 := v.Args[1]
if v_1.Op != OpLoad { if v_1.Op != OpLoad {
break break
} }
t := v_1.Type
src := v_1.Args[0] src := v_1.Args[0]
mem := v_1.Args[1] mem := v_1.Args[1]
v_2 := v.Args[2] v_2 := v.Args[2]
@ -14946,11 +14934,11 @@ func rewriteValuegeneric_OpStore(v *Value, config *Config) bool {
if mem != v_2.Args[0] { if mem != v_2.Args[0] {
break break
} }
if !(!config.fe.CanSSA(t)) { if !(!config.fe.CanSSA(t.(Type))) {
break break
} }
v.reset(OpMove) v.reset(OpMove)
v.AuxInt = MakeSizeAndAlign(size, t.Alignment()).Int64() v.AuxInt = t.(Type).Size()
v.Aux = t v.Aux = t
v.AddArg(dst) v.AddArg(dst)
v.AddArg(src) v.AddArg(src)
@ -14974,11 +14962,10 @@ func rewriteValuegeneric_OpStore(v *Value, config *Config) bool {
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
// match: (Store [size] dst (ArrayMake1 e) mem) // match: (Store dst (ArrayMake1 e) mem)
// cond: // cond:
// result: (Store [size] {e.Type} dst e mem) // result: (Store {e.Type} dst e mem)
for { for {
size := v.AuxInt
dst := v.Args[0] dst := v.Args[0]
v_1 := v.Args[1] v_1 := v.Args[1]
if v_1.Op != OpArrayMake1 { if v_1.Op != OpArrayMake1 {
@ -14987,7 +14974,6 @@ func rewriteValuegeneric_OpStore(v *Value, config *Config) bool {
e := v_1.Args[0] e := v_1.Args[0]
mem := v.Args[2] mem := v.Args[2]
v.reset(OpStore) v.reset(OpStore)
v.AuxInt = size
v.Aux = e.Type v.Aux = e.Type
v.AddArg(dst) v.AddArg(dst)
v.AddArg(e) v.AddArg(e)

View file

@ -14,9 +14,9 @@ func TestSchedule(t *testing.T) {
Valu("mem0", OpInitMem, TypeMem, 0, nil), Valu("mem0", OpInitMem, TypeMem, 0, nil),
Valu("ptr", OpConst64, TypeInt64, 0xABCD, nil), Valu("ptr", OpConst64, TypeInt64, 0xABCD, nil),
Valu("v", OpConst64, TypeInt64, 12, nil), Valu("v", OpConst64, TypeInt64, 12, nil),
Valu("mem1", OpStore, TypeMem, 8, nil, "ptr", "v", "mem0"), Valu("mem1", OpStore, TypeMem, 0, TypeInt64, "ptr", "v", "mem0"),
Valu("mem2", OpStore, TypeMem, 8, nil, "ptr", "v", "mem1"), Valu("mem2", OpStore, TypeMem, 0, TypeInt64, "ptr", "v", "mem1"),
Valu("mem3", OpStore, TypeInt64, 8, nil, "ptr", "sum", "mem2"), Valu("mem3", OpStore, TypeMem, 0, TypeInt64, "ptr", "sum", "mem2"),
Valu("l1", OpLoad, TypeInt64, 0, nil, "ptr", "mem1"), Valu("l1", OpLoad, TypeInt64, 0, nil, "ptr", "mem1"),
Valu("l2", OpLoad, TypeInt64, 0, nil, "ptr", "mem2"), Valu("l2", OpLoad, TypeInt64, 0, nil, "ptr", "mem2"),
Valu("sum", OpAdd64, TypeInt64, 0, nil, "l1", "l2"), Valu("sum", OpAdd64, TypeInt64, 0, nil, "l1", "l2"),
@ -66,8 +66,8 @@ func TestStoreOrder(t *testing.T) {
Valu("a", OpAdd64, TypeInt64, 0, nil, "b", "c"), // v2 Valu("a", OpAdd64, TypeInt64, 0, nil, "b", "c"), // v2
Valu("b", OpLoad, TypeInt64, 0, nil, "ptr", "mem1"), // v3 Valu("b", OpLoad, TypeInt64, 0, nil, "ptr", "mem1"), // v3
Valu("c", OpNeg64, TypeInt64, 0, nil, "b"), // v4 Valu("c", OpNeg64, TypeInt64, 0, nil, "b"), // v4
Valu("mem1", OpStore, TypeMem, 8, nil, "ptr", "v", "mem0"), // v5 Valu("mem1", OpStore, TypeMem, 0, TypeInt64, "ptr", "v", "mem0"), // v5
Valu("mem2", OpStore, TypeMem, 0, nil, "ptr", "a", "mem1"), Valu("mem2", OpStore, TypeMem, 0, TypeInt64, "ptr", "a", "mem1"),
Valu("ptr", OpConst64, TypeInt64, 0xABCD, nil), Valu("ptr", OpConst64, TypeInt64, 0xABCD, nil),
Valu("v", OpConst64, TypeInt64, 12, nil), Valu("v", OpConst64, TypeInt64, 12, nil),
Goto("exit")), Goto("exit")),

View file

@ -41,7 +41,7 @@ func makeConstShiftFunc(c *Config, amount int64, op Op, typ Type) fun {
Valu("load", OpLoad, typ, 0, nil, "argptr", "mem"), Valu("load", OpLoad, typ, 0, nil, "argptr", "mem"),
Valu("c", OpConst64, TypeUInt64, amount, nil), Valu("c", OpConst64, TypeUInt64, amount, nil),
Valu("shift", op, typ, 0, nil, "load", "c"), Valu("shift", op, typ, 0, nil, "load", "c"),
Valu("store", OpStore, TypeMem, 8, TypeUInt64, "resptr", "shift", "mem"), Valu("store", OpStore, TypeMem, 0, TypeUInt64, "resptr", "shift", "mem"),
Exit("store"))) Exit("store")))
Compile(fun.f) Compile(fun.f)
return fun return fun
@ -101,7 +101,7 @@ func makeShiftExtensionFunc(c *Config, amount int64, lshift, rshift Op, typ Type
Valu("c", OpConst64, TypeUInt64, amount, nil), Valu("c", OpConst64, TypeUInt64, amount, nil),
Valu("lshift", lshift, typ, 0, nil, "load", "c"), Valu("lshift", lshift, typ, 0, nil, "load", "c"),
Valu("rshift", rshift, typ, 0, nil, "lshift", "c"), Valu("rshift", rshift, typ, 0, nil, "lshift", "c"),
Valu("store", OpStore, TypeMem, 8, TypeUInt64, "resptr", "rshift", "mem"), Valu("store", OpStore, TypeMem, 0, TypeUInt64, "resptr", "rshift", "mem"),
Exit("store"))) Exit("store")))
Compile(fun.f) Compile(fun.f)
return fun return fun

View file

@ -128,17 +128,15 @@ func (v *Value) auxString() string {
return fmt.Sprintf(" [%d]", v.AuxInt32()) return fmt.Sprintf(" [%d]", v.AuxInt32())
case auxInt64, auxInt128: case auxInt64, auxInt128:
return fmt.Sprintf(" [%d]", v.AuxInt) return fmt.Sprintf(" [%d]", v.AuxInt)
case auxSizeAndAlign:
return fmt.Sprintf(" [%s]", SizeAndAlign(v.AuxInt))
case auxFloat32, auxFloat64: case auxFloat32, auxFloat64:
return fmt.Sprintf(" [%g]", v.AuxFloat()) return fmt.Sprintf(" [%g]", v.AuxFloat())
case auxString: case auxString:
return fmt.Sprintf(" {%q}", v.Aux) return fmt.Sprintf(" {%q}", v.Aux)
case auxSym: case auxSym, auxTyp:
if v.Aux != nil { if v.Aux != nil {
return fmt.Sprintf(" {%v}", v.Aux) return fmt.Sprintf(" {%v}", v.Aux)
} }
case auxSymOff, auxSymInt32: case auxSymOff, auxSymInt32, auxTypSize:
s := "" s := ""
if v.Aux != nil { if v.Aux != nil {
s = fmt.Sprintf(" {%v}", v.Aux) s = fmt.Sprintf(" {%v}", v.Aux)
@ -153,12 +151,6 @@ func (v *Value) auxString() string {
s = fmt.Sprintf(" {%v}", v.Aux) s = fmt.Sprintf(" {%v}", v.Aux)
} }
return s + fmt.Sprintf(" [%s]", v.AuxValAndOff()) return s + fmt.Sprintf(" [%s]", v.AuxValAndOff())
case auxSymSizeAndAlign:
s := ""
if v.Aux != nil {
s = fmt.Sprintf(" {%v}", v.Aux)
}
return s + fmt.Sprintf(" [%s]", SizeAndAlign(v.AuxInt))
} }
return "" return ""
} }

View file

@ -173,26 +173,21 @@ func writebarrier(f *Func) {
for _, w := range stores { for _, w := range stores {
var val *Value var val *Value
ptr := w.Args[0] ptr := w.Args[0]
siz := w.AuxInt
var typ interface{} var typ interface{}
if w.Op != OpStoreWB { if w.Op != OpStoreWB {
typ = &ExternSymbol{Typ: f.Config.fe.TypeUintptr(), Sym: w.Aux.(Type).Symbol()} typ = &ExternSymbol{Typ: f.Config.fe.TypeUintptr(), Sym: w.Aux.(Type).Symbol()}
} }
pos = w.Pos pos = w.Pos
var op Op
var fn *obj.LSym var fn *obj.LSym
switch w.Op { switch w.Op {
case OpStoreWB: case OpStoreWB:
op = OpStore
fn = writebarrierptr fn = writebarrierptr
val = w.Args[1] val = w.Args[1]
case OpMoveWB: case OpMoveWB:
op = OpMove
fn = typedmemmove fn = typedmemmove
val = w.Args[1] val = w.Args[1]
case OpZeroWB: case OpZeroWB:
op = OpZero
fn = typedmemclr fn = typedmemclr
} }
@ -201,10 +196,15 @@ func writebarrier(f *Func) {
memThen = wbcall(pos, bThen, fn, typ, ptr, val, memThen, sp, sb, volatile) memThen = wbcall(pos, bThen, fn, typ, ptr, val, memThen, sp, sb, volatile)
// else block: normal store // else block: normal store
if op == OpZero { switch w.Op {
memElse = bElse.NewValue2I(pos, op, TypeMem, siz, ptr, memElse) case OpStoreWB:
} else { memElse = bElse.NewValue3A(pos, OpStore, TypeMem, w.Aux, ptr, val, memElse)
memElse = bElse.NewValue3I(pos, op, TypeMem, siz, ptr, val, memElse) case OpMoveWB:
memElse = bElse.NewValue3I(pos, OpMove, TypeMem, w.AuxInt, ptr, val, memElse)
memElse.Aux = w.Aux
case OpZeroWB:
memElse = bElse.NewValue2I(pos, OpZero, TypeMem, w.AuxInt, ptr, memElse)
memElse.Aux = w.Aux
} }
if f.NoWB { if f.NoWB {
@ -270,8 +270,9 @@ func wbcall(pos src.XPos, b *Block, fn *obj.LSym, typ interface{}, ptr, val, mem
aux := &AutoSymbol{Typ: t, Node: tmp} aux := &AutoSymbol{Typ: t, Node: tmp}
mem = b.NewValue1A(pos, OpVarDef, TypeMem, tmp, mem) mem = b.NewValue1A(pos, OpVarDef, TypeMem, tmp, mem)
tmpaddr := b.NewValue1A(pos, OpAddr, t.PtrTo(), aux, sp) tmpaddr := b.NewValue1A(pos, OpAddr, t.PtrTo(), aux, sp)
siz := MakeSizeAndAlign(t.Size(), t.Alignment()).Int64() siz := t.Size()
mem = b.NewValue3I(pos, OpMove, TypeMem, siz, tmpaddr, val, mem) mem = b.NewValue3I(pos, OpMove, TypeMem, siz, tmpaddr, val, mem)
mem.Aux = t
val = tmpaddr val = tmpaddr
} }
@ -282,19 +283,19 @@ func wbcall(pos src.XPos, b *Block, fn *obj.LSym, typ interface{}, ptr, val, mem
taddr := b.NewValue1A(pos, OpAddr, config.fe.TypeUintptr(), typ, sb) taddr := b.NewValue1A(pos, OpAddr, config.fe.TypeUintptr(), typ, sb)
off = round(off, taddr.Type.Alignment()) off = round(off, taddr.Type.Alignment())
arg := b.NewValue1I(pos, OpOffPtr, taddr.Type.PtrTo(), off, sp) arg := b.NewValue1I(pos, OpOffPtr, taddr.Type.PtrTo(), off, sp)
mem = b.NewValue3I(pos, OpStore, TypeMem, ptr.Type.Size(), arg, taddr, mem) mem = b.NewValue3A(pos, OpStore, TypeMem, ptr.Type, arg, taddr, mem)
off += taddr.Type.Size() off += taddr.Type.Size()
} }
off = round(off, ptr.Type.Alignment()) off = round(off, ptr.Type.Alignment())
arg := b.NewValue1I(pos, OpOffPtr, ptr.Type.PtrTo(), off, sp) arg := b.NewValue1I(pos, OpOffPtr, ptr.Type.PtrTo(), off, sp)
mem = b.NewValue3I(pos, OpStore, TypeMem, ptr.Type.Size(), arg, ptr, mem) mem = b.NewValue3A(pos, OpStore, TypeMem, ptr.Type, arg, ptr, mem)
off += ptr.Type.Size() off += ptr.Type.Size()
if val != nil { if val != nil {
off = round(off, val.Type.Alignment()) off = round(off, val.Type.Alignment())
arg = b.NewValue1I(pos, OpOffPtr, val.Type.PtrTo(), off, sp) arg = b.NewValue1I(pos, OpOffPtr, val.Type.PtrTo(), off, sp)
mem = b.NewValue3I(pos, OpStore, TypeMem, val.Type.Size(), arg, val, mem) mem = b.NewValue3A(pos, OpStore, TypeMem, val.Type, arg, val, mem)
off += val.Type.Size() off += val.Type.Size()
} }
off = round(off, config.PtrSize) off = round(off, config.PtrSize)

View file

@ -17,8 +17,8 @@ func TestWriteBarrierStoreOrder(t *testing.T) {
Valu("sp", OpSP, TypeInvalid, 0, nil), Valu("sp", OpSP, TypeInvalid, 0, nil),
Valu("v", OpConstNil, ptrType, 0, nil), Valu("v", OpConstNil, ptrType, 0, nil),
Valu("addr1", OpAddr, ptrType, 0, nil, "sb"), Valu("addr1", OpAddr, ptrType, 0, nil, "sb"),
Valu("wb2", OpStore, TypeMem, 8, ptrType, "addr1", "v", "wb1"), Valu("wb2", OpStore, TypeMem, 0, ptrType, "addr1", "v", "wb1"),
Valu("wb1", OpStore, TypeMem, 8, ptrType, "addr1", "v", "start"), // wb1 and wb2 are out of order Valu("wb1", OpStore, TypeMem, 0, ptrType, "addr1", "v", "start"), // wb1 and wb2 are out of order
Goto("exit")), Goto("exit")),
Bloc("exit", Bloc("exit",
Exit("wb2"))) Exit("wb2")))
@ -44,7 +44,7 @@ func TestWriteBarrierPhi(t *testing.T) {
Valu("phi", OpPhi, TypeMem, 0, nil, "start", "wb"), Valu("phi", OpPhi, TypeMem, 0, nil, "start", "wb"),
Valu("v", OpConstNil, ptrType, 0, nil), Valu("v", OpConstNil, ptrType, 0, nil),
Valu("addr", OpAddr, ptrType, 0, nil, "sb"), Valu("addr", OpAddr, ptrType, 0, nil, "sb"),
Valu("wb", OpStore, TypeMem, 8, ptrType, "addr", "v", "phi"), // has write barrier Valu("wb", OpStore, TypeMem, 0, ptrType, "addr", "v", "phi"), // has write barrier
Goto("loop"))) Goto("loop")))
CheckFunc(fun.f) CheckFunc(fun.f)