cmd/compile: extend loong MOV*idx rules to match ADDshiftLLV

Fixes #76085

I was focused on restoring the old behavior and fixing the failing
test/codegen/floats.go:index* tests.

It is probable this same bug hides elsewhere in this file.

Change-Id: Ibb2cb2be5c7bbeb5eafa9705d998a67380f2b04c
Reviewed-on: https://go-review.googlesource.com/c/go/+/715580
Reviewed-by: abner chenc <chenguoqi@loongson.cn>
Reviewed-by: Keith Randall <khr@google.com>
Auto-Submit: Jorropo <jorropo.pgm@gmail.com>
Reviewed-by: Keith Randall <khr@golang.org>
LUCI-TryBot-Result: Go LUCI <golang-scoped@luci-project-accounts.iam.gserviceaccount.com>
Reviewed-by: Michael Knyszek <mknyszek@google.com>
This commit is contained in:
Jorropo 2025-10-28 10:48:18 +01:00 committed by Gopher Robot
parent 46e5e2b09a
commit 30c047d0d0
2 changed files with 395 additions and 15 deletions

View file

@ -612,14 +612,23 @@
// register indexed load // register indexed load
(MOVVload [off] {sym} (ADDV ptr idx) mem) && off == 0 && sym == nil => (MOVVloadidx ptr idx mem) (MOVVload [off] {sym} (ADDV ptr idx) mem) && off == 0 && sym == nil => (MOVVloadidx ptr idx mem)
(MOVVload [off] {sym} (ADDshiftLLV [shift] ptr idx) mem) && off == 0 && sym == nil => (MOVVloadidx ptr (SLLVconst <typ.Int64> [shift] idx) mem)
(MOVWUload [off] {sym} (ADDV ptr idx) mem) && off == 0 && sym == nil => (MOVWUloadidx ptr idx mem) (MOVWUload [off] {sym} (ADDV ptr idx) mem) && off == 0 && sym == nil => (MOVWUloadidx ptr idx mem)
(MOVWUload [off] {sym} (ADDshiftLLV [shift] ptr idx) mem) && off == 0 && sym == nil => (MOVWUloadidx ptr (SLLVconst <typ.Int64> [shift] idx) mem)
(MOVWload [off] {sym} (ADDV ptr idx) mem) && off == 0 && sym == nil => (MOVWloadidx ptr idx mem) (MOVWload [off] {sym} (ADDV ptr idx) mem) && off == 0 && sym == nil => (MOVWloadidx ptr idx mem)
(MOVWload [off] {sym} (ADDshiftLLV [shift] ptr idx) mem) && off == 0 && sym == nil => (MOVWloadidx ptr (SLLVconst <typ.Int64> [shift] idx) mem)
(MOVHUload [off] {sym} (ADDV ptr idx) mem) && off == 0 && sym == nil => (MOVHUloadidx ptr idx mem) (MOVHUload [off] {sym} (ADDV ptr idx) mem) && off == 0 && sym == nil => (MOVHUloadidx ptr idx mem)
(MOVHUload [off] {sym} (ADDshiftLLV [shift] ptr idx) mem) && off == 0 && sym == nil => (MOVHUloadidx ptr (SLLVconst <typ.Int64> [shift] idx) mem)
(MOVHload [off] {sym} (ADDV ptr idx) mem) && off == 0 && sym == nil => (MOVHloadidx ptr idx mem) (MOVHload [off] {sym} (ADDV ptr idx) mem) && off == 0 && sym == nil => (MOVHloadidx ptr idx mem)
(MOVHload [off] {sym} (ADDshiftLLV [shift] ptr idx) mem) && off == 0 && sym == nil => (MOVHloadidx ptr (SLLVconst <typ.Int64> [shift] idx) mem)
(MOVBUload [off] {sym} (ADDV ptr idx) mem) && off == 0 && sym == nil => (MOVBUloadidx ptr idx mem) (MOVBUload [off] {sym} (ADDV ptr idx) mem) && off == 0 && sym == nil => (MOVBUloadidx ptr idx mem)
(MOVBUload [off] {sym} (ADDshiftLLV [shift] ptr idx) mem) && off == 0 && sym == nil => (MOVBUloadidx ptr (SLLVconst <typ.Int64> [shift] idx) mem)
(MOVBload [off] {sym} (ADDV ptr idx) mem) && off == 0 && sym == nil => (MOVBloadidx ptr idx mem) (MOVBload [off] {sym} (ADDV ptr idx) mem) && off == 0 && sym == nil => (MOVBloadidx ptr idx mem)
(MOVBload [off] {sym} (ADDshiftLLV [shift] ptr idx) mem) && off == 0 && sym == nil => (MOVBloadidx ptr (SLLVconst <typ.Int64> [shift] idx) mem)
(MOVFload [off] {sym} (ADDV ptr idx) mem) && off == 0 && sym == nil => (MOVFloadidx ptr idx mem) (MOVFload [off] {sym} (ADDV ptr idx) mem) && off == 0 && sym == nil => (MOVFloadidx ptr idx mem)
(MOVFload [off] {sym} (ADDshiftLLV [shift] ptr idx) mem) && off == 0 && sym == nil => (MOVFloadidx ptr (SLLVconst <typ.Int64> [shift] idx) mem)
(MOVDload [off] {sym} (ADDV ptr idx) mem) && off == 0 && sym == nil => (MOVDloadidx ptr idx mem) (MOVDload [off] {sym} (ADDV ptr idx) mem) && off == 0 && sym == nil => (MOVDloadidx ptr idx mem)
(MOVDload [off] {sym} (ADDshiftLLV [shift] ptr idx) mem) && off == 0 && sym == nil => (MOVDloadidx ptr (SLLVconst <typ.Int64> [shift] idx) mem)
(MOVVloadidx ptr (MOVVconst [c]) mem) && is32Bit(c) => (MOVVload [int32(c)] ptr mem) (MOVVloadidx ptr (MOVVconst [c]) mem) && is32Bit(c) => (MOVVload [int32(c)] ptr mem)
(MOVVloadidx (MOVVconst [c]) ptr mem) && is32Bit(c) => (MOVVload [int32(c)] ptr mem) (MOVVloadidx (MOVVconst [c]) ptr mem) && is32Bit(c) => (MOVVload [int32(c)] ptr mem)
(MOVWUloadidx ptr (MOVVconst [c]) mem) && is32Bit(c) => (MOVWUload [int32(c)] ptr mem) (MOVWUloadidx ptr (MOVVconst [c]) mem) && is32Bit(c) => (MOVWUload [int32(c)] ptr mem)
@ -641,11 +650,17 @@
// register indexed store // register indexed store
(MOVVstore [off] {sym} (ADDV ptr idx) val mem) && off == 0 && sym == nil => (MOVVstoreidx ptr idx val mem) (MOVVstore [off] {sym} (ADDV ptr idx) val mem) && off == 0 && sym == nil => (MOVVstoreidx ptr idx val mem)
(MOVVstore [off] {sym} (ADDshiftLLV [shift] ptr idx) val mem) && off == 0 && sym == nil => (MOVVstoreidx ptr (SLLVconst <typ.Int64> [shift] idx) val mem)
(MOVWstore [off] {sym} (ADDV ptr idx) val mem) && off == 0 && sym == nil => (MOVWstoreidx ptr idx val mem) (MOVWstore [off] {sym} (ADDV ptr idx) val mem) && off == 0 && sym == nil => (MOVWstoreidx ptr idx val mem)
(MOVWstore [off] {sym} (ADDshiftLLV [shift] ptr idx) val mem) && off == 0 && sym == nil => (MOVWstoreidx ptr (SLLVconst <typ.Int64> [shift] idx) val mem)
(MOVHstore [off] {sym} (ADDV ptr idx) val mem) && off == 0 && sym == nil => (MOVHstoreidx ptr idx val mem) (MOVHstore [off] {sym} (ADDV ptr idx) val mem) && off == 0 && sym == nil => (MOVHstoreidx ptr idx val mem)
(MOVHstore [off] {sym} (ADDshiftLLV [shift] ptr idx) val mem) && off == 0 && sym == nil => (MOVHstoreidx ptr (SLLVconst <typ.Int64> [shift] idx) val mem)
(MOVBstore [off] {sym} (ADDV ptr idx) val mem) && off == 0 && sym == nil => (MOVBstoreidx ptr idx val mem) (MOVBstore [off] {sym} (ADDV ptr idx) val mem) && off == 0 && sym == nil => (MOVBstoreidx ptr idx val mem)
(MOVBstore [off] {sym} (ADDshiftLLV [shift] ptr idx) val mem) && off == 0 && sym == nil => (MOVBstoreidx ptr (SLLVconst <typ.Int64> [shift] idx) val mem)
(MOVFstore [off] {sym} (ADDV ptr idx) val mem) && off == 0 && sym == nil => (MOVFstoreidx ptr idx val mem) (MOVFstore [off] {sym} (ADDV ptr idx) val mem) && off == 0 && sym == nil => (MOVFstoreidx ptr idx val mem)
(MOVFstore [off] {sym} (ADDshiftLLV [shift] ptr idx) val mem) && off == 0 && sym == nil => (MOVFstoreidx ptr (SLLVconst <typ.Int64> [shift] idx) val mem)
(MOVDstore [off] {sym} (ADDV ptr idx) val mem) && off == 0 && sym == nil => (MOVDstoreidx ptr idx val mem) (MOVDstore [off] {sym} (ADDV ptr idx) val mem) && off == 0 && sym == nil => (MOVDstoreidx ptr idx val mem)
(MOVDstore [off] {sym} (ADDshiftLLV [shift] ptr idx) val mem) && off == 0 && sym == nil => (MOVDstoreidx ptr (SLLVconst <typ.Int64> [shift] idx) val mem)
(MOVVstoreidx ptr (MOVVconst [c]) val mem) && is32Bit(c) => (MOVVstore [int32(c)] ptr val mem) (MOVVstoreidx ptr (MOVVconst [c]) val mem) && is32Bit(c) => (MOVVstore [int32(c)] ptr val mem)
(MOVVstoreidx (MOVVconst [c]) idx val mem) && is32Bit(c) => (MOVVstore [int32(c)] idx val mem) (MOVVstoreidx (MOVVconst [c]) idx val mem) && is32Bit(c) => (MOVVstore [int32(c)] idx val mem)
(MOVWstoreidx ptr (MOVVconst [c]) val mem) && is32Bit(c) => (MOVWstore [int32(c)] ptr val mem) (MOVWstoreidx ptr (MOVVconst [c]) val mem) && is32Bit(c) => (MOVWstore [int32(c)] ptr val mem)

View file

@ -2368,6 +2368,7 @@ func rewriteValueLOONG64_OpLOONG64MOVBUload(v *Value) bool {
v_0 := v.Args[0] v_0 := v.Args[0]
b := v.Block b := v.Block
config := b.Func.Config config := b.Func.Config
typ := &b.Func.Config.Types
// match: (MOVBUload [off] {sym} ptr (MOVBstore [off] {sym} ptr x _)) // match: (MOVBUload [off] {sym} ptr (MOVBstore [off] {sym} ptr x _))
// result: (MOVBUreg x) // result: (MOVBUreg x)
for { for {
@ -2447,6 +2448,29 @@ func rewriteValueLOONG64_OpLOONG64MOVBUload(v *Value) bool {
v.AddArg3(ptr, idx, mem) v.AddArg3(ptr, idx, mem)
return true return true
} }
// match: (MOVBUload [off] {sym} (ADDshiftLLV [shift] ptr idx) mem)
// cond: off == 0 && sym == nil
// result: (MOVBUloadidx ptr (SLLVconst <typ.Int64> [shift] idx) mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
if v_0.Op != OpLOONG64ADDshiftLLV {
break
}
shift := auxIntToInt64(v_0.AuxInt)
idx := v_0.Args[1]
ptr := v_0.Args[0]
mem := v_1
if !(off == 0 && sym == nil) {
break
}
v.reset(OpLOONG64MOVBUloadidx)
v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.Int64)
v0.AuxInt = int64ToAuxInt(shift)
v0.AddArg(idx)
v.AddArg3(ptr, v0, mem)
return true
}
// match: (MOVBUload [off] {sym} (SB) _) // match: (MOVBUload [off] {sym} (SB) _)
// cond: symIsRO(sym) // cond: symIsRO(sym)
// result: (MOVVconst [int64(read8(sym, int64(off)))]) // result: (MOVVconst [int64(read8(sym, int64(off)))])
@ -2675,6 +2699,7 @@ func rewriteValueLOONG64_OpLOONG64MOVBload(v *Value) bool {
v_0 := v.Args[0] v_0 := v.Args[0]
b := v.Block b := v.Block
config := b.Func.Config config := b.Func.Config
typ := &b.Func.Config.Types
// match: (MOVBload [off] {sym} ptr (MOVBstore [off] {sym} ptr x _)) // match: (MOVBload [off] {sym} ptr (MOVBstore [off] {sym} ptr x _))
// result: (MOVBreg x) // result: (MOVBreg x)
for { for {
@ -2754,6 +2779,29 @@ func rewriteValueLOONG64_OpLOONG64MOVBload(v *Value) bool {
v.AddArg3(ptr, idx, mem) v.AddArg3(ptr, idx, mem)
return true return true
} }
// match: (MOVBload [off] {sym} (ADDshiftLLV [shift] ptr idx) mem)
// cond: off == 0 && sym == nil
// result: (MOVBloadidx ptr (SLLVconst <typ.Int64> [shift] idx) mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
if v_0.Op != OpLOONG64ADDshiftLLV {
break
}
shift := auxIntToInt64(v_0.AuxInt)
idx := v_0.Args[1]
ptr := v_0.Args[0]
mem := v_1
if !(off == 0 && sym == nil) {
break
}
v.reset(OpLOONG64MOVBloadidx)
v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.Int64)
v0.AuxInt = int64ToAuxInt(shift)
v0.AddArg(idx)
v.AddArg3(ptr, v0, mem)
return true
}
// match: (MOVBload [off] {sym} (SB) _) // match: (MOVBload [off] {sym} (SB) _)
// cond: symIsRO(sym) // cond: symIsRO(sym)
// result: (MOVVconst [int64(int8(read8(sym, int64(off))))]) // result: (MOVVconst [int64(int8(read8(sym, int64(off))))])
@ -2880,6 +2928,7 @@ func rewriteValueLOONG64_OpLOONG64MOVBstore(v *Value) bool {
v_0 := v.Args[0] v_0 := v.Args[0]
b := v.Block b := v.Block
config := b.Func.Config config := b.Func.Config
typ := &b.Func.Config.Types
// match: (MOVBstore [off1] {sym} (ADDVconst [off2] ptr) val mem) // match: (MOVBstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink) // cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
// result: (MOVBstore [off1+int32(off2)] {sym} ptr val mem) // result: (MOVBstore [off1+int32(off2)] {sym} ptr val mem)
@ -3047,6 +3096,30 @@ func rewriteValueLOONG64_OpLOONG64MOVBstore(v *Value) bool {
v.AddArg4(ptr, idx, val, mem) v.AddArg4(ptr, idx, val, mem)
return true return true
} }
// match: (MOVBstore [off] {sym} (ADDshiftLLV [shift] ptr idx) val mem)
// cond: off == 0 && sym == nil
// result: (MOVBstoreidx ptr (SLLVconst <typ.Int64> [shift] idx) val mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
if v_0.Op != OpLOONG64ADDshiftLLV {
break
}
shift := auxIntToInt64(v_0.AuxInt)
idx := v_0.Args[1]
ptr := v_0.Args[0]
val := v_1
mem := v_2
if !(off == 0 && sym == nil) {
break
}
v.reset(OpLOONG64MOVBstoreidx)
v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.Int64)
v0.AuxInt = int64ToAuxInt(shift)
v0.AddArg(idx)
v.AddArg4(ptr, v0, val, mem)
return true
}
return false return false
} }
func rewriteValueLOONG64_OpLOONG64MOVBstoreidx(v *Value) bool { func rewriteValueLOONG64_OpLOONG64MOVBstoreidx(v *Value) bool {
@ -3099,6 +3172,7 @@ func rewriteValueLOONG64_OpLOONG64MOVDload(v *Value) bool {
v_0 := v.Args[0] v_0 := v.Args[0]
b := v.Block b := v.Block
config := b.Func.Config config := b.Func.Config
typ := &b.Func.Config.Types
// match: (MOVDload [off] {sym} ptr (MOVVstore [off] {sym} ptr val _)) // match: (MOVDload [off] {sym} ptr (MOVVstore [off] {sym} ptr val _))
// result: (MOVVgpfp val) // result: (MOVVgpfp val)
for { for {
@ -3178,6 +3252,29 @@ func rewriteValueLOONG64_OpLOONG64MOVDload(v *Value) bool {
v.AddArg3(ptr, idx, mem) v.AddArg3(ptr, idx, mem)
return true return true
} }
// match: (MOVDload [off] {sym} (ADDshiftLLV [shift] ptr idx) mem)
// cond: off == 0 && sym == nil
// result: (MOVDloadidx ptr (SLLVconst <typ.Int64> [shift] idx) mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
if v_0.Op != OpLOONG64ADDshiftLLV {
break
}
shift := auxIntToInt64(v_0.AuxInt)
idx := v_0.Args[1]
ptr := v_0.Args[0]
mem := v_1
if !(off == 0 && sym == nil) {
break
}
v.reset(OpLOONG64MOVDloadidx)
v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.Int64)
v0.AuxInt = int64ToAuxInt(shift)
v0.AddArg(idx)
v.AddArg3(ptr, v0, mem)
return true
}
return false return false
} }
func rewriteValueLOONG64_OpLOONG64MOVDloadidx(v *Value) bool { func rewriteValueLOONG64_OpLOONG64MOVDloadidx(v *Value) bool {
@ -3228,6 +3325,7 @@ func rewriteValueLOONG64_OpLOONG64MOVDstore(v *Value) bool {
v_0 := v.Args[0] v_0 := v.Args[0]
b := v.Block b := v.Block
config := b.Func.Config config := b.Func.Config
typ := &b.Func.Config.Types
// match: (MOVDstore [off] {sym} ptr (MOVVgpfp val) mem) // match: (MOVDstore [off] {sym} ptr (MOVVgpfp val) mem)
// result: (MOVVstore [off] {sym} ptr val mem) // result: (MOVVstore [off] {sym} ptr val mem)
for { for {
@ -3310,6 +3408,30 @@ func rewriteValueLOONG64_OpLOONG64MOVDstore(v *Value) bool {
v.AddArg4(ptr, idx, val, mem) v.AddArg4(ptr, idx, val, mem)
return true return true
} }
// match: (MOVDstore [off] {sym} (ADDshiftLLV [shift] ptr idx) val mem)
// cond: off == 0 && sym == nil
// result: (MOVDstoreidx ptr (SLLVconst <typ.Int64> [shift] idx) val mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
if v_0.Op != OpLOONG64ADDshiftLLV {
break
}
shift := auxIntToInt64(v_0.AuxInt)
idx := v_0.Args[1]
ptr := v_0.Args[0]
val := v_1
mem := v_2
if !(off == 0 && sym == nil) {
break
}
v.reset(OpLOONG64MOVDstoreidx)
v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.Int64)
v0.AuxInt = int64ToAuxInt(shift)
v0.AddArg(idx)
v.AddArg4(ptr, v0, val, mem)
return true
}
return false return false
} }
func rewriteValueLOONG64_OpLOONG64MOVDstoreidx(v *Value) bool { func rewriteValueLOONG64_OpLOONG64MOVDstoreidx(v *Value) bool {
@ -3362,6 +3484,7 @@ func rewriteValueLOONG64_OpLOONG64MOVFload(v *Value) bool {
v_0 := v.Args[0] v_0 := v.Args[0]
b := v.Block b := v.Block
config := b.Func.Config config := b.Func.Config
typ := &b.Func.Config.Types
// match: (MOVFload [off] {sym} ptr (MOVWstore [off] {sym} ptr val _)) // match: (MOVFload [off] {sym} ptr (MOVWstore [off] {sym} ptr val _))
// result: (MOVWgpfp val) // result: (MOVWgpfp val)
for { for {
@ -3441,6 +3564,29 @@ func rewriteValueLOONG64_OpLOONG64MOVFload(v *Value) bool {
v.AddArg3(ptr, idx, mem) v.AddArg3(ptr, idx, mem)
return true return true
} }
// match: (MOVFload [off] {sym} (ADDshiftLLV [shift] ptr idx) mem)
// cond: off == 0 && sym == nil
// result: (MOVFloadidx ptr (SLLVconst <typ.Int64> [shift] idx) mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
if v_0.Op != OpLOONG64ADDshiftLLV {
break
}
shift := auxIntToInt64(v_0.AuxInt)
idx := v_0.Args[1]
ptr := v_0.Args[0]
mem := v_1
if !(off == 0 && sym == nil) {
break
}
v.reset(OpLOONG64MOVFloadidx)
v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.Int64)
v0.AuxInt = int64ToAuxInt(shift)
v0.AddArg(idx)
v.AddArg3(ptr, v0, mem)
return true
}
return false return false
} }
func rewriteValueLOONG64_OpLOONG64MOVFloadidx(v *Value) bool { func rewriteValueLOONG64_OpLOONG64MOVFloadidx(v *Value) bool {
@ -3491,6 +3637,7 @@ func rewriteValueLOONG64_OpLOONG64MOVFstore(v *Value) bool {
v_0 := v.Args[0] v_0 := v.Args[0]
b := v.Block b := v.Block
config := b.Func.Config config := b.Func.Config
typ := &b.Func.Config.Types
// match: (MOVFstore [off] {sym} ptr (MOVWgpfp val) mem) // match: (MOVFstore [off] {sym} ptr (MOVWgpfp val) mem)
// result: (MOVWstore [off] {sym} ptr val mem) // result: (MOVWstore [off] {sym} ptr val mem)
for { for {
@ -3573,6 +3720,30 @@ func rewriteValueLOONG64_OpLOONG64MOVFstore(v *Value) bool {
v.AddArg4(ptr, idx, val, mem) v.AddArg4(ptr, idx, val, mem)
return true return true
} }
// match: (MOVFstore [off] {sym} (ADDshiftLLV [shift] ptr idx) val mem)
// cond: off == 0 && sym == nil
// result: (MOVFstoreidx ptr (SLLVconst <typ.Int64> [shift] idx) val mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
if v_0.Op != OpLOONG64ADDshiftLLV {
break
}
shift := auxIntToInt64(v_0.AuxInt)
idx := v_0.Args[1]
ptr := v_0.Args[0]
val := v_1
mem := v_2
if !(off == 0 && sym == nil) {
break
}
v.reset(OpLOONG64MOVFstoreidx)
v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.Int64)
v0.AuxInt = int64ToAuxInt(shift)
v0.AddArg(idx)
v.AddArg4(ptr, v0, val, mem)
return true
}
return false return false
} }
func rewriteValueLOONG64_OpLOONG64MOVFstoreidx(v *Value) bool { func rewriteValueLOONG64_OpLOONG64MOVFstoreidx(v *Value) bool {
@ -3625,6 +3796,7 @@ func rewriteValueLOONG64_OpLOONG64MOVHUload(v *Value) bool {
v_0 := v.Args[0] v_0 := v.Args[0]
b := v.Block b := v.Block
config := b.Func.Config config := b.Func.Config
typ := &b.Func.Config.Types
// match: (MOVHUload [off] {sym} ptr (MOVHstore [off] {sym} ptr x _)) // match: (MOVHUload [off] {sym} ptr (MOVHstore [off] {sym} ptr x _))
// result: (MOVHUreg x) // result: (MOVHUreg x)
for { for {
@ -3704,6 +3876,29 @@ func rewriteValueLOONG64_OpLOONG64MOVHUload(v *Value) bool {
v.AddArg3(ptr, idx, mem) v.AddArg3(ptr, idx, mem)
return true return true
} }
// match: (MOVHUload [off] {sym} (ADDshiftLLV [shift] ptr idx) mem)
// cond: off == 0 && sym == nil
// result: (MOVHUloadidx ptr (SLLVconst <typ.Int64> [shift] idx) mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
if v_0.Op != OpLOONG64ADDshiftLLV {
break
}
shift := auxIntToInt64(v_0.AuxInt)
idx := v_0.Args[1]
ptr := v_0.Args[0]
mem := v_1
if !(off == 0 && sym == nil) {
break
}
v.reset(OpLOONG64MOVHUloadidx)
v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.Int64)
v0.AuxInt = int64ToAuxInt(shift)
v0.AddArg(idx)
v.AddArg3(ptr, v0, mem)
return true
}
// match: (MOVHUload [off] {sym} (SB) _) // match: (MOVHUload [off] {sym} (SB) _)
// cond: symIsRO(sym) // cond: symIsRO(sym)
// result: (MOVVconst [int64(read16(sym, int64(off), config.ctxt.Arch.ByteOrder))]) // result: (MOVVconst [int64(read16(sym, int64(off), config.ctxt.Arch.ByteOrder))])
@ -3894,6 +4089,7 @@ func rewriteValueLOONG64_OpLOONG64MOVHload(v *Value) bool {
v_0 := v.Args[0] v_0 := v.Args[0]
b := v.Block b := v.Block
config := b.Func.Config config := b.Func.Config
typ := &b.Func.Config.Types
// match: (MOVHload [off] {sym} ptr (MOVHstore [off] {sym} ptr x _)) // match: (MOVHload [off] {sym} ptr (MOVHstore [off] {sym} ptr x _))
// result: (MOVHreg x) // result: (MOVHreg x)
for { for {
@ -3973,6 +4169,29 @@ func rewriteValueLOONG64_OpLOONG64MOVHload(v *Value) bool {
v.AddArg3(ptr, idx, mem) v.AddArg3(ptr, idx, mem)
return true return true
} }
// match: (MOVHload [off] {sym} (ADDshiftLLV [shift] ptr idx) mem)
// cond: off == 0 && sym == nil
// result: (MOVHloadidx ptr (SLLVconst <typ.Int64> [shift] idx) mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
if v_0.Op != OpLOONG64ADDshiftLLV {
break
}
shift := auxIntToInt64(v_0.AuxInt)
idx := v_0.Args[1]
ptr := v_0.Args[0]
mem := v_1
if !(off == 0 && sym == nil) {
break
}
v.reset(OpLOONG64MOVHloadidx)
v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.Int64)
v0.AuxInt = int64ToAuxInt(shift)
v0.AddArg(idx)
v.AddArg3(ptr, v0, mem)
return true
}
// match: (MOVHload [off] {sym} (SB) _) // match: (MOVHload [off] {sym} (SB) _)
// cond: symIsRO(sym) // cond: symIsRO(sym)
// result: (MOVVconst [int64(int16(read16(sym, int64(off), config.ctxt.Arch.ByteOrder)))]) // result: (MOVVconst [int64(int16(read16(sym, int64(off), config.ctxt.Arch.ByteOrder)))])
@ -4165,6 +4384,7 @@ func rewriteValueLOONG64_OpLOONG64MOVHstore(v *Value) bool {
v_0 := v.Args[0] v_0 := v.Args[0]
b := v.Block b := v.Block
config := b.Func.Config config := b.Func.Config
typ := &b.Func.Config.Types
// match: (MOVHstore [off1] {sym} (ADDVconst [off2] ptr) val mem) // match: (MOVHstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink) // cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
// result: (MOVHstore [off1+int32(off2)] {sym} ptr val mem) // result: (MOVHstore [off1+int32(off2)] {sym} ptr val mem)
@ -4298,6 +4518,30 @@ func rewriteValueLOONG64_OpLOONG64MOVHstore(v *Value) bool {
v.AddArg4(ptr, idx, val, mem) v.AddArg4(ptr, idx, val, mem)
return true return true
} }
// match: (MOVHstore [off] {sym} (ADDshiftLLV [shift] ptr idx) val mem)
// cond: off == 0 && sym == nil
// result: (MOVHstoreidx ptr (SLLVconst <typ.Int64> [shift] idx) val mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
if v_0.Op != OpLOONG64ADDshiftLLV {
break
}
shift := auxIntToInt64(v_0.AuxInt)
idx := v_0.Args[1]
ptr := v_0.Args[0]
val := v_1
mem := v_2
if !(off == 0 && sym == nil) {
break
}
v.reset(OpLOONG64MOVHstoreidx)
v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.Int64)
v0.AuxInt = int64ToAuxInt(shift)
v0.AddArg(idx)
v.AddArg4(ptr, v0, val, mem)
return true
}
return false return false
} }
func rewriteValueLOONG64_OpLOONG64MOVHstoreidx(v *Value) bool { func rewriteValueLOONG64_OpLOONG64MOVHstoreidx(v *Value) bool {
@ -4350,6 +4594,7 @@ func rewriteValueLOONG64_OpLOONG64MOVVload(v *Value) bool {
v_0 := v.Args[0] v_0 := v.Args[0]
b := v.Block b := v.Block
config := b.Func.Config config := b.Func.Config
typ := &b.Func.Config.Types
// match: (MOVVload [off] {sym} ptr (MOVDstore [off] {sym} ptr val _)) // match: (MOVVload [off] {sym} ptr (MOVDstore [off] {sym} ptr val _))
// result: (MOVVfpgp val) // result: (MOVVfpgp val)
for { for {
@ -4446,6 +4691,29 @@ func rewriteValueLOONG64_OpLOONG64MOVVload(v *Value) bool {
v.AddArg3(ptr, idx, mem) v.AddArg3(ptr, idx, mem)
return true return true
} }
// match: (MOVVload [off] {sym} (ADDshiftLLV [shift] ptr idx) mem)
// cond: off == 0 && sym == nil
// result: (MOVVloadidx ptr (SLLVconst <typ.Int64> [shift] idx) mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
if v_0.Op != OpLOONG64ADDshiftLLV {
break
}
shift := auxIntToInt64(v_0.AuxInt)
idx := v_0.Args[1]
ptr := v_0.Args[0]
mem := v_1
if !(off == 0 && sym == nil) {
break
}
v.reset(OpLOONG64MOVVloadidx)
v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.Int64)
v0.AuxInt = int64ToAuxInt(shift)
v0.AddArg(idx)
v.AddArg3(ptr, v0, mem)
return true
}
// match: (MOVVload [off] {sym} (SB) _) // match: (MOVVload [off] {sym} (SB) _)
// cond: symIsRO(sym) // cond: symIsRO(sym)
// result: (MOVVconst [int64(read64(sym, int64(off), config.ctxt.Arch.ByteOrder))]) // result: (MOVVconst [int64(read64(sym, int64(off), config.ctxt.Arch.ByteOrder))])
@ -4551,6 +4819,7 @@ func rewriteValueLOONG64_OpLOONG64MOVVstore(v *Value) bool {
v_0 := v.Args[0] v_0 := v.Args[0]
b := v.Block b := v.Block
config := b.Func.Config config := b.Func.Config
typ := &b.Func.Config.Types
// match: (MOVVstore [off] {sym} ptr (MOVVfpgp val) mem) // match: (MOVVstore [off] {sym} ptr (MOVVfpgp val) mem)
// result: (MOVDstore [off] {sym} ptr val mem) // result: (MOVDstore [off] {sym} ptr val mem)
for { for {
@ -4633,6 +4902,30 @@ func rewriteValueLOONG64_OpLOONG64MOVVstore(v *Value) bool {
v.AddArg4(ptr, idx, val, mem) v.AddArg4(ptr, idx, val, mem)
return true return true
} }
// match: (MOVVstore [off] {sym} (ADDshiftLLV [shift] ptr idx) val mem)
// cond: off == 0 && sym == nil
// result: (MOVVstoreidx ptr (SLLVconst <typ.Int64> [shift] idx) val mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
if v_0.Op != OpLOONG64ADDshiftLLV {
break
}
shift := auxIntToInt64(v_0.AuxInt)
idx := v_0.Args[1]
ptr := v_0.Args[0]
val := v_1
mem := v_2
if !(off == 0 && sym == nil) {
break
}
v.reset(OpLOONG64MOVVstoreidx)
v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.Int64)
v0.AuxInt = int64ToAuxInt(shift)
v0.AddArg(idx)
v.AddArg4(ptr, v0, val, mem)
return true
}
return false return false
} }
func rewriteValueLOONG64_OpLOONG64MOVVstoreidx(v *Value) bool { func rewriteValueLOONG64_OpLOONG64MOVVstoreidx(v *Value) bool {
@ -4784,6 +5077,29 @@ func rewriteValueLOONG64_OpLOONG64MOVWUload(v *Value) bool {
v.AddArg3(ptr, idx, mem) v.AddArg3(ptr, idx, mem)
return true return true
} }
// match: (MOVWUload [off] {sym} (ADDshiftLLV [shift] ptr idx) mem)
// cond: off == 0 && sym == nil
// result: (MOVWUloadidx ptr (SLLVconst <typ.Int64> [shift] idx) mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
if v_0.Op != OpLOONG64ADDshiftLLV {
break
}
shift := auxIntToInt64(v_0.AuxInt)
idx := v_0.Args[1]
ptr := v_0.Args[0]
mem := v_1
if !(off == 0 && sym == nil) {
break
}
v.reset(OpLOONG64MOVWUloadidx)
v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.Int64)
v0.AuxInt = int64ToAuxInt(shift)
v0.AddArg(idx)
v.AddArg3(ptr, v0, mem)
return true
}
// match: (MOVWUload [off] {sym} (SB) _) // match: (MOVWUload [off] {sym} (SB) _)
// cond: symIsRO(sym) // cond: symIsRO(sym)
// result: (MOVVconst [int64(read32(sym, int64(off), config.ctxt.Arch.ByteOrder))]) // result: (MOVVconst [int64(read32(sym, int64(off), config.ctxt.Arch.ByteOrder))])
@ -5007,6 +5323,7 @@ func rewriteValueLOONG64_OpLOONG64MOVWload(v *Value) bool {
v_0 := v.Args[0] v_0 := v.Args[0]
b := v.Block b := v.Block
config := b.Func.Config config := b.Func.Config
typ := &b.Func.Config.Types
// match: (MOVWload [off] {sym} ptr (MOVWstore [off] {sym} ptr x _)) // match: (MOVWload [off] {sym} ptr (MOVWstore [off] {sym} ptr x _))
// result: (MOVWreg x) // result: (MOVWreg x)
for { for {
@ -5086,6 +5403,29 @@ func rewriteValueLOONG64_OpLOONG64MOVWload(v *Value) bool {
v.AddArg3(ptr, idx, mem) v.AddArg3(ptr, idx, mem)
return true return true
} }
// match: (MOVWload [off] {sym} (ADDshiftLLV [shift] ptr idx) mem)
// cond: off == 0 && sym == nil
// result: (MOVWloadidx ptr (SLLVconst <typ.Int64> [shift] idx) mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
if v_0.Op != OpLOONG64ADDshiftLLV {
break
}
shift := auxIntToInt64(v_0.AuxInt)
idx := v_0.Args[1]
ptr := v_0.Args[0]
mem := v_1
if !(off == 0 && sym == nil) {
break
}
v.reset(OpLOONG64MOVWloadidx)
v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.Int64)
v0.AuxInt = int64ToAuxInt(shift)
v0.AddArg(idx)
v.AddArg3(ptr, v0, mem)
return true
}
// match: (MOVWload [off] {sym} (SB) _) // match: (MOVWload [off] {sym} (SB) _)
// cond: symIsRO(sym) // cond: symIsRO(sym)
// result: (MOVVconst [int64(int32(read32(sym, int64(off), config.ctxt.Arch.ByteOrder)))]) // result: (MOVVconst [int64(int32(read32(sym, int64(off), config.ctxt.Arch.ByteOrder)))])
@ -5333,6 +5673,7 @@ func rewriteValueLOONG64_OpLOONG64MOVWstore(v *Value) bool {
v_0 := v.Args[0] v_0 := v.Args[0]
b := v.Block b := v.Block
config := b.Func.Config config := b.Func.Config
typ := &b.Func.Config.Types
// match: (MOVWstore [off] {sym} ptr (MOVWfpgp val) mem) // match: (MOVWstore [off] {sym} ptr (MOVWfpgp val) mem)
// result: (MOVFstore [off] {sym} ptr val mem) // result: (MOVFstore [off] {sym} ptr val mem)
for { for {
@ -5449,6 +5790,30 @@ func rewriteValueLOONG64_OpLOONG64MOVWstore(v *Value) bool {
v.AddArg4(ptr, idx, val, mem) v.AddArg4(ptr, idx, val, mem)
return true return true
} }
// match: (MOVWstore [off] {sym} (ADDshiftLLV [shift] ptr idx) val mem)
// cond: off == 0 && sym == nil
// result: (MOVWstoreidx ptr (SLLVconst <typ.Int64> [shift] idx) val mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
if v_0.Op != OpLOONG64ADDshiftLLV {
break
}
shift := auxIntToInt64(v_0.AuxInt)
idx := v_0.Args[1]
ptr := v_0.Args[0]
val := v_1
mem := v_2
if !(off == 0 && sym == nil) {
break
}
v.reset(OpLOONG64MOVWstoreidx)
v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.Int64)
v0.AuxInt = int64ToAuxInt(shift)
v0.AddArg(idx)
v.AddArg4(ptr, v0, val, mem)
return true
}
return false return false
} }
func rewriteValueLOONG64_OpLOONG64MOVWstoreidx(v *Value) bool { func rewriteValueLOONG64_OpLOONG64MOVWstoreidx(v *Value) bool {