go/src/cmd/internal/ssa/lowerAmd64.go

309 lines
5.3 KiB
Go
Raw Normal View History

// autogenerated from rulegen/lower_amd64.rules: do not edit!
// generated with: go run rulegen/rulegen.go rulegen/lower_amd64.rules lowerAmd64 lowerAmd64.go
package ssa
func lowerAmd64(v *Value) bool {
switch v.Op {
case OpADDQ:
// match: (ADDQ x (Const [c]))
// cond:
// result: (ADDCQ [c] x)
{
x := v.Args[0]
if v.Args[1].Op != OpConst {
goto end0
}
c := v.Args[1].Aux
v.Op = OpADDCQ
v.Aux = nil
v.Args = v.argstorage[:0]
v.Aux = c
v.AddArg(x)
return true
}
end0:
;
// match: (ADDQ (Const [c]) x)
// cond:
// result: (ADDCQ [c] x)
{
if v.Args[0].Op != OpConst {
goto end1
}
c := v.Args[0].Aux
x := v.Args[1]
v.Op = OpADDCQ
v.Aux = nil
v.Args = v.argstorage[:0]
v.Aux = c
v.AddArg(x)
return true
}
end1:
;
case OpAdd:
// match: (Add <t> x y)
// cond: is64BitInt(t)
// result: (ADDQ x y)
{
t := v.Type
x := v.Args[0]
y := v.Args[1]
if !(is64BitInt(t)) {
goto end2
}
v.Op = OpADDQ
v.Aux = nil
v.Args = v.argstorage[:0]
v.AddArg(x)
v.AddArg(y)
return true
}
end2:
;
// match: (Add <t> x y)
// cond: is32BitInt(t)
// result: (ADDL x y)
{
t := v.Type
x := v.Args[0]
y := v.Args[1]
if !(is32BitInt(t)) {
goto end3
}
v.Op = OpADDL
v.Aux = nil
v.Args = v.argstorage[:0]
v.AddArg(x)
v.AddArg(y)
return true
}
end3:
;
case OpCMPQ:
// match: (CMPQ x (Const [c]))
// cond:
// result: (CMPCQ x [c])
{
x := v.Args[0]
if v.Args[1].Op != OpConst {
goto end4
}
c := v.Args[1].Aux
v.Op = OpCMPCQ
v.Aux = nil
v.Args = v.argstorage[:0]
v.AddArg(x)
v.Aux = c
return true
}
end4:
;
// match: (CMPQ (Const [c]) x)
// cond:
// result: (InvertFlags (CMPCQ <TypeFlags> x [c]))
{
if v.Args[0].Op != OpConst {
goto end5
}
c := v.Args[0].Aux
x := v.Args[1]
v.Op = OpInvertFlags
v.Aux = nil
v.Args = v.argstorage[:0]
v0 := v.Block.NewValue(OpCMPCQ, TypeInvalid, nil)
v0.Type = TypeFlags
v0.AddArg(x)
v0.Aux = c
v.AddArg(v0)
return true
}
end5:
;
case OpLess:
// match: (Less x y)
// cond: is64BitInt(v.Args[0].Type) && isSigned(v.Args[0].Type)
// result: (SETL (CMPQ <TypeFlags> x y))
{
x := v.Args[0]
y := v.Args[1]
if !(is64BitInt(v.Args[0].Type) && isSigned(v.Args[0].Type)) {
goto end6
}
v.Op = OpSETL
v.Aux = nil
v.Args = v.argstorage[:0]
v0 := v.Block.NewValue(OpCMPQ, TypeInvalid, nil)
v0.Type = TypeFlags
v0.AddArg(x)
v0.AddArg(y)
v.AddArg(v0)
return true
}
end6:
;
case OpLoadFP:
// match: (LoadFP <t> [offset] mem)
// cond: typeSize(t) == 8
// result: (LoadFP8 <t> [offset] mem)
{
t := v.Type
offset := v.Aux
mem := v.Args[0]
if !(typeSize(t) == 8) {
goto end7
}
v.Op = OpLoadFP8
v.Aux = nil
v.Args = v.argstorage[:0]
v.Type = t
v.Aux = offset
v.AddArg(mem)
return true
}
end7:
;
case OpLoadSP:
// match: (LoadSP <t> [offset] mem)
// cond: typeSize(t) == 8
// result: (LoadSP8 <t> [offset] mem)
{
t := v.Type
offset := v.Aux
mem := v.Args[0]
if !(typeSize(t) == 8) {
goto end8
}
v.Op = OpLoadSP8
v.Aux = nil
v.Args = v.argstorage[:0]
v.Type = t
v.Aux = offset
v.AddArg(mem)
return true
}
end8:
;
case OpSETL:
// match: (SETL (InvertFlags x))
// cond:
// result: (SETGE x)
{
if v.Args[0].Op != OpInvertFlags {
goto end9
}
x := v.Args[0].Args[0]
v.Op = OpSETGE
v.Aux = nil
v.Args = v.argstorage[:0]
v.AddArg(x)
return true
}
end9:
;
case OpSUBQ:
// match: (SUBQ x (Const [c]))
// cond:
// result: (SUBCQ x [c])
{
x := v.Args[0]
if v.Args[1].Op != OpConst {
goto end10
}
c := v.Args[1].Aux
v.Op = OpSUBCQ
v.Aux = nil
v.Args = v.argstorage[:0]
v.AddArg(x)
v.Aux = c
return true
}
end10:
;
// match: (SUBQ <t> (Const [c]) x)
// cond:
// result: (NEGQ (SUBCQ <t> x [c]))
{
t := v.Type
if v.Args[0].Op != OpConst {
goto end11
}
c := v.Args[0].Aux
x := v.Args[1]
v.Op = OpNEGQ
v.Aux = nil
v.Args = v.argstorage[:0]
v0 := v.Block.NewValue(OpSUBCQ, TypeInvalid, nil)
v0.Type = t
v0.AddArg(x)
v0.Aux = c
v.AddArg(v0)
return true
}
end11:
;
case OpStoreFP:
// match: (StoreFP [offset] val mem)
// cond: typeSize(val.Type) == 8
// result: (StoreFP8 [offset] val mem)
{
offset := v.Aux
val := v.Args[0]
mem := v.Args[1]
if !(typeSize(val.Type) == 8) {
goto end12
}
v.Op = OpStoreFP8
v.Aux = nil
v.Args = v.argstorage[:0]
v.Aux = offset
v.AddArg(val)
v.AddArg(mem)
return true
}
end12:
;
case OpStoreSP:
// match: (StoreSP [offset] val mem)
// cond: typeSize(val.Type) == 8
// result: (StoreSP8 [offset] val mem)
{
offset := v.Aux
val := v.Args[0]
mem := v.Args[1]
if !(typeSize(val.Type) == 8) {
goto end13
}
v.Op = OpStoreSP8
v.Aux = nil
v.Args = v.argstorage[:0]
v.Aux = offset
v.AddArg(val)
v.AddArg(mem)
return true
}
end13:
;
case OpSub:
// match: (Sub <t> x y)
// cond: is64BitInt(t)
// result: (SUBQ x y)
{
t := v.Type
x := v.Args[0]
y := v.Args[1]
if !(is64BitInt(t)) {
goto end14
}
v.Op = OpSUBQ
v.Aux = nil
v.Args = v.argstorage[:0]
v.AddArg(x)
v.AddArg(y)
return true
}
end14:
}
return false
}