2016-06-24 14:37:17 -05:00
|
|
|
// autogenerated from gen/PPC64.rules: do not edit!
|
|
|
|
|
// generated with: cd gen; go run *.go
|
|
|
|
|
|
|
|
|
|
package ssa
|
|
|
|
|
|
|
|
|
|
import "math"
|
|
|
|
|
|
|
|
|
|
var _ = math.MinInt8 // in case not otherwise used
|
|
|
|
|
func rewriteValuePPC64(v *Value, config *Config) bool {
|
|
|
|
|
switch v.Op {
|
|
|
|
|
case OpPPC64ADD:
|
|
|
|
|
return rewriteValuePPC64_OpPPC64ADD(v, config)
|
|
|
|
|
case OpAdd16:
|
|
|
|
|
return rewriteValuePPC64_OpAdd16(v, config)
|
|
|
|
|
case OpAdd32:
|
|
|
|
|
return rewriteValuePPC64_OpAdd32(v, config)
|
|
|
|
|
case OpAdd32F:
|
|
|
|
|
return rewriteValuePPC64_OpAdd32F(v, config)
|
|
|
|
|
case OpAdd64:
|
|
|
|
|
return rewriteValuePPC64_OpAdd64(v, config)
|
|
|
|
|
case OpAdd64F:
|
|
|
|
|
return rewriteValuePPC64_OpAdd64F(v, config)
|
|
|
|
|
case OpAdd8:
|
|
|
|
|
return rewriteValuePPC64_OpAdd8(v, config)
|
|
|
|
|
case OpAddPtr:
|
|
|
|
|
return rewriteValuePPC64_OpAddPtr(v, config)
|
|
|
|
|
case OpAddr:
|
|
|
|
|
return rewriteValuePPC64_OpAddr(v, config)
|
|
|
|
|
case OpAnd16:
|
|
|
|
|
return rewriteValuePPC64_OpAnd16(v, config)
|
|
|
|
|
case OpAnd32:
|
|
|
|
|
return rewriteValuePPC64_OpAnd32(v, config)
|
|
|
|
|
case OpAnd64:
|
|
|
|
|
return rewriteValuePPC64_OpAnd64(v, config)
|
|
|
|
|
case OpAnd8:
|
|
|
|
|
return rewriteValuePPC64_OpAnd8(v, config)
|
|
|
|
|
case OpConst16:
|
|
|
|
|
return rewriteValuePPC64_OpConst16(v, config)
|
|
|
|
|
case OpConst32:
|
|
|
|
|
return rewriteValuePPC64_OpConst32(v, config)
|
|
|
|
|
case OpConst32F:
|
|
|
|
|
return rewriteValuePPC64_OpConst32F(v, config)
|
|
|
|
|
case OpConst64:
|
|
|
|
|
return rewriteValuePPC64_OpConst64(v, config)
|
|
|
|
|
case OpConst64F:
|
|
|
|
|
return rewriteValuePPC64_OpConst64F(v, config)
|
|
|
|
|
case OpConst8:
|
|
|
|
|
return rewriteValuePPC64_OpConst8(v, config)
|
|
|
|
|
case OpConstBool:
|
|
|
|
|
return rewriteValuePPC64_OpConstBool(v, config)
|
|
|
|
|
case OpConstNil:
|
|
|
|
|
return rewriteValuePPC64_OpConstNil(v, config)
|
|
|
|
|
case OpDiv32F:
|
|
|
|
|
return rewriteValuePPC64_OpDiv32F(v, config)
|
|
|
|
|
case OpDiv64F:
|
|
|
|
|
return rewriteValuePPC64_OpDiv64F(v, config)
|
|
|
|
|
case OpEq16:
|
|
|
|
|
return rewriteValuePPC64_OpEq16(v, config)
|
|
|
|
|
case OpEq32:
|
|
|
|
|
return rewriteValuePPC64_OpEq32(v, config)
|
|
|
|
|
case OpEq64:
|
|
|
|
|
return rewriteValuePPC64_OpEq64(v, config)
|
|
|
|
|
case OpEq64F:
|
|
|
|
|
return rewriteValuePPC64_OpEq64F(v, config)
|
|
|
|
|
case OpEq8:
|
|
|
|
|
return rewriteValuePPC64_OpEq8(v, config)
|
|
|
|
|
case OpEqPtr:
|
|
|
|
|
return rewriteValuePPC64_OpEqPtr(v, config)
|
|
|
|
|
case OpGeq16:
|
|
|
|
|
return rewriteValuePPC64_OpGeq16(v, config)
|
|
|
|
|
case OpGeq16U:
|
|
|
|
|
return rewriteValuePPC64_OpGeq16U(v, config)
|
|
|
|
|
case OpGeq32:
|
|
|
|
|
return rewriteValuePPC64_OpGeq32(v, config)
|
|
|
|
|
case OpGeq32U:
|
|
|
|
|
return rewriteValuePPC64_OpGeq32U(v, config)
|
|
|
|
|
case OpGeq64:
|
|
|
|
|
return rewriteValuePPC64_OpGeq64(v, config)
|
|
|
|
|
case OpGeq64U:
|
|
|
|
|
return rewriteValuePPC64_OpGeq64U(v, config)
|
|
|
|
|
case OpGeq8:
|
|
|
|
|
return rewriteValuePPC64_OpGeq8(v, config)
|
|
|
|
|
case OpGeq8U:
|
|
|
|
|
return rewriteValuePPC64_OpGeq8U(v, config)
|
|
|
|
|
case OpGreater16:
|
|
|
|
|
return rewriteValuePPC64_OpGreater16(v, config)
|
|
|
|
|
case OpGreater16U:
|
|
|
|
|
return rewriteValuePPC64_OpGreater16U(v, config)
|
|
|
|
|
case OpGreater32:
|
|
|
|
|
return rewriteValuePPC64_OpGreater32(v, config)
|
|
|
|
|
case OpGreater32U:
|
|
|
|
|
return rewriteValuePPC64_OpGreater32U(v, config)
|
|
|
|
|
case OpGreater64:
|
|
|
|
|
return rewriteValuePPC64_OpGreater64(v, config)
|
|
|
|
|
case OpGreater64U:
|
|
|
|
|
return rewriteValuePPC64_OpGreater64U(v, config)
|
|
|
|
|
case OpGreater8:
|
|
|
|
|
return rewriteValuePPC64_OpGreater8(v, config)
|
|
|
|
|
case OpGreater8U:
|
|
|
|
|
return rewriteValuePPC64_OpGreater8U(v, config)
|
|
|
|
|
case OpLeq16:
|
|
|
|
|
return rewriteValuePPC64_OpLeq16(v, config)
|
|
|
|
|
case OpLeq16U:
|
|
|
|
|
return rewriteValuePPC64_OpLeq16U(v, config)
|
|
|
|
|
case OpLeq32:
|
|
|
|
|
return rewriteValuePPC64_OpLeq32(v, config)
|
|
|
|
|
case OpLeq32U:
|
|
|
|
|
return rewriteValuePPC64_OpLeq32U(v, config)
|
|
|
|
|
case OpLeq64:
|
|
|
|
|
return rewriteValuePPC64_OpLeq64(v, config)
|
|
|
|
|
case OpLeq64F:
|
|
|
|
|
return rewriteValuePPC64_OpLeq64F(v, config)
|
|
|
|
|
case OpLeq64U:
|
|
|
|
|
return rewriteValuePPC64_OpLeq64U(v, config)
|
|
|
|
|
case OpLeq8:
|
|
|
|
|
return rewriteValuePPC64_OpLeq8(v, config)
|
|
|
|
|
case OpLeq8U:
|
|
|
|
|
return rewriteValuePPC64_OpLeq8U(v, config)
|
|
|
|
|
case OpLess16:
|
|
|
|
|
return rewriteValuePPC64_OpLess16(v, config)
|
|
|
|
|
case OpLess16U:
|
|
|
|
|
return rewriteValuePPC64_OpLess16U(v, config)
|
|
|
|
|
case OpLess32:
|
|
|
|
|
return rewriteValuePPC64_OpLess32(v, config)
|
|
|
|
|
case OpLess32U:
|
|
|
|
|
return rewriteValuePPC64_OpLess32U(v, config)
|
|
|
|
|
case OpLess64:
|
|
|
|
|
return rewriteValuePPC64_OpLess64(v, config)
|
|
|
|
|
case OpLess64F:
|
|
|
|
|
return rewriteValuePPC64_OpLess64F(v, config)
|
|
|
|
|
case OpLess64U:
|
|
|
|
|
return rewriteValuePPC64_OpLess64U(v, config)
|
|
|
|
|
case OpLess8:
|
|
|
|
|
return rewriteValuePPC64_OpLess8(v, config)
|
|
|
|
|
case OpLess8U:
|
|
|
|
|
return rewriteValuePPC64_OpLess8U(v, config)
|
|
|
|
|
case OpLoad:
|
|
|
|
|
return rewriteValuePPC64_OpLoad(v, config)
|
2016-07-06 13:32:52 -07:00
|
|
|
case OpPPC64MOVBstore:
|
|
|
|
|
return rewriteValuePPC64_OpPPC64MOVBstore(v, config)
|
|
|
|
|
case OpPPC64MOVDstore:
|
|
|
|
|
return rewriteValuePPC64_OpPPC64MOVDstore(v, config)
|
|
|
|
|
case OpPPC64MOVHstore:
|
|
|
|
|
return rewriteValuePPC64_OpPPC64MOVHstore(v, config)
|
|
|
|
|
case OpPPC64MOVWstore:
|
|
|
|
|
return rewriteValuePPC64_OpPPC64MOVWstore(v, config)
|
2016-06-24 14:37:17 -05:00
|
|
|
case OpMul16:
|
|
|
|
|
return rewriteValuePPC64_OpMul16(v, config)
|
|
|
|
|
case OpMul32:
|
|
|
|
|
return rewriteValuePPC64_OpMul32(v, config)
|
|
|
|
|
case OpMul32F:
|
|
|
|
|
return rewriteValuePPC64_OpMul32F(v, config)
|
|
|
|
|
case OpMul64:
|
|
|
|
|
return rewriteValuePPC64_OpMul64(v, config)
|
|
|
|
|
case OpMul64F:
|
|
|
|
|
return rewriteValuePPC64_OpMul64F(v, config)
|
|
|
|
|
case OpMul8:
|
|
|
|
|
return rewriteValuePPC64_OpMul8(v, config)
|
|
|
|
|
case OpNeg16:
|
|
|
|
|
return rewriteValuePPC64_OpNeg16(v, config)
|
|
|
|
|
case OpNeg32:
|
|
|
|
|
return rewriteValuePPC64_OpNeg32(v, config)
|
|
|
|
|
case OpNeg64:
|
|
|
|
|
return rewriteValuePPC64_OpNeg64(v, config)
|
|
|
|
|
case OpNeg8:
|
|
|
|
|
return rewriteValuePPC64_OpNeg8(v, config)
|
|
|
|
|
case OpNeq16:
|
|
|
|
|
return rewriteValuePPC64_OpNeq16(v, config)
|
|
|
|
|
case OpNeq32:
|
|
|
|
|
return rewriteValuePPC64_OpNeq32(v, config)
|
|
|
|
|
case OpNeq64:
|
|
|
|
|
return rewriteValuePPC64_OpNeq64(v, config)
|
|
|
|
|
case OpNeq64F:
|
|
|
|
|
return rewriteValuePPC64_OpNeq64F(v, config)
|
|
|
|
|
case OpNeq8:
|
|
|
|
|
return rewriteValuePPC64_OpNeq8(v, config)
|
|
|
|
|
case OpNeqPtr:
|
|
|
|
|
return rewriteValuePPC64_OpNeqPtr(v, config)
|
|
|
|
|
case OpOffPtr:
|
|
|
|
|
return rewriteValuePPC64_OpOffPtr(v, config)
|
|
|
|
|
case OpOr16:
|
|
|
|
|
return rewriteValuePPC64_OpOr16(v, config)
|
|
|
|
|
case OpOr32:
|
|
|
|
|
return rewriteValuePPC64_OpOr32(v, config)
|
|
|
|
|
case OpOr64:
|
|
|
|
|
return rewriteValuePPC64_OpOr64(v, config)
|
|
|
|
|
case OpOr8:
|
|
|
|
|
return rewriteValuePPC64_OpOr8(v, config)
|
|
|
|
|
case OpSignExt16to32:
|
|
|
|
|
return rewriteValuePPC64_OpSignExt16to32(v, config)
|
|
|
|
|
case OpSignExt16to64:
|
|
|
|
|
return rewriteValuePPC64_OpSignExt16to64(v, config)
|
|
|
|
|
case OpSignExt32to64:
|
|
|
|
|
return rewriteValuePPC64_OpSignExt32to64(v, config)
|
|
|
|
|
case OpSignExt8to16:
|
|
|
|
|
return rewriteValuePPC64_OpSignExt8to16(v, config)
|
|
|
|
|
case OpSignExt8to32:
|
|
|
|
|
return rewriteValuePPC64_OpSignExt8to32(v, config)
|
|
|
|
|
case OpSignExt8to64:
|
|
|
|
|
return rewriteValuePPC64_OpSignExt8to64(v, config)
|
|
|
|
|
case OpStaticCall:
|
|
|
|
|
return rewriteValuePPC64_OpStaticCall(v, config)
|
|
|
|
|
case OpStore:
|
|
|
|
|
return rewriteValuePPC64_OpStore(v, config)
|
|
|
|
|
case OpSub16:
|
|
|
|
|
return rewriteValuePPC64_OpSub16(v, config)
|
|
|
|
|
case OpSub32:
|
|
|
|
|
return rewriteValuePPC64_OpSub32(v, config)
|
|
|
|
|
case OpSub32F:
|
|
|
|
|
return rewriteValuePPC64_OpSub32F(v, config)
|
|
|
|
|
case OpSub64:
|
|
|
|
|
return rewriteValuePPC64_OpSub64(v, config)
|
|
|
|
|
case OpSub64F:
|
|
|
|
|
return rewriteValuePPC64_OpSub64F(v, config)
|
|
|
|
|
case OpSub8:
|
|
|
|
|
return rewriteValuePPC64_OpSub8(v, config)
|
|
|
|
|
case OpSubPtr:
|
|
|
|
|
return rewriteValuePPC64_OpSubPtr(v, config)
|
|
|
|
|
case OpTrunc16to8:
|
|
|
|
|
return rewriteValuePPC64_OpTrunc16to8(v, config)
|
|
|
|
|
case OpTrunc32to16:
|
|
|
|
|
return rewriteValuePPC64_OpTrunc32to16(v, config)
|
|
|
|
|
case OpTrunc32to8:
|
|
|
|
|
return rewriteValuePPC64_OpTrunc32to8(v, config)
|
|
|
|
|
case OpTrunc64to16:
|
|
|
|
|
return rewriteValuePPC64_OpTrunc64to16(v, config)
|
|
|
|
|
case OpTrunc64to32:
|
|
|
|
|
return rewriteValuePPC64_OpTrunc64to32(v, config)
|
|
|
|
|
case OpTrunc64to8:
|
|
|
|
|
return rewriteValuePPC64_OpTrunc64to8(v, config)
|
|
|
|
|
case OpXor16:
|
|
|
|
|
return rewriteValuePPC64_OpXor16(v, config)
|
|
|
|
|
case OpXor32:
|
|
|
|
|
return rewriteValuePPC64_OpXor32(v, config)
|
|
|
|
|
case OpXor64:
|
|
|
|
|
return rewriteValuePPC64_OpXor64(v, config)
|
|
|
|
|
case OpXor8:
|
|
|
|
|
return rewriteValuePPC64_OpXor8(v, config)
|
|
|
|
|
case OpZero:
|
|
|
|
|
return rewriteValuePPC64_OpZero(v, config)
|
|
|
|
|
case OpZeroExt16to32:
|
|
|
|
|
return rewriteValuePPC64_OpZeroExt16to32(v, config)
|
|
|
|
|
case OpZeroExt16to64:
|
|
|
|
|
return rewriteValuePPC64_OpZeroExt16to64(v, config)
|
|
|
|
|
case OpZeroExt32to64:
|
|
|
|
|
return rewriteValuePPC64_OpZeroExt32to64(v, config)
|
|
|
|
|
case OpZeroExt8to16:
|
|
|
|
|
return rewriteValuePPC64_OpZeroExt8to16(v, config)
|
|
|
|
|
case OpZeroExt8to32:
|
|
|
|
|
return rewriteValuePPC64_OpZeroExt8to32(v, config)
|
|
|
|
|
case OpZeroExt8to64:
|
|
|
|
|
return rewriteValuePPC64_OpZeroExt8to64(v, config)
|
|
|
|
|
}
|
|
|
|
|
return false
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpPPC64ADD(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (ADD (MOVDconst [c]) x)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (ADDconst [c] x)
|
|
|
|
|
for {
|
|
|
|
|
v_0 := v.Args[0]
|
|
|
|
|
if v_0.Op != OpPPC64MOVDconst {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
c := v_0.AuxInt
|
|
|
|
|
x := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64ADDconst)
|
|
|
|
|
v.AuxInt = c
|
|
|
|
|
v.AddArg(x)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
// match: (ADD x (MOVDconst [c]))
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (ADDconst [c] x)
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
v_1 := v.Args[1]
|
|
|
|
|
if v_1.Op != OpPPC64MOVDconst {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
c := v_1.AuxInt
|
|
|
|
|
v.reset(OpPPC64ADDconst)
|
|
|
|
|
v.AuxInt = c
|
|
|
|
|
v.AddArg(x)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
return false
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpAdd16(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Add16 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (ADD (SignExt16to64 x) (SignExt16to64 y))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64ADD)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpSignExt16to64, config.fe.TypeInt64())
|
|
|
|
|
v0.AddArg(x)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
v1 := b.NewValue0(v.Line, OpSignExt16to64, config.fe.TypeInt64())
|
|
|
|
|
v1.AddArg(y)
|
|
|
|
|
v.AddArg(v1)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpAdd32(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Add32 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (ADD (SignExt32to64 x) (SignExt32to64 y))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64ADD)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpSignExt32to64, config.fe.TypeInt64())
|
|
|
|
|
v0.AddArg(x)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
v1 := b.NewValue0(v.Line, OpSignExt32to64, config.fe.TypeInt64())
|
|
|
|
|
v1.AddArg(y)
|
|
|
|
|
v.AddArg(v1)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpAdd32F(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Add32F x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (FADDS x y)
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64FADDS)
|
|
|
|
|
v.AddArg(x)
|
|
|
|
|
v.AddArg(y)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpAdd64(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Add64 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (ADD x y)
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64ADD)
|
|
|
|
|
v.AddArg(x)
|
|
|
|
|
v.AddArg(y)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpAdd64F(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Add64F x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (FADD x y)
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64FADD)
|
|
|
|
|
v.AddArg(x)
|
|
|
|
|
v.AddArg(y)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpAdd8(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Add8 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (ADD (SignExt8to64 x) (SignExt8to64 y))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64ADD)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpSignExt8to64, config.fe.TypeInt64())
|
|
|
|
|
v0.AddArg(x)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
v1 := b.NewValue0(v.Line, OpSignExt8to64, config.fe.TypeInt64())
|
|
|
|
|
v1.AddArg(y)
|
|
|
|
|
v.AddArg(v1)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpAddPtr(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (AddPtr x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (ADD x y)
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64ADD)
|
|
|
|
|
v.AddArg(x)
|
|
|
|
|
v.AddArg(y)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpAddr(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Addr {sym} base)
|
|
|
|
|
// cond:
|
2016-07-06 13:32:52 -07:00
|
|
|
// result: (MOVDaddr {sym} base)
|
2016-06-24 14:37:17 -05:00
|
|
|
for {
|
|
|
|
|
sym := v.Aux
|
|
|
|
|
base := v.Args[0]
|
2016-07-06 13:32:52 -07:00
|
|
|
v.reset(OpPPC64MOVDaddr)
|
2016-06-24 14:37:17 -05:00
|
|
|
v.Aux = sym
|
|
|
|
|
v.AddArg(base)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpAnd16(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (And16 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (AND (ZeroExt16to64 x) (ZeroExt16to64 y))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64AND)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
|
|
|
|
|
v0.AddArg(x)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
v1 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
|
|
|
|
|
v1.AddArg(y)
|
|
|
|
|
v.AddArg(v1)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpAnd32(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (And32 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (AND (ZeroExt32to64 x) (ZeroExt32to64 y))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64AND)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
|
|
|
|
|
v0.AddArg(x)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
v1 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
|
|
|
|
|
v1.AddArg(y)
|
|
|
|
|
v.AddArg(v1)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpAnd64(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (And64 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (AND x y)
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64AND)
|
|
|
|
|
v.AddArg(x)
|
|
|
|
|
v.AddArg(y)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpAnd8(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (And8 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (AND (ZeroExt8to64 x) (ZeroExt8to64 y))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64AND)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
|
|
|
|
|
v0.AddArg(x)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
v1 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
|
|
|
|
|
v1.AddArg(y)
|
|
|
|
|
v.AddArg(v1)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpConst16(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Const16 [val])
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (MOVWconst [val])
|
|
|
|
|
for {
|
|
|
|
|
val := v.AuxInt
|
|
|
|
|
v.reset(OpPPC64MOVWconst)
|
|
|
|
|
v.AuxInt = val
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpConst32(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Const32 [val])
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (MOVWconst [val])
|
|
|
|
|
for {
|
|
|
|
|
val := v.AuxInt
|
|
|
|
|
v.reset(OpPPC64MOVWconst)
|
|
|
|
|
v.AuxInt = val
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpConst32F(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Const32F [val])
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (FMOVSconst [val])
|
|
|
|
|
for {
|
|
|
|
|
val := v.AuxInt
|
|
|
|
|
v.reset(OpPPC64FMOVSconst)
|
|
|
|
|
v.AuxInt = val
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpConst64(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Const64 [val])
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (MOVDconst [val])
|
|
|
|
|
for {
|
|
|
|
|
val := v.AuxInt
|
|
|
|
|
v.reset(OpPPC64MOVDconst)
|
|
|
|
|
v.AuxInt = val
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpConst64F(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Const64F [val])
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (FMOVDconst [val])
|
|
|
|
|
for {
|
|
|
|
|
val := v.AuxInt
|
|
|
|
|
v.reset(OpPPC64FMOVDconst)
|
|
|
|
|
v.AuxInt = val
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpConst8(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Const8 [val])
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (MOVWconst [val])
|
|
|
|
|
for {
|
|
|
|
|
val := v.AuxInt
|
|
|
|
|
v.reset(OpPPC64MOVWconst)
|
|
|
|
|
v.AuxInt = val
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpConstBool(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (ConstBool [b])
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (MOVBconst [b])
|
|
|
|
|
for {
|
|
|
|
|
b := v.AuxInt
|
|
|
|
|
v.reset(OpPPC64MOVBconst)
|
|
|
|
|
v.AuxInt = b
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpConstNil(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (ConstNil)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (MOVDconst [0])
|
|
|
|
|
for {
|
|
|
|
|
v.reset(OpPPC64MOVDconst)
|
|
|
|
|
v.AuxInt = 0
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpDiv32F(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Div32F x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (FDIVS x y)
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64FDIVS)
|
|
|
|
|
v.AddArg(x)
|
|
|
|
|
v.AddArg(y)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpDiv64F(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Div64F x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (FDIV x y)
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64FDIV)
|
|
|
|
|
v.AddArg(x)
|
|
|
|
|
v.AddArg(y)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpEq16(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Eq16 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (Equal (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y)))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64Equal)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64CMPW, TypeFlags)
|
|
|
|
|
v1 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
|
|
|
|
|
v1.AddArg(x)
|
|
|
|
|
v0.AddArg(v1)
|
|
|
|
|
v2 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
|
|
|
|
|
v2.AddArg(y)
|
|
|
|
|
v0.AddArg(v2)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpEq32(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Eq32 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (Equal (CMPW x y))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64Equal)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64CMPW, TypeFlags)
|
|
|
|
|
v0.AddArg(x)
|
|
|
|
|
v0.AddArg(y)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpEq64(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Eq64 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (Equal (CMP x y))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64Equal)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64CMP, TypeFlags)
|
|
|
|
|
v0.AddArg(x)
|
|
|
|
|
v0.AddArg(y)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpEq64F(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Eq64F x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (Equal (FCMPU x y))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64Equal)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64FCMPU, TypeFlags)
|
|
|
|
|
v0.AddArg(x)
|
|
|
|
|
v0.AddArg(y)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpEq8(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Eq8 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (Equal (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y)))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64Equal)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64CMPW, TypeFlags)
|
|
|
|
|
v1 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
|
|
|
|
|
v1.AddArg(x)
|
|
|
|
|
v0.AddArg(v1)
|
|
|
|
|
v2 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
|
|
|
|
|
v2.AddArg(y)
|
|
|
|
|
v0.AddArg(v2)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpEqPtr(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (EqPtr x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (Equal (CMP x y))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64Equal)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64CMP, TypeFlags)
|
|
|
|
|
v0.AddArg(x)
|
|
|
|
|
v0.AddArg(y)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpGeq16(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Geq16 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (GreaterEqual (CMPW (SignExt16to32 x) (SignExt16to32 y)))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64GreaterEqual)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64CMPW, TypeFlags)
|
|
|
|
|
v1 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
|
|
|
|
|
v1.AddArg(x)
|
|
|
|
|
v0.AddArg(v1)
|
|
|
|
|
v2 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
|
|
|
|
|
v2.AddArg(y)
|
|
|
|
|
v0.AddArg(v2)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpGeq16U(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Geq16U x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (GreaterEqual (CMPU (ZeroExt16to32 x) (ZeroExt16to32 y)))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64GreaterEqual)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64CMPU, TypeFlags)
|
|
|
|
|
v1 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
|
|
|
|
|
v1.AddArg(x)
|
|
|
|
|
v0.AddArg(v1)
|
|
|
|
|
v2 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
|
|
|
|
|
v2.AddArg(y)
|
|
|
|
|
v0.AddArg(v2)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpGeq32(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Geq32 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (GreaterEqual (CMPW x y))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64GreaterEqual)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64CMPW, TypeFlags)
|
|
|
|
|
v0.AddArg(x)
|
|
|
|
|
v0.AddArg(y)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpGeq32U(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Geq32U x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (GreaterEqual (CMPU x y))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64GreaterEqual)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64CMPU, TypeFlags)
|
|
|
|
|
v0.AddArg(x)
|
|
|
|
|
v0.AddArg(y)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpGeq64(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Geq64 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (GreaterEqual (CMP x y))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64GreaterEqual)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64CMP, TypeFlags)
|
|
|
|
|
v0.AddArg(x)
|
|
|
|
|
v0.AddArg(y)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpGeq64U(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Geq64U x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (GreaterEqual (CMPU x y))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64GreaterEqual)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64CMPU, TypeFlags)
|
|
|
|
|
v0.AddArg(x)
|
|
|
|
|
v0.AddArg(y)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpGeq8(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Geq8 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (GreaterEqual (CMPW (SignExt8to32 x) (SignExt8to32 y)))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64GreaterEqual)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64CMPW, TypeFlags)
|
|
|
|
|
v1 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
|
|
|
|
|
v1.AddArg(x)
|
|
|
|
|
v0.AddArg(v1)
|
|
|
|
|
v2 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
|
|
|
|
|
v2.AddArg(y)
|
|
|
|
|
v0.AddArg(v2)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpGeq8U(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Geq8U x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (GreaterEqual (CMPU (ZeroExt8to32 x) (ZeroExt8to32 y)))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64GreaterEqual)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64CMPU, TypeFlags)
|
|
|
|
|
v1 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
|
|
|
|
|
v1.AddArg(x)
|
|
|
|
|
v0.AddArg(v1)
|
|
|
|
|
v2 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
|
|
|
|
|
v2.AddArg(y)
|
|
|
|
|
v0.AddArg(v2)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpGreater16(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Greater16 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (GreaterThan (CMPW (SignExt16to32 x) (SignExt16to32 y)))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64GreaterThan)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64CMPW, TypeFlags)
|
|
|
|
|
v1 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
|
|
|
|
|
v1.AddArg(x)
|
|
|
|
|
v0.AddArg(v1)
|
|
|
|
|
v2 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
|
|
|
|
|
v2.AddArg(y)
|
|
|
|
|
v0.AddArg(v2)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpGreater16U(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Greater16U x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (GreaterThan (CMPWU (ZeroExt16to32 x) (ZeroExt16to32 y)))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64GreaterThan)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64CMPWU, TypeFlags)
|
|
|
|
|
v1 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
|
|
|
|
|
v1.AddArg(x)
|
|
|
|
|
v0.AddArg(v1)
|
|
|
|
|
v2 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
|
|
|
|
|
v2.AddArg(y)
|
|
|
|
|
v0.AddArg(v2)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpGreater32(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Greater32 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (GreaterThan (CMPW x y))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64GreaterThan)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64CMPW, TypeFlags)
|
|
|
|
|
v0.AddArg(x)
|
|
|
|
|
v0.AddArg(y)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpGreater32U(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Greater32U x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (GreaterThan (CMPWU x y))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64GreaterThan)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64CMPWU, TypeFlags)
|
|
|
|
|
v0.AddArg(x)
|
|
|
|
|
v0.AddArg(y)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpGreater64(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Greater64 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (GreaterThan (CMP x y))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64GreaterThan)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64CMP, TypeFlags)
|
|
|
|
|
v0.AddArg(x)
|
|
|
|
|
v0.AddArg(y)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpGreater64U(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Greater64U x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (GreaterThan (CMPU x y))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64GreaterThan)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64CMPU, TypeFlags)
|
|
|
|
|
v0.AddArg(x)
|
|
|
|
|
v0.AddArg(y)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpGreater8(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Greater8 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (GreaterThan (CMPW (SignExt8to32 x) (SignExt8to32 y)))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64GreaterThan)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64CMPW, TypeFlags)
|
|
|
|
|
v1 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
|
|
|
|
|
v1.AddArg(x)
|
|
|
|
|
v0.AddArg(v1)
|
|
|
|
|
v2 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
|
|
|
|
|
v2.AddArg(y)
|
|
|
|
|
v0.AddArg(v2)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpGreater8U(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Greater8U x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (GreaterThan (CMPWU (ZeroExt8to32 x) (ZeroExt8to32 y)))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64GreaterThan)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64CMPWU, TypeFlags)
|
|
|
|
|
v1 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
|
|
|
|
|
v1.AddArg(x)
|
|
|
|
|
v0.AddArg(v1)
|
|
|
|
|
v2 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
|
|
|
|
|
v2.AddArg(y)
|
|
|
|
|
v0.AddArg(v2)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpLeq16(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Leq16 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (LessEqual (CMPW (SignExt16to32 x) (SignExt16to32 y)))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64LessEqual)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64CMPW, TypeFlags)
|
|
|
|
|
v1 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
|
|
|
|
|
v1.AddArg(x)
|
|
|
|
|
v0.AddArg(v1)
|
|
|
|
|
v2 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
|
|
|
|
|
v2.AddArg(y)
|
|
|
|
|
v0.AddArg(v2)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpLeq16U(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Leq16U x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (LessEqual (CMPWU (ZeroExt16to32 x) (ZeroExt16to32 y)))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64LessEqual)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64CMPWU, TypeFlags)
|
|
|
|
|
v1 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
|
|
|
|
|
v1.AddArg(x)
|
|
|
|
|
v0.AddArg(v1)
|
|
|
|
|
v2 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
|
|
|
|
|
v2.AddArg(y)
|
|
|
|
|
v0.AddArg(v2)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpLeq32(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Leq32 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (LessEqual (CMPW x y))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64LessEqual)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64CMPW, TypeFlags)
|
|
|
|
|
v0.AddArg(x)
|
|
|
|
|
v0.AddArg(y)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpLeq32U(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Leq32U x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (LessEqual (CMPWU x y))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64LessEqual)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64CMPWU, TypeFlags)
|
|
|
|
|
v0.AddArg(x)
|
|
|
|
|
v0.AddArg(y)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpLeq64(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Leq64 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (LessEqual (CMP x y))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64LessEqual)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64CMP, TypeFlags)
|
|
|
|
|
v0.AddArg(x)
|
|
|
|
|
v0.AddArg(y)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpLeq64F(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Leq64F x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (LessEqual (FCMPU x y))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64LessEqual)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64FCMPU, TypeFlags)
|
|
|
|
|
v0.AddArg(x)
|
|
|
|
|
v0.AddArg(y)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpLeq64U(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Leq64U x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (LessEqual (CMPU x y))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64LessEqual)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64CMPU, TypeFlags)
|
|
|
|
|
v0.AddArg(x)
|
|
|
|
|
v0.AddArg(y)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpLeq8(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Leq8 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (LessEqual (CMPW (SignExt8to32 x) (SignExt8to32 y)))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64LessEqual)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64CMPW, TypeFlags)
|
|
|
|
|
v1 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
|
|
|
|
|
v1.AddArg(x)
|
|
|
|
|
v0.AddArg(v1)
|
|
|
|
|
v2 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
|
|
|
|
|
v2.AddArg(y)
|
|
|
|
|
v0.AddArg(v2)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpLeq8U(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Leq8U x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (LessEqual (CMPWU (ZeroExt8to32 x) (ZeroExt8to32 y)))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64LessEqual)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64CMPWU, TypeFlags)
|
|
|
|
|
v1 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
|
|
|
|
|
v1.AddArg(x)
|
|
|
|
|
v0.AddArg(v1)
|
|
|
|
|
v2 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
|
|
|
|
|
v2.AddArg(y)
|
|
|
|
|
v0.AddArg(v2)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpLess16(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Less16 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (LessThan (CMPW (SignExt16to32 x) (SignExt16to32 y)))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64LessThan)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64CMPW, TypeFlags)
|
|
|
|
|
v1 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
|
|
|
|
|
v1.AddArg(x)
|
|
|
|
|
v0.AddArg(v1)
|
|
|
|
|
v2 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
|
|
|
|
|
v2.AddArg(y)
|
|
|
|
|
v0.AddArg(v2)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpLess16U(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Less16U x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (LessThan (CMPWU (ZeroExt16to32 x) (ZeroExt16to32 y)))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64LessThan)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64CMPWU, TypeFlags)
|
|
|
|
|
v1 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
|
|
|
|
|
v1.AddArg(x)
|
|
|
|
|
v0.AddArg(v1)
|
|
|
|
|
v2 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
|
|
|
|
|
v2.AddArg(y)
|
|
|
|
|
v0.AddArg(v2)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpLess32(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Less32 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (LessThan (CMPW x y))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64LessThan)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64CMPW, TypeFlags)
|
|
|
|
|
v0.AddArg(x)
|
|
|
|
|
v0.AddArg(y)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpLess32U(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Less32U x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (LessThan (CMPWU x y))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64LessThan)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64CMPWU, TypeFlags)
|
|
|
|
|
v0.AddArg(x)
|
|
|
|
|
v0.AddArg(y)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpLess64(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Less64 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (LessThan (CMP x y))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64LessThan)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64CMP, TypeFlags)
|
|
|
|
|
v0.AddArg(x)
|
|
|
|
|
v0.AddArg(y)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpLess64F(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Less64F x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (LessThan (FCMPU x y))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64LessThan)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64FCMPU, TypeFlags)
|
|
|
|
|
v0.AddArg(x)
|
|
|
|
|
v0.AddArg(y)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpLess64U(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Less64U x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (LessThan (CMPU x y))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64LessThan)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64CMPU, TypeFlags)
|
|
|
|
|
v0.AddArg(x)
|
|
|
|
|
v0.AddArg(y)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpLess8(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Less8 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (LessThan (CMPW (SignExt8to32 x) (SignExt8to32 y)))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64LessThan)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64CMPW, TypeFlags)
|
|
|
|
|
v1 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
|
|
|
|
|
v1.AddArg(x)
|
|
|
|
|
v0.AddArg(v1)
|
|
|
|
|
v2 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
|
|
|
|
|
v2.AddArg(y)
|
|
|
|
|
v0.AddArg(v2)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpLess8U(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Less8U x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (LessThan (CMPWU (ZeroExt8to32 x) (ZeroExt8to32 y)))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64LessThan)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64CMPWU, TypeFlags)
|
|
|
|
|
v1 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
|
|
|
|
|
v1.AddArg(x)
|
|
|
|
|
v0.AddArg(v1)
|
|
|
|
|
v2 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
|
|
|
|
|
v2.AddArg(y)
|
|
|
|
|
v0.AddArg(v2)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpLoad(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Load <t> ptr mem)
|
|
|
|
|
// cond: (is64BitInt(t) || isPtr(t))
|
|
|
|
|
// result: (MOVDload ptr mem)
|
|
|
|
|
for {
|
|
|
|
|
t := v.Type
|
|
|
|
|
ptr := v.Args[0]
|
|
|
|
|
mem := v.Args[1]
|
|
|
|
|
if !(is64BitInt(t) || isPtr(t)) {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
v.reset(OpPPC64MOVDload)
|
|
|
|
|
v.AddArg(ptr)
|
|
|
|
|
v.AddArg(mem)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
// match: (Load <t> ptr mem)
|
|
|
|
|
// cond: is32BitInt(t) && isSigned(t)
|
|
|
|
|
// result: (MOVWload ptr mem)
|
|
|
|
|
for {
|
|
|
|
|
t := v.Type
|
|
|
|
|
ptr := v.Args[0]
|
|
|
|
|
mem := v.Args[1]
|
|
|
|
|
if !(is32BitInt(t) && isSigned(t)) {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
v.reset(OpPPC64MOVWload)
|
|
|
|
|
v.AddArg(ptr)
|
|
|
|
|
v.AddArg(mem)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
// match: (Load <t> ptr mem)
|
|
|
|
|
// cond: is32BitInt(t) && !isSigned(t)
|
|
|
|
|
// result: (MOVWZload ptr mem)
|
|
|
|
|
for {
|
|
|
|
|
t := v.Type
|
|
|
|
|
ptr := v.Args[0]
|
|
|
|
|
mem := v.Args[1]
|
|
|
|
|
if !(is32BitInt(t) && !isSigned(t)) {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
v.reset(OpPPC64MOVWZload)
|
|
|
|
|
v.AddArg(ptr)
|
|
|
|
|
v.AddArg(mem)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
// match: (Load <t> ptr mem)
|
|
|
|
|
// cond: is16BitInt(t) && isSigned(t)
|
|
|
|
|
// result: (MOVHload ptr mem)
|
|
|
|
|
for {
|
|
|
|
|
t := v.Type
|
|
|
|
|
ptr := v.Args[0]
|
|
|
|
|
mem := v.Args[1]
|
|
|
|
|
if !(is16BitInt(t) && isSigned(t)) {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
v.reset(OpPPC64MOVHload)
|
|
|
|
|
v.AddArg(ptr)
|
|
|
|
|
v.AddArg(mem)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
// match: (Load <t> ptr mem)
|
|
|
|
|
// cond: is16BitInt(t) && !isSigned(t)
|
|
|
|
|
// result: (MOVHZload ptr mem)
|
|
|
|
|
for {
|
|
|
|
|
t := v.Type
|
|
|
|
|
ptr := v.Args[0]
|
|
|
|
|
mem := v.Args[1]
|
|
|
|
|
if !(is16BitInt(t) && !isSigned(t)) {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
v.reset(OpPPC64MOVHZload)
|
|
|
|
|
v.AddArg(ptr)
|
|
|
|
|
v.AddArg(mem)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
// match: (Load <t> ptr mem)
|
2016-07-06 13:32:52 -07:00
|
|
|
// cond: (t.IsBoolean() || (is8BitInt(t) && isSigned(t)))
|
2016-06-24 14:37:17 -05:00
|
|
|
// result: (MOVBload ptr mem)
|
|
|
|
|
for {
|
|
|
|
|
t := v.Type
|
|
|
|
|
ptr := v.Args[0]
|
|
|
|
|
mem := v.Args[1]
|
2016-07-06 13:32:52 -07:00
|
|
|
if !(t.IsBoolean() || (is8BitInt(t) && isSigned(t))) {
|
2016-06-24 14:37:17 -05:00
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
v.reset(OpPPC64MOVBload)
|
|
|
|
|
v.AddArg(ptr)
|
|
|
|
|
v.AddArg(mem)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
// match: (Load <t> ptr mem)
|
|
|
|
|
// cond: is8BitInt(t) && !isSigned(t)
|
|
|
|
|
// result: (MOVBZload ptr mem)
|
|
|
|
|
for {
|
|
|
|
|
t := v.Type
|
|
|
|
|
ptr := v.Args[0]
|
|
|
|
|
mem := v.Args[1]
|
|
|
|
|
if !(is8BitInt(t) && !isSigned(t)) {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
v.reset(OpPPC64MOVBZload)
|
|
|
|
|
v.AddArg(ptr)
|
|
|
|
|
v.AddArg(mem)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
// match: (Load <t> ptr mem)
|
|
|
|
|
// cond: is32BitFloat(t)
|
|
|
|
|
// result: (FMOVSload ptr mem)
|
|
|
|
|
for {
|
|
|
|
|
t := v.Type
|
|
|
|
|
ptr := v.Args[0]
|
|
|
|
|
mem := v.Args[1]
|
|
|
|
|
if !(is32BitFloat(t)) {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
v.reset(OpPPC64FMOVSload)
|
|
|
|
|
v.AddArg(ptr)
|
|
|
|
|
v.AddArg(mem)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
// match: (Load <t> ptr mem)
|
|
|
|
|
// cond: is64BitFloat(t)
|
|
|
|
|
// result: (FMOVDload ptr mem)
|
|
|
|
|
for {
|
|
|
|
|
t := v.Type
|
|
|
|
|
ptr := v.Args[0]
|
|
|
|
|
mem := v.Args[1]
|
|
|
|
|
if !(is64BitFloat(t)) {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
v.reset(OpPPC64FMOVDload)
|
|
|
|
|
v.AddArg(ptr)
|
|
|
|
|
v.AddArg(mem)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
return false
|
|
|
|
|
}
|
2016-07-06 13:32:52 -07:00
|
|
|
func rewriteValuePPC64_OpPPC64MOVBstore(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (MOVBstore [off1] {sym} (ADDconst [off2] x) val mem)
|
|
|
|
|
// cond: is16Bit(off1+off2)
|
|
|
|
|
// result: (MOVBstore [off1+off2] {sym} x val mem)
|
|
|
|
|
for {
|
|
|
|
|
off1 := v.AuxInt
|
|
|
|
|
sym := v.Aux
|
|
|
|
|
v_0 := v.Args[0]
|
|
|
|
|
if v_0.Op != OpPPC64ADDconst {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
off2 := v_0.AuxInt
|
|
|
|
|
x := v_0.Args[0]
|
|
|
|
|
val := v.Args[1]
|
|
|
|
|
mem := v.Args[2]
|
|
|
|
|
if !(is16Bit(off1 + off2)) {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
v.reset(OpPPC64MOVBstore)
|
|
|
|
|
v.AuxInt = off1 + off2
|
|
|
|
|
v.Aux = sym
|
|
|
|
|
v.AddArg(x)
|
|
|
|
|
v.AddArg(val)
|
|
|
|
|
v.AddArg(mem)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
return false
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpPPC64MOVDstore(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (MOVDstore [off1] {sym} (ADDconst [off2] x) val mem)
|
|
|
|
|
// cond: is16Bit(off1+off2)
|
|
|
|
|
// result: (MOVDstore [off1+off2] {sym} x val mem)
|
|
|
|
|
for {
|
|
|
|
|
off1 := v.AuxInt
|
|
|
|
|
sym := v.Aux
|
|
|
|
|
v_0 := v.Args[0]
|
|
|
|
|
if v_0.Op != OpPPC64ADDconst {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
off2 := v_0.AuxInt
|
|
|
|
|
x := v_0.Args[0]
|
|
|
|
|
val := v.Args[1]
|
|
|
|
|
mem := v.Args[2]
|
|
|
|
|
if !(is16Bit(off1 + off2)) {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
v.reset(OpPPC64MOVDstore)
|
|
|
|
|
v.AuxInt = off1 + off2
|
|
|
|
|
v.Aux = sym
|
|
|
|
|
v.AddArg(x)
|
|
|
|
|
v.AddArg(val)
|
|
|
|
|
v.AddArg(mem)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
return false
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpPPC64MOVHstore(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (MOVHstore [off1] {sym} (ADDconst [off2] x) val mem)
|
|
|
|
|
// cond: is16Bit(off1+off2)
|
|
|
|
|
// result: (MOVHstore [off1+off2] {sym} x val mem)
|
|
|
|
|
for {
|
|
|
|
|
off1 := v.AuxInt
|
|
|
|
|
sym := v.Aux
|
|
|
|
|
v_0 := v.Args[0]
|
|
|
|
|
if v_0.Op != OpPPC64ADDconst {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
off2 := v_0.AuxInt
|
|
|
|
|
x := v_0.Args[0]
|
|
|
|
|
val := v.Args[1]
|
|
|
|
|
mem := v.Args[2]
|
|
|
|
|
if !(is16Bit(off1 + off2)) {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
v.reset(OpPPC64MOVHstore)
|
|
|
|
|
v.AuxInt = off1 + off2
|
|
|
|
|
v.Aux = sym
|
|
|
|
|
v.AddArg(x)
|
|
|
|
|
v.AddArg(val)
|
|
|
|
|
v.AddArg(mem)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
return false
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpPPC64MOVWstore(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (MOVWstore [off1] {sym} (ADDconst [off2] x) val mem)
|
|
|
|
|
// cond: is16Bit(off1+off2)
|
|
|
|
|
// result: (MOVWstore [off1+off2] {sym} x val mem)
|
|
|
|
|
for {
|
|
|
|
|
off1 := v.AuxInt
|
|
|
|
|
sym := v.Aux
|
|
|
|
|
v_0 := v.Args[0]
|
|
|
|
|
if v_0.Op != OpPPC64ADDconst {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
off2 := v_0.AuxInt
|
|
|
|
|
x := v_0.Args[0]
|
|
|
|
|
val := v.Args[1]
|
|
|
|
|
mem := v.Args[2]
|
|
|
|
|
if !(is16Bit(off1 + off2)) {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
v.reset(OpPPC64MOVWstore)
|
|
|
|
|
v.AuxInt = off1 + off2
|
|
|
|
|
v.Aux = sym
|
|
|
|
|
v.AddArg(x)
|
|
|
|
|
v.AddArg(val)
|
|
|
|
|
v.AddArg(mem)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
return false
|
|
|
|
|
}
|
2016-06-24 14:37:17 -05:00
|
|
|
func rewriteValuePPC64_OpMul16(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Mul16 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (MULLW (SignExt16to32 x) (SignExt16to32 y))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64MULLW)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
|
|
|
|
|
v0.AddArg(x)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
v1 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
|
|
|
|
|
v1.AddArg(y)
|
|
|
|
|
v.AddArg(v1)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpMul32(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Mul32 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (MULLW x y)
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64MULLW)
|
|
|
|
|
v.AddArg(x)
|
|
|
|
|
v.AddArg(y)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpMul32F(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Mul32F x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (FMULS x y)
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64FMULS)
|
|
|
|
|
v.AddArg(x)
|
|
|
|
|
v.AddArg(y)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpMul64(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Mul64 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (MULLD x y)
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64MULLD)
|
|
|
|
|
v.AddArg(x)
|
|
|
|
|
v.AddArg(y)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpMul64F(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Mul64F x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (FMUL x y)
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64FMUL)
|
|
|
|
|
v.AddArg(x)
|
|
|
|
|
v.AddArg(y)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpMul8(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Mul8 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (MULLW (SignExt8to32 x) (SignExt8to32 y))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64MULLW)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
|
|
|
|
|
v0.AddArg(x)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
v1 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
|
|
|
|
|
v1.AddArg(y)
|
|
|
|
|
v.AddArg(v1)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpNeg16(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Neg16 x)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (NEG (ZeroExt16to64 x))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
v.reset(OpPPC64NEG)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
|
|
|
|
|
v0.AddArg(x)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpNeg32(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Neg32 x)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (NEG (ZeroExt32to64 x))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
v.reset(OpPPC64NEG)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
|
|
|
|
|
v0.AddArg(x)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpNeg64(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Neg64 x)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (NEG x)
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
v.reset(OpPPC64NEG)
|
|
|
|
|
v.AddArg(x)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpNeg8(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Neg8 x)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (NEG (ZeroExt8to64 x))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
v.reset(OpPPC64NEG)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
|
|
|
|
|
v0.AddArg(x)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpNeq16(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Neq16 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (NotEqual (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y)))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64NotEqual)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64CMPW, TypeFlags)
|
|
|
|
|
v1 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
|
|
|
|
|
v1.AddArg(x)
|
|
|
|
|
v0.AddArg(v1)
|
|
|
|
|
v2 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
|
|
|
|
|
v2.AddArg(y)
|
|
|
|
|
v0.AddArg(v2)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpNeq32(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Neq32 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (NotEqual (CMPW x y))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64NotEqual)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64CMPW, TypeFlags)
|
|
|
|
|
v0.AddArg(x)
|
|
|
|
|
v0.AddArg(y)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpNeq64(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Neq64 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (NotEqual (CMP x y))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64NotEqual)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64CMP, TypeFlags)
|
|
|
|
|
v0.AddArg(x)
|
|
|
|
|
v0.AddArg(y)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpNeq64F(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Neq64F x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (NotEqual (FCMPU x y))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64NotEqual)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64FCMPU, TypeFlags)
|
|
|
|
|
v0.AddArg(x)
|
|
|
|
|
v0.AddArg(y)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpNeq8(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Neq8 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (NotEqual (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y)))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64NotEqual)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64CMPW, TypeFlags)
|
|
|
|
|
v1 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
|
|
|
|
|
v1.AddArg(x)
|
|
|
|
|
v0.AddArg(v1)
|
|
|
|
|
v2 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
|
|
|
|
|
v2.AddArg(y)
|
|
|
|
|
v0.AddArg(v2)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpNeqPtr(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (NeqPtr x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (NotEqual (CMP x y))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64NotEqual)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64CMP, TypeFlags)
|
|
|
|
|
v0.AddArg(x)
|
|
|
|
|
v0.AddArg(y)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpOffPtr(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (OffPtr [off] ptr)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (ADD (MOVDconst <config.Frontend().TypeInt64()> [off]) ptr)
|
|
|
|
|
for {
|
|
|
|
|
off := v.AuxInt
|
|
|
|
|
ptr := v.Args[0]
|
|
|
|
|
v.reset(OpPPC64ADD)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64MOVDconst, config.Frontend().TypeInt64())
|
|
|
|
|
v0.AuxInt = off
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
v.AddArg(ptr)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpOr16(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Or16 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (OR (ZeroExt16to64 x) (ZeroExt16to64 y))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64OR)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
|
|
|
|
|
v0.AddArg(x)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
v1 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
|
|
|
|
|
v1.AddArg(y)
|
|
|
|
|
v.AddArg(v1)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpOr32(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Or32 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (OR (ZeroExt32to64 x) (ZeroExt32to64 y))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64OR)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
|
|
|
|
|
v0.AddArg(x)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
v1 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
|
|
|
|
|
v1.AddArg(y)
|
|
|
|
|
v.AddArg(v1)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpOr64(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Or64 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (OR x y)
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64OR)
|
|
|
|
|
v.AddArg(x)
|
|
|
|
|
v.AddArg(y)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpOr8(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Or8 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (OR (ZeroExt8to64 x) (ZeroExt8to64 y))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64OR)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
|
|
|
|
|
v0.AddArg(x)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
v1 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
|
|
|
|
|
v1.AddArg(y)
|
|
|
|
|
v.AddArg(v1)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpSignExt16to32(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (SignExt16to32 x)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (MOVHreg x)
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
v.reset(OpPPC64MOVHreg)
|
|
|
|
|
v.AddArg(x)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpSignExt16to64(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (SignExt16to64 x)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (MOVHreg x)
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
v.reset(OpPPC64MOVHreg)
|
|
|
|
|
v.AddArg(x)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpSignExt32to64(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (SignExt32to64 x)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (MOVWreg x)
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
v.reset(OpPPC64MOVWreg)
|
|
|
|
|
v.AddArg(x)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpSignExt8to16(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (SignExt8to16 x)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (MOVBreg x)
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
v.reset(OpPPC64MOVBreg)
|
|
|
|
|
v.AddArg(x)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpSignExt8to32(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (SignExt8to32 x)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (MOVBreg x)
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
v.reset(OpPPC64MOVBreg)
|
|
|
|
|
v.AddArg(x)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpSignExt8to64(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (SignExt8to64 x)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (MOVBreg x)
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
v.reset(OpPPC64MOVBreg)
|
|
|
|
|
v.AddArg(x)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpStaticCall(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (StaticCall [argwid] {target} mem)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (CALLstatic [argwid] {target} mem)
|
|
|
|
|
for {
|
|
|
|
|
argwid := v.AuxInt
|
|
|
|
|
target := v.Aux
|
|
|
|
|
mem := v.Args[0]
|
|
|
|
|
v.reset(OpPPC64CALLstatic)
|
|
|
|
|
v.AuxInt = argwid
|
|
|
|
|
v.Aux = target
|
|
|
|
|
v.AddArg(mem)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpStore(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Store [8] ptr val mem)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (MOVDstore ptr val mem)
|
|
|
|
|
for {
|
|
|
|
|
if v.AuxInt != 8 {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
ptr := v.Args[0]
|
|
|
|
|
val := v.Args[1]
|
|
|
|
|
mem := v.Args[2]
|
|
|
|
|
v.reset(OpPPC64MOVDstore)
|
|
|
|
|
v.AddArg(ptr)
|
|
|
|
|
v.AddArg(val)
|
|
|
|
|
v.AddArg(mem)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
// match: (Store [4] ptr val mem)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (MOVWstore ptr val mem)
|
|
|
|
|
for {
|
|
|
|
|
if v.AuxInt != 4 {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
ptr := v.Args[0]
|
|
|
|
|
val := v.Args[1]
|
|
|
|
|
mem := v.Args[2]
|
|
|
|
|
v.reset(OpPPC64MOVWstore)
|
|
|
|
|
v.AddArg(ptr)
|
|
|
|
|
v.AddArg(val)
|
|
|
|
|
v.AddArg(mem)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
// match: (Store [2] ptr val mem)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (MOVHstore ptr val mem)
|
|
|
|
|
for {
|
|
|
|
|
if v.AuxInt != 2 {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
ptr := v.Args[0]
|
|
|
|
|
val := v.Args[1]
|
|
|
|
|
mem := v.Args[2]
|
|
|
|
|
v.reset(OpPPC64MOVHstore)
|
|
|
|
|
v.AddArg(ptr)
|
|
|
|
|
v.AddArg(val)
|
|
|
|
|
v.AddArg(mem)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
// match: (Store [1] ptr val mem)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (MOVBstore ptr val mem)
|
|
|
|
|
for {
|
|
|
|
|
if v.AuxInt != 1 {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
ptr := v.Args[0]
|
|
|
|
|
val := v.Args[1]
|
|
|
|
|
mem := v.Args[2]
|
|
|
|
|
v.reset(OpPPC64MOVBstore)
|
|
|
|
|
v.AddArg(ptr)
|
|
|
|
|
v.AddArg(val)
|
|
|
|
|
v.AddArg(mem)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
return false
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpSub16(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Sub16 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (SUB (SignExt16to64 x) (SignExt16to64 y))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64SUB)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpSignExt16to64, config.fe.TypeInt64())
|
|
|
|
|
v0.AddArg(x)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
v1 := b.NewValue0(v.Line, OpSignExt16to64, config.fe.TypeInt64())
|
|
|
|
|
v1.AddArg(y)
|
|
|
|
|
v.AddArg(v1)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpSub32(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Sub32 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (SUB x y)
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64SUB)
|
|
|
|
|
v.AddArg(x)
|
|
|
|
|
v.AddArg(y)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpSub32F(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Sub32F x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (FSUBS x y)
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64FSUBS)
|
|
|
|
|
v.AddArg(x)
|
|
|
|
|
v.AddArg(y)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpSub64(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Sub64 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (SUB x y)
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64SUB)
|
|
|
|
|
v.AddArg(x)
|
|
|
|
|
v.AddArg(y)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpSub64F(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Sub64F x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (FSUB x y)
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64FSUB)
|
|
|
|
|
v.AddArg(x)
|
|
|
|
|
v.AddArg(y)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpSub8(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Sub8 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (SUB (SignExt8to64 x) (SignExt8to64 y))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64SUB)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpSignExt8to64, config.fe.TypeInt64())
|
|
|
|
|
v0.AddArg(x)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
v1 := b.NewValue0(v.Line, OpSignExt8to64, config.fe.TypeInt64())
|
|
|
|
|
v1.AddArg(y)
|
|
|
|
|
v.AddArg(v1)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpSubPtr(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (SubPtr x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (SUB x y)
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64SUB)
|
|
|
|
|
v.AddArg(x)
|
|
|
|
|
v.AddArg(y)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpTrunc16to8(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Trunc16to8 x)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (MOVBreg x)
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
v.reset(OpPPC64MOVBreg)
|
|
|
|
|
v.AddArg(x)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpTrunc32to16(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Trunc32to16 x)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (MOVHreg x)
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
v.reset(OpPPC64MOVHreg)
|
|
|
|
|
v.AddArg(x)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpTrunc32to8(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Trunc32to8 x)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (MOVBreg x)
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
v.reset(OpPPC64MOVBreg)
|
|
|
|
|
v.AddArg(x)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpTrunc64to16(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Trunc64to16 x)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (MOVHreg x)
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
v.reset(OpPPC64MOVHreg)
|
|
|
|
|
v.AddArg(x)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpTrunc64to32(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Trunc64to32 x)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (MOVWreg x)
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
v.reset(OpPPC64MOVWreg)
|
|
|
|
|
v.AddArg(x)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpTrunc64to8(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Trunc64to8 x)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (MOVBreg x)
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
v.reset(OpPPC64MOVBreg)
|
|
|
|
|
v.AddArg(x)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpXor16(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Xor16 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (XOR (ZeroExt16to64 x) (ZeroExt16to64 y))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64XOR)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
|
|
|
|
|
v0.AddArg(x)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
v1 := b.NewValue0(v.Line, OpZeroExt16to64, config.fe.TypeUInt64())
|
|
|
|
|
v1.AddArg(y)
|
|
|
|
|
v.AddArg(v1)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpXor32(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Xor32 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (XOR (ZeroExt32to64 x) (ZeroExt32to64 y))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64XOR)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
|
|
|
|
|
v0.AddArg(x)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
v1 := b.NewValue0(v.Line, OpZeroExt32to64, config.fe.TypeUInt64())
|
|
|
|
|
v1.AddArg(y)
|
|
|
|
|
v.AddArg(v1)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpXor64(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Xor64 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (XOR x y)
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64XOR)
|
|
|
|
|
v.AddArg(x)
|
|
|
|
|
v.AddArg(y)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpXor8(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (Xor8 x y)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (XOR (ZeroExt8to64 x) (ZeroExt8to64 y))
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
y := v.Args[1]
|
|
|
|
|
v.reset(OpPPC64XOR)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
|
|
|
|
|
v0.AddArg(x)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
v1 := b.NewValue0(v.Line, OpZeroExt8to64, config.fe.TypeUInt64())
|
|
|
|
|
v1.AddArg(y)
|
|
|
|
|
v.AddArg(v1)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpZero(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
2016-06-27 16:54:57 -04:00
|
|
|
// match: (Zero [s] _ mem)
|
|
|
|
|
// cond: SizeAndAlign(s).Size() == 0
|
2016-06-24 14:37:17 -05:00
|
|
|
// result: mem
|
|
|
|
|
for {
|
2016-06-27 16:54:57 -04:00
|
|
|
s := v.AuxInt
|
|
|
|
|
mem := v.Args[1]
|
|
|
|
|
if !(SizeAndAlign(s).Size() == 0) {
|
2016-06-24 14:37:17 -05:00
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
v.reset(OpCopy)
|
|
|
|
|
v.Type = mem.Type
|
|
|
|
|
v.AddArg(mem)
|
|
|
|
|
return true
|
|
|
|
|
}
|
2016-06-27 16:54:57 -04:00
|
|
|
// match: (Zero [s] destptr mem)
|
|
|
|
|
// cond: SizeAndAlign(s).Size() == 1
|
2016-06-24 14:37:17 -05:00
|
|
|
// result: (MOVBstoreconst [0] destptr mem)
|
|
|
|
|
for {
|
2016-06-27 16:54:57 -04:00
|
|
|
s := v.AuxInt
|
2016-06-24 14:37:17 -05:00
|
|
|
destptr := v.Args[0]
|
|
|
|
|
mem := v.Args[1]
|
2016-06-27 16:54:57 -04:00
|
|
|
if !(SizeAndAlign(s).Size() == 1) {
|
|
|
|
|
break
|
|
|
|
|
}
|
2016-06-24 14:37:17 -05:00
|
|
|
v.reset(OpPPC64MOVBstoreconst)
|
|
|
|
|
v.AuxInt = 0
|
|
|
|
|
v.AddArg(destptr)
|
|
|
|
|
v.AddArg(mem)
|
|
|
|
|
return true
|
|
|
|
|
}
|
2016-06-27 16:54:57 -04:00
|
|
|
// match: (Zero [s] destptr mem)
|
|
|
|
|
// cond: SizeAndAlign(s).Size() == 2 && SizeAndAlign(s).Align()%2 == 0
|
2016-06-24 14:37:17 -05:00
|
|
|
// result: (MOVHstoreconst [0] destptr mem)
|
|
|
|
|
for {
|
2016-06-27 16:54:57 -04:00
|
|
|
s := v.AuxInt
|
2016-06-24 14:37:17 -05:00
|
|
|
destptr := v.Args[0]
|
|
|
|
|
mem := v.Args[1]
|
2016-06-27 16:54:57 -04:00
|
|
|
if !(SizeAndAlign(s).Size() == 2 && SizeAndAlign(s).Align()%2 == 0) {
|
|
|
|
|
break
|
|
|
|
|
}
|
2016-06-24 14:37:17 -05:00
|
|
|
v.reset(OpPPC64MOVHstoreconst)
|
|
|
|
|
v.AuxInt = 0
|
|
|
|
|
v.AddArg(destptr)
|
|
|
|
|
v.AddArg(mem)
|
|
|
|
|
return true
|
|
|
|
|
}
|
2016-06-27 16:54:57 -04:00
|
|
|
// match: (Zero [s] destptr mem)
|
|
|
|
|
// cond: SizeAndAlign(s).Size() == 2
|
|
|
|
|
// result: (MOVBstoreconst [makeValAndOff(0,1)] destptr (MOVBstoreconst [0] destptr mem))
|
2016-06-24 14:37:17 -05:00
|
|
|
for {
|
2016-06-27 16:54:57 -04:00
|
|
|
s := v.AuxInt
|
2016-06-24 14:37:17 -05:00
|
|
|
destptr := v.Args[0]
|
|
|
|
|
mem := v.Args[1]
|
2016-06-27 16:54:57 -04:00
|
|
|
if !(SizeAndAlign(s).Size() == 2) {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
v.reset(OpPPC64MOVBstoreconst)
|
|
|
|
|
v.AuxInt = makeValAndOff(0, 1)
|
2016-06-24 14:37:17 -05:00
|
|
|
v.AddArg(destptr)
|
2016-06-27 16:54:57 -04:00
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64MOVBstoreconst, TypeMem)
|
|
|
|
|
v0.AuxInt = 0
|
|
|
|
|
v0.AddArg(destptr)
|
|
|
|
|
v0.AddArg(mem)
|
|
|
|
|
v.AddArg(v0)
|
2016-06-24 14:37:17 -05:00
|
|
|
return true
|
|
|
|
|
}
|
2016-06-27 16:54:57 -04:00
|
|
|
// match: (Zero [s] destptr mem)
|
|
|
|
|
// cond: SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%4 == 0
|
|
|
|
|
// result: (MOVWstoreconst [0] destptr mem)
|
2016-06-24 14:37:17 -05:00
|
|
|
for {
|
2016-06-27 16:54:57 -04:00
|
|
|
s := v.AuxInt
|
2016-06-24 14:37:17 -05:00
|
|
|
destptr := v.Args[0]
|
|
|
|
|
mem := v.Args[1]
|
2016-06-27 16:54:57 -04:00
|
|
|
if !(SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%4 == 0) {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
v.reset(OpPPC64MOVWstoreconst)
|
2016-06-24 14:37:17 -05:00
|
|
|
v.AuxInt = 0
|
|
|
|
|
v.AddArg(destptr)
|
|
|
|
|
v.AddArg(mem)
|
|
|
|
|
return true
|
|
|
|
|
}
|
2016-06-27 16:54:57 -04:00
|
|
|
// match: (Zero [s] destptr mem)
|
|
|
|
|
// cond: SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%2 == 0
|
|
|
|
|
// result: (MOVHstoreconst [makeValAndOff(0,2)] destptr (MOVHstoreconst [0] destptr mem))
|
2016-06-24 14:37:17 -05:00
|
|
|
for {
|
2016-06-27 16:54:57 -04:00
|
|
|
s := v.AuxInt
|
2016-06-24 14:37:17 -05:00
|
|
|
destptr := v.Args[0]
|
|
|
|
|
mem := v.Args[1]
|
2016-06-27 16:54:57 -04:00
|
|
|
if !(SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%2 == 0) {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
v.reset(OpPPC64MOVHstoreconst)
|
2016-06-24 14:37:17 -05:00
|
|
|
v.AuxInt = makeValAndOff(0, 2)
|
|
|
|
|
v.AddArg(destptr)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64MOVHstoreconst, TypeMem)
|
|
|
|
|
v0.AuxInt = 0
|
|
|
|
|
v0.AddArg(destptr)
|
|
|
|
|
v0.AddArg(mem)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
2016-06-27 16:54:57 -04:00
|
|
|
// match: (Zero [s] destptr mem)
|
|
|
|
|
// cond: SizeAndAlign(s).Size() == 4
|
|
|
|
|
// result: (MOVBstoreconst [makeValAndOff(0,3)] destptr (MOVBstoreconst [makeValAndOff(0,2)] destptr (MOVBstoreconst [makeValAndOff(0,1)] destptr (MOVBstoreconst [0] destptr mem))))
|
2016-06-24 14:37:17 -05:00
|
|
|
for {
|
2016-06-27 16:54:57 -04:00
|
|
|
s := v.AuxInt
|
2016-06-24 14:37:17 -05:00
|
|
|
destptr := v.Args[0]
|
|
|
|
|
mem := v.Args[1]
|
2016-06-27 16:54:57 -04:00
|
|
|
if !(SizeAndAlign(s).Size() == 4) {
|
|
|
|
|
break
|
|
|
|
|
}
|
2016-06-24 14:37:17 -05:00
|
|
|
v.reset(OpPPC64MOVBstoreconst)
|
2016-06-27 16:54:57 -04:00
|
|
|
v.AuxInt = makeValAndOff(0, 3)
|
2016-06-24 14:37:17 -05:00
|
|
|
v.AddArg(destptr)
|
2016-06-27 16:54:57 -04:00
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64MOVBstoreconst, TypeMem)
|
|
|
|
|
v0.AuxInt = makeValAndOff(0, 2)
|
2016-06-24 14:37:17 -05:00
|
|
|
v0.AddArg(destptr)
|
2016-06-27 16:54:57 -04:00
|
|
|
v1 := b.NewValue0(v.Line, OpPPC64MOVBstoreconst, TypeMem)
|
|
|
|
|
v1.AuxInt = makeValAndOff(0, 1)
|
|
|
|
|
v1.AddArg(destptr)
|
|
|
|
|
v2 := b.NewValue0(v.Line, OpPPC64MOVBstoreconst, TypeMem)
|
|
|
|
|
v2.AuxInt = 0
|
|
|
|
|
v2.AddArg(destptr)
|
|
|
|
|
v2.AddArg(mem)
|
|
|
|
|
v1.AddArg(v2)
|
|
|
|
|
v0.AddArg(v1)
|
2016-06-24 14:37:17 -05:00
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
2016-06-27 16:54:57 -04:00
|
|
|
// match: (Zero [s] destptr mem)
|
|
|
|
|
// cond: SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%8 == 0
|
|
|
|
|
// result: (MOVDstoreconst [0] destptr mem)
|
2016-06-24 14:37:17 -05:00
|
|
|
for {
|
2016-06-27 16:54:57 -04:00
|
|
|
s := v.AuxInt
|
|
|
|
|
destptr := v.Args[0]
|
|
|
|
|
mem := v.Args[1]
|
|
|
|
|
if !(SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%8 == 0) {
|
2016-06-24 14:37:17 -05:00
|
|
|
break
|
|
|
|
|
}
|
2016-06-27 16:54:57 -04:00
|
|
|
v.reset(OpPPC64MOVDstoreconst)
|
|
|
|
|
v.AuxInt = 0
|
|
|
|
|
v.AddArg(destptr)
|
|
|
|
|
v.AddArg(mem)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
// match: (Zero [s] destptr mem)
|
|
|
|
|
// cond: SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%4 == 0
|
|
|
|
|
// result: (MOVWstoreconst [makeValAndOff(0,4)] destptr (MOVWstoreconst [0] destptr mem))
|
|
|
|
|
for {
|
|
|
|
|
s := v.AuxInt
|
2016-06-24 14:37:17 -05:00
|
|
|
destptr := v.Args[0]
|
|
|
|
|
mem := v.Args[1]
|
2016-06-27 16:54:57 -04:00
|
|
|
if !(SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%4 == 0) {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
v.reset(OpPPC64MOVWstoreconst)
|
2016-06-24 14:37:17 -05:00
|
|
|
v.AuxInt = makeValAndOff(0, 4)
|
|
|
|
|
v.AddArg(destptr)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64MOVWstoreconst, TypeMem)
|
|
|
|
|
v0.AuxInt = 0
|
|
|
|
|
v0.AddArg(destptr)
|
|
|
|
|
v0.AddArg(mem)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
2016-06-27 16:54:57 -04:00
|
|
|
// match: (Zero [s] destptr mem)
|
|
|
|
|
// cond: SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%2 == 0
|
|
|
|
|
// result: (MOVHstoreconst [makeValAndOff(0,6)] destptr (MOVHstoreconst [makeValAndOff(0,4)] destptr (MOVHstoreconst [makeValAndOff(0,2)] destptr (MOVHstoreconst [0] destptr mem))))
|
|
|
|
|
for {
|
|
|
|
|
s := v.AuxInt
|
|
|
|
|
destptr := v.Args[0]
|
|
|
|
|
mem := v.Args[1]
|
|
|
|
|
if !(SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%2 == 0) {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
v.reset(OpPPC64MOVHstoreconst)
|
|
|
|
|
v.AuxInt = makeValAndOff(0, 6)
|
|
|
|
|
v.AddArg(destptr)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64MOVHstoreconst, TypeMem)
|
|
|
|
|
v0.AuxInt = makeValAndOff(0, 4)
|
|
|
|
|
v0.AddArg(destptr)
|
|
|
|
|
v1 := b.NewValue0(v.Line, OpPPC64MOVHstoreconst, TypeMem)
|
|
|
|
|
v1.AuxInt = makeValAndOff(0, 2)
|
|
|
|
|
v1.AddArg(destptr)
|
|
|
|
|
v2 := b.NewValue0(v.Line, OpPPC64MOVHstoreconst, TypeMem)
|
|
|
|
|
v2.AuxInt = 0
|
|
|
|
|
v2.AddArg(destptr)
|
|
|
|
|
v2.AddArg(mem)
|
|
|
|
|
v1.AddArg(v2)
|
|
|
|
|
v0.AddArg(v1)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
// match: (Zero [s] destptr mem)
|
|
|
|
|
// cond: SizeAndAlign(s).Size() == 3
|
|
|
|
|
// result: (MOVBstoreconst [makeValAndOff(0,2)] destptr (MOVBstoreconst [makeValAndOff(0,1)] destptr (MOVBstoreconst [0] destptr mem)))
|
2016-06-24 14:37:17 -05:00
|
|
|
for {
|
2016-06-27 16:54:57 -04:00
|
|
|
s := v.AuxInt
|
|
|
|
|
destptr := v.Args[0]
|
|
|
|
|
mem := v.Args[1]
|
|
|
|
|
if !(SizeAndAlign(s).Size() == 3) {
|
2016-06-24 14:37:17 -05:00
|
|
|
break
|
|
|
|
|
}
|
2016-06-27 16:54:57 -04:00
|
|
|
v.reset(OpPPC64MOVBstoreconst)
|
|
|
|
|
v.AuxInt = makeValAndOff(0, 2)
|
|
|
|
|
v.AddArg(destptr)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64MOVBstoreconst, TypeMem)
|
|
|
|
|
v0.AuxInt = makeValAndOff(0, 1)
|
|
|
|
|
v0.AddArg(destptr)
|
|
|
|
|
v1 := b.NewValue0(v.Line, OpPPC64MOVBstoreconst, TypeMem)
|
|
|
|
|
v1.AuxInt = 0
|
|
|
|
|
v1.AddArg(destptr)
|
|
|
|
|
v1.AddArg(mem)
|
|
|
|
|
v0.AddArg(v1)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
// match: (Zero [s] destptr mem)
|
|
|
|
|
// cond: SizeAndAlign(s).Size() == 16 && SizeAndAlign(s).Align()%8 == 0
|
|
|
|
|
// result: (MOVDstoreconst [makeValAndOff(0,8)] destptr (MOVDstoreconst [0] destptr mem))
|
|
|
|
|
for {
|
|
|
|
|
s := v.AuxInt
|
2016-06-24 14:37:17 -05:00
|
|
|
destptr := v.Args[0]
|
|
|
|
|
mem := v.Args[1]
|
2016-06-27 16:54:57 -04:00
|
|
|
if !(SizeAndAlign(s).Size() == 16 && SizeAndAlign(s).Align()%8 == 0) {
|
|
|
|
|
break
|
|
|
|
|
}
|
2016-06-24 14:37:17 -05:00
|
|
|
v.reset(OpPPC64MOVDstoreconst)
|
|
|
|
|
v.AuxInt = makeValAndOff(0, 8)
|
|
|
|
|
v.AddArg(destptr)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64MOVDstoreconst, TypeMem)
|
|
|
|
|
v0.AuxInt = 0
|
|
|
|
|
v0.AddArg(destptr)
|
|
|
|
|
v0.AddArg(mem)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
2016-06-27 16:54:57 -04:00
|
|
|
// match: (Zero [s] destptr mem)
|
|
|
|
|
// cond: SizeAndAlign(s).Size() == 24 && SizeAndAlign(s).Align()%8 == 0
|
2016-06-24 14:37:17 -05:00
|
|
|
// result: (MOVDstoreconst [makeValAndOff(0,16)] destptr (MOVDstoreconst [makeValAndOff(0,8)] destptr (MOVDstoreconst [0] destptr mem)))
|
|
|
|
|
for {
|
2016-06-27 16:54:57 -04:00
|
|
|
s := v.AuxInt
|
2016-06-24 14:37:17 -05:00
|
|
|
destptr := v.Args[0]
|
|
|
|
|
mem := v.Args[1]
|
2016-06-27 16:54:57 -04:00
|
|
|
if !(SizeAndAlign(s).Size() == 24 && SizeAndAlign(s).Align()%8 == 0) {
|
|
|
|
|
break
|
|
|
|
|
}
|
2016-06-24 14:37:17 -05:00
|
|
|
v.reset(OpPPC64MOVDstoreconst)
|
|
|
|
|
v.AuxInt = makeValAndOff(0, 16)
|
|
|
|
|
v.AddArg(destptr)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64MOVDstoreconst, TypeMem)
|
|
|
|
|
v0.AuxInt = makeValAndOff(0, 8)
|
|
|
|
|
v0.AddArg(destptr)
|
|
|
|
|
v1 := b.NewValue0(v.Line, OpPPC64MOVDstoreconst, TypeMem)
|
|
|
|
|
v1.AuxInt = 0
|
|
|
|
|
v1.AddArg(destptr)
|
|
|
|
|
v1.AddArg(mem)
|
|
|
|
|
v0.AddArg(v1)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
2016-06-27 16:54:57 -04:00
|
|
|
// match: (Zero [s] destptr mem)
|
|
|
|
|
// cond: SizeAndAlign(s).Size() == 32 && SizeAndAlign(s).Align()%8 == 0
|
2016-06-24 14:37:17 -05:00
|
|
|
// result: (MOVDstoreconst [makeValAndOff(0,24)] destptr (MOVDstoreconst [makeValAndOff(0,16)] destptr (MOVDstoreconst [makeValAndOff(0,8)] destptr (MOVDstoreconst [0] destptr mem))))
|
|
|
|
|
for {
|
2016-06-27 16:54:57 -04:00
|
|
|
s := v.AuxInt
|
2016-06-24 14:37:17 -05:00
|
|
|
destptr := v.Args[0]
|
|
|
|
|
mem := v.Args[1]
|
2016-06-27 16:54:57 -04:00
|
|
|
if !(SizeAndAlign(s).Size() == 32 && SizeAndAlign(s).Align()%8 == 0) {
|
|
|
|
|
break
|
|
|
|
|
}
|
2016-06-24 14:37:17 -05:00
|
|
|
v.reset(OpPPC64MOVDstoreconst)
|
|
|
|
|
v.AuxInt = makeValAndOff(0, 24)
|
|
|
|
|
v.AddArg(destptr)
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64MOVDstoreconst, TypeMem)
|
|
|
|
|
v0.AuxInt = makeValAndOff(0, 16)
|
|
|
|
|
v0.AddArg(destptr)
|
|
|
|
|
v1 := b.NewValue0(v.Line, OpPPC64MOVDstoreconst, TypeMem)
|
|
|
|
|
v1.AuxInt = makeValAndOff(0, 8)
|
|
|
|
|
v1.AddArg(destptr)
|
|
|
|
|
v2 := b.NewValue0(v.Line, OpPPC64MOVDstoreconst, TypeMem)
|
|
|
|
|
v2.AuxInt = 0
|
|
|
|
|
v2.AddArg(destptr)
|
|
|
|
|
v2.AddArg(mem)
|
|
|
|
|
v1.AddArg(v2)
|
|
|
|
|
v0.AddArg(v1)
|
|
|
|
|
v.AddArg(v0)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
return false
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpZeroExt16to32(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (ZeroExt16to32 x)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (MOVHZreg x)
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
v.reset(OpPPC64MOVHZreg)
|
|
|
|
|
v.AddArg(x)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpZeroExt16to64(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (ZeroExt16to64 x)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (MOVHZreg x)
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
v.reset(OpPPC64MOVHZreg)
|
|
|
|
|
v.AddArg(x)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpZeroExt32to64(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (ZeroExt32to64 x)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (MOVWZreg x)
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
v.reset(OpPPC64MOVWZreg)
|
|
|
|
|
v.AddArg(x)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpZeroExt8to16(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (ZeroExt8to16 x)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (MOVBZreg x)
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
v.reset(OpPPC64MOVBZreg)
|
|
|
|
|
v.AddArg(x)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpZeroExt8to32(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (ZeroExt8to32 x)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (MOVBZreg x)
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
v.reset(OpPPC64MOVBZreg)
|
|
|
|
|
v.AddArg(x)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteValuePPC64_OpZeroExt8to64(v *Value, config *Config) bool {
|
|
|
|
|
b := v.Block
|
|
|
|
|
_ = b
|
|
|
|
|
// match: (ZeroExt8to64 x)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (MOVBZreg x)
|
|
|
|
|
for {
|
|
|
|
|
x := v.Args[0]
|
|
|
|
|
v.reset(OpPPC64MOVBZreg)
|
|
|
|
|
v.AddArg(x)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func rewriteBlockPPC64(b *Block) bool {
|
|
|
|
|
switch b.Kind {
|
|
|
|
|
case BlockIf:
|
|
|
|
|
// match: (If (Equal cc) yes no)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (EQ cc yes no)
|
|
|
|
|
for {
|
|
|
|
|
v := b.Control
|
|
|
|
|
if v.Op != OpPPC64Equal {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
cc := v.Args[0]
|
|
|
|
|
yes := b.Succs[0]
|
|
|
|
|
no := b.Succs[1]
|
|
|
|
|
b.Kind = BlockPPC64EQ
|
|
|
|
|
b.SetControl(cc)
|
|
|
|
|
_ = yes
|
|
|
|
|
_ = no
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
// match: (If (NotEqual cc) yes no)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (NE cc yes no)
|
|
|
|
|
for {
|
|
|
|
|
v := b.Control
|
|
|
|
|
if v.Op != OpPPC64NotEqual {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
cc := v.Args[0]
|
|
|
|
|
yes := b.Succs[0]
|
|
|
|
|
no := b.Succs[1]
|
|
|
|
|
b.Kind = BlockPPC64NE
|
|
|
|
|
b.SetControl(cc)
|
|
|
|
|
_ = yes
|
|
|
|
|
_ = no
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
// match: (If (LessThan cc) yes no)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (LT cc yes no)
|
|
|
|
|
for {
|
|
|
|
|
v := b.Control
|
|
|
|
|
if v.Op != OpPPC64LessThan {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
cc := v.Args[0]
|
|
|
|
|
yes := b.Succs[0]
|
|
|
|
|
no := b.Succs[1]
|
|
|
|
|
b.Kind = BlockPPC64LT
|
|
|
|
|
b.SetControl(cc)
|
|
|
|
|
_ = yes
|
|
|
|
|
_ = no
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
// match: (If (LessEqual cc) yes no)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (LE cc yes no)
|
|
|
|
|
for {
|
|
|
|
|
v := b.Control
|
|
|
|
|
if v.Op != OpPPC64LessEqual {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
cc := v.Args[0]
|
|
|
|
|
yes := b.Succs[0]
|
|
|
|
|
no := b.Succs[1]
|
|
|
|
|
b.Kind = BlockPPC64LE
|
|
|
|
|
b.SetControl(cc)
|
|
|
|
|
_ = yes
|
|
|
|
|
_ = no
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
// match: (If (GreaterThan cc) yes no)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (GT cc yes no)
|
|
|
|
|
for {
|
|
|
|
|
v := b.Control
|
|
|
|
|
if v.Op != OpPPC64GreaterThan {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
cc := v.Args[0]
|
|
|
|
|
yes := b.Succs[0]
|
|
|
|
|
no := b.Succs[1]
|
|
|
|
|
b.Kind = BlockPPC64GT
|
|
|
|
|
b.SetControl(cc)
|
|
|
|
|
_ = yes
|
|
|
|
|
_ = no
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
// match: (If (GreaterEqual cc) yes no)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (GE cc yes no)
|
|
|
|
|
for {
|
|
|
|
|
v := b.Control
|
|
|
|
|
if v.Op != OpPPC64GreaterEqual {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
cc := v.Args[0]
|
|
|
|
|
yes := b.Succs[0]
|
|
|
|
|
no := b.Succs[1]
|
|
|
|
|
b.Kind = BlockPPC64GE
|
|
|
|
|
b.SetControl(cc)
|
|
|
|
|
_ = yes
|
|
|
|
|
_ = no
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
// match: (If cond yes no)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (NE (CMPconst [0] cond) yes no)
|
|
|
|
|
for {
|
|
|
|
|
v := b.Control
|
|
|
|
|
cond := b.Control
|
|
|
|
|
yes := b.Succs[0]
|
|
|
|
|
no := b.Succs[1]
|
|
|
|
|
b.Kind = BlockPPC64NE
|
|
|
|
|
v0 := b.NewValue0(v.Line, OpPPC64CMPconst, TypeFlags)
|
|
|
|
|
v0.AuxInt = 0
|
|
|
|
|
v0.AddArg(cond)
|
|
|
|
|
b.SetControl(v0)
|
|
|
|
|
_ = yes
|
|
|
|
|
_ = no
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
case BlockPPC64NE:
|
|
|
|
|
// match: (NE (CMPconst [0] (Equal cc)) yes no)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (EQ cc yes no)
|
|
|
|
|
for {
|
|
|
|
|
v := b.Control
|
|
|
|
|
if v.Op != OpPPC64CMPconst {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
if v.AuxInt != 0 {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
v_0 := v.Args[0]
|
|
|
|
|
if v_0.Op != OpPPC64Equal {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
cc := v_0.Args[0]
|
|
|
|
|
yes := b.Succs[0]
|
|
|
|
|
no := b.Succs[1]
|
|
|
|
|
b.Kind = BlockPPC64EQ
|
|
|
|
|
b.SetControl(cc)
|
|
|
|
|
_ = yes
|
|
|
|
|
_ = no
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
// match: (NE (CMPconst [0] (NotEqual cc)) yes no)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (NE cc yes no)
|
|
|
|
|
for {
|
|
|
|
|
v := b.Control
|
|
|
|
|
if v.Op != OpPPC64CMPconst {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
if v.AuxInt != 0 {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
v_0 := v.Args[0]
|
|
|
|
|
if v_0.Op != OpPPC64NotEqual {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
cc := v_0.Args[0]
|
|
|
|
|
yes := b.Succs[0]
|
|
|
|
|
no := b.Succs[1]
|
|
|
|
|
b.Kind = BlockPPC64NE
|
|
|
|
|
b.SetControl(cc)
|
|
|
|
|
_ = yes
|
|
|
|
|
_ = no
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
// match: (NE (CMPconst [0] (LessThan cc)) yes no)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (LT cc yes no)
|
|
|
|
|
for {
|
|
|
|
|
v := b.Control
|
|
|
|
|
if v.Op != OpPPC64CMPconst {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
if v.AuxInt != 0 {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
v_0 := v.Args[0]
|
|
|
|
|
if v_0.Op != OpPPC64LessThan {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
cc := v_0.Args[0]
|
|
|
|
|
yes := b.Succs[0]
|
|
|
|
|
no := b.Succs[1]
|
|
|
|
|
b.Kind = BlockPPC64LT
|
|
|
|
|
b.SetControl(cc)
|
|
|
|
|
_ = yes
|
|
|
|
|
_ = no
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
// match: (NE (CMPconst [0] (LessEqual cc)) yes no)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (LE cc yes no)
|
|
|
|
|
for {
|
|
|
|
|
v := b.Control
|
|
|
|
|
if v.Op != OpPPC64CMPconst {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
if v.AuxInt != 0 {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
v_0 := v.Args[0]
|
|
|
|
|
if v_0.Op != OpPPC64LessEqual {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
cc := v_0.Args[0]
|
|
|
|
|
yes := b.Succs[0]
|
|
|
|
|
no := b.Succs[1]
|
|
|
|
|
b.Kind = BlockPPC64LE
|
|
|
|
|
b.SetControl(cc)
|
|
|
|
|
_ = yes
|
|
|
|
|
_ = no
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
// match: (NE (CMPconst [0] (GreaterThan cc)) yes no)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (GT cc yes no)
|
|
|
|
|
for {
|
|
|
|
|
v := b.Control
|
|
|
|
|
if v.Op != OpPPC64CMPconst {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
if v.AuxInt != 0 {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
v_0 := v.Args[0]
|
|
|
|
|
if v_0.Op != OpPPC64GreaterThan {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
cc := v_0.Args[0]
|
|
|
|
|
yes := b.Succs[0]
|
|
|
|
|
no := b.Succs[1]
|
|
|
|
|
b.Kind = BlockPPC64GT
|
|
|
|
|
b.SetControl(cc)
|
|
|
|
|
_ = yes
|
|
|
|
|
_ = no
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
// match: (NE (CMPconst [0] (GreaterEqual cc)) yes no)
|
|
|
|
|
// cond:
|
|
|
|
|
// result: (GE cc yes no)
|
|
|
|
|
for {
|
|
|
|
|
v := b.Control
|
|
|
|
|
if v.Op != OpPPC64CMPconst {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
if v.AuxInt != 0 {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
v_0 := v.Args[0]
|
|
|
|
|
if v_0.Op != OpPPC64GreaterEqual {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
cc := v_0.Args[0]
|
|
|
|
|
yes := b.Succs[0]
|
|
|
|
|
no := b.Succs[1]
|
|
|
|
|
b.Kind = BlockPPC64GE
|
|
|
|
|
b.SetControl(cc)
|
|
|
|
|
_ = yes
|
|
|
|
|
_ = no
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return false
|
|
|
|
|
}
|