mirror of
https://github.com/golang/go.git
synced 2025-10-23 21:13:20 +00:00
4608 lines
94 KiB
Go
4608 lines
94 KiB
Go
|
|
// autogenerated from gen/ARM64.rules: do not edit!
|
||
|
|
// generated with: cd gen; go run *.go
|
||
|
|
|
||
|
|
package ssa
|
||
|
|
|
||
|
|
import "math"
|
||
|
|
|
||
|
|
var _ = math.MinInt8 // in case not otherwise used
|
||
|
|
func rewriteValueARM64(v *Value, config *Config) bool {
|
||
|
|
switch v.Op {
|
||
|
|
case OpARM64ADDconst:
|
||
|
|
return rewriteValueARM64_OpARM64ADDconst(v, config)
|
||
|
|
case OpAdd16:
|
||
|
|
return rewriteValueARM64_OpAdd16(v, config)
|
||
|
|
case OpAdd32:
|
||
|
|
return rewriteValueARM64_OpAdd32(v, config)
|
||
|
|
case OpAdd32F:
|
||
|
|
return rewriteValueARM64_OpAdd32F(v, config)
|
||
|
|
case OpAdd64:
|
||
|
|
return rewriteValueARM64_OpAdd64(v, config)
|
||
|
|
case OpAdd64F:
|
||
|
|
return rewriteValueARM64_OpAdd64F(v, config)
|
||
|
|
case OpAdd8:
|
||
|
|
return rewriteValueARM64_OpAdd8(v, config)
|
||
|
|
case OpAddPtr:
|
||
|
|
return rewriteValueARM64_OpAddPtr(v, config)
|
||
|
|
case OpAddr:
|
||
|
|
return rewriteValueARM64_OpAddr(v, config)
|
||
|
|
case OpAnd16:
|
||
|
|
return rewriteValueARM64_OpAnd16(v, config)
|
||
|
|
case OpAnd32:
|
||
|
|
return rewriteValueARM64_OpAnd32(v, config)
|
||
|
|
case OpAnd64:
|
||
|
|
return rewriteValueARM64_OpAnd64(v, config)
|
||
|
|
case OpAnd8:
|
||
|
|
return rewriteValueARM64_OpAnd8(v, config)
|
||
|
|
case OpAndB:
|
||
|
|
return rewriteValueARM64_OpAndB(v, config)
|
||
|
|
case OpClosureCall:
|
||
|
|
return rewriteValueARM64_OpClosureCall(v, config)
|
||
|
|
case OpCom16:
|
||
|
|
return rewriteValueARM64_OpCom16(v, config)
|
||
|
|
case OpCom32:
|
||
|
|
return rewriteValueARM64_OpCom32(v, config)
|
||
|
|
case OpCom64:
|
||
|
|
return rewriteValueARM64_OpCom64(v, config)
|
||
|
|
case OpCom8:
|
||
|
|
return rewriteValueARM64_OpCom8(v, config)
|
||
|
|
case OpConst16:
|
||
|
|
return rewriteValueARM64_OpConst16(v, config)
|
||
|
|
case OpConst32:
|
||
|
|
return rewriteValueARM64_OpConst32(v, config)
|
||
|
|
case OpConst32F:
|
||
|
|
return rewriteValueARM64_OpConst32F(v, config)
|
||
|
|
case OpConst64:
|
||
|
|
return rewriteValueARM64_OpConst64(v, config)
|
||
|
|
case OpConst64F:
|
||
|
|
return rewriteValueARM64_OpConst64F(v, config)
|
||
|
|
case OpConst8:
|
||
|
|
return rewriteValueARM64_OpConst8(v, config)
|
||
|
|
case OpConstBool:
|
||
|
|
return rewriteValueARM64_OpConstBool(v, config)
|
||
|
|
case OpConstNil:
|
||
|
|
return rewriteValueARM64_OpConstNil(v, config)
|
||
|
|
case OpConvert:
|
||
|
|
return rewriteValueARM64_OpConvert(v, config)
|
||
|
|
case OpCvt32Fto32:
|
||
|
|
return rewriteValueARM64_OpCvt32Fto32(v, config)
|
||
|
|
case OpCvt32Fto32U:
|
||
|
|
return rewriteValueARM64_OpCvt32Fto32U(v, config)
|
||
|
|
case OpCvt32Fto64:
|
||
|
|
return rewriteValueARM64_OpCvt32Fto64(v, config)
|
||
|
|
case OpCvt32Fto64F:
|
||
|
|
return rewriteValueARM64_OpCvt32Fto64F(v, config)
|
||
|
|
case OpCvt32Uto32F:
|
||
|
|
return rewriteValueARM64_OpCvt32Uto32F(v, config)
|
||
|
|
case OpCvt32Uto64F:
|
||
|
|
return rewriteValueARM64_OpCvt32Uto64F(v, config)
|
||
|
|
case OpCvt32to32F:
|
||
|
|
return rewriteValueARM64_OpCvt32to32F(v, config)
|
||
|
|
case OpCvt32to64F:
|
||
|
|
return rewriteValueARM64_OpCvt32to64F(v, config)
|
||
|
|
case OpCvt64Fto32:
|
||
|
|
return rewriteValueARM64_OpCvt64Fto32(v, config)
|
||
|
|
case OpCvt64Fto32F:
|
||
|
|
return rewriteValueARM64_OpCvt64Fto32F(v, config)
|
||
|
|
case OpCvt64Fto32U:
|
||
|
|
return rewriteValueARM64_OpCvt64Fto32U(v, config)
|
||
|
|
case OpCvt64Fto64:
|
||
|
|
return rewriteValueARM64_OpCvt64Fto64(v, config)
|
||
|
|
case OpCvt64to32F:
|
||
|
|
return rewriteValueARM64_OpCvt64to32F(v, config)
|
||
|
|
case OpCvt64to64F:
|
||
|
|
return rewriteValueARM64_OpCvt64to64F(v, config)
|
||
|
|
case OpDeferCall:
|
||
|
|
return rewriteValueARM64_OpDeferCall(v, config)
|
||
|
|
case OpDiv16:
|
||
|
|
return rewriteValueARM64_OpDiv16(v, config)
|
||
|
|
case OpDiv16u:
|
||
|
|
return rewriteValueARM64_OpDiv16u(v, config)
|
||
|
|
case OpDiv32:
|
||
|
|
return rewriteValueARM64_OpDiv32(v, config)
|
||
|
|
case OpDiv32F:
|
||
|
|
return rewriteValueARM64_OpDiv32F(v, config)
|
||
|
|
case OpDiv32u:
|
||
|
|
return rewriteValueARM64_OpDiv32u(v, config)
|
||
|
|
case OpDiv64:
|
||
|
|
return rewriteValueARM64_OpDiv64(v, config)
|
||
|
|
case OpDiv64F:
|
||
|
|
return rewriteValueARM64_OpDiv64F(v, config)
|
||
|
|
case OpDiv64u:
|
||
|
|
return rewriteValueARM64_OpDiv64u(v, config)
|
||
|
|
case OpDiv8:
|
||
|
|
return rewriteValueARM64_OpDiv8(v, config)
|
||
|
|
case OpDiv8u:
|
||
|
|
return rewriteValueARM64_OpDiv8u(v, config)
|
||
|
|
case OpEq16:
|
||
|
|
return rewriteValueARM64_OpEq16(v, config)
|
||
|
|
case OpEq32:
|
||
|
|
return rewriteValueARM64_OpEq32(v, config)
|
||
|
|
case OpEq32F:
|
||
|
|
return rewriteValueARM64_OpEq32F(v, config)
|
||
|
|
case OpEq64:
|
||
|
|
return rewriteValueARM64_OpEq64(v, config)
|
||
|
|
case OpEq64F:
|
||
|
|
return rewriteValueARM64_OpEq64F(v, config)
|
||
|
|
case OpEq8:
|
||
|
|
return rewriteValueARM64_OpEq8(v, config)
|
||
|
|
case OpEqB:
|
||
|
|
return rewriteValueARM64_OpEqB(v, config)
|
||
|
|
case OpEqPtr:
|
||
|
|
return rewriteValueARM64_OpEqPtr(v, config)
|
||
|
|
case OpARM64FMOVDload:
|
||
|
|
return rewriteValueARM64_OpARM64FMOVDload(v, config)
|
||
|
|
case OpARM64FMOVDstore:
|
||
|
|
return rewriteValueARM64_OpARM64FMOVDstore(v, config)
|
||
|
|
case OpARM64FMOVSload:
|
||
|
|
return rewriteValueARM64_OpARM64FMOVSload(v, config)
|
||
|
|
case OpARM64FMOVSstore:
|
||
|
|
return rewriteValueARM64_OpARM64FMOVSstore(v, config)
|
||
|
|
case OpGeq16:
|
||
|
|
return rewriteValueARM64_OpGeq16(v, config)
|
||
|
|
case OpGeq16U:
|
||
|
|
return rewriteValueARM64_OpGeq16U(v, config)
|
||
|
|
case OpGeq32:
|
||
|
|
return rewriteValueARM64_OpGeq32(v, config)
|
||
|
|
case OpGeq32F:
|
||
|
|
return rewriteValueARM64_OpGeq32F(v, config)
|
||
|
|
case OpGeq32U:
|
||
|
|
return rewriteValueARM64_OpGeq32U(v, config)
|
||
|
|
case OpGeq64:
|
||
|
|
return rewriteValueARM64_OpGeq64(v, config)
|
||
|
|
case OpGeq64F:
|
||
|
|
return rewriteValueARM64_OpGeq64F(v, config)
|
||
|
|
case OpGeq64U:
|
||
|
|
return rewriteValueARM64_OpGeq64U(v, config)
|
||
|
|
case OpGeq8:
|
||
|
|
return rewriteValueARM64_OpGeq8(v, config)
|
||
|
|
case OpGeq8U:
|
||
|
|
return rewriteValueARM64_OpGeq8U(v, config)
|
||
|
|
case OpGetClosurePtr:
|
||
|
|
return rewriteValueARM64_OpGetClosurePtr(v, config)
|
||
|
|
case OpGoCall:
|
||
|
|
return rewriteValueARM64_OpGoCall(v, config)
|
||
|
|
case OpGreater16:
|
||
|
|
return rewriteValueARM64_OpGreater16(v, config)
|
||
|
|
case OpGreater16U:
|
||
|
|
return rewriteValueARM64_OpGreater16U(v, config)
|
||
|
|
case OpGreater32:
|
||
|
|
return rewriteValueARM64_OpGreater32(v, config)
|
||
|
|
case OpGreater32F:
|
||
|
|
return rewriteValueARM64_OpGreater32F(v, config)
|
||
|
|
case OpGreater32U:
|
||
|
|
return rewriteValueARM64_OpGreater32U(v, config)
|
||
|
|
case OpGreater64:
|
||
|
|
return rewriteValueARM64_OpGreater64(v, config)
|
||
|
|
case OpGreater64F:
|
||
|
|
return rewriteValueARM64_OpGreater64F(v, config)
|
||
|
|
case OpGreater64U:
|
||
|
|
return rewriteValueARM64_OpGreater64U(v, config)
|
||
|
|
case OpGreater8:
|
||
|
|
return rewriteValueARM64_OpGreater8(v, config)
|
||
|
|
case OpGreater8U:
|
||
|
|
return rewriteValueARM64_OpGreater8U(v, config)
|
||
|
|
case OpInterCall:
|
||
|
|
return rewriteValueARM64_OpInterCall(v, config)
|
||
|
|
case OpIsInBounds:
|
||
|
|
return rewriteValueARM64_OpIsInBounds(v, config)
|
||
|
|
case OpIsNonNil:
|
||
|
|
return rewriteValueARM64_OpIsNonNil(v, config)
|
||
|
|
case OpIsSliceInBounds:
|
||
|
|
return rewriteValueARM64_OpIsSliceInBounds(v, config)
|
||
|
|
case OpLeq16:
|
||
|
|
return rewriteValueARM64_OpLeq16(v, config)
|
||
|
|
case OpLeq16U:
|
||
|
|
return rewriteValueARM64_OpLeq16U(v, config)
|
||
|
|
case OpLeq32:
|
||
|
|
return rewriteValueARM64_OpLeq32(v, config)
|
||
|
|
case OpLeq32F:
|
||
|
|
return rewriteValueARM64_OpLeq32F(v, config)
|
||
|
|
case OpLeq32U:
|
||
|
|
return rewriteValueARM64_OpLeq32U(v, config)
|
||
|
|
case OpLeq64:
|
||
|
|
return rewriteValueARM64_OpLeq64(v, config)
|
||
|
|
case OpLeq64F:
|
||
|
|
return rewriteValueARM64_OpLeq64F(v, config)
|
||
|
|
case OpLeq64U:
|
||
|
|
return rewriteValueARM64_OpLeq64U(v, config)
|
||
|
|
case OpLeq8:
|
||
|
|
return rewriteValueARM64_OpLeq8(v, config)
|
||
|
|
case OpLeq8U:
|
||
|
|
return rewriteValueARM64_OpLeq8U(v, config)
|
||
|
|
case OpLess16:
|
||
|
|
return rewriteValueARM64_OpLess16(v, config)
|
||
|
|
case OpLess16U:
|
||
|
|
return rewriteValueARM64_OpLess16U(v, config)
|
||
|
|
case OpLess32:
|
||
|
|
return rewriteValueARM64_OpLess32(v, config)
|
||
|
|
case OpLess32F:
|
||
|
|
return rewriteValueARM64_OpLess32F(v, config)
|
||
|
|
case OpLess32U:
|
||
|
|
return rewriteValueARM64_OpLess32U(v, config)
|
||
|
|
case OpLess64:
|
||
|
|
return rewriteValueARM64_OpLess64(v, config)
|
||
|
|
case OpLess64F:
|
||
|
|
return rewriteValueARM64_OpLess64F(v, config)
|
||
|
|
case OpLess64U:
|
||
|
|
return rewriteValueARM64_OpLess64U(v, config)
|
||
|
|
case OpLess8:
|
||
|
|
return rewriteValueARM64_OpLess8(v, config)
|
||
|
|
case OpLess8U:
|
||
|
|
return rewriteValueARM64_OpLess8U(v, config)
|
||
|
|
case OpLoad:
|
||
|
|
return rewriteValueARM64_OpLoad(v, config)
|
||
|
|
case OpARM64MOVBUload:
|
||
|
|
return rewriteValueARM64_OpARM64MOVBUload(v, config)
|
||
|
|
case OpARM64MOVBload:
|
||
|
|
return rewriteValueARM64_OpARM64MOVBload(v, config)
|
||
|
|
case OpARM64MOVBstore:
|
||
|
|
return rewriteValueARM64_OpARM64MOVBstore(v, config)
|
||
|
|
case OpARM64MOVDload:
|
||
|
|
return rewriteValueARM64_OpARM64MOVDload(v, config)
|
||
|
|
case OpARM64MOVDstore:
|
||
|
|
return rewriteValueARM64_OpARM64MOVDstore(v, config)
|
||
|
|
case OpARM64MOVHUload:
|
||
|
|
return rewriteValueARM64_OpARM64MOVHUload(v, config)
|
||
|
|
case OpARM64MOVHload:
|
||
|
|
return rewriteValueARM64_OpARM64MOVHload(v, config)
|
||
|
|
case OpARM64MOVHstore:
|
||
|
|
return rewriteValueARM64_OpARM64MOVHstore(v, config)
|
||
|
|
case OpARM64MOVWUload:
|
||
|
|
return rewriteValueARM64_OpARM64MOVWUload(v, config)
|
||
|
|
case OpARM64MOVWload:
|
||
|
|
return rewriteValueARM64_OpARM64MOVWload(v, config)
|
||
|
|
case OpARM64MOVWstore:
|
||
|
|
return rewriteValueARM64_OpARM64MOVWstore(v, config)
|
||
|
|
case OpMod16:
|
||
|
|
return rewriteValueARM64_OpMod16(v, config)
|
||
|
|
case OpMod16u:
|
||
|
|
return rewriteValueARM64_OpMod16u(v, config)
|
||
|
|
case OpMod32:
|
||
|
|
return rewriteValueARM64_OpMod32(v, config)
|
||
|
|
case OpMod32u:
|
||
|
|
return rewriteValueARM64_OpMod32u(v, config)
|
||
|
|
case OpMod64:
|
||
|
|
return rewriteValueARM64_OpMod64(v, config)
|
||
|
|
case OpMod64u:
|
||
|
|
return rewriteValueARM64_OpMod64u(v, config)
|
||
|
|
case OpMod8:
|
||
|
|
return rewriteValueARM64_OpMod8(v, config)
|
||
|
|
case OpMod8u:
|
||
|
|
return rewriteValueARM64_OpMod8u(v, config)
|
||
|
|
case OpMul16:
|
||
|
|
return rewriteValueARM64_OpMul16(v, config)
|
||
|
|
case OpMul32:
|
||
|
|
return rewriteValueARM64_OpMul32(v, config)
|
||
|
|
case OpMul32F:
|
||
|
|
return rewriteValueARM64_OpMul32F(v, config)
|
||
|
|
case OpMul64:
|
||
|
|
return rewriteValueARM64_OpMul64(v, config)
|
||
|
|
case OpMul64F:
|
||
|
|
return rewriteValueARM64_OpMul64F(v, config)
|
||
|
|
case OpMul8:
|
||
|
|
return rewriteValueARM64_OpMul8(v, config)
|
||
|
|
case OpNeg16:
|
||
|
|
return rewriteValueARM64_OpNeg16(v, config)
|
||
|
|
case OpNeg32:
|
||
|
|
return rewriteValueARM64_OpNeg32(v, config)
|
||
|
|
case OpNeg32F:
|
||
|
|
return rewriteValueARM64_OpNeg32F(v, config)
|
||
|
|
case OpNeg64:
|
||
|
|
return rewriteValueARM64_OpNeg64(v, config)
|
||
|
|
case OpNeg64F:
|
||
|
|
return rewriteValueARM64_OpNeg64F(v, config)
|
||
|
|
case OpNeg8:
|
||
|
|
return rewriteValueARM64_OpNeg8(v, config)
|
||
|
|
case OpNeq16:
|
||
|
|
return rewriteValueARM64_OpNeq16(v, config)
|
||
|
|
case OpNeq32:
|
||
|
|
return rewriteValueARM64_OpNeq32(v, config)
|
||
|
|
case OpNeq32F:
|
||
|
|
return rewriteValueARM64_OpNeq32F(v, config)
|
||
|
|
case OpNeq64:
|
||
|
|
return rewriteValueARM64_OpNeq64(v, config)
|
||
|
|
case OpNeq64F:
|
||
|
|
return rewriteValueARM64_OpNeq64F(v, config)
|
||
|
|
case OpNeq8:
|
||
|
|
return rewriteValueARM64_OpNeq8(v, config)
|
||
|
|
case OpNeqB:
|
||
|
|
return rewriteValueARM64_OpNeqB(v, config)
|
||
|
|
case OpNeqPtr:
|
||
|
|
return rewriteValueARM64_OpNeqPtr(v, config)
|
||
|
|
case OpNilCheck:
|
||
|
|
return rewriteValueARM64_OpNilCheck(v, config)
|
||
|
|
case OpNot:
|
||
|
|
return rewriteValueARM64_OpNot(v, config)
|
||
|
|
case OpOffPtr:
|
||
|
|
return rewriteValueARM64_OpOffPtr(v, config)
|
||
|
|
case OpOr16:
|
||
|
|
return rewriteValueARM64_OpOr16(v, config)
|
||
|
|
case OpOr32:
|
||
|
|
return rewriteValueARM64_OpOr32(v, config)
|
||
|
|
case OpOr64:
|
||
|
|
return rewriteValueARM64_OpOr64(v, config)
|
||
|
|
case OpOr8:
|
||
|
|
return rewriteValueARM64_OpOr8(v, config)
|
||
|
|
case OpOrB:
|
||
|
|
return rewriteValueARM64_OpOrB(v, config)
|
||
|
|
case OpSignExt16to32:
|
||
|
|
return rewriteValueARM64_OpSignExt16to32(v, config)
|
||
|
|
case OpSignExt16to64:
|
||
|
|
return rewriteValueARM64_OpSignExt16to64(v, config)
|
||
|
|
case OpSignExt32to64:
|
||
|
|
return rewriteValueARM64_OpSignExt32to64(v, config)
|
||
|
|
case OpSignExt8to16:
|
||
|
|
return rewriteValueARM64_OpSignExt8to16(v, config)
|
||
|
|
case OpSignExt8to32:
|
||
|
|
return rewriteValueARM64_OpSignExt8to32(v, config)
|
||
|
|
case OpSignExt8to64:
|
||
|
|
return rewriteValueARM64_OpSignExt8to64(v, config)
|
||
|
|
case OpStaticCall:
|
||
|
|
return rewriteValueARM64_OpStaticCall(v, config)
|
||
|
|
case OpStore:
|
||
|
|
return rewriteValueARM64_OpStore(v, config)
|
||
|
|
case OpSub16:
|
||
|
|
return rewriteValueARM64_OpSub16(v, config)
|
||
|
|
case OpSub32:
|
||
|
|
return rewriteValueARM64_OpSub32(v, config)
|
||
|
|
case OpSub32F:
|
||
|
|
return rewriteValueARM64_OpSub32F(v, config)
|
||
|
|
case OpSub64:
|
||
|
|
return rewriteValueARM64_OpSub64(v, config)
|
||
|
|
case OpSub64F:
|
||
|
|
return rewriteValueARM64_OpSub64F(v, config)
|
||
|
|
case OpSub8:
|
||
|
|
return rewriteValueARM64_OpSub8(v, config)
|
||
|
|
case OpSubPtr:
|
||
|
|
return rewriteValueARM64_OpSubPtr(v, config)
|
||
|
|
case OpTrunc16to8:
|
||
|
|
return rewriteValueARM64_OpTrunc16to8(v, config)
|
||
|
|
case OpTrunc32to16:
|
||
|
|
return rewriteValueARM64_OpTrunc32to16(v, config)
|
||
|
|
case OpTrunc32to8:
|
||
|
|
return rewriteValueARM64_OpTrunc32to8(v, config)
|
||
|
|
case OpTrunc64to16:
|
||
|
|
return rewriteValueARM64_OpTrunc64to16(v, config)
|
||
|
|
case OpTrunc64to32:
|
||
|
|
return rewriteValueARM64_OpTrunc64to32(v, config)
|
||
|
|
case OpTrunc64to8:
|
||
|
|
return rewriteValueARM64_OpTrunc64to8(v, config)
|
||
|
|
case OpXor16:
|
||
|
|
return rewriteValueARM64_OpXor16(v, config)
|
||
|
|
case OpXor32:
|
||
|
|
return rewriteValueARM64_OpXor32(v, config)
|
||
|
|
case OpXor64:
|
||
|
|
return rewriteValueARM64_OpXor64(v, config)
|
||
|
|
case OpXor8:
|
||
|
|
return rewriteValueARM64_OpXor8(v, config)
|
||
|
|
case OpZeroExt16to32:
|
||
|
|
return rewriteValueARM64_OpZeroExt16to32(v, config)
|
||
|
|
case OpZeroExt16to64:
|
||
|
|
return rewriteValueARM64_OpZeroExt16to64(v, config)
|
||
|
|
case OpZeroExt32to64:
|
||
|
|
return rewriteValueARM64_OpZeroExt32to64(v, config)
|
||
|
|
case OpZeroExt8to16:
|
||
|
|
return rewriteValueARM64_OpZeroExt8to16(v, config)
|
||
|
|
case OpZeroExt8to32:
|
||
|
|
return rewriteValueARM64_OpZeroExt8to32(v, config)
|
||
|
|
case OpZeroExt8to64:
|
||
|
|
return rewriteValueARM64_OpZeroExt8to64(v, config)
|
||
|
|
}
|
||
|
|
return false
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpARM64ADDconst(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (ADDconst [off1] (MOVDaddr [off2] {sym} ptr))
|
||
|
|
// cond:
|
||
|
|
// result: (MOVDaddr [off1+off2] {sym} ptr)
|
||
|
|
for {
|
||
|
|
off1 := v.AuxInt
|
||
|
|
v_0 := v.Args[0]
|
||
|
|
if v_0.Op != OpARM64MOVDaddr {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
off2 := v_0.AuxInt
|
||
|
|
sym := v_0.Aux
|
||
|
|
ptr := v_0.Args[0]
|
||
|
|
v.reset(OpARM64MOVDaddr)
|
||
|
|
v.AuxInt = off1 + off2
|
||
|
|
v.Aux = sym
|
||
|
|
v.AddArg(ptr)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
return false
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpAdd16(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Add16 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (ADD x y)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64ADD)
|
||
|
|
v.AddArg(x)
|
||
|
|
v.AddArg(y)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpAdd32(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Add32 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (ADD x y)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64ADD)
|
||
|
|
v.AddArg(x)
|
||
|
|
v.AddArg(y)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpAdd32F(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Add32F x y)
|
||
|
|
// cond:
|
||
|
|
// result: (FADDS x y)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64FADDS)
|
||
|
|
v.AddArg(x)
|
||
|
|
v.AddArg(y)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpAdd64(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Add64 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (ADD x y)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64ADD)
|
||
|
|
v.AddArg(x)
|
||
|
|
v.AddArg(y)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpAdd64F(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Add64F x y)
|
||
|
|
// cond:
|
||
|
|
// result: (FADDD x y)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64FADDD)
|
||
|
|
v.AddArg(x)
|
||
|
|
v.AddArg(y)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpAdd8(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Add8 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (ADD x y)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64ADD)
|
||
|
|
v.AddArg(x)
|
||
|
|
v.AddArg(y)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpAddPtr(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (AddPtr x y)
|
||
|
|
// cond:
|
||
|
|
// result: (ADD x y)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64ADD)
|
||
|
|
v.AddArg(x)
|
||
|
|
v.AddArg(y)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpAddr(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Addr {sym} base)
|
||
|
|
// cond:
|
||
|
|
// result: (MOVDaddr {sym} base)
|
||
|
|
for {
|
||
|
|
sym := v.Aux
|
||
|
|
base := v.Args[0]
|
||
|
|
v.reset(OpARM64MOVDaddr)
|
||
|
|
v.Aux = sym
|
||
|
|
v.AddArg(base)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpAnd16(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (And16 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (AND x y)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64AND)
|
||
|
|
v.AddArg(x)
|
||
|
|
v.AddArg(y)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpAnd32(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (And32 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (AND x y)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64AND)
|
||
|
|
v.AddArg(x)
|
||
|
|
v.AddArg(y)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpAnd64(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (And64 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (AND x y)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64AND)
|
||
|
|
v.AddArg(x)
|
||
|
|
v.AddArg(y)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpAnd8(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (And8 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (AND x y)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64AND)
|
||
|
|
v.AddArg(x)
|
||
|
|
v.AddArg(y)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpAndB(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (AndB x y)
|
||
|
|
// cond:
|
||
|
|
// result: (AND x y)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64AND)
|
||
|
|
v.AddArg(x)
|
||
|
|
v.AddArg(y)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpClosureCall(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (ClosureCall [argwid] entry closure mem)
|
||
|
|
// cond:
|
||
|
|
// result: (CALLclosure [argwid] entry closure mem)
|
||
|
|
for {
|
||
|
|
argwid := v.AuxInt
|
||
|
|
entry := v.Args[0]
|
||
|
|
closure := v.Args[1]
|
||
|
|
mem := v.Args[2]
|
||
|
|
v.reset(OpARM64CALLclosure)
|
||
|
|
v.AuxInt = argwid
|
||
|
|
v.AddArg(entry)
|
||
|
|
v.AddArg(closure)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpCom16(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Com16 x)
|
||
|
|
// cond:
|
||
|
|
// result: (MVN x)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
v.reset(OpARM64MVN)
|
||
|
|
v.AddArg(x)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpCom32(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Com32 x)
|
||
|
|
// cond:
|
||
|
|
// result: (MVN x)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
v.reset(OpARM64MVN)
|
||
|
|
v.AddArg(x)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpCom64(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Com64 x)
|
||
|
|
// cond:
|
||
|
|
// result: (MVN x)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
v.reset(OpARM64MVN)
|
||
|
|
v.AddArg(x)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpCom8(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Com8 x)
|
||
|
|
// cond:
|
||
|
|
// result: (MVN x)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
v.reset(OpARM64MVN)
|
||
|
|
v.AddArg(x)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpConst16(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Const16 [val])
|
||
|
|
// cond:
|
||
|
|
// result: (MOVDconst [val])
|
||
|
|
for {
|
||
|
|
val := v.AuxInt
|
||
|
|
v.reset(OpARM64MOVDconst)
|
||
|
|
v.AuxInt = val
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpConst32(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Const32 [val])
|
||
|
|
// cond:
|
||
|
|
// result: (MOVDconst [val])
|
||
|
|
for {
|
||
|
|
val := v.AuxInt
|
||
|
|
v.reset(OpARM64MOVDconst)
|
||
|
|
v.AuxInt = val
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpConst32F(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Const32F [val])
|
||
|
|
// cond:
|
||
|
|
// result: (FMOVSconst [val])
|
||
|
|
for {
|
||
|
|
val := v.AuxInt
|
||
|
|
v.reset(OpARM64FMOVSconst)
|
||
|
|
v.AuxInt = val
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpConst64(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Const64 [val])
|
||
|
|
// cond:
|
||
|
|
// result: (MOVDconst [val])
|
||
|
|
for {
|
||
|
|
val := v.AuxInt
|
||
|
|
v.reset(OpARM64MOVDconst)
|
||
|
|
v.AuxInt = val
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpConst64F(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Const64F [val])
|
||
|
|
// cond:
|
||
|
|
// result: (FMOVDconst [val])
|
||
|
|
for {
|
||
|
|
val := v.AuxInt
|
||
|
|
v.reset(OpARM64FMOVDconst)
|
||
|
|
v.AuxInt = val
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpConst8(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Const8 [val])
|
||
|
|
// cond:
|
||
|
|
// result: (MOVDconst [val])
|
||
|
|
for {
|
||
|
|
val := v.AuxInt
|
||
|
|
v.reset(OpARM64MOVDconst)
|
||
|
|
v.AuxInt = val
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpConstBool(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (ConstBool [b])
|
||
|
|
// cond:
|
||
|
|
// result: (MOVDconst [b])
|
||
|
|
for {
|
||
|
|
b := v.AuxInt
|
||
|
|
v.reset(OpARM64MOVDconst)
|
||
|
|
v.AuxInt = b
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpConstNil(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (ConstNil)
|
||
|
|
// cond:
|
||
|
|
// result: (MOVDconst [0])
|
||
|
|
for {
|
||
|
|
v.reset(OpARM64MOVDconst)
|
||
|
|
v.AuxInt = 0
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpConvert(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Convert x mem)
|
||
|
|
// cond:
|
||
|
|
// result: (MOVDconvert x mem)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
mem := v.Args[1]
|
||
|
|
v.reset(OpARM64MOVDconvert)
|
||
|
|
v.AddArg(x)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpCvt32Fto32(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Cvt32Fto32 x)
|
||
|
|
// cond:
|
||
|
|
// result: (FCVTZSSW x)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
v.reset(OpARM64FCVTZSSW)
|
||
|
|
v.AddArg(x)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpCvt32Fto32U(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Cvt32Fto32U x)
|
||
|
|
// cond:
|
||
|
|
// result: (FCVTZUSW x)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
v.reset(OpARM64FCVTZUSW)
|
||
|
|
v.AddArg(x)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpCvt32Fto64(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Cvt32Fto64 x)
|
||
|
|
// cond:
|
||
|
|
// result: (FCVTZSS x)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
v.reset(OpARM64FCVTZSS)
|
||
|
|
v.AddArg(x)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpCvt32Fto64F(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Cvt32Fto64F x)
|
||
|
|
// cond:
|
||
|
|
// result: (FCVTSD x)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
v.reset(OpARM64FCVTSD)
|
||
|
|
v.AddArg(x)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpCvt32Uto32F(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Cvt32Uto32F x)
|
||
|
|
// cond:
|
||
|
|
// result: (UCVTFWS x)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
v.reset(OpARM64UCVTFWS)
|
||
|
|
v.AddArg(x)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpCvt32Uto64F(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Cvt32Uto64F x)
|
||
|
|
// cond:
|
||
|
|
// result: (UCVTFWD x)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
v.reset(OpARM64UCVTFWD)
|
||
|
|
v.AddArg(x)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpCvt32to32F(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Cvt32to32F x)
|
||
|
|
// cond:
|
||
|
|
// result: (SCVTFWS x)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
v.reset(OpARM64SCVTFWS)
|
||
|
|
v.AddArg(x)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpCvt32to64F(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Cvt32to64F x)
|
||
|
|
// cond:
|
||
|
|
// result: (SCVTFWD x)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
v.reset(OpARM64SCVTFWD)
|
||
|
|
v.AddArg(x)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpCvt64Fto32(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Cvt64Fto32 x)
|
||
|
|
// cond:
|
||
|
|
// result: (FCVTZSDW x)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
v.reset(OpARM64FCVTZSDW)
|
||
|
|
v.AddArg(x)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpCvt64Fto32F(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Cvt64Fto32F x)
|
||
|
|
// cond:
|
||
|
|
// result: (FCVTDS x)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
v.reset(OpARM64FCVTDS)
|
||
|
|
v.AddArg(x)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpCvt64Fto32U(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Cvt64Fto32U x)
|
||
|
|
// cond:
|
||
|
|
// result: (FCVTZUDW x)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
v.reset(OpARM64FCVTZUDW)
|
||
|
|
v.AddArg(x)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpCvt64Fto64(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Cvt64Fto64 x)
|
||
|
|
// cond:
|
||
|
|
// result: (FCVTZSD x)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
v.reset(OpARM64FCVTZSD)
|
||
|
|
v.AddArg(x)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpCvt64to32F(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Cvt64to32F x)
|
||
|
|
// cond:
|
||
|
|
// result: (SCVTFS x)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
v.reset(OpARM64SCVTFS)
|
||
|
|
v.AddArg(x)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpCvt64to64F(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Cvt64to64F x)
|
||
|
|
// cond:
|
||
|
|
// result: (SCVTFD x)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
v.reset(OpARM64SCVTFD)
|
||
|
|
v.AddArg(x)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpDeferCall(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (DeferCall [argwid] mem)
|
||
|
|
// cond:
|
||
|
|
// result: (CALLdefer [argwid] mem)
|
||
|
|
for {
|
||
|
|
argwid := v.AuxInt
|
||
|
|
mem := v.Args[0]
|
||
|
|
v.reset(OpARM64CALLdefer)
|
||
|
|
v.AuxInt = argwid
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpDiv16(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Div16 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (DIVW (SignExt16to32 x) (SignExt16to32 y))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64DIVW)
|
||
|
|
v0 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
|
||
|
|
v0.AddArg(x)
|
||
|
|
v.AddArg(v0)
|
||
|
|
v1 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
|
||
|
|
v1.AddArg(y)
|
||
|
|
v.AddArg(v1)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpDiv16u(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Div16u x y)
|
||
|
|
// cond:
|
||
|
|
// result: (UDIVW (ZeroExt16to32 x) (ZeroExt16to32 y))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64UDIVW)
|
||
|
|
v0 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
|
||
|
|
v0.AddArg(x)
|
||
|
|
v.AddArg(v0)
|
||
|
|
v1 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
|
||
|
|
v1.AddArg(y)
|
||
|
|
v.AddArg(v1)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpDiv32(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Div32 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (DIVW x y)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64DIVW)
|
||
|
|
v.AddArg(x)
|
||
|
|
v.AddArg(y)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpDiv32F(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Div32F x y)
|
||
|
|
// cond:
|
||
|
|
// result: (FDIVS x y)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64FDIVS)
|
||
|
|
v.AddArg(x)
|
||
|
|
v.AddArg(y)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpDiv32u(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Div32u x y)
|
||
|
|
// cond:
|
||
|
|
// result: (UDIVW x y)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64UDIVW)
|
||
|
|
v.AddArg(x)
|
||
|
|
v.AddArg(y)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpDiv64(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Div64 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (DIV x y)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64DIV)
|
||
|
|
v.AddArg(x)
|
||
|
|
v.AddArg(y)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpDiv64F(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Div64F x y)
|
||
|
|
// cond:
|
||
|
|
// result: (FDIVD x y)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64FDIVD)
|
||
|
|
v.AddArg(x)
|
||
|
|
v.AddArg(y)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpDiv64u(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Div64u x y)
|
||
|
|
// cond:
|
||
|
|
// result: (UDIV x y)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64UDIV)
|
||
|
|
v.AddArg(x)
|
||
|
|
v.AddArg(y)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpDiv8(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Div8 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (DIVW (SignExt8to32 x) (SignExt8to32 y))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64DIVW)
|
||
|
|
v0 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
|
||
|
|
v0.AddArg(x)
|
||
|
|
v.AddArg(v0)
|
||
|
|
v1 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
|
||
|
|
v1.AddArg(y)
|
||
|
|
v.AddArg(v1)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpDiv8u(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Div8u x y)
|
||
|
|
// cond:
|
||
|
|
// result: (UDIVW (ZeroExt8to32 x) (ZeroExt8to32 y))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64UDIVW)
|
||
|
|
v0 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
|
||
|
|
v0.AddArg(x)
|
||
|
|
v.AddArg(v0)
|
||
|
|
v1 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
|
||
|
|
v1.AddArg(y)
|
||
|
|
v.AddArg(v1)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpEq16(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Eq16 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (Equal (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y)))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64Equal)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
|
||
|
|
v1 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
|
||
|
|
v1.AddArg(x)
|
||
|
|
v0.AddArg(v1)
|
||
|
|
v2 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
|
||
|
|
v2.AddArg(y)
|
||
|
|
v0.AddArg(v2)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpEq32(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Eq32 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (Equal (CMPW x y))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64Equal)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
|
||
|
|
v0.AddArg(x)
|
||
|
|
v0.AddArg(y)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpEq32F(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Eq32F x y)
|
||
|
|
// cond:
|
||
|
|
// result: (Equal (FCMPS x y))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64Equal)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64FCMPS, TypeFlags)
|
||
|
|
v0.AddArg(x)
|
||
|
|
v0.AddArg(y)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpEq64(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Eq64 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (Equal (CMP x y))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64Equal)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64CMP, TypeFlags)
|
||
|
|
v0.AddArg(x)
|
||
|
|
v0.AddArg(y)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpEq64F(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Eq64F x y)
|
||
|
|
// cond:
|
||
|
|
// result: (Equal (FCMPD x y))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64Equal)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64FCMPD, TypeFlags)
|
||
|
|
v0.AddArg(x)
|
||
|
|
v0.AddArg(y)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpEq8(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Eq8 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (Equal (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y)))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64Equal)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
|
||
|
|
v1 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
|
||
|
|
v1.AddArg(x)
|
||
|
|
v0.AddArg(v1)
|
||
|
|
v2 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
|
||
|
|
v2.AddArg(y)
|
||
|
|
v0.AddArg(v2)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpEqB(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (EqB x y)
|
||
|
|
// cond:
|
||
|
|
// result: (XORconst [1] (XOR <config.fe.TypeBool()> x y))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64XORconst)
|
||
|
|
v.AuxInt = 1
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64XOR, config.fe.TypeBool())
|
||
|
|
v0.AddArg(x)
|
||
|
|
v0.AddArg(y)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpEqPtr(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (EqPtr x y)
|
||
|
|
// cond:
|
||
|
|
// result: (Equal (CMP x y))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64Equal)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64CMP, TypeFlags)
|
||
|
|
v0.AddArg(x)
|
||
|
|
v0.AddArg(y)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpARM64FMOVDload(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (FMOVDload [off1] {sym} (ADDconst [off2] ptr) mem)
|
||
|
|
// cond:
|
||
|
|
// result: (FMOVDload [off1+off2] {sym} ptr mem)
|
||
|
|
for {
|
||
|
|
off1 := v.AuxInt
|
||
|
|
sym := v.Aux
|
||
|
|
v_0 := v.Args[0]
|
||
|
|
if v_0.Op != OpARM64ADDconst {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
off2 := v_0.AuxInt
|
||
|
|
ptr := v_0.Args[0]
|
||
|
|
mem := v.Args[1]
|
||
|
|
v.reset(OpARM64FMOVDload)
|
||
|
|
v.AuxInt = off1 + off2
|
||
|
|
v.Aux = sym
|
||
|
|
v.AddArg(ptr)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (FMOVDload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
|
||
|
|
// cond: canMergeSym(sym1,sym2)
|
||
|
|
// result: (FMOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
|
||
|
|
for {
|
||
|
|
off1 := v.AuxInt
|
||
|
|
sym1 := v.Aux
|
||
|
|
v_0 := v.Args[0]
|
||
|
|
if v_0.Op != OpARM64MOVDaddr {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
off2 := v_0.AuxInt
|
||
|
|
sym2 := v_0.Aux
|
||
|
|
ptr := v_0.Args[0]
|
||
|
|
mem := v.Args[1]
|
||
|
|
if !(canMergeSym(sym1, sym2)) {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
v.reset(OpARM64FMOVDload)
|
||
|
|
v.AuxInt = off1 + off2
|
||
|
|
v.Aux = mergeSym(sym1, sym2)
|
||
|
|
v.AddArg(ptr)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
return false
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpARM64FMOVDstore(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (FMOVDstore [off1] {sym} (ADDconst [off2] ptr) val mem)
|
||
|
|
// cond:
|
||
|
|
// result: (FMOVDstore [off1+off2] {sym} ptr val mem)
|
||
|
|
for {
|
||
|
|
off1 := v.AuxInt
|
||
|
|
sym := v.Aux
|
||
|
|
v_0 := v.Args[0]
|
||
|
|
if v_0.Op != OpARM64ADDconst {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
off2 := v_0.AuxInt
|
||
|
|
ptr := v_0.Args[0]
|
||
|
|
val := v.Args[1]
|
||
|
|
mem := v.Args[2]
|
||
|
|
v.reset(OpARM64FMOVDstore)
|
||
|
|
v.AuxInt = off1 + off2
|
||
|
|
v.Aux = sym
|
||
|
|
v.AddArg(ptr)
|
||
|
|
v.AddArg(val)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (FMOVDstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem)
|
||
|
|
// cond: canMergeSym(sym1,sym2)
|
||
|
|
// result: (FMOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
|
||
|
|
for {
|
||
|
|
off1 := v.AuxInt
|
||
|
|
sym1 := v.Aux
|
||
|
|
v_0 := v.Args[0]
|
||
|
|
if v_0.Op != OpARM64MOVDaddr {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
off2 := v_0.AuxInt
|
||
|
|
sym2 := v_0.Aux
|
||
|
|
ptr := v_0.Args[0]
|
||
|
|
val := v.Args[1]
|
||
|
|
mem := v.Args[2]
|
||
|
|
if !(canMergeSym(sym1, sym2)) {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
v.reset(OpARM64FMOVDstore)
|
||
|
|
v.AuxInt = off1 + off2
|
||
|
|
v.Aux = mergeSym(sym1, sym2)
|
||
|
|
v.AddArg(ptr)
|
||
|
|
v.AddArg(val)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
return false
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpARM64FMOVSload(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (FMOVSload [off1] {sym} (ADDconst [off2] ptr) mem)
|
||
|
|
// cond:
|
||
|
|
// result: (FMOVSload [off1+off2] {sym} ptr mem)
|
||
|
|
for {
|
||
|
|
off1 := v.AuxInt
|
||
|
|
sym := v.Aux
|
||
|
|
v_0 := v.Args[0]
|
||
|
|
if v_0.Op != OpARM64ADDconst {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
off2 := v_0.AuxInt
|
||
|
|
ptr := v_0.Args[0]
|
||
|
|
mem := v.Args[1]
|
||
|
|
v.reset(OpARM64FMOVSload)
|
||
|
|
v.AuxInt = off1 + off2
|
||
|
|
v.Aux = sym
|
||
|
|
v.AddArg(ptr)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (FMOVSload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
|
||
|
|
// cond: canMergeSym(sym1,sym2)
|
||
|
|
// result: (FMOVSload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
|
||
|
|
for {
|
||
|
|
off1 := v.AuxInt
|
||
|
|
sym1 := v.Aux
|
||
|
|
v_0 := v.Args[0]
|
||
|
|
if v_0.Op != OpARM64MOVDaddr {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
off2 := v_0.AuxInt
|
||
|
|
sym2 := v_0.Aux
|
||
|
|
ptr := v_0.Args[0]
|
||
|
|
mem := v.Args[1]
|
||
|
|
if !(canMergeSym(sym1, sym2)) {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
v.reset(OpARM64FMOVSload)
|
||
|
|
v.AuxInt = off1 + off2
|
||
|
|
v.Aux = mergeSym(sym1, sym2)
|
||
|
|
v.AddArg(ptr)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
return false
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpARM64FMOVSstore(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (FMOVSstore [off1] {sym} (ADDconst [off2] ptr) val mem)
|
||
|
|
// cond:
|
||
|
|
// result: (FMOVSstore [off1+off2] {sym} ptr val mem)
|
||
|
|
for {
|
||
|
|
off1 := v.AuxInt
|
||
|
|
sym := v.Aux
|
||
|
|
v_0 := v.Args[0]
|
||
|
|
if v_0.Op != OpARM64ADDconst {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
off2 := v_0.AuxInt
|
||
|
|
ptr := v_0.Args[0]
|
||
|
|
val := v.Args[1]
|
||
|
|
mem := v.Args[2]
|
||
|
|
v.reset(OpARM64FMOVSstore)
|
||
|
|
v.AuxInt = off1 + off2
|
||
|
|
v.Aux = sym
|
||
|
|
v.AddArg(ptr)
|
||
|
|
v.AddArg(val)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (FMOVSstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem)
|
||
|
|
// cond: canMergeSym(sym1,sym2)
|
||
|
|
// result: (FMOVSstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
|
||
|
|
for {
|
||
|
|
off1 := v.AuxInt
|
||
|
|
sym1 := v.Aux
|
||
|
|
v_0 := v.Args[0]
|
||
|
|
if v_0.Op != OpARM64MOVDaddr {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
off2 := v_0.AuxInt
|
||
|
|
sym2 := v_0.Aux
|
||
|
|
ptr := v_0.Args[0]
|
||
|
|
val := v.Args[1]
|
||
|
|
mem := v.Args[2]
|
||
|
|
if !(canMergeSym(sym1, sym2)) {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
v.reset(OpARM64FMOVSstore)
|
||
|
|
v.AuxInt = off1 + off2
|
||
|
|
v.Aux = mergeSym(sym1, sym2)
|
||
|
|
v.AddArg(ptr)
|
||
|
|
v.AddArg(val)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
return false
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpGeq16(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Geq16 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (GreaterEqual (CMPW (SignExt16to32 x) (SignExt16to32 y)))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64GreaterEqual)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
|
||
|
|
v1 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
|
||
|
|
v1.AddArg(x)
|
||
|
|
v0.AddArg(v1)
|
||
|
|
v2 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
|
||
|
|
v2.AddArg(y)
|
||
|
|
v0.AddArg(v2)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpGeq16U(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Geq16U x y)
|
||
|
|
// cond:
|
||
|
|
// result: (GreaterEqualU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y)))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64GreaterEqualU)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
|
||
|
|
v1 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
|
||
|
|
v1.AddArg(x)
|
||
|
|
v0.AddArg(v1)
|
||
|
|
v2 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
|
||
|
|
v2.AddArg(y)
|
||
|
|
v0.AddArg(v2)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpGeq32(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Geq32 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (GreaterEqual (CMPW x y))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64GreaterEqual)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
|
||
|
|
v0.AddArg(x)
|
||
|
|
v0.AddArg(y)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpGeq32F(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Geq32F x y)
|
||
|
|
// cond:
|
||
|
|
// result: (GreaterEqual (FCMPS x y))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64GreaterEqual)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64FCMPS, TypeFlags)
|
||
|
|
v0.AddArg(x)
|
||
|
|
v0.AddArg(y)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpGeq32U(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Geq32U x y)
|
||
|
|
// cond:
|
||
|
|
// result: (GreaterEqualU (CMPW x y))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64GreaterEqualU)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
|
||
|
|
v0.AddArg(x)
|
||
|
|
v0.AddArg(y)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpGeq64(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Geq64 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (GreaterEqual (CMP x y))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64GreaterEqual)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64CMP, TypeFlags)
|
||
|
|
v0.AddArg(x)
|
||
|
|
v0.AddArg(y)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpGeq64F(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Geq64F x y)
|
||
|
|
// cond:
|
||
|
|
// result: (GreaterEqual (FCMPD x y))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64GreaterEqual)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64FCMPD, TypeFlags)
|
||
|
|
v0.AddArg(x)
|
||
|
|
v0.AddArg(y)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpGeq64U(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Geq64U x y)
|
||
|
|
// cond:
|
||
|
|
// result: (GreaterEqualU (CMP x y))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64GreaterEqualU)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64CMP, TypeFlags)
|
||
|
|
v0.AddArg(x)
|
||
|
|
v0.AddArg(y)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpGeq8(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Geq8 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (GreaterEqual (CMPW (SignExt8to32 x) (SignExt8to32 y)))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64GreaterEqual)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
|
||
|
|
v1 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
|
||
|
|
v1.AddArg(x)
|
||
|
|
v0.AddArg(v1)
|
||
|
|
v2 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
|
||
|
|
v2.AddArg(y)
|
||
|
|
v0.AddArg(v2)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpGeq8U(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Geq8U x y)
|
||
|
|
// cond:
|
||
|
|
// result: (GreaterEqualU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y)))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64GreaterEqualU)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
|
||
|
|
v1 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
|
||
|
|
v1.AddArg(x)
|
||
|
|
v0.AddArg(v1)
|
||
|
|
v2 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
|
||
|
|
v2.AddArg(y)
|
||
|
|
v0.AddArg(v2)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpGetClosurePtr(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (GetClosurePtr)
|
||
|
|
// cond:
|
||
|
|
// result: (LoweredGetClosurePtr)
|
||
|
|
for {
|
||
|
|
v.reset(OpARM64LoweredGetClosurePtr)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpGoCall(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (GoCall [argwid] mem)
|
||
|
|
// cond:
|
||
|
|
// result: (CALLgo [argwid] mem)
|
||
|
|
for {
|
||
|
|
argwid := v.AuxInt
|
||
|
|
mem := v.Args[0]
|
||
|
|
v.reset(OpARM64CALLgo)
|
||
|
|
v.AuxInt = argwid
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpGreater16(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Greater16 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (GreaterThan (CMPW (SignExt16to32 x) (SignExt16to32 y)))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64GreaterThan)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
|
||
|
|
v1 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
|
||
|
|
v1.AddArg(x)
|
||
|
|
v0.AddArg(v1)
|
||
|
|
v2 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
|
||
|
|
v2.AddArg(y)
|
||
|
|
v0.AddArg(v2)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpGreater16U(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Greater16U x y)
|
||
|
|
// cond:
|
||
|
|
// result: (GreaterThanU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y)))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64GreaterThanU)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
|
||
|
|
v1 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
|
||
|
|
v1.AddArg(x)
|
||
|
|
v0.AddArg(v1)
|
||
|
|
v2 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
|
||
|
|
v2.AddArg(y)
|
||
|
|
v0.AddArg(v2)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpGreater32(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Greater32 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (GreaterThan (CMPW x y))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64GreaterThan)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
|
||
|
|
v0.AddArg(x)
|
||
|
|
v0.AddArg(y)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpGreater32F(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Greater32F x y)
|
||
|
|
// cond:
|
||
|
|
// result: (GreaterThan (FCMPS x y))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64GreaterThan)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64FCMPS, TypeFlags)
|
||
|
|
v0.AddArg(x)
|
||
|
|
v0.AddArg(y)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpGreater32U(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Greater32U x y)
|
||
|
|
// cond:
|
||
|
|
// result: (GreaterThanU (CMPW x y))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64GreaterThanU)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
|
||
|
|
v0.AddArg(x)
|
||
|
|
v0.AddArg(y)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpGreater64(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Greater64 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (GreaterThan (CMP x y))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64GreaterThan)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64CMP, TypeFlags)
|
||
|
|
v0.AddArg(x)
|
||
|
|
v0.AddArg(y)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpGreater64F(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Greater64F x y)
|
||
|
|
// cond:
|
||
|
|
// result: (GreaterThan (FCMPD x y))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64GreaterThan)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64FCMPD, TypeFlags)
|
||
|
|
v0.AddArg(x)
|
||
|
|
v0.AddArg(y)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpGreater64U(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Greater64U x y)
|
||
|
|
// cond:
|
||
|
|
// result: (GreaterThanU (CMP x y))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64GreaterThanU)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64CMP, TypeFlags)
|
||
|
|
v0.AddArg(x)
|
||
|
|
v0.AddArg(y)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpGreater8(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Greater8 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (GreaterThan (CMPW (SignExt8to32 x) (SignExt8to32 y)))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64GreaterThan)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
|
||
|
|
v1 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
|
||
|
|
v1.AddArg(x)
|
||
|
|
v0.AddArg(v1)
|
||
|
|
v2 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
|
||
|
|
v2.AddArg(y)
|
||
|
|
v0.AddArg(v2)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpGreater8U(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Greater8U x y)
|
||
|
|
// cond:
|
||
|
|
// result: (GreaterThanU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y)))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64GreaterThanU)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
|
||
|
|
v1 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
|
||
|
|
v1.AddArg(x)
|
||
|
|
v0.AddArg(v1)
|
||
|
|
v2 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
|
||
|
|
v2.AddArg(y)
|
||
|
|
v0.AddArg(v2)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpInterCall(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (InterCall [argwid] entry mem)
|
||
|
|
// cond:
|
||
|
|
// result: (CALLinter [argwid] entry mem)
|
||
|
|
for {
|
||
|
|
argwid := v.AuxInt
|
||
|
|
entry := v.Args[0]
|
||
|
|
mem := v.Args[1]
|
||
|
|
v.reset(OpARM64CALLinter)
|
||
|
|
v.AuxInt = argwid
|
||
|
|
v.AddArg(entry)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpIsInBounds(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (IsInBounds idx len)
|
||
|
|
// cond:
|
||
|
|
// result: (LessThanU (CMP idx len))
|
||
|
|
for {
|
||
|
|
idx := v.Args[0]
|
||
|
|
len := v.Args[1]
|
||
|
|
v.reset(OpARM64LessThanU)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64CMP, TypeFlags)
|
||
|
|
v0.AddArg(idx)
|
||
|
|
v0.AddArg(len)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpIsNonNil(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (IsNonNil ptr)
|
||
|
|
// cond:
|
||
|
|
// result: (NotEqual (CMPconst [0] ptr))
|
||
|
|
for {
|
||
|
|
ptr := v.Args[0]
|
||
|
|
v.reset(OpARM64NotEqual)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
|
||
|
|
v0.AuxInt = 0
|
||
|
|
v0.AddArg(ptr)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpIsSliceInBounds(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (IsSliceInBounds idx len)
|
||
|
|
// cond:
|
||
|
|
// result: (LessEqualU (CMP idx len))
|
||
|
|
for {
|
||
|
|
idx := v.Args[0]
|
||
|
|
len := v.Args[1]
|
||
|
|
v.reset(OpARM64LessEqualU)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64CMP, TypeFlags)
|
||
|
|
v0.AddArg(idx)
|
||
|
|
v0.AddArg(len)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpLeq16(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Leq16 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (LessEqual (CMPW (SignExt16to32 x) (SignExt16to32 y)))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64LessEqual)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
|
||
|
|
v1 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
|
||
|
|
v1.AddArg(x)
|
||
|
|
v0.AddArg(v1)
|
||
|
|
v2 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
|
||
|
|
v2.AddArg(y)
|
||
|
|
v0.AddArg(v2)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpLeq16U(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Leq16U x y)
|
||
|
|
// cond:
|
||
|
|
// result: (LessEqualU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y)))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64LessEqualU)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
|
||
|
|
v1 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
|
||
|
|
v1.AddArg(x)
|
||
|
|
v0.AddArg(v1)
|
||
|
|
v2 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
|
||
|
|
v2.AddArg(y)
|
||
|
|
v0.AddArg(v2)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpLeq32(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Leq32 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (LessEqual (CMPW x y))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64LessEqual)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
|
||
|
|
v0.AddArg(x)
|
||
|
|
v0.AddArg(y)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpLeq32F(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Leq32F x y)
|
||
|
|
// cond:
|
||
|
|
// result: (GreaterEqual (FCMPS y x))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64GreaterEqual)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64FCMPS, TypeFlags)
|
||
|
|
v0.AddArg(y)
|
||
|
|
v0.AddArg(x)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpLeq32U(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Leq32U x y)
|
||
|
|
// cond:
|
||
|
|
// result: (LessEqualU (CMPW x y))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64LessEqualU)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
|
||
|
|
v0.AddArg(x)
|
||
|
|
v0.AddArg(y)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpLeq64(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Leq64 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (LessEqual (CMP x y))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64LessEqual)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64CMP, TypeFlags)
|
||
|
|
v0.AddArg(x)
|
||
|
|
v0.AddArg(y)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpLeq64F(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Leq64F x y)
|
||
|
|
// cond:
|
||
|
|
// result: (GreaterEqual (FCMPD y x))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64GreaterEqual)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64FCMPD, TypeFlags)
|
||
|
|
v0.AddArg(y)
|
||
|
|
v0.AddArg(x)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpLeq64U(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Leq64U x y)
|
||
|
|
// cond:
|
||
|
|
// result: (LessEqualU (CMP x y))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64LessEqualU)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64CMP, TypeFlags)
|
||
|
|
v0.AddArg(x)
|
||
|
|
v0.AddArg(y)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpLeq8(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Leq8 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (LessEqual (CMPW (SignExt8to32 x) (SignExt8to32 y)))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64LessEqual)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
|
||
|
|
v1 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
|
||
|
|
v1.AddArg(x)
|
||
|
|
v0.AddArg(v1)
|
||
|
|
v2 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
|
||
|
|
v2.AddArg(y)
|
||
|
|
v0.AddArg(v2)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpLeq8U(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Leq8U x y)
|
||
|
|
// cond:
|
||
|
|
// result: (LessEqualU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y)))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64LessEqualU)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
|
||
|
|
v1 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
|
||
|
|
v1.AddArg(x)
|
||
|
|
v0.AddArg(v1)
|
||
|
|
v2 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
|
||
|
|
v2.AddArg(y)
|
||
|
|
v0.AddArg(v2)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpLess16(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Less16 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (LessThan (CMPW (SignExt16to32 x) (SignExt16to32 y)))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64LessThan)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
|
||
|
|
v1 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
|
||
|
|
v1.AddArg(x)
|
||
|
|
v0.AddArg(v1)
|
||
|
|
v2 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
|
||
|
|
v2.AddArg(y)
|
||
|
|
v0.AddArg(v2)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpLess16U(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Less16U x y)
|
||
|
|
// cond:
|
||
|
|
// result: (LessThanU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y)))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64LessThanU)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
|
||
|
|
v1 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
|
||
|
|
v1.AddArg(x)
|
||
|
|
v0.AddArg(v1)
|
||
|
|
v2 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
|
||
|
|
v2.AddArg(y)
|
||
|
|
v0.AddArg(v2)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpLess32(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Less32 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (LessThan (CMPW x y))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64LessThan)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
|
||
|
|
v0.AddArg(x)
|
||
|
|
v0.AddArg(y)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpLess32F(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Less32F x y)
|
||
|
|
// cond:
|
||
|
|
// result: (GreaterThan (FCMPS y x))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64GreaterThan)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64FCMPS, TypeFlags)
|
||
|
|
v0.AddArg(y)
|
||
|
|
v0.AddArg(x)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpLess32U(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Less32U x y)
|
||
|
|
// cond:
|
||
|
|
// result: (LessThanU (CMPW x y))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64LessThanU)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
|
||
|
|
v0.AddArg(x)
|
||
|
|
v0.AddArg(y)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpLess64(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Less64 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (LessThan (CMP x y))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64LessThan)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64CMP, TypeFlags)
|
||
|
|
v0.AddArg(x)
|
||
|
|
v0.AddArg(y)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpLess64F(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Less64F x y)
|
||
|
|
// cond:
|
||
|
|
// result: (GreaterThan (FCMPD y x))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64GreaterThan)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64FCMPD, TypeFlags)
|
||
|
|
v0.AddArg(y)
|
||
|
|
v0.AddArg(x)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpLess64U(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Less64U x y)
|
||
|
|
// cond:
|
||
|
|
// result: (LessThanU (CMP x y))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64LessThanU)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64CMP, TypeFlags)
|
||
|
|
v0.AddArg(x)
|
||
|
|
v0.AddArg(y)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpLess8(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Less8 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (LessThan (CMPW (SignExt8to32 x) (SignExt8to32 y)))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64LessThan)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
|
||
|
|
v1 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
|
||
|
|
v1.AddArg(x)
|
||
|
|
v0.AddArg(v1)
|
||
|
|
v2 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
|
||
|
|
v2.AddArg(y)
|
||
|
|
v0.AddArg(v2)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpLess8U(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Less8U x y)
|
||
|
|
// cond:
|
||
|
|
// result: (LessThanU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y)))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64LessThanU)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
|
||
|
|
v1 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
|
||
|
|
v1.AddArg(x)
|
||
|
|
v0.AddArg(v1)
|
||
|
|
v2 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
|
||
|
|
v2.AddArg(y)
|
||
|
|
v0.AddArg(v2)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpLoad(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Load <t> ptr mem)
|
||
|
|
// cond: t.IsBoolean()
|
||
|
|
// result: (MOVBUload ptr mem)
|
||
|
|
for {
|
||
|
|
t := v.Type
|
||
|
|
ptr := v.Args[0]
|
||
|
|
mem := v.Args[1]
|
||
|
|
if !(t.IsBoolean()) {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
v.reset(OpARM64MOVBUload)
|
||
|
|
v.AddArg(ptr)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (Load <t> ptr mem)
|
||
|
|
// cond: (is8BitInt(t) && isSigned(t))
|
||
|
|
// result: (MOVBload ptr mem)
|
||
|
|
for {
|
||
|
|
t := v.Type
|
||
|
|
ptr := v.Args[0]
|
||
|
|
mem := v.Args[1]
|
||
|
|
if !(is8BitInt(t) && isSigned(t)) {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
v.reset(OpARM64MOVBload)
|
||
|
|
v.AddArg(ptr)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (Load <t> ptr mem)
|
||
|
|
// cond: (is8BitInt(t) && !isSigned(t))
|
||
|
|
// result: (MOVBUload ptr mem)
|
||
|
|
for {
|
||
|
|
t := v.Type
|
||
|
|
ptr := v.Args[0]
|
||
|
|
mem := v.Args[1]
|
||
|
|
if !(is8BitInt(t) && !isSigned(t)) {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
v.reset(OpARM64MOVBUload)
|
||
|
|
v.AddArg(ptr)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (Load <t> ptr mem)
|
||
|
|
// cond: (is16BitInt(t) && isSigned(t))
|
||
|
|
// result: (MOVHload ptr mem)
|
||
|
|
for {
|
||
|
|
t := v.Type
|
||
|
|
ptr := v.Args[0]
|
||
|
|
mem := v.Args[1]
|
||
|
|
if !(is16BitInt(t) && isSigned(t)) {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
v.reset(OpARM64MOVHload)
|
||
|
|
v.AddArg(ptr)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (Load <t> ptr mem)
|
||
|
|
// cond: (is16BitInt(t) && !isSigned(t))
|
||
|
|
// result: (MOVHUload ptr mem)
|
||
|
|
for {
|
||
|
|
t := v.Type
|
||
|
|
ptr := v.Args[0]
|
||
|
|
mem := v.Args[1]
|
||
|
|
if !(is16BitInt(t) && !isSigned(t)) {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
v.reset(OpARM64MOVHUload)
|
||
|
|
v.AddArg(ptr)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (Load <t> ptr mem)
|
||
|
|
// cond: (is32BitInt(t) && isSigned(t))
|
||
|
|
// result: (MOVWload ptr mem)
|
||
|
|
for {
|
||
|
|
t := v.Type
|
||
|
|
ptr := v.Args[0]
|
||
|
|
mem := v.Args[1]
|
||
|
|
if !(is32BitInt(t) && isSigned(t)) {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
v.reset(OpARM64MOVWload)
|
||
|
|
v.AddArg(ptr)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (Load <t> ptr mem)
|
||
|
|
// cond: (is32BitInt(t) && !isSigned(t))
|
||
|
|
// result: (MOVWUload ptr mem)
|
||
|
|
for {
|
||
|
|
t := v.Type
|
||
|
|
ptr := v.Args[0]
|
||
|
|
mem := v.Args[1]
|
||
|
|
if !(is32BitInt(t) && !isSigned(t)) {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
v.reset(OpARM64MOVWUload)
|
||
|
|
v.AddArg(ptr)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (Load <t> ptr mem)
|
||
|
|
// cond: (is64BitInt(t) || isPtr(t))
|
||
|
|
// result: (MOVDload ptr mem)
|
||
|
|
for {
|
||
|
|
t := v.Type
|
||
|
|
ptr := v.Args[0]
|
||
|
|
mem := v.Args[1]
|
||
|
|
if !(is64BitInt(t) || isPtr(t)) {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
v.reset(OpARM64MOVDload)
|
||
|
|
v.AddArg(ptr)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (Load <t> ptr mem)
|
||
|
|
// cond: is32BitFloat(t)
|
||
|
|
// result: (FMOVSload ptr mem)
|
||
|
|
for {
|
||
|
|
t := v.Type
|
||
|
|
ptr := v.Args[0]
|
||
|
|
mem := v.Args[1]
|
||
|
|
if !(is32BitFloat(t)) {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
v.reset(OpARM64FMOVSload)
|
||
|
|
v.AddArg(ptr)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (Load <t> ptr mem)
|
||
|
|
// cond: is64BitFloat(t)
|
||
|
|
// result: (FMOVDload ptr mem)
|
||
|
|
for {
|
||
|
|
t := v.Type
|
||
|
|
ptr := v.Args[0]
|
||
|
|
mem := v.Args[1]
|
||
|
|
if !(is64BitFloat(t)) {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
v.reset(OpARM64FMOVDload)
|
||
|
|
v.AddArg(ptr)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
return false
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpARM64MOVBUload(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (MOVBUload [off1] {sym} (ADDconst [off2] ptr) mem)
|
||
|
|
// cond:
|
||
|
|
// result: (MOVBUload [off1+off2] {sym} ptr mem)
|
||
|
|
for {
|
||
|
|
off1 := v.AuxInt
|
||
|
|
sym := v.Aux
|
||
|
|
v_0 := v.Args[0]
|
||
|
|
if v_0.Op != OpARM64ADDconst {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
off2 := v_0.AuxInt
|
||
|
|
ptr := v_0.Args[0]
|
||
|
|
mem := v.Args[1]
|
||
|
|
v.reset(OpARM64MOVBUload)
|
||
|
|
v.AuxInt = off1 + off2
|
||
|
|
v.Aux = sym
|
||
|
|
v.AddArg(ptr)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (MOVBUload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
|
||
|
|
// cond: canMergeSym(sym1,sym2)
|
||
|
|
// result: (MOVBUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
|
||
|
|
for {
|
||
|
|
off1 := v.AuxInt
|
||
|
|
sym1 := v.Aux
|
||
|
|
v_0 := v.Args[0]
|
||
|
|
if v_0.Op != OpARM64MOVDaddr {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
off2 := v_0.AuxInt
|
||
|
|
sym2 := v_0.Aux
|
||
|
|
ptr := v_0.Args[0]
|
||
|
|
mem := v.Args[1]
|
||
|
|
if !(canMergeSym(sym1, sym2)) {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
v.reset(OpARM64MOVBUload)
|
||
|
|
v.AuxInt = off1 + off2
|
||
|
|
v.Aux = mergeSym(sym1, sym2)
|
||
|
|
v.AddArg(ptr)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
return false
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpARM64MOVBload(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (MOVBload [off1] {sym} (ADDconst [off2] ptr) mem)
|
||
|
|
// cond:
|
||
|
|
// result: (MOVBload [off1+off2] {sym} ptr mem)
|
||
|
|
for {
|
||
|
|
off1 := v.AuxInt
|
||
|
|
sym := v.Aux
|
||
|
|
v_0 := v.Args[0]
|
||
|
|
if v_0.Op != OpARM64ADDconst {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
off2 := v_0.AuxInt
|
||
|
|
ptr := v_0.Args[0]
|
||
|
|
mem := v.Args[1]
|
||
|
|
v.reset(OpARM64MOVBload)
|
||
|
|
v.AuxInt = off1 + off2
|
||
|
|
v.Aux = sym
|
||
|
|
v.AddArg(ptr)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (MOVBload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
|
||
|
|
// cond: canMergeSym(sym1,sym2)
|
||
|
|
// result: (MOVBload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
|
||
|
|
for {
|
||
|
|
off1 := v.AuxInt
|
||
|
|
sym1 := v.Aux
|
||
|
|
v_0 := v.Args[0]
|
||
|
|
if v_0.Op != OpARM64MOVDaddr {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
off2 := v_0.AuxInt
|
||
|
|
sym2 := v_0.Aux
|
||
|
|
ptr := v_0.Args[0]
|
||
|
|
mem := v.Args[1]
|
||
|
|
if !(canMergeSym(sym1, sym2)) {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
v.reset(OpARM64MOVBload)
|
||
|
|
v.AuxInt = off1 + off2
|
||
|
|
v.Aux = mergeSym(sym1, sym2)
|
||
|
|
v.AddArg(ptr)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
return false
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpARM64MOVBstore(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (MOVBstore [off1] {sym} (ADDconst [off2] ptr) val mem)
|
||
|
|
// cond:
|
||
|
|
// result: (MOVBstore [off1+off2] {sym} ptr val mem)
|
||
|
|
for {
|
||
|
|
off1 := v.AuxInt
|
||
|
|
sym := v.Aux
|
||
|
|
v_0 := v.Args[0]
|
||
|
|
if v_0.Op != OpARM64ADDconst {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
off2 := v_0.AuxInt
|
||
|
|
ptr := v_0.Args[0]
|
||
|
|
val := v.Args[1]
|
||
|
|
mem := v.Args[2]
|
||
|
|
v.reset(OpARM64MOVBstore)
|
||
|
|
v.AuxInt = off1 + off2
|
||
|
|
v.Aux = sym
|
||
|
|
v.AddArg(ptr)
|
||
|
|
v.AddArg(val)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (MOVBstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem)
|
||
|
|
// cond: canMergeSym(sym1,sym2)
|
||
|
|
// result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
|
||
|
|
for {
|
||
|
|
off1 := v.AuxInt
|
||
|
|
sym1 := v.Aux
|
||
|
|
v_0 := v.Args[0]
|
||
|
|
if v_0.Op != OpARM64MOVDaddr {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
off2 := v_0.AuxInt
|
||
|
|
sym2 := v_0.Aux
|
||
|
|
ptr := v_0.Args[0]
|
||
|
|
val := v.Args[1]
|
||
|
|
mem := v.Args[2]
|
||
|
|
if !(canMergeSym(sym1, sym2)) {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
v.reset(OpARM64MOVBstore)
|
||
|
|
v.AuxInt = off1 + off2
|
||
|
|
v.Aux = mergeSym(sym1, sym2)
|
||
|
|
v.AddArg(ptr)
|
||
|
|
v.AddArg(val)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
return false
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpARM64MOVDload(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (MOVDload [off1] {sym} (ADDconst [off2] ptr) mem)
|
||
|
|
// cond:
|
||
|
|
// result: (MOVDload [off1+off2] {sym} ptr mem)
|
||
|
|
for {
|
||
|
|
off1 := v.AuxInt
|
||
|
|
sym := v.Aux
|
||
|
|
v_0 := v.Args[0]
|
||
|
|
if v_0.Op != OpARM64ADDconst {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
off2 := v_0.AuxInt
|
||
|
|
ptr := v_0.Args[0]
|
||
|
|
mem := v.Args[1]
|
||
|
|
v.reset(OpARM64MOVDload)
|
||
|
|
v.AuxInt = off1 + off2
|
||
|
|
v.Aux = sym
|
||
|
|
v.AddArg(ptr)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (MOVDload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
|
||
|
|
// cond: canMergeSym(sym1,sym2)
|
||
|
|
// result: (MOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
|
||
|
|
for {
|
||
|
|
off1 := v.AuxInt
|
||
|
|
sym1 := v.Aux
|
||
|
|
v_0 := v.Args[0]
|
||
|
|
if v_0.Op != OpARM64MOVDaddr {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
off2 := v_0.AuxInt
|
||
|
|
sym2 := v_0.Aux
|
||
|
|
ptr := v_0.Args[0]
|
||
|
|
mem := v.Args[1]
|
||
|
|
if !(canMergeSym(sym1, sym2)) {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
v.reset(OpARM64MOVDload)
|
||
|
|
v.AuxInt = off1 + off2
|
||
|
|
v.Aux = mergeSym(sym1, sym2)
|
||
|
|
v.AddArg(ptr)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
return false
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpARM64MOVDstore(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (MOVDstore [off1] {sym} (ADDconst [off2] ptr) val mem)
|
||
|
|
// cond:
|
||
|
|
// result: (MOVDstore [off1+off2] {sym} ptr val mem)
|
||
|
|
for {
|
||
|
|
off1 := v.AuxInt
|
||
|
|
sym := v.Aux
|
||
|
|
v_0 := v.Args[0]
|
||
|
|
if v_0.Op != OpARM64ADDconst {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
off2 := v_0.AuxInt
|
||
|
|
ptr := v_0.Args[0]
|
||
|
|
val := v.Args[1]
|
||
|
|
mem := v.Args[2]
|
||
|
|
v.reset(OpARM64MOVDstore)
|
||
|
|
v.AuxInt = off1 + off2
|
||
|
|
v.Aux = sym
|
||
|
|
v.AddArg(ptr)
|
||
|
|
v.AddArg(val)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (MOVDstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem)
|
||
|
|
// cond: canMergeSym(sym1,sym2)
|
||
|
|
// result: (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
|
||
|
|
for {
|
||
|
|
off1 := v.AuxInt
|
||
|
|
sym1 := v.Aux
|
||
|
|
v_0 := v.Args[0]
|
||
|
|
if v_0.Op != OpARM64MOVDaddr {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
off2 := v_0.AuxInt
|
||
|
|
sym2 := v_0.Aux
|
||
|
|
ptr := v_0.Args[0]
|
||
|
|
val := v.Args[1]
|
||
|
|
mem := v.Args[2]
|
||
|
|
if !(canMergeSym(sym1, sym2)) {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
v.reset(OpARM64MOVDstore)
|
||
|
|
v.AuxInt = off1 + off2
|
||
|
|
v.Aux = mergeSym(sym1, sym2)
|
||
|
|
v.AddArg(ptr)
|
||
|
|
v.AddArg(val)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
return false
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpARM64MOVHUload(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (MOVHUload [off1] {sym} (ADDconst [off2] ptr) mem)
|
||
|
|
// cond:
|
||
|
|
// result: (MOVHUload [off1+off2] {sym} ptr mem)
|
||
|
|
for {
|
||
|
|
off1 := v.AuxInt
|
||
|
|
sym := v.Aux
|
||
|
|
v_0 := v.Args[0]
|
||
|
|
if v_0.Op != OpARM64ADDconst {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
off2 := v_0.AuxInt
|
||
|
|
ptr := v_0.Args[0]
|
||
|
|
mem := v.Args[1]
|
||
|
|
v.reset(OpARM64MOVHUload)
|
||
|
|
v.AuxInt = off1 + off2
|
||
|
|
v.Aux = sym
|
||
|
|
v.AddArg(ptr)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (MOVHUload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
|
||
|
|
// cond: canMergeSym(sym1,sym2)
|
||
|
|
// result: (MOVHUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
|
||
|
|
for {
|
||
|
|
off1 := v.AuxInt
|
||
|
|
sym1 := v.Aux
|
||
|
|
v_0 := v.Args[0]
|
||
|
|
if v_0.Op != OpARM64MOVDaddr {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
off2 := v_0.AuxInt
|
||
|
|
sym2 := v_0.Aux
|
||
|
|
ptr := v_0.Args[0]
|
||
|
|
mem := v.Args[1]
|
||
|
|
if !(canMergeSym(sym1, sym2)) {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
v.reset(OpARM64MOVHUload)
|
||
|
|
v.AuxInt = off1 + off2
|
||
|
|
v.Aux = mergeSym(sym1, sym2)
|
||
|
|
v.AddArg(ptr)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
return false
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpARM64MOVHload(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (MOVHload [off1] {sym} (ADDconst [off2] ptr) mem)
|
||
|
|
// cond:
|
||
|
|
// result: (MOVHload [off1+off2] {sym} ptr mem)
|
||
|
|
for {
|
||
|
|
off1 := v.AuxInt
|
||
|
|
sym := v.Aux
|
||
|
|
v_0 := v.Args[0]
|
||
|
|
if v_0.Op != OpARM64ADDconst {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
off2 := v_0.AuxInt
|
||
|
|
ptr := v_0.Args[0]
|
||
|
|
mem := v.Args[1]
|
||
|
|
v.reset(OpARM64MOVHload)
|
||
|
|
v.AuxInt = off1 + off2
|
||
|
|
v.Aux = sym
|
||
|
|
v.AddArg(ptr)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (MOVHload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
|
||
|
|
// cond: canMergeSym(sym1,sym2)
|
||
|
|
// result: (MOVHload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
|
||
|
|
for {
|
||
|
|
off1 := v.AuxInt
|
||
|
|
sym1 := v.Aux
|
||
|
|
v_0 := v.Args[0]
|
||
|
|
if v_0.Op != OpARM64MOVDaddr {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
off2 := v_0.AuxInt
|
||
|
|
sym2 := v_0.Aux
|
||
|
|
ptr := v_0.Args[0]
|
||
|
|
mem := v.Args[1]
|
||
|
|
if !(canMergeSym(sym1, sym2)) {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
v.reset(OpARM64MOVHload)
|
||
|
|
v.AuxInt = off1 + off2
|
||
|
|
v.Aux = mergeSym(sym1, sym2)
|
||
|
|
v.AddArg(ptr)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
return false
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpARM64MOVHstore(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (MOVHstore [off1] {sym} (ADDconst [off2] ptr) val mem)
|
||
|
|
// cond:
|
||
|
|
// result: (MOVHstore [off1+off2] {sym} ptr val mem)
|
||
|
|
for {
|
||
|
|
off1 := v.AuxInt
|
||
|
|
sym := v.Aux
|
||
|
|
v_0 := v.Args[0]
|
||
|
|
if v_0.Op != OpARM64ADDconst {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
off2 := v_0.AuxInt
|
||
|
|
ptr := v_0.Args[0]
|
||
|
|
val := v.Args[1]
|
||
|
|
mem := v.Args[2]
|
||
|
|
v.reset(OpARM64MOVHstore)
|
||
|
|
v.AuxInt = off1 + off2
|
||
|
|
v.Aux = sym
|
||
|
|
v.AddArg(ptr)
|
||
|
|
v.AddArg(val)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (MOVHstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem)
|
||
|
|
// cond: canMergeSym(sym1,sym2)
|
||
|
|
// result: (MOVHstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
|
||
|
|
for {
|
||
|
|
off1 := v.AuxInt
|
||
|
|
sym1 := v.Aux
|
||
|
|
v_0 := v.Args[0]
|
||
|
|
if v_0.Op != OpARM64MOVDaddr {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
off2 := v_0.AuxInt
|
||
|
|
sym2 := v_0.Aux
|
||
|
|
ptr := v_0.Args[0]
|
||
|
|
val := v.Args[1]
|
||
|
|
mem := v.Args[2]
|
||
|
|
if !(canMergeSym(sym1, sym2)) {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
v.reset(OpARM64MOVHstore)
|
||
|
|
v.AuxInt = off1 + off2
|
||
|
|
v.Aux = mergeSym(sym1, sym2)
|
||
|
|
v.AddArg(ptr)
|
||
|
|
v.AddArg(val)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
return false
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpARM64MOVWUload(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (MOVWUload [off1] {sym} (ADDconst [off2] ptr) mem)
|
||
|
|
// cond:
|
||
|
|
// result: (MOVWUload [off1+off2] {sym} ptr mem)
|
||
|
|
for {
|
||
|
|
off1 := v.AuxInt
|
||
|
|
sym := v.Aux
|
||
|
|
v_0 := v.Args[0]
|
||
|
|
if v_0.Op != OpARM64ADDconst {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
off2 := v_0.AuxInt
|
||
|
|
ptr := v_0.Args[0]
|
||
|
|
mem := v.Args[1]
|
||
|
|
v.reset(OpARM64MOVWUload)
|
||
|
|
v.AuxInt = off1 + off2
|
||
|
|
v.Aux = sym
|
||
|
|
v.AddArg(ptr)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (MOVWUload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
|
||
|
|
// cond: canMergeSym(sym1,sym2)
|
||
|
|
// result: (MOVWUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
|
||
|
|
for {
|
||
|
|
off1 := v.AuxInt
|
||
|
|
sym1 := v.Aux
|
||
|
|
v_0 := v.Args[0]
|
||
|
|
if v_0.Op != OpARM64MOVDaddr {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
off2 := v_0.AuxInt
|
||
|
|
sym2 := v_0.Aux
|
||
|
|
ptr := v_0.Args[0]
|
||
|
|
mem := v.Args[1]
|
||
|
|
if !(canMergeSym(sym1, sym2)) {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
v.reset(OpARM64MOVWUload)
|
||
|
|
v.AuxInt = off1 + off2
|
||
|
|
v.Aux = mergeSym(sym1, sym2)
|
||
|
|
v.AddArg(ptr)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
return false
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpARM64MOVWload(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (MOVWload [off1] {sym} (ADDconst [off2] ptr) mem)
|
||
|
|
// cond:
|
||
|
|
// result: (MOVWload [off1+off2] {sym} ptr mem)
|
||
|
|
for {
|
||
|
|
off1 := v.AuxInt
|
||
|
|
sym := v.Aux
|
||
|
|
v_0 := v.Args[0]
|
||
|
|
if v_0.Op != OpARM64ADDconst {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
off2 := v_0.AuxInt
|
||
|
|
ptr := v_0.Args[0]
|
||
|
|
mem := v.Args[1]
|
||
|
|
v.reset(OpARM64MOVWload)
|
||
|
|
v.AuxInt = off1 + off2
|
||
|
|
v.Aux = sym
|
||
|
|
v.AddArg(ptr)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (MOVWload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
|
||
|
|
// cond: canMergeSym(sym1,sym2)
|
||
|
|
// result: (MOVWload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
|
||
|
|
for {
|
||
|
|
off1 := v.AuxInt
|
||
|
|
sym1 := v.Aux
|
||
|
|
v_0 := v.Args[0]
|
||
|
|
if v_0.Op != OpARM64MOVDaddr {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
off2 := v_0.AuxInt
|
||
|
|
sym2 := v_0.Aux
|
||
|
|
ptr := v_0.Args[0]
|
||
|
|
mem := v.Args[1]
|
||
|
|
if !(canMergeSym(sym1, sym2)) {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
v.reset(OpARM64MOVWload)
|
||
|
|
v.AuxInt = off1 + off2
|
||
|
|
v.Aux = mergeSym(sym1, sym2)
|
||
|
|
v.AddArg(ptr)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
return false
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpARM64MOVWstore(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (MOVWstore [off1] {sym} (ADDconst [off2] ptr) val mem)
|
||
|
|
// cond:
|
||
|
|
// result: (MOVWstore [off1+off2] {sym} ptr val mem)
|
||
|
|
for {
|
||
|
|
off1 := v.AuxInt
|
||
|
|
sym := v.Aux
|
||
|
|
v_0 := v.Args[0]
|
||
|
|
if v_0.Op != OpARM64ADDconst {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
off2 := v_0.AuxInt
|
||
|
|
ptr := v_0.Args[0]
|
||
|
|
val := v.Args[1]
|
||
|
|
mem := v.Args[2]
|
||
|
|
v.reset(OpARM64MOVWstore)
|
||
|
|
v.AuxInt = off1 + off2
|
||
|
|
v.Aux = sym
|
||
|
|
v.AddArg(ptr)
|
||
|
|
v.AddArg(val)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (MOVWstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem)
|
||
|
|
// cond: canMergeSym(sym1,sym2)
|
||
|
|
// result: (MOVWstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
|
||
|
|
for {
|
||
|
|
off1 := v.AuxInt
|
||
|
|
sym1 := v.Aux
|
||
|
|
v_0 := v.Args[0]
|
||
|
|
if v_0.Op != OpARM64MOVDaddr {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
off2 := v_0.AuxInt
|
||
|
|
sym2 := v_0.Aux
|
||
|
|
ptr := v_0.Args[0]
|
||
|
|
val := v.Args[1]
|
||
|
|
mem := v.Args[2]
|
||
|
|
if !(canMergeSym(sym1, sym2)) {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
v.reset(OpARM64MOVWstore)
|
||
|
|
v.AuxInt = off1 + off2
|
||
|
|
v.Aux = mergeSym(sym1, sym2)
|
||
|
|
v.AddArg(ptr)
|
||
|
|
v.AddArg(val)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
return false
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpMod16(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Mod16 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (MODW (SignExt16to32 x) (SignExt16to32 y))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64MODW)
|
||
|
|
v0 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
|
||
|
|
v0.AddArg(x)
|
||
|
|
v.AddArg(v0)
|
||
|
|
v1 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
|
||
|
|
v1.AddArg(y)
|
||
|
|
v.AddArg(v1)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpMod16u(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Mod16u x y)
|
||
|
|
// cond:
|
||
|
|
// result: (UMODW (ZeroExt16to32 x) (ZeroExt16to32 y))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64UMODW)
|
||
|
|
v0 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
|
||
|
|
v0.AddArg(x)
|
||
|
|
v.AddArg(v0)
|
||
|
|
v1 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
|
||
|
|
v1.AddArg(y)
|
||
|
|
v.AddArg(v1)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpMod32(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Mod32 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (MODW x y)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64MODW)
|
||
|
|
v.AddArg(x)
|
||
|
|
v.AddArg(y)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpMod32u(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Mod32u x y)
|
||
|
|
// cond:
|
||
|
|
// result: (UMODW x y)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64UMODW)
|
||
|
|
v.AddArg(x)
|
||
|
|
v.AddArg(y)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpMod64(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Mod64 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (MOD x y)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64MOD)
|
||
|
|
v.AddArg(x)
|
||
|
|
v.AddArg(y)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpMod64u(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Mod64u x y)
|
||
|
|
// cond:
|
||
|
|
// result: (UMOD x y)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64UMOD)
|
||
|
|
v.AddArg(x)
|
||
|
|
v.AddArg(y)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpMod8(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Mod8 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (MODW (SignExt8to32 x) (SignExt8to32 y))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64MODW)
|
||
|
|
v0 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
|
||
|
|
v0.AddArg(x)
|
||
|
|
v.AddArg(v0)
|
||
|
|
v1 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
|
||
|
|
v1.AddArg(y)
|
||
|
|
v.AddArg(v1)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpMod8u(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Mod8u x y)
|
||
|
|
// cond:
|
||
|
|
// result: (UMODW (ZeroExt8to32 x) (ZeroExt8to32 y))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64UMODW)
|
||
|
|
v0 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
|
||
|
|
v0.AddArg(x)
|
||
|
|
v.AddArg(v0)
|
||
|
|
v1 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
|
||
|
|
v1.AddArg(y)
|
||
|
|
v.AddArg(v1)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpMul16(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Mul16 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (MUL x y)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64MUL)
|
||
|
|
v.AddArg(x)
|
||
|
|
v.AddArg(y)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpMul32(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Mul32 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (MUL x y)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64MUL)
|
||
|
|
v.AddArg(x)
|
||
|
|
v.AddArg(y)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpMul32F(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Mul32F x y)
|
||
|
|
// cond:
|
||
|
|
// result: (FMULS x y)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64FMULS)
|
||
|
|
v.AddArg(x)
|
||
|
|
v.AddArg(y)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpMul64(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Mul64 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (MUL x y)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64MUL)
|
||
|
|
v.AddArg(x)
|
||
|
|
v.AddArg(y)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpMul64F(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Mul64F x y)
|
||
|
|
// cond:
|
||
|
|
// result: (FMULD x y)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64FMULD)
|
||
|
|
v.AddArg(x)
|
||
|
|
v.AddArg(y)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpMul8(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Mul8 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (MUL x y)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64MUL)
|
||
|
|
v.AddArg(x)
|
||
|
|
v.AddArg(y)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpNeg16(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Neg16 x)
|
||
|
|
// cond:
|
||
|
|
// result: (NEG x)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
v.reset(OpARM64NEG)
|
||
|
|
v.AddArg(x)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpNeg32(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Neg32 x)
|
||
|
|
// cond:
|
||
|
|
// result: (NEG x)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
v.reset(OpARM64NEG)
|
||
|
|
v.AddArg(x)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpNeg32F(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Neg32F x)
|
||
|
|
// cond:
|
||
|
|
// result: (FNEGS x)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
v.reset(OpARM64FNEGS)
|
||
|
|
v.AddArg(x)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpNeg64(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Neg64 x)
|
||
|
|
// cond:
|
||
|
|
// result: (NEG x)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
v.reset(OpARM64NEG)
|
||
|
|
v.AddArg(x)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpNeg64F(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Neg64F x)
|
||
|
|
// cond:
|
||
|
|
// result: (FNEGD x)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
v.reset(OpARM64FNEGD)
|
||
|
|
v.AddArg(x)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpNeg8(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Neg8 x)
|
||
|
|
// cond:
|
||
|
|
// result: (NEG x)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
v.reset(OpARM64NEG)
|
||
|
|
v.AddArg(x)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpNeq16(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Neq16 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (NotEqual (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y)))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64NotEqual)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
|
||
|
|
v1 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
|
||
|
|
v1.AddArg(x)
|
||
|
|
v0.AddArg(v1)
|
||
|
|
v2 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
|
||
|
|
v2.AddArg(y)
|
||
|
|
v0.AddArg(v2)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpNeq32(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Neq32 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (NotEqual (CMPW x y))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64NotEqual)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
|
||
|
|
v0.AddArg(x)
|
||
|
|
v0.AddArg(y)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpNeq32F(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Neq32F x y)
|
||
|
|
// cond:
|
||
|
|
// result: (NotEqual (FCMPS x y))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64NotEqual)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64FCMPS, TypeFlags)
|
||
|
|
v0.AddArg(x)
|
||
|
|
v0.AddArg(y)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpNeq64(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Neq64 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (NotEqual (CMP x y))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64NotEqual)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64CMP, TypeFlags)
|
||
|
|
v0.AddArg(x)
|
||
|
|
v0.AddArg(y)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpNeq64F(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Neq64F x y)
|
||
|
|
// cond:
|
||
|
|
// result: (NotEqual (FCMPD x y))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64NotEqual)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64FCMPD, TypeFlags)
|
||
|
|
v0.AddArg(x)
|
||
|
|
v0.AddArg(y)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpNeq8(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Neq8 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (NotEqual (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y)))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64NotEqual)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64CMPW, TypeFlags)
|
||
|
|
v1 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
|
||
|
|
v1.AddArg(x)
|
||
|
|
v0.AddArg(v1)
|
||
|
|
v2 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
|
||
|
|
v2.AddArg(y)
|
||
|
|
v0.AddArg(v2)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpNeqB(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (NeqB x y)
|
||
|
|
// cond:
|
||
|
|
// result: (XOR x y)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64XOR)
|
||
|
|
v.AddArg(x)
|
||
|
|
v.AddArg(y)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpNeqPtr(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (NeqPtr x y)
|
||
|
|
// cond:
|
||
|
|
// result: (NotEqual (CMP x y))
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64NotEqual)
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64CMP, TypeFlags)
|
||
|
|
v0.AddArg(x)
|
||
|
|
v0.AddArg(y)
|
||
|
|
v.AddArg(v0)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpNilCheck(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (NilCheck ptr mem)
|
||
|
|
// cond:
|
||
|
|
// result: (LoweredNilCheck ptr mem)
|
||
|
|
for {
|
||
|
|
ptr := v.Args[0]
|
||
|
|
mem := v.Args[1]
|
||
|
|
v.reset(OpARM64LoweredNilCheck)
|
||
|
|
v.AddArg(ptr)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpNot(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Not x)
|
||
|
|
// cond:
|
||
|
|
// result: (XORconst [1] x)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
v.reset(OpARM64XORconst)
|
||
|
|
v.AuxInt = 1
|
||
|
|
v.AddArg(x)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpOffPtr(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (OffPtr [off] ptr:(SP))
|
||
|
|
// cond:
|
||
|
|
// result: (MOVDaddr [off] ptr)
|
||
|
|
for {
|
||
|
|
off := v.AuxInt
|
||
|
|
ptr := v.Args[0]
|
||
|
|
if ptr.Op != OpSP {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
v.reset(OpARM64MOVDaddr)
|
||
|
|
v.AuxInt = off
|
||
|
|
v.AddArg(ptr)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (OffPtr [off] ptr)
|
||
|
|
// cond:
|
||
|
|
// result: (ADDconst [off] ptr)
|
||
|
|
for {
|
||
|
|
off := v.AuxInt
|
||
|
|
ptr := v.Args[0]
|
||
|
|
v.reset(OpARM64ADDconst)
|
||
|
|
v.AuxInt = off
|
||
|
|
v.AddArg(ptr)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpOr16(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Or16 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (OR x y)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64OR)
|
||
|
|
v.AddArg(x)
|
||
|
|
v.AddArg(y)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpOr32(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Or32 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (OR x y)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64OR)
|
||
|
|
v.AddArg(x)
|
||
|
|
v.AddArg(y)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpOr64(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Or64 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (OR x y)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64OR)
|
||
|
|
v.AddArg(x)
|
||
|
|
v.AddArg(y)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpOr8(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Or8 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (OR x y)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64OR)
|
||
|
|
v.AddArg(x)
|
||
|
|
v.AddArg(y)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpOrB(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (OrB x y)
|
||
|
|
// cond:
|
||
|
|
// result: (OR x y)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64OR)
|
||
|
|
v.AddArg(x)
|
||
|
|
v.AddArg(y)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpSignExt16to32(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (SignExt16to32 x)
|
||
|
|
// cond:
|
||
|
|
// result: (MOVHreg x)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
v.reset(OpARM64MOVHreg)
|
||
|
|
v.AddArg(x)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpSignExt16to64(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (SignExt16to64 x)
|
||
|
|
// cond:
|
||
|
|
// result: (MOVHreg x)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
v.reset(OpARM64MOVHreg)
|
||
|
|
v.AddArg(x)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpSignExt32to64(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (SignExt32to64 x)
|
||
|
|
// cond:
|
||
|
|
// result: (MOVWreg x)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
v.reset(OpARM64MOVWreg)
|
||
|
|
v.AddArg(x)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpSignExt8to16(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (SignExt8to16 x)
|
||
|
|
// cond:
|
||
|
|
// result: (MOVBreg x)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
v.reset(OpARM64MOVBreg)
|
||
|
|
v.AddArg(x)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpSignExt8to32(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (SignExt8to32 x)
|
||
|
|
// cond:
|
||
|
|
// result: (MOVBreg x)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
v.reset(OpARM64MOVBreg)
|
||
|
|
v.AddArg(x)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpSignExt8to64(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (SignExt8to64 x)
|
||
|
|
// cond:
|
||
|
|
// result: (MOVBreg x)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
v.reset(OpARM64MOVBreg)
|
||
|
|
v.AddArg(x)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpStaticCall(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (StaticCall [argwid] {target} mem)
|
||
|
|
// cond:
|
||
|
|
// result: (CALLstatic [argwid] {target} mem)
|
||
|
|
for {
|
||
|
|
argwid := v.AuxInt
|
||
|
|
target := v.Aux
|
||
|
|
mem := v.Args[0]
|
||
|
|
v.reset(OpARM64CALLstatic)
|
||
|
|
v.AuxInt = argwid
|
||
|
|
v.Aux = target
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpStore(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Store [1] ptr val mem)
|
||
|
|
// cond:
|
||
|
|
// result: (MOVBstore ptr val mem)
|
||
|
|
for {
|
||
|
|
if v.AuxInt != 1 {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
ptr := v.Args[0]
|
||
|
|
val := v.Args[1]
|
||
|
|
mem := v.Args[2]
|
||
|
|
v.reset(OpARM64MOVBstore)
|
||
|
|
v.AddArg(ptr)
|
||
|
|
v.AddArg(val)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (Store [2] ptr val mem)
|
||
|
|
// cond:
|
||
|
|
// result: (MOVHstore ptr val mem)
|
||
|
|
for {
|
||
|
|
if v.AuxInt != 2 {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
ptr := v.Args[0]
|
||
|
|
val := v.Args[1]
|
||
|
|
mem := v.Args[2]
|
||
|
|
v.reset(OpARM64MOVHstore)
|
||
|
|
v.AddArg(ptr)
|
||
|
|
v.AddArg(val)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (Store [4] ptr val mem)
|
||
|
|
// cond: !is32BitFloat(val.Type)
|
||
|
|
// result: (MOVWstore ptr val mem)
|
||
|
|
for {
|
||
|
|
if v.AuxInt != 4 {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
ptr := v.Args[0]
|
||
|
|
val := v.Args[1]
|
||
|
|
mem := v.Args[2]
|
||
|
|
if !(!is32BitFloat(val.Type)) {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
v.reset(OpARM64MOVWstore)
|
||
|
|
v.AddArg(ptr)
|
||
|
|
v.AddArg(val)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (Store [8] ptr val mem)
|
||
|
|
// cond: !is64BitFloat(val.Type)
|
||
|
|
// result: (MOVDstore ptr val mem)
|
||
|
|
for {
|
||
|
|
if v.AuxInt != 8 {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
ptr := v.Args[0]
|
||
|
|
val := v.Args[1]
|
||
|
|
mem := v.Args[2]
|
||
|
|
if !(!is64BitFloat(val.Type)) {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
v.reset(OpARM64MOVDstore)
|
||
|
|
v.AddArg(ptr)
|
||
|
|
v.AddArg(val)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (Store [4] ptr val mem)
|
||
|
|
// cond: is32BitFloat(val.Type)
|
||
|
|
// result: (FMOVSstore ptr val mem)
|
||
|
|
for {
|
||
|
|
if v.AuxInt != 4 {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
ptr := v.Args[0]
|
||
|
|
val := v.Args[1]
|
||
|
|
mem := v.Args[2]
|
||
|
|
if !(is32BitFloat(val.Type)) {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
v.reset(OpARM64FMOVSstore)
|
||
|
|
v.AddArg(ptr)
|
||
|
|
v.AddArg(val)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (Store [8] ptr val mem)
|
||
|
|
// cond: is64BitFloat(val.Type)
|
||
|
|
// result: (FMOVDstore ptr val mem)
|
||
|
|
for {
|
||
|
|
if v.AuxInt != 8 {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
ptr := v.Args[0]
|
||
|
|
val := v.Args[1]
|
||
|
|
mem := v.Args[2]
|
||
|
|
if !(is64BitFloat(val.Type)) {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
v.reset(OpARM64FMOVDstore)
|
||
|
|
v.AddArg(ptr)
|
||
|
|
v.AddArg(val)
|
||
|
|
v.AddArg(mem)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
return false
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpSub16(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Sub16 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (SUB x y)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64SUB)
|
||
|
|
v.AddArg(x)
|
||
|
|
v.AddArg(y)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpSub32(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Sub32 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (SUB x y)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64SUB)
|
||
|
|
v.AddArg(x)
|
||
|
|
v.AddArg(y)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpSub32F(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Sub32F x y)
|
||
|
|
// cond:
|
||
|
|
// result: (FSUBS x y)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64FSUBS)
|
||
|
|
v.AddArg(x)
|
||
|
|
v.AddArg(y)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpSub64(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Sub64 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (SUB x y)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64SUB)
|
||
|
|
v.AddArg(x)
|
||
|
|
v.AddArg(y)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpSub64F(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Sub64F x y)
|
||
|
|
// cond:
|
||
|
|
// result: (FSUBD x y)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64FSUBD)
|
||
|
|
v.AddArg(x)
|
||
|
|
v.AddArg(y)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpSub8(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Sub8 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (SUB x y)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64SUB)
|
||
|
|
v.AddArg(x)
|
||
|
|
v.AddArg(y)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpSubPtr(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (SubPtr x y)
|
||
|
|
// cond:
|
||
|
|
// result: (SUB x y)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64SUB)
|
||
|
|
v.AddArg(x)
|
||
|
|
v.AddArg(y)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpTrunc16to8(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Trunc16to8 x)
|
||
|
|
// cond:
|
||
|
|
// result: x
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
v.reset(OpCopy)
|
||
|
|
v.Type = x.Type
|
||
|
|
v.AddArg(x)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpTrunc32to16(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Trunc32to16 x)
|
||
|
|
// cond:
|
||
|
|
// result: x
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
v.reset(OpCopy)
|
||
|
|
v.Type = x.Type
|
||
|
|
v.AddArg(x)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpTrunc32to8(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Trunc32to8 x)
|
||
|
|
// cond:
|
||
|
|
// result: x
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
v.reset(OpCopy)
|
||
|
|
v.Type = x.Type
|
||
|
|
v.AddArg(x)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpTrunc64to16(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Trunc64to16 x)
|
||
|
|
// cond:
|
||
|
|
// result: x
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
v.reset(OpCopy)
|
||
|
|
v.Type = x.Type
|
||
|
|
v.AddArg(x)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpTrunc64to32(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Trunc64to32 x)
|
||
|
|
// cond:
|
||
|
|
// result: x
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
v.reset(OpCopy)
|
||
|
|
v.Type = x.Type
|
||
|
|
v.AddArg(x)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpTrunc64to8(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Trunc64to8 x)
|
||
|
|
// cond:
|
||
|
|
// result: x
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
v.reset(OpCopy)
|
||
|
|
v.Type = x.Type
|
||
|
|
v.AddArg(x)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpXor16(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Xor16 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (XOR x y)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64XOR)
|
||
|
|
v.AddArg(x)
|
||
|
|
v.AddArg(y)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpXor32(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Xor32 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (XOR x y)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64XOR)
|
||
|
|
v.AddArg(x)
|
||
|
|
v.AddArg(y)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpXor64(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Xor64 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (XOR x y)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64XOR)
|
||
|
|
v.AddArg(x)
|
||
|
|
v.AddArg(y)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpXor8(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (Xor8 x y)
|
||
|
|
// cond:
|
||
|
|
// result: (XOR x y)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
y := v.Args[1]
|
||
|
|
v.reset(OpARM64XOR)
|
||
|
|
v.AddArg(x)
|
||
|
|
v.AddArg(y)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpZeroExt16to32(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (ZeroExt16to32 x)
|
||
|
|
// cond:
|
||
|
|
// result: (MOVHUreg x)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
v.reset(OpARM64MOVHUreg)
|
||
|
|
v.AddArg(x)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpZeroExt16to64(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (ZeroExt16to64 x)
|
||
|
|
// cond:
|
||
|
|
// result: (MOVHUreg x)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
v.reset(OpARM64MOVHUreg)
|
||
|
|
v.AddArg(x)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpZeroExt32to64(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (ZeroExt32to64 x)
|
||
|
|
// cond:
|
||
|
|
// result: (MOVWUreg x)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
v.reset(OpARM64MOVWUreg)
|
||
|
|
v.AddArg(x)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpZeroExt8to16(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (ZeroExt8to16 x)
|
||
|
|
// cond:
|
||
|
|
// result: (MOVBUreg x)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
v.reset(OpARM64MOVBUreg)
|
||
|
|
v.AddArg(x)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpZeroExt8to32(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (ZeroExt8to32 x)
|
||
|
|
// cond:
|
||
|
|
// result: (MOVBUreg x)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
v.reset(OpARM64MOVBUreg)
|
||
|
|
v.AddArg(x)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteValueARM64_OpZeroExt8to64(v *Value, config *Config) bool {
|
||
|
|
b := v.Block
|
||
|
|
_ = b
|
||
|
|
// match: (ZeroExt8to64 x)
|
||
|
|
// cond:
|
||
|
|
// result: (MOVBUreg x)
|
||
|
|
for {
|
||
|
|
x := v.Args[0]
|
||
|
|
v.reset(OpARM64MOVBUreg)
|
||
|
|
v.AddArg(x)
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
func rewriteBlockARM64(b *Block) bool {
|
||
|
|
switch b.Kind {
|
||
|
|
case BlockIf:
|
||
|
|
// match: (If (Equal cc) yes no)
|
||
|
|
// cond:
|
||
|
|
// result: (EQ cc yes no)
|
||
|
|
for {
|
||
|
|
v := b.Control
|
||
|
|
if v.Op != OpARM64Equal {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
cc := v.Args[0]
|
||
|
|
yes := b.Succs[0]
|
||
|
|
no := b.Succs[1]
|
||
|
|
b.Kind = BlockARM64EQ
|
||
|
|
b.SetControl(cc)
|
||
|
|
_ = yes
|
||
|
|
_ = no
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (If (NotEqual cc) yes no)
|
||
|
|
// cond:
|
||
|
|
// result: (NE cc yes no)
|
||
|
|
for {
|
||
|
|
v := b.Control
|
||
|
|
if v.Op != OpARM64NotEqual {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
cc := v.Args[0]
|
||
|
|
yes := b.Succs[0]
|
||
|
|
no := b.Succs[1]
|
||
|
|
b.Kind = BlockARM64NE
|
||
|
|
b.SetControl(cc)
|
||
|
|
_ = yes
|
||
|
|
_ = no
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (If (LessThan cc) yes no)
|
||
|
|
// cond:
|
||
|
|
// result: (LT cc yes no)
|
||
|
|
for {
|
||
|
|
v := b.Control
|
||
|
|
if v.Op != OpARM64LessThan {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
cc := v.Args[0]
|
||
|
|
yes := b.Succs[0]
|
||
|
|
no := b.Succs[1]
|
||
|
|
b.Kind = BlockARM64LT
|
||
|
|
b.SetControl(cc)
|
||
|
|
_ = yes
|
||
|
|
_ = no
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (If (LessThanU cc) yes no)
|
||
|
|
// cond:
|
||
|
|
// result: (ULT cc yes no)
|
||
|
|
for {
|
||
|
|
v := b.Control
|
||
|
|
if v.Op != OpARM64LessThanU {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
cc := v.Args[0]
|
||
|
|
yes := b.Succs[0]
|
||
|
|
no := b.Succs[1]
|
||
|
|
b.Kind = BlockARM64ULT
|
||
|
|
b.SetControl(cc)
|
||
|
|
_ = yes
|
||
|
|
_ = no
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (If (LessEqual cc) yes no)
|
||
|
|
// cond:
|
||
|
|
// result: (LE cc yes no)
|
||
|
|
for {
|
||
|
|
v := b.Control
|
||
|
|
if v.Op != OpARM64LessEqual {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
cc := v.Args[0]
|
||
|
|
yes := b.Succs[0]
|
||
|
|
no := b.Succs[1]
|
||
|
|
b.Kind = BlockARM64LE
|
||
|
|
b.SetControl(cc)
|
||
|
|
_ = yes
|
||
|
|
_ = no
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (If (LessEqualU cc) yes no)
|
||
|
|
// cond:
|
||
|
|
// result: (ULE cc yes no)
|
||
|
|
for {
|
||
|
|
v := b.Control
|
||
|
|
if v.Op != OpARM64LessEqualU {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
cc := v.Args[0]
|
||
|
|
yes := b.Succs[0]
|
||
|
|
no := b.Succs[1]
|
||
|
|
b.Kind = BlockARM64ULE
|
||
|
|
b.SetControl(cc)
|
||
|
|
_ = yes
|
||
|
|
_ = no
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (If (GreaterThan cc) yes no)
|
||
|
|
// cond:
|
||
|
|
// result: (GT cc yes no)
|
||
|
|
for {
|
||
|
|
v := b.Control
|
||
|
|
if v.Op != OpARM64GreaterThan {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
cc := v.Args[0]
|
||
|
|
yes := b.Succs[0]
|
||
|
|
no := b.Succs[1]
|
||
|
|
b.Kind = BlockARM64GT
|
||
|
|
b.SetControl(cc)
|
||
|
|
_ = yes
|
||
|
|
_ = no
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (If (GreaterThanU cc) yes no)
|
||
|
|
// cond:
|
||
|
|
// result: (UGT cc yes no)
|
||
|
|
for {
|
||
|
|
v := b.Control
|
||
|
|
if v.Op != OpARM64GreaterThanU {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
cc := v.Args[0]
|
||
|
|
yes := b.Succs[0]
|
||
|
|
no := b.Succs[1]
|
||
|
|
b.Kind = BlockARM64UGT
|
||
|
|
b.SetControl(cc)
|
||
|
|
_ = yes
|
||
|
|
_ = no
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (If (GreaterEqual cc) yes no)
|
||
|
|
// cond:
|
||
|
|
// result: (GE cc yes no)
|
||
|
|
for {
|
||
|
|
v := b.Control
|
||
|
|
if v.Op != OpARM64GreaterEqual {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
cc := v.Args[0]
|
||
|
|
yes := b.Succs[0]
|
||
|
|
no := b.Succs[1]
|
||
|
|
b.Kind = BlockARM64GE
|
||
|
|
b.SetControl(cc)
|
||
|
|
_ = yes
|
||
|
|
_ = no
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (If (GreaterEqualU cc) yes no)
|
||
|
|
// cond:
|
||
|
|
// result: (UGE cc yes no)
|
||
|
|
for {
|
||
|
|
v := b.Control
|
||
|
|
if v.Op != OpARM64GreaterEqualU {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
cc := v.Args[0]
|
||
|
|
yes := b.Succs[0]
|
||
|
|
no := b.Succs[1]
|
||
|
|
b.Kind = BlockARM64UGE
|
||
|
|
b.SetControl(cc)
|
||
|
|
_ = yes
|
||
|
|
_ = no
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (If cond yes no)
|
||
|
|
// cond:
|
||
|
|
// result: (NE (CMPconst [0] cond) yes no)
|
||
|
|
for {
|
||
|
|
v := b.Control
|
||
|
|
cond := b.Control
|
||
|
|
yes := b.Succs[0]
|
||
|
|
no := b.Succs[1]
|
||
|
|
b.Kind = BlockARM64NE
|
||
|
|
v0 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
|
||
|
|
v0.AuxInt = 0
|
||
|
|
v0.AddArg(cond)
|
||
|
|
b.SetControl(v0)
|
||
|
|
_ = yes
|
||
|
|
_ = no
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
case BlockARM64NE:
|
||
|
|
// match: (NE (CMPconst [0] (Equal cc)) yes no)
|
||
|
|
// cond:
|
||
|
|
// result: (EQ cc yes no)
|
||
|
|
for {
|
||
|
|
v := b.Control
|
||
|
|
if v.Op != OpARM64CMPconst {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
if v.AuxInt != 0 {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
v_0 := v.Args[0]
|
||
|
|
if v_0.Op != OpARM64Equal {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
cc := v_0.Args[0]
|
||
|
|
yes := b.Succs[0]
|
||
|
|
no := b.Succs[1]
|
||
|
|
b.Kind = BlockARM64EQ
|
||
|
|
b.SetControl(cc)
|
||
|
|
_ = yes
|
||
|
|
_ = no
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (NE (CMPconst [0] (NotEqual cc)) yes no)
|
||
|
|
// cond:
|
||
|
|
// result: (NE cc yes no)
|
||
|
|
for {
|
||
|
|
v := b.Control
|
||
|
|
if v.Op != OpARM64CMPconst {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
if v.AuxInt != 0 {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
v_0 := v.Args[0]
|
||
|
|
if v_0.Op != OpARM64NotEqual {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
cc := v_0.Args[0]
|
||
|
|
yes := b.Succs[0]
|
||
|
|
no := b.Succs[1]
|
||
|
|
b.Kind = BlockARM64NE
|
||
|
|
b.SetControl(cc)
|
||
|
|
_ = yes
|
||
|
|
_ = no
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (NE (CMPconst [0] (LessThan cc)) yes no)
|
||
|
|
// cond:
|
||
|
|
// result: (LT cc yes no)
|
||
|
|
for {
|
||
|
|
v := b.Control
|
||
|
|
if v.Op != OpARM64CMPconst {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
if v.AuxInt != 0 {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
v_0 := v.Args[0]
|
||
|
|
if v_0.Op != OpARM64LessThan {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
cc := v_0.Args[0]
|
||
|
|
yes := b.Succs[0]
|
||
|
|
no := b.Succs[1]
|
||
|
|
b.Kind = BlockARM64LT
|
||
|
|
b.SetControl(cc)
|
||
|
|
_ = yes
|
||
|
|
_ = no
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (NE (CMPconst [0] (LessThanU cc)) yes no)
|
||
|
|
// cond:
|
||
|
|
// result: (ULT cc yes no)
|
||
|
|
for {
|
||
|
|
v := b.Control
|
||
|
|
if v.Op != OpARM64CMPconst {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
if v.AuxInt != 0 {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
v_0 := v.Args[0]
|
||
|
|
if v_0.Op != OpARM64LessThanU {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
cc := v_0.Args[0]
|
||
|
|
yes := b.Succs[0]
|
||
|
|
no := b.Succs[1]
|
||
|
|
b.Kind = BlockARM64ULT
|
||
|
|
b.SetControl(cc)
|
||
|
|
_ = yes
|
||
|
|
_ = no
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (NE (CMPconst [0] (LessEqual cc)) yes no)
|
||
|
|
// cond:
|
||
|
|
// result: (LE cc yes no)
|
||
|
|
for {
|
||
|
|
v := b.Control
|
||
|
|
if v.Op != OpARM64CMPconst {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
if v.AuxInt != 0 {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
v_0 := v.Args[0]
|
||
|
|
if v_0.Op != OpARM64LessEqual {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
cc := v_0.Args[0]
|
||
|
|
yes := b.Succs[0]
|
||
|
|
no := b.Succs[1]
|
||
|
|
b.Kind = BlockARM64LE
|
||
|
|
b.SetControl(cc)
|
||
|
|
_ = yes
|
||
|
|
_ = no
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (NE (CMPconst [0] (LessEqualU cc)) yes no)
|
||
|
|
// cond:
|
||
|
|
// result: (ULE cc yes no)
|
||
|
|
for {
|
||
|
|
v := b.Control
|
||
|
|
if v.Op != OpARM64CMPconst {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
if v.AuxInt != 0 {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
v_0 := v.Args[0]
|
||
|
|
if v_0.Op != OpARM64LessEqualU {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
cc := v_0.Args[0]
|
||
|
|
yes := b.Succs[0]
|
||
|
|
no := b.Succs[1]
|
||
|
|
b.Kind = BlockARM64ULE
|
||
|
|
b.SetControl(cc)
|
||
|
|
_ = yes
|
||
|
|
_ = no
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (NE (CMPconst [0] (GreaterThan cc)) yes no)
|
||
|
|
// cond:
|
||
|
|
// result: (GT cc yes no)
|
||
|
|
for {
|
||
|
|
v := b.Control
|
||
|
|
if v.Op != OpARM64CMPconst {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
if v.AuxInt != 0 {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
v_0 := v.Args[0]
|
||
|
|
if v_0.Op != OpARM64GreaterThan {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
cc := v_0.Args[0]
|
||
|
|
yes := b.Succs[0]
|
||
|
|
no := b.Succs[1]
|
||
|
|
b.Kind = BlockARM64GT
|
||
|
|
b.SetControl(cc)
|
||
|
|
_ = yes
|
||
|
|
_ = no
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (NE (CMPconst [0] (GreaterThanU cc)) yes no)
|
||
|
|
// cond:
|
||
|
|
// result: (UGT cc yes no)
|
||
|
|
for {
|
||
|
|
v := b.Control
|
||
|
|
if v.Op != OpARM64CMPconst {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
if v.AuxInt != 0 {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
v_0 := v.Args[0]
|
||
|
|
if v_0.Op != OpARM64GreaterThanU {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
cc := v_0.Args[0]
|
||
|
|
yes := b.Succs[0]
|
||
|
|
no := b.Succs[1]
|
||
|
|
b.Kind = BlockARM64UGT
|
||
|
|
b.SetControl(cc)
|
||
|
|
_ = yes
|
||
|
|
_ = no
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (NE (CMPconst [0] (GreaterEqual cc)) yes no)
|
||
|
|
// cond:
|
||
|
|
// result: (GE cc yes no)
|
||
|
|
for {
|
||
|
|
v := b.Control
|
||
|
|
if v.Op != OpARM64CMPconst {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
if v.AuxInt != 0 {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
v_0 := v.Args[0]
|
||
|
|
if v_0.Op != OpARM64GreaterEqual {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
cc := v_0.Args[0]
|
||
|
|
yes := b.Succs[0]
|
||
|
|
no := b.Succs[1]
|
||
|
|
b.Kind = BlockARM64GE
|
||
|
|
b.SetControl(cc)
|
||
|
|
_ = yes
|
||
|
|
_ = no
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
// match: (NE (CMPconst [0] (GreaterEqualU cc)) yes no)
|
||
|
|
// cond:
|
||
|
|
// result: (UGE cc yes no)
|
||
|
|
for {
|
||
|
|
v := b.Control
|
||
|
|
if v.Op != OpARM64CMPconst {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
if v.AuxInt != 0 {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
v_0 := v.Args[0]
|
||
|
|
if v_0.Op != OpARM64GreaterEqualU {
|
||
|
|
break
|
||
|
|
}
|
||
|
|
cc := v_0.Args[0]
|
||
|
|
yes := b.Succs[0]
|
||
|
|
no := b.Succs[1]
|
||
|
|
b.Kind = BlockARM64UGE
|
||
|
|
b.SetControl(cc)
|
||
|
|
_ = yes
|
||
|
|
_ = no
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
return false
|
||
|
|
}
|