mirror of
https://github.com/golang/go.git
synced 2025-12-08 06:10:04 +00:00
This CL named VPALIGNR ConcatShiftBytes[Grouped]. Change-Id: I46c6703085efb0613deefa512de9911b4fdf6bc4 Reviewed-on: https://go-review.googlesource.com/c/go/+/714440 Reviewed-by: David Chase <drchase@google.com> LUCI-TryBot-Result: Go LUCI <golang-scoped@luci-project-accounts.iam.gserviceaccount.com>
2899 lines
255 KiB
Text
2899 lines
255 KiB
Text
// Code generated by x/arch/internal/simdgen using 'go run . -xedPath $XED_PATH -o godefs -goroot $GOROOT go.yaml types.yaml categories.yaml'; DO NOT EDIT.
|
|
|
|
(AESDecryptLastRoundUint8x16 ...) => (VAESDECLAST128 ...)
|
|
(AESDecryptLastRoundUint8x32 ...) => (VAESDECLAST256 ...)
|
|
(AESDecryptRoundUint8x16 ...) => (VAESDEC128 ...)
|
|
(AESDecryptRoundUint8x32 ...) => (VAESDEC256 ...)
|
|
(AESEncryptLastRoundUint8x16 ...) => (VAESENCLAST128 ...)
|
|
(AESEncryptLastRoundUint8x32 ...) => (VAESENCLAST256 ...)
|
|
(AESEncryptRoundUint8x16 ...) => (VAESENC128 ...)
|
|
(AESEncryptRoundUint8x32 ...) => (VAESENC256 ...)
|
|
(AESInvMixColumnsUint32x4 ...) => (VAESIMC128 ...)
|
|
(AESRoundKeyGenAssistUint32x4 ...) => (VAESKEYGENASSIST128 ...)
|
|
(AbsInt8x16 ...) => (VPABSB128 ...)
|
|
(AbsInt8x32 ...) => (VPABSB256 ...)
|
|
(AbsInt8x64 ...) => (VPABSB512 ...)
|
|
(AbsInt16x8 ...) => (VPABSW128 ...)
|
|
(AbsInt16x16 ...) => (VPABSW256 ...)
|
|
(AbsInt16x32 ...) => (VPABSW512 ...)
|
|
(AbsInt32x4 ...) => (VPABSD128 ...)
|
|
(AbsInt32x8 ...) => (VPABSD256 ...)
|
|
(AbsInt32x16 ...) => (VPABSD512 ...)
|
|
(AbsInt64x2 ...) => (VPABSQ128 ...)
|
|
(AbsInt64x4 ...) => (VPABSQ256 ...)
|
|
(AbsInt64x8 ...) => (VPABSQ512 ...)
|
|
(AddFloat32x4 ...) => (VADDPS128 ...)
|
|
(AddFloat32x8 ...) => (VADDPS256 ...)
|
|
(AddFloat32x16 ...) => (VADDPS512 ...)
|
|
(AddFloat64x2 ...) => (VADDPD128 ...)
|
|
(AddFloat64x4 ...) => (VADDPD256 ...)
|
|
(AddFloat64x8 ...) => (VADDPD512 ...)
|
|
(AddInt8x16 ...) => (VPADDB128 ...)
|
|
(AddInt8x32 ...) => (VPADDB256 ...)
|
|
(AddInt8x64 ...) => (VPADDB512 ...)
|
|
(AddInt16x8 ...) => (VPADDW128 ...)
|
|
(AddInt16x16 ...) => (VPADDW256 ...)
|
|
(AddInt16x32 ...) => (VPADDW512 ...)
|
|
(AddInt32x4 ...) => (VPADDD128 ...)
|
|
(AddInt32x8 ...) => (VPADDD256 ...)
|
|
(AddInt32x16 ...) => (VPADDD512 ...)
|
|
(AddInt64x2 ...) => (VPADDQ128 ...)
|
|
(AddInt64x4 ...) => (VPADDQ256 ...)
|
|
(AddInt64x8 ...) => (VPADDQ512 ...)
|
|
(AddUint8x16 ...) => (VPADDB128 ...)
|
|
(AddUint8x32 ...) => (VPADDB256 ...)
|
|
(AddUint8x64 ...) => (VPADDB512 ...)
|
|
(AddUint16x8 ...) => (VPADDW128 ...)
|
|
(AddUint16x16 ...) => (VPADDW256 ...)
|
|
(AddUint16x32 ...) => (VPADDW512 ...)
|
|
(AddUint32x4 ...) => (VPADDD128 ...)
|
|
(AddUint32x8 ...) => (VPADDD256 ...)
|
|
(AddUint32x16 ...) => (VPADDD512 ...)
|
|
(AddUint64x2 ...) => (VPADDQ128 ...)
|
|
(AddUint64x4 ...) => (VPADDQ256 ...)
|
|
(AddUint64x8 ...) => (VPADDQ512 ...)
|
|
(AddDotProductQuadrupleInt32x4 ...) => (VPDPBUSD128 ...)
|
|
(AddDotProductQuadrupleInt32x8 ...) => (VPDPBUSD256 ...)
|
|
(AddDotProductQuadrupleInt32x16 ...) => (VPDPBUSD512 ...)
|
|
(AddDotProductQuadrupleSaturatedInt32x4 ...) => (VPDPBUSDS128 ...)
|
|
(AddDotProductQuadrupleSaturatedInt32x8 ...) => (VPDPBUSDS256 ...)
|
|
(AddDotProductQuadrupleSaturatedInt32x16 ...) => (VPDPBUSDS512 ...)
|
|
(AddPairsFloat32x4 ...) => (VHADDPS128 ...)
|
|
(AddPairsFloat32x8 ...) => (VHADDPS256 ...)
|
|
(AddPairsFloat64x2 ...) => (VHADDPD128 ...)
|
|
(AddPairsFloat64x4 ...) => (VHADDPD256 ...)
|
|
(AddPairsInt16x8 ...) => (VPHADDW128 ...)
|
|
(AddPairsInt16x16 ...) => (VPHADDW256 ...)
|
|
(AddPairsInt32x4 ...) => (VPHADDD128 ...)
|
|
(AddPairsInt32x8 ...) => (VPHADDD256 ...)
|
|
(AddPairsUint16x8 ...) => (VPHADDW128 ...)
|
|
(AddPairsUint16x16 ...) => (VPHADDW256 ...)
|
|
(AddPairsUint32x4 ...) => (VPHADDD128 ...)
|
|
(AddPairsUint32x8 ...) => (VPHADDD256 ...)
|
|
(AddPairsSaturatedInt16x8 ...) => (VPHADDSW128 ...)
|
|
(AddPairsSaturatedInt16x16 ...) => (VPHADDSW256 ...)
|
|
(AddSaturatedInt8x16 ...) => (VPADDSB128 ...)
|
|
(AddSaturatedInt8x32 ...) => (VPADDSB256 ...)
|
|
(AddSaturatedInt8x64 ...) => (VPADDSB512 ...)
|
|
(AddSaturatedInt16x8 ...) => (VPADDSW128 ...)
|
|
(AddSaturatedInt16x16 ...) => (VPADDSW256 ...)
|
|
(AddSaturatedInt16x32 ...) => (VPADDSW512 ...)
|
|
(AddSaturatedUint8x16 ...) => (VPADDUSB128 ...)
|
|
(AddSaturatedUint8x32 ...) => (VPADDUSB256 ...)
|
|
(AddSaturatedUint8x64 ...) => (VPADDUSB512 ...)
|
|
(AddSaturatedUint16x8 ...) => (VPADDUSW128 ...)
|
|
(AddSaturatedUint16x16 ...) => (VPADDUSW256 ...)
|
|
(AddSaturatedUint16x32 ...) => (VPADDUSW512 ...)
|
|
(AddSubFloat32x4 ...) => (VADDSUBPS128 ...)
|
|
(AddSubFloat32x8 ...) => (VADDSUBPS256 ...)
|
|
(AddSubFloat64x2 ...) => (VADDSUBPD128 ...)
|
|
(AddSubFloat64x4 ...) => (VADDSUBPD256 ...)
|
|
(AndInt8x16 ...) => (VPAND128 ...)
|
|
(AndInt8x32 ...) => (VPAND256 ...)
|
|
(AndInt8x64 ...) => (VPANDD512 ...)
|
|
(AndInt16x8 ...) => (VPAND128 ...)
|
|
(AndInt16x16 ...) => (VPAND256 ...)
|
|
(AndInt16x32 ...) => (VPANDD512 ...)
|
|
(AndInt32x4 ...) => (VPAND128 ...)
|
|
(AndInt32x8 ...) => (VPAND256 ...)
|
|
(AndInt32x16 ...) => (VPANDD512 ...)
|
|
(AndInt64x2 ...) => (VPAND128 ...)
|
|
(AndInt64x4 ...) => (VPAND256 ...)
|
|
(AndInt64x8 ...) => (VPANDQ512 ...)
|
|
(AndUint8x16 ...) => (VPAND128 ...)
|
|
(AndUint8x32 ...) => (VPAND256 ...)
|
|
(AndUint8x64 ...) => (VPANDD512 ...)
|
|
(AndUint16x8 ...) => (VPAND128 ...)
|
|
(AndUint16x16 ...) => (VPAND256 ...)
|
|
(AndUint16x32 ...) => (VPANDD512 ...)
|
|
(AndUint32x4 ...) => (VPAND128 ...)
|
|
(AndUint32x8 ...) => (VPAND256 ...)
|
|
(AndUint32x16 ...) => (VPANDD512 ...)
|
|
(AndUint64x2 ...) => (VPAND128 ...)
|
|
(AndUint64x4 ...) => (VPAND256 ...)
|
|
(AndUint64x8 ...) => (VPANDQ512 ...)
|
|
(AndNotInt8x16 ...) => (VPANDN128 ...)
|
|
(AndNotInt8x32 ...) => (VPANDN256 ...)
|
|
(AndNotInt8x64 ...) => (VPANDND512 ...)
|
|
(AndNotInt16x8 ...) => (VPANDN128 ...)
|
|
(AndNotInt16x16 ...) => (VPANDN256 ...)
|
|
(AndNotInt16x32 ...) => (VPANDND512 ...)
|
|
(AndNotInt32x4 ...) => (VPANDN128 ...)
|
|
(AndNotInt32x8 ...) => (VPANDN256 ...)
|
|
(AndNotInt32x16 ...) => (VPANDND512 ...)
|
|
(AndNotInt64x2 ...) => (VPANDN128 ...)
|
|
(AndNotInt64x4 ...) => (VPANDN256 ...)
|
|
(AndNotInt64x8 ...) => (VPANDNQ512 ...)
|
|
(AndNotUint8x16 ...) => (VPANDN128 ...)
|
|
(AndNotUint8x32 ...) => (VPANDN256 ...)
|
|
(AndNotUint8x64 ...) => (VPANDND512 ...)
|
|
(AndNotUint16x8 ...) => (VPANDN128 ...)
|
|
(AndNotUint16x16 ...) => (VPANDN256 ...)
|
|
(AndNotUint16x32 ...) => (VPANDND512 ...)
|
|
(AndNotUint32x4 ...) => (VPANDN128 ...)
|
|
(AndNotUint32x8 ...) => (VPANDN256 ...)
|
|
(AndNotUint32x16 ...) => (VPANDND512 ...)
|
|
(AndNotUint64x2 ...) => (VPANDN128 ...)
|
|
(AndNotUint64x4 ...) => (VPANDN256 ...)
|
|
(AndNotUint64x8 ...) => (VPANDNQ512 ...)
|
|
(AverageUint8x16 ...) => (VPAVGB128 ...)
|
|
(AverageUint8x32 ...) => (VPAVGB256 ...)
|
|
(AverageUint8x64 ...) => (VPAVGB512 ...)
|
|
(AverageUint16x8 ...) => (VPAVGW128 ...)
|
|
(AverageUint16x16 ...) => (VPAVGW256 ...)
|
|
(AverageUint16x32 ...) => (VPAVGW512 ...)
|
|
(Broadcast128Float32x4 ...) => (VBROADCASTSS128 ...)
|
|
(Broadcast128Float64x2 ...) => (VPBROADCASTQ128 ...)
|
|
(Broadcast128Int8x16 ...) => (VPBROADCASTB128 ...)
|
|
(Broadcast128Int16x8 ...) => (VPBROADCASTW128 ...)
|
|
(Broadcast128Int32x4 ...) => (VPBROADCASTD128 ...)
|
|
(Broadcast128Int64x2 ...) => (VPBROADCASTQ128 ...)
|
|
(Broadcast128Uint8x16 ...) => (VPBROADCASTB128 ...)
|
|
(Broadcast128Uint16x8 ...) => (VPBROADCASTW128 ...)
|
|
(Broadcast128Uint32x4 ...) => (VPBROADCASTD128 ...)
|
|
(Broadcast128Uint64x2 ...) => (VPBROADCASTQ128 ...)
|
|
(Broadcast256Float32x4 ...) => (VBROADCASTSS256 ...)
|
|
(Broadcast256Float64x2 ...) => (VBROADCASTSD256 ...)
|
|
(Broadcast256Int8x16 ...) => (VPBROADCASTB256 ...)
|
|
(Broadcast256Int16x8 ...) => (VPBROADCASTW256 ...)
|
|
(Broadcast256Int32x4 ...) => (VPBROADCASTD256 ...)
|
|
(Broadcast256Int64x2 ...) => (VPBROADCASTQ256 ...)
|
|
(Broadcast256Uint8x16 ...) => (VPBROADCASTB256 ...)
|
|
(Broadcast256Uint16x8 ...) => (VPBROADCASTW256 ...)
|
|
(Broadcast256Uint32x4 ...) => (VPBROADCASTD256 ...)
|
|
(Broadcast256Uint64x2 ...) => (VPBROADCASTQ256 ...)
|
|
(Broadcast512Float32x4 ...) => (VBROADCASTSS512 ...)
|
|
(Broadcast512Float64x2 ...) => (VBROADCASTSD512 ...)
|
|
(Broadcast512Int8x16 ...) => (VPBROADCASTB512 ...)
|
|
(Broadcast512Int16x8 ...) => (VPBROADCASTW512 ...)
|
|
(Broadcast512Int32x4 ...) => (VPBROADCASTD512 ...)
|
|
(Broadcast512Int64x2 ...) => (VPBROADCASTQ512 ...)
|
|
(Broadcast512Uint8x16 ...) => (VPBROADCASTB512 ...)
|
|
(Broadcast512Uint16x8 ...) => (VPBROADCASTW512 ...)
|
|
(Broadcast512Uint32x4 ...) => (VPBROADCASTD512 ...)
|
|
(Broadcast512Uint64x2 ...) => (VPBROADCASTQ512 ...)
|
|
(CeilFloat32x4 x) => (VROUNDPS128 [2] x)
|
|
(CeilFloat32x8 x) => (VROUNDPS256 [2] x)
|
|
(CeilFloat64x2 x) => (VROUNDPD128 [2] x)
|
|
(CeilFloat64x4 x) => (VROUNDPD256 [2] x)
|
|
(CeilScaledFloat32x4 [a] x) => (VRNDSCALEPS128 [a+2] x)
|
|
(CeilScaledFloat32x8 [a] x) => (VRNDSCALEPS256 [a+2] x)
|
|
(CeilScaledFloat32x16 [a] x) => (VRNDSCALEPS512 [a+2] x)
|
|
(CeilScaledFloat64x2 [a] x) => (VRNDSCALEPD128 [a+2] x)
|
|
(CeilScaledFloat64x4 [a] x) => (VRNDSCALEPD256 [a+2] x)
|
|
(CeilScaledFloat64x8 [a] x) => (VRNDSCALEPD512 [a+2] x)
|
|
(CeilScaledResidueFloat32x4 [a] x) => (VREDUCEPS128 [a+2] x)
|
|
(CeilScaledResidueFloat32x8 [a] x) => (VREDUCEPS256 [a+2] x)
|
|
(CeilScaledResidueFloat32x16 [a] x) => (VREDUCEPS512 [a+2] x)
|
|
(CeilScaledResidueFloat64x2 [a] x) => (VREDUCEPD128 [a+2] x)
|
|
(CeilScaledResidueFloat64x4 [a] x) => (VREDUCEPD256 [a+2] x)
|
|
(CeilScaledResidueFloat64x8 [a] x) => (VREDUCEPD512 [a+2] x)
|
|
(CompressFloat32x4 x mask) => (VCOMPRESSPSMasked128 x (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(CompressFloat32x8 x mask) => (VCOMPRESSPSMasked256 x (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(CompressFloat32x16 x mask) => (VCOMPRESSPSMasked512 x (VPMOVVec32x16ToM <types.TypeMask> mask))
|
|
(CompressFloat64x2 x mask) => (VCOMPRESSPDMasked128 x (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(CompressFloat64x4 x mask) => (VCOMPRESSPDMasked256 x (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(CompressFloat64x8 x mask) => (VCOMPRESSPDMasked512 x (VPMOVVec64x8ToM <types.TypeMask> mask))
|
|
(CompressInt8x16 x mask) => (VPCOMPRESSBMasked128 x (VPMOVVec8x16ToM <types.TypeMask> mask))
|
|
(CompressInt8x32 x mask) => (VPCOMPRESSBMasked256 x (VPMOVVec8x32ToM <types.TypeMask> mask))
|
|
(CompressInt8x64 x mask) => (VPCOMPRESSBMasked512 x (VPMOVVec8x64ToM <types.TypeMask> mask))
|
|
(CompressInt16x8 x mask) => (VPCOMPRESSWMasked128 x (VPMOVVec16x8ToM <types.TypeMask> mask))
|
|
(CompressInt16x16 x mask) => (VPCOMPRESSWMasked256 x (VPMOVVec16x16ToM <types.TypeMask> mask))
|
|
(CompressInt16x32 x mask) => (VPCOMPRESSWMasked512 x (VPMOVVec16x32ToM <types.TypeMask> mask))
|
|
(CompressInt32x4 x mask) => (VPCOMPRESSDMasked128 x (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(CompressInt32x8 x mask) => (VPCOMPRESSDMasked256 x (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(CompressInt32x16 x mask) => (VPCOMPRESSDMasked512 x (VPMOVVec32x16ToM <types.TypeMask> mask))
|
|
(CompressInt64x2 x mask) => (VPCOMPRESSQMasked128 x (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(CompressInt64x4 x mask) => (VPCOMPRESSQMasked256 x (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(CompressInt64x8 x mask) => (VPCOMPRESSQMasked512 x (VPMOVVec64x8ToM <types.TypeMask> mask))
|
|
(CompressUint8x16 x mask) => (VPCOMPRESSBMasked128 x (VPMOVVec8x16ToM <types.TypeMask> mask))
|
|
(CompressUint8x32 x mask) => (VPCOMPRESSBMasked256 x (VPMOVVec8x32ToM <types.TypeMask> mask))
|
|
(CompressUint8x64 x mask) => (VPCOMPRESSBMasked512 x (VPMOVVec8x64ToM <types.TypeMask> mask))
|
|
(CompressUint16x8 x mask) => (VPCOMPRESSWMasked128 x (VPMOVVec16x8ToM <types.TypeMask> mask))
|
|
(CompressUint16x16 x mask) => (VPCOMPRESSWMasked256 x (VPMOVVec16x16ToM <types.TypeMask> mask))
|
|
(CompressUint16x32 x mask) => (VPCOMPRESSWMasked512 x (VPMOVVec16x32ToM <types.TypeMask> mask))
|
|
(CompressUint32x4 x mask) => (VPCOMPRESSDMasked128 x (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(CompressUint32x8 x mask) => (VPCOMPRESSDMasked256 x (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(CompressUint32x16 x mask) => (VPCOMPRESSDMasked512 x (VPMOVVec32x16ToM <types.TypeMask> mask))
|
|
(CompressUint64x2 x mask) => (VPCOMPRESSQMasked128 x (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(CompressUint64x4 x mask) => (VPCOMPRESSQMasked256 x (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(CompressUint64x8 x mask) => (VPCOMPRESSQMasked512 x (VPMOVVec64x8ToM <types.TypeMask> mask))
|
|
(ConcatShiftBytesRightUint8x16 ...) => (VPALIGNR128 ...)
|
|
(ConcatShiftBytesRightGroupedUint8x32 ...) => (VPALIGNR256 ...)
|
|
(ConcatShiftBytesRightGroupedUint8x64 ...) => (VPALIGNR512 ...)
|
|
(ConvertToInt8Int16x8 ...) => (VPMOVWB128_128 ...)
|
|
(ConvertToInt8Int16x16 ...) => (VPMOVWB128_256 ...)
|
|
(ConvertToInt8Int16x32 ...) => (VPMOVWB256 ...)
|
|
(ConvertToInt8Int32x4 ...) => (VPMOVDB128_128 ...)
|
|
(ConvertToInt8Int32x8 ...) => (VPMOVDB128_256 ...)
|
|
(ConvertToInt8Int32x16 ...) => (VPMOVDB128_512 ...)
|
|
(ConvertToInt8Int64x2 ...) => (VPMOVQB128_128 ...)
|
|
(ConvertToInt8Int64x4 ...) => (VPMOVQB128_256 ...)
|
|
(ConvertToInt8Int64x8 ...) => (VPMOVQB128_512 ...)
|
|
(ConvertToInt8SaturatedInt16x8 ...) => (VPMOVSWB128_128 ...)
|
|
(ConvertToInt8SaturatedInt16x16 ...) => (VPMOVSWB128_256 ...)
|
|
(ConvertToInt8SaturatedInt16x32 ...) => (VPMOVSWB256 ...)
|
|
(ConvertToInt8SaturatedInt32x4 ...) => (VPMOVSDB128_128 ...)
|
|
(ConvertToInt8SaturatedInt32x8 ...) => (VPMOVSDB128_256 ...)
|
|
(ConvertToInt8SaturatedInt32x16 ...) => (VPMOVSDB128_512 ...)
|
|
(ConvertToInt8SaturatedInt64x2 ...) => (VPMOVSQB128_128 ...)
|
|
(ConvertToInt8SaturatedInt64x4 ...) => (VPMOVSQB128_256 ...)
|
|
(ConvertToInt8SaturatedInt64x8 ...) => (VPMOVSQB128_512 ...)
|
|
(ConvertToInt16Int8x16 ...) => (VPMOVSXBW256 ...)
|
|
(ConvertToInt16Int8x32 ...) => (VPMOVSXBW512 ...)
|
|
(ConvertToInt16Int32x4 ...) => (VPMOVDW128_128 ...)
|
|
(ConvertToInt16Int32x8 ...) => (VPMOVDW128_256 ...)
|
|
(ConvertToInt16Int32x16 ...) => (VPMOVDW256 ...)
|
|
(ConvertToInt16Int64x2 ...) => (VPMOVQW128_128 ...)
|
|
(ConvertToInt16Int64x4 ...) => (VPMOVQW128_256 ...)
|
|
(ConvertToInt16Int64x8 ...) => (VPMOVQW128_512 ...)
|
|
(ConvertToInt16SaturatedInt32x4 ...) => (VPMOVSDW128_128 ...)
|
|
(ConvertToInt16SaturatedInt32x8 ...) => (VPMOVSDW128_256 ...)
|
|
(ConvertToInt16SaturatedInt32x16 ...) => (VPMOVSDW256 ...)
|
|
(ConvertToInt16SaturatedInt64x2 ...) => (VPMOVSQW128_128 ...)
|
|
(ConvertToInt16SaturatedInt64x4 ...) => (VPMOVSQW128_256 ...)
|
|
(ConvertToInt16SaturatedInt64x8 ...) => (VPMOVSQW128_512 ...)
|
|
(ConvertToInt16SaturatedPackedInt32x4 ...) => (VPACKSSDW128 ...)
|
|
(ConvertToInt16SaturatedPackedInt32x8 ...) => (VPACKSSDW256 ...)
|
|
(ConvertToInt16SaturatedPackedInt32x16 ...) => (VPACKSSDW512 ...)
|
|
(ConvertToInt16x8Int8x16 ...) => (VPMOVSXBW128 ...)
|
|
(ConvertToInt32Float32x4 ...) => (VCVTTPS2DQ128 ...)
|
|
(ConvertToInt32Float32x8 ...) => (VCVTTPS2DQ256 ...)
|
|
(ConvertToInt32Float32x16 ...) => (VCVTTPS2DQ512 ...)
|
|
(ConvertToInt32Int8x16 ...) => (VPMOVSXBD512 ...)
|
|
(ConvertToInt32Int16x8 ...) => (VPMOVSXWD256 ...)
|
|
(ConvertToInt32Int16x16 ...) => (VPMOVSXWD512 ...)
|
|
(ConvertToInt32Int64x2 ...) => (VPMOVQD128_128 ...)
|
|
(ConvertToInt32Int64x4 ...) => (VPMOVQD128_256 ...)
|
|
(ConvertToInt32Int64x8 ...) => (VPMOVQD256 ...)
|
|
(ConvertToInt32SaturatedInt64x2 ...) => (VPMOVSQD128_128 ...)
|
|
(ConvertToInt32SaturatedInt64x4 ...) => (VPMOVSQD128_256 ...)
|
|
(ConvertToInt32SaturatedInt64x8 ...) => (VPMOVSQD256 ...)
|
|
(ConvertToInt32x4Int8x16 ...) => (VPMOVSXBD128 ...)
|
|
(ConvertToInt32x4Int16x8 ...) => (VPMOVSXWD128 ...)
|
|
(ConvertToInt32x8Int8x16 ...) => (VPMOVSXBD256 ...)
|
|
(ConvertToInt64Int16x8 ...) => (VPMOVSXWQ512 ...)
|
|
(ConvertToInt64Int32x4 ...) => (VPMOVSXDQ256 ...)
|
|
(ConvertToInt64Int32x8 ...) => (VPMOVSXDQ512 ...)
|
|
(ConvertToInt64x2Int8x16 ...) => (VPMOVSXBQ128 ...)
|
|
(ConvertToInt64x2Int16x8 ...) => (VPMOVSXWQ128 ...)
|
|
(ConvertToInt64x2Int32x4 ...) => (VPMOVSXDQ128 ...)
|
|
(ConvertToInt64x4Int8x16 ...) => (VPMOVSXBQ256 ...)
|
|
(ConvertToInt64x8Int8x16 ...) => (VPMOVSXBQ512 ...)
|
|
(ConvertToUint8Uint16x8 ...) => (VPMOVWB128_128 ...)
|
|
(ConvertToUint8Uint16x16 ...) => (VPMOVWB128_256 ...)
|
|
(ConvertToUint8Uint16x32 ...) => (VPMOVWB256 ...)
|
|
(ConvertToUint8Uint32x4 ...) => (VPMOVDB128_128 ...)
|
|
(ConvertToUint8Uint32x8 ...) => (VPMOVDB128_256 ...)
|
|
(ConvertToUint8Uint32x16 ...) => (VPMOVDB128_512 ...)
|
|
(ConvertToUint8Uint64x2 ...) => (VPMOVQB128_128 ...)
|
|
(ConvertToUint8Uint64x4 ...) => (VPMOVQB128_256 ...)
|
|
(ConvertToUint8Uint64x8 ...) => (VPMOVQB128_512 ...)
|
|
(ConvertToUint8SaturatedUint16x8 ...) => (VPMOVUSWB128_128 ...)
|
|
(ConvertToUint8SaturatedUint16x16 ...) => (VPMOVUSWB128_256 ...)
|
|
(ConvertToUint8SaturatedUint16x32 ...) => (VPMOVUSWB256 ...)
|
|
(ConvertToUint8SaturatedUint32x4 ...) => (VPMOVUSDB128_128 ...)
|
|
(ConvertToUint8SaturatedUint32x8 ...) => (VPMOVUSDB128_256 ...)
|
|
(ConvertToUint8SaturatedUint32x16 ...) => (VPMOVUSDB128_512 ...)
|
|
(ConvertToUint8SaturatedUint64x2 ...) => (VPMOVUSQB128_128 ...)
|
|
(ConvertToUint8SaturatedUint64x4 ...) => (VPMOVUSQB128_256 ...)
|
|
(ConvertToUint8SaturatedUint64x8 ...) => (VPMOVUSQB128_512 ...)
|
|
(ConvertToUint16Uint8x16 ...) => (VPMOVZXBW256 ...)
|
|
(ConvertToUint16Uint8x32 ...) => (VPMOVZXBW512 ...)
|
|
(ConvertToUint16Uint32x4 ...) => (VPMOVDW128_128 ...)
|
|
(ConvertToUint16Uint32x8 ...) => (VPMOVDW128_256 ...)
|
|
(ConvertToUint16Uint32x16 ...) => (VPMOVDW256 ...)
|
|
(ConvertToUint16Uint64x2 ...) => (VPMOVQW128_128 ...)
|
|
(ConvertToUint16Uint64x4 ...) => (VPMOVQW128_256 ...)
|
|
(ConvertToUint16Uint64x8 ...) => (VPMOVQW128_512 ...)
|
|
(ConvertToUint16SaturatedUint32x4 ...) => (VPMOVUSDW128_128 ...)
|
|
(ConvertToUint16SaturatedUint32x8 ...) => (VPMOVUSDW128_256 ...)
|
|
(ConvertToUint16SaturatedUint32x16 ...) => (VPMOVUSDW256 ...)
|
|
(ConvertToUint16SaturatedUint64x2 ...) => (VPMOVUSQW128_128 ...)
|
|
(ConvertToUint16SaturatedUint64x4 ...) => (VPMOVUSQW128_256 ...)
|
|
(ConvertToUint16SaturatedUint64x8 ...) => (VPMOVUSQW128_512 ...)
|
|
(ConvertToUint16SaturatedPackedUint32x4 ...) => (VPACKUSDW128 ...)
|
|
(ConvertToUint16SaturatedPackedUint32x8 ...) => (VPACKUSDW256 ...)
|
|
(ConvertToUint16SaturatedPackedUint32x16 ...) => (VPACKUSDW512 ...)
|
|
(ConvertToUint16x8Uint8x16 ...) => (VPMOVZXBW128 ...)
|
|
(ConvertToUint32Float32x4 ...) => (VCVTPS2UDQ128 ...)
|
|
(ConvertToUint32Float32x8 ...) => (VCVTPS2UDQ256 ...)
|
|
(ConvertToUint32Float32x16 ...) => (VCVTPS2UDQ512 ...)
|
|
(ConvertToUint32Uint8x16 ...) => (VPMOVZXBD512 ...)
|
|
(ConvertToUint32Uint16x8 ...) => (VPMOVZXWD256 ...)
|
|
(ConvertToUint32Uint16x16 ...) => (VPMOVZXWD512 ...)
|
|
(ConvertToUint32Uint64x2 ...) => (VPMOVQD128_128 ...)
|
|
(ConvertToUint32Uint64x4 ...) => (VPMOVQD128_256 ...)
|
|
(ConvertToUint32Uint64x8 ...) => (VPMOVQD256 ...)
|
|
(ConvertToUint32SaturatedUint64x2 ...) => (VPMOVUSQD128_128 ...)
|
|
(ConvertToUint32SaturatedUint64x4 ...) => (VPMOVUSQD128_256 ...)
|
|
(ConvertToUint32SaturatedUint64x8 ...) => (VPMOVUSQD256 ...)
|
|
(ConvertToUint32x4Uint8x16 ...) => (VPMOVZXBD128 ...)
|
|
(ConvertToUint32x4Uint16x8 ...) => (VPMOVZXWD128 ...)
|
|
(ConvertToUint32x8Uint8x16 ...) => (VPMOVZXBD256 ...)
|
|
(ConvertToUint64Uint16x8 ...) => (VPMOVZXWQ512 ...)
|
|
(ConvertToUint64Uint32x4 ...) => (VPMOVZXDQ256 ...)
|
|
(ConvertToUint64Uint32x8 ...) => (VPMOVZXDQ512 ...)
|
|
(ConvertToUint64x2Uint8x16 ...) => (VPMOVZXBQ128 ...)
|
|
(ConvertToUint64x2Uint16x8 ...) => (VPMOVZXWQ128 ...)
|
|
(ConvertToUint64x2Uint32x4 ...) => (VPMOVZXDQ128 ...)
|
|
(ConvertToUint64x4Int16x8 ...) => (VPMOVSXWQ256 ...)
|
|
(ConvertToUint64x4Uint8x16 ...) => (VPMOVZXBQ256 ...)
|
|
(ConvertToUint64x4Uint16x8 ...) => (VPMOVZXWQ256 ...)
|
|
(ConvertToUint64x8Uint8x16 ...) => (VPMOVZXBQ512 ...)
|
|
(CopySignInt8x16 ...) => (VPSIGNB128 ...)
|
|
(CopySignInt8x32 ...) => (VPSIGNB256 ...)
|
|
(CopySignInt16x8 ...) => (VPSIGNW128 ...)
|
|
(CopySignInt16x16 ...) => (VPSIGNW256 ...)
|
|
(CopySignInt32x4 ...) => (VPSIGND128 ...)
|
|
(CopySignInt32x8 ...) => (VPSIGND256 ...)
|
|
(DivFloat32x4 ...) => (VDIVPS128 ...)
|
|
(DivFloat32x8 ...) => (VDIVPS256 ...)
|
|
(DivFloat32x16 ...) => (VDIVPS512 ...)
|
|
(DivFloat64x2 ...) => (VDIVPD128 ...)
|
|
(DivFloat64x4 ...) => (VDIVPD256 ...)
|
|
(DivFloat64x8 ...) => (VDIVPD512 ...)
|
|
(DotProductPairsInt16x8 ...) => (VPMADDWD128 ...)
|
|
(DotProductPairsInt16x16 ...) => (VPMADDWD256 ...)
|
|
(DotProductPairsInt16x32 ...) => (VPMADDWD512 ...)
|
|
(DotProductPairsSaturatedUint8x16 ...) => (VPMADDUBSW128 ...)
|
|
(DotProductPairsSaturatedUint8x32 ...) => (VPMADDUBSW256 ...)
|
|
(DotProductPairsSaturatedUint8x64 ...) => (VPMADDUBSW512 ...)
|
|
(EqualFloat32x4 x y) => (VCMPPS128 [0] x y)
|
|
(EqualFloat32x8 x y) => (VCMPPS256 [0] x y)
|
|
(EqualFloat32x16 x y) => (VPMOVMToVec32x16 (VCMPPS512 [0] x y))
|
|
(EqualFloat64x2 x y) => (VCMPPD128 [0] x y)
|
|
(EqualFloat64x4 x y) => (VCMPPD256 [0] x y)
|
|
(EqualFloat64x8 x y) => (VPMOVMToVec64x8 (VCMPPD512 [0] x y))
|
|
(EqualInt8x16 ...) => (VPCMPEQB128 ...)
|
|
(EqualInt8x32 ...) => (VPCMPEQB256 ...)
|
|
(EqualInt8x64 x y) => (VPMOVMToVec8x64 (VPCMPEQB512 x y))
|
|
(EqualInt16x8 ...) => (VPCMPEQW128 ...)
|
|
(EqualInt16x16 ...) => (VPCMPEQW256 ...)
|
|
(EqualInt16x32 x y) => (VPMOVMToVec16x32 (VPCMPEQW512 x y))
|
|
(EqualInt32x4 ...) => (VPCMPEQD128 ...)
|
|
(EqualInt32x8 ...) => (VPCMPEQD256 ...)
|
|
(EqualInt32x16 x y) => (VPMOVMToVec32x16 (VPCMPEQD512 x y))
|
|
(EqualInt64x2 ...) => (VPCMPEQQ128 ...)
|
|
(EqualInt64x4 ...) => (VPCMPEQQ256 ...)
|
|
(EqualInt64x8 x y) => (VPMOVMToVec64x8 (VPCMPEQQ512 x y))
|
|
(EqualUint8x16 ...) => (VPCMPEQB128 ...)
|
|
(EqualUint8x32 ...) => (VPCMPEQB256 ...)
|
|
(EqualUint8x64 x y) => (VPMOVMToVec8x64 (VPCMPEQB512 x y))
|
|
(EqualUint16x8 ...) => (VPCMPEQW128 ...)
|
|
(EqualUint16x16 ...) => (VPCMPEQW256 ...)
|
|
(EqualUint16x32 x y) => (VPMOVMToVec16x32 (VPCMPEQW512 x y))
|
|
(EqualUint32x4 ...) => (VPCMPEQD128 ...)
|
|
(EqualUint32x8 ...) => (VPCMPEQD256 ...)
|
|
(EqualUint32x16 x y) => (VPMOVMToVec32x16 (VPCMPEQD512 x y))
|
|
(EqualUint64x2 ...) => (VPCMPEQQ128 ...)
|
|
(EqualUint64x4 ...) => (VPCMPEQQ256 ...)
|
|
(EqualUint64x8 x y) => (VPMOVMToVec64x8 (VPCMPEQQ512 x y))
|
|
(ExpandFloat32x4 x mask) => (VEXPANDPSMasked128 x (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(ExpandFloat32x8 x mask) => (VEXPANDPSMasked256 x (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(ExpandFloat32x16 x mask) => (VEXPANDPSMasked512 x (VPMOVVec32x16ToM <types.TypeMask> mask))
|
|
(ExpandFloat64x2 x mask) => (VEXPANDPDMasked128 x (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(ExpandFloat64x4 x mask) => (VEXPANDPDMasked256 x (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(ExpandFloat64x8 x mask) => (VEXPANDPDMasked512 x (VPMOVVec64x8ToM <types.TypeMask> mask))
|
|
(ExpandInt8x16 x mask) => (VPEXPANDBMasked128 x (VPMOVVec8x16ToM <types.TypeMask> mask))
|
|
(ExpandInt8x32 x mask) => (VPEXPANDBMasked256 x (VPMOVVec8x32ToM <types.TypeMask> mask))
|
|
(ExpandInt8x64 x mask) => (VPEXPANDBMasked512 x (VPMOVVec8x64ToM <types.TypeMask> mask))
|
|
(ExpandInt16x8 x mask) => (VPEXPANDWMasked128 x (VPMOVVec16x8ToM <types.TypeMask> mask))
|
|
(ExpandInt16x16 x mask) => (VPEXPANDWMasked256 x (VPMOVVec16x16ToM <types.TypeMask> mask))
|
|
(ExpandInt16x32 x mask) => (VPEXPANDWMasked512 x (VPMOVVec16x32ToM <types.TypeMask> mask))
|
|
(ExpandInt32x4 x mask) => (VPEXPANDDMasked128 x (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(ExpandInt32x8 x mask) => (VPEXPANDDMasked256 x (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(ExpandInt32x16 x mask) => (VPEXPANDDMasked512 x (VPMOVVec32x16ToM <types.TypeMask> mask))
|
|
(ExpandInt64x2 x mask) => (VPEXPANDQMasked128 x (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(ExpandInt64x4 x mask) => (VPEXPANDQMasked256 x (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(ExpandInt64x8 x mask) => (VPEXPANDQMasked512 x (VPMOVVec64x8ToM <types.TypeMask> mask))
|
|
(ExpandUint8x16 x mask) => (VPEXPANDBMasked128 x (VPMOVVec8x16ToM <types.TypeMask> mask))
|
|
(ExpandUint8x32 x mask) => (VPEXPANDBMasked256 x (VPMOVVec8x32ToM <types.TypeMask> mask))
|
|
(ExpandUint8x64 x mask) => (VPEXPANDBMasked512 x (VPMOVVec8x64ToM <types.TypeMask> mask))
|
|
(ExpandUint16x8 x mask) => (VPEXPANDWMasked128 x (VPMOVVec16x8ToM <types.TypeMask> mask))
|
|
(ExpandUint16x16 x mask) => (VPEXPANDWMasked256 x (VPMOVVec16x16ToM <types.TypeMask> mask))
|
|
(ExpandUint16x32 x mask) => (VPEXPANDWMasked512 x (VPMOVVec16x32ToM <types.TypeMask> mask))
|
|
(ExpandUint32x4 x mask) => (VPEXPANDDMasked128 x (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(ExpandUint32x8 x mask) => (VPEXPANDDMasked256 x (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(ExpandUint32x16 x mask) => (VPEXPANDDMasked512 x (VPMOVVec32x16ToM <types.TypeMask> mask))
|
|
(ExpandUint64x2 x mask) => (VPEXPANDQMasked128 x (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(ExpandUint64x4 x mask) => (VPEXPANDQMasked256 x (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(ExpandUint64x8 x mask) => (VPEXPANDQMasked512 x (VPMOVVec64x8ToM <types.TypeMask> mask))
|
|
(FloorFloat32x4 x) => (VROUNDPS128 [1] x)
|
|
(FloorFloat32x8 x) => (VROUNDPS256 [1] x)
|
|
(FloorFloat64x2 x) => (VROUNDPD128 [1] x)
|
|
(FloorFloat64x4 x) => (VROUNDPD256 [1] x)
|
|
(FloorScaledFloat32x4 [a] x) => (VRNDSCALEPS128 [a+1] x)
|
|
(FloorScaledFloat32x8 [a] x) => (VRNDSCALEPS256 [a+1] x)
|
|
(FloorScaledFloat32x16 [a] x) => (VRNDSCALEPS512 [a+1] x)
|
|
(FloorScaledFloat64x2 [a] x) => (VRNDSCALEPD128 [a+1] x)
|
|
(FloorScaledFloat64x4 [a] x) => (VRNDSCALEPD256 [a+1] x)
|
|
(FloorScaledFloat64x8 [a] x) => (VRNDSCALEPD512 [a+1] x)
|
|
(FloorScaledResidueFloat32x4 [a] x) => (VREDUCEPS128 [a+1] x)
|
|
(FloorScaledResidueFloat32x8 [a] x) => (VREDUCEPS256 [a+1] x)
|
|
(FloorScaledResidueFloat32x16 [a] x) => (VREDUCEPS512 [a+1] x)
|
|
(FloorScaledResidueFloat64x2 [a] x) => (VREDUCEPD128 [a+1] x)
|
|
(FloorScaledResidueFloat64x4 [a] x) => (VREDUCEPD256 [a+1] x)
|
|
(FloorScaledResidueFloat64x8 [a] x) => (VREDUCEPD512 [a+1] x)
|
|
(GaloisFieldAffineTransformUint8x16 ...) => (VGF2P8AFFINEQB128 ...)
|
|
(GaloisFieldAffineTransformUint8x32 ...) => (VGF2P8AFFINEQB256 ...)
|
|
(GaloisFieldAffineTransformUint8x64 ...) => (VGF2P8AFFINEQB512 ...)
|
|
(GaloisFieldAffineTransformInverseUint8x16 ...) => (VGF2P8AFFINEINVQB128 ...)
|
|
(GaloisFieldAffineTransformInverseUint8x32 ...) => (VGF2P8AFFINEINVQB256 ...)
|
|
(GaloisFieldAffineTransformInverseUint8x64 ...) => (VGF2P8AFFINEINVQB512 ...)
|
|
(GaloisFieldMulUint8x16 ...) => (VGF2P8MULB128 ...)
|
|
(GaloisFieldMulUint8x32 ...) => (VGF2P8MULB256 ...)
|
|
(GaloisFieldMulUint8x64 ...) => (VGF2P8MULB512 ...)
|
|
(GetElemFloat32x4 ...) => (VPEXTRD128 ...)
|
|
(GetElemFloat64x2 ...) => (VPEXTRQ128 ...)
|
|
(GetElemInt8x16 ...) => (VPEXTRB128 ...)
|
|
(GetElemInt16x8 ...) => (VPEXTRW128 ...)
|
|
(GetElemInt32x4 ...) => (VPEXTRD128 ...)
|
|
(GetElemInt64x2 ...) => (VPEXTRQ128 ...)
|
|
(GetElemUint8x16 ...) => (VPEXTRB128 ...)
|
|
(GetElemUint16x8 ...) => (VPEXTRW128 ...)
|
|
(GetElemUint32x4 ...) => (VPEXTRD128 ...)
|
|
(GetElemUint64x2 ...) => (VPEXTRQ128 ...)
|
|
(GetHiFloat32x8 x) => (VEXTRACTF128128 [1] x)
|
|
(GetHiFloat32x16 x) => (VEXTRACTF64X4256 [1] x)
|
|
(GetHiFloat64x4 x) => (VEXTRACTF128128 [1] x)
|
|
(GetHiFloat64x8 x) => (VEXTRACTF64X4256 [1] x)
|
|
(GetHiInt8x32 x) => (VEXTRACTI128128 [1] x)
|
|
(GetHiInt8x64 x) => (VEXTRACTI64X4256 [1] x)
|
|
(GetHiInt16x16 x) => (VEXTRACTI128128 [1] x)
|
|
(GetHiInt16x32 x) => (VEXTRACTI64X4256 [1] x)
|
|
(GetHiInt32x8 x) => (VEXTRACTI128128 [1] x)
|
|
(GetHiInt32x16 x) => (VEXTRACTI64X4256 [1] x)
|
|
(GetHiInt64x4 x) => (VEXTRACTI128128 [1] x)
|
|
(GetHiInt64x8 x) => (VEXTRACTI64X4256 [1] x)
|
|
(GetHiUint8x32 x) => (VEXTRACTI128128 [1] x)
|
|
(GetHiUint8x64 x) => (VEXTRACTI64X4256 [1] x)
|
|
(GetHiUint16x16 x) => (VEXTRACTI128128 [1] x)
|
|
(GetHiUint16x32 x) => (VEXTRACTI64X4256 [1] x)
|
|
(GetHiUint32x8 x) => (VEXTRACTI128128 [1] x)
|
|
(GetHiUint32x16 x) => (VEXTRACTI64X4256 [1] x)
|
|
(GetHiUint64x4 x) => (VEXTRACTI128128 [1] x)
|
|
(GetHiUint64x8 x) => (VEXTRACTI64X4256 [1] x)
|
|
(GetLoFloat32x8 x) => (VEXTRACTF128128 [0] x)
|
|
(GetLoFloat32x16 x) => (VEXTRACTF64X4256 [0] x)
|
|
(GetLoFloat64x4 x) => (VEXTRACTF128128 [0] x)
|
|
(GetLoFloat64x8 x) => (VEXTRACTF64X4256 [0] x)
|
|
(GetLoInt8x32 x) => (VEXTRACTI128128 [0] x)
|
|
(GetLoInt8x64 x) => (VEXTRACTI64X4256 [0] x)
|
|
(GetLoInt16x16 x) => (VEXTRACTI128128 [0] x)
|
|
(GetLoInt16x32 x) => (VEXTRACTI64X4256 [0] x)
|
|
(GetLoInt32x8 x) => (VEXTRACTI128128 [0] x)
|
|
(GetLoInt32x16 x) => (VEXTRACTI64X4256 [0] x)
|
|
(GetLoInt64x4 x) => (VEXTRACTI128128 [0] x)
|
|
(GetLoInt64x8 x) => (VEXTRACTI64X4256 [0] x)
|
|
(GetLoUint8x32 x) => (VEXTRACTI128128 [0] x)
|
|
(GetLoUint8x64 x) => (VEXTRACTI64X4256 [0] x)
|
|
(GetLoUint16x16 x) => (VEXTRACTI128128 [0] x)
|
|
(GetLoUint16x32 x) => (VEXTRACTI64X4256 [0] x)
|
|
(GetLoUint32x8 x) => (VEXTRACTI128128 [0] x)
|
|
(GetLoUint32x16 x) => (VEXTRACTI64X4256 [0] x)
|
|
(GetLoUint64x4 x) => (VEXTRACTI128128 [0] x)
|
|
(GetLoUint64x8 x) => (VEXTRACTI64X4256 [0] x)
|
|
(GreaterFloat32x4 x y) => (VCMPPS128 [14] x y)
|
|
(GreaterFloat32x8 x y) => (VCMPPS256 [14] x y)
|
|
(GreaterFloat32x16 x y) => (VPMOVMToVec32x16 (VCMPPS512 [14] x y))
|
|
(GreaterFloat64x2 x y) => (VCMPPD128 [14] x y)
|
|
(GreaterFloat64x4 x y) => (VCMPPD256 [14] x y)
|
|
(GreaterFloat64x8 x y) => (VPMOVMToVec64x8 (VCMPPD512 [14] x y))
|
|
(GreaterInt8x16 ...) => (VPCMPGTB128 ...)
|
|
(GreaterInt8x32 ...) => (VPCMPGTB256 ...)
|
|
(GreaterInt8x64 x y) => (VPMOVMToVec8x64 (VPCMPGTB512 x y))
|
|
(GreaterInt16x8 ...) => (VPCMPGTW128 ...)
|
|
(GreaterInt16x16 ...) => (VPCMPGTW256 ...)
|
|
(GreaterInt16x32 x y) => (VPMOVMToVec16x32 (VPCMPGTW512 x y))
|
|
(GreaterInt32x4 ...) => (VPCMPGTD128 ...)
|
|
(GreaterInt32x8 ...) => (VPCMPGTD256 ...)
|
|
(GreaterInt32x16 x y) => (VPMOVMToVec32x16 (VPCMPGTD512 x y))
|
|
(GreaterInt64x2 ...) => (VPCMPGTQ128 ...)
|
|
(GreaterInt64x4 ...) => (VPCMPGTQ256 ...)
|
|
(GreaterInt64x8 x y) => (VPMOVMToVec64x8 (VPCMPGTQ512 x y))
|
|
(GreaterUint8x64 x y) => (VPMOVMToVec8x64 (VPCMPUB512 [14] x y))
|
|
(GreaterUint16x32 x y) => (VPMOVMToVec16x32 (VPCMPUW512 [14] x y))
|
|
(GreaterUint32x16 x y) => (VPMOVMToVec32x16 (VPCMPUD512 [14] x y))
|
|
(GreaterUint64x8 x y) => (VPMOVMToVec64x8 (VPCMPUQ512 [14] x y))
|
|
(GreaterEqualFloat32x4 x y) => (VCMPPS128 [13] x y)
|
|
(GreaterEqualFloat32x8 x y) => (VCMPPS256 [13] x y)
|
|
(GreaterEqualFloat32x16 x y) => (VPMOVMToVec32x16 (VCMPPS512 [13] x y))
|
|
(GreaterEqualFloat64x2 x y) => (VCMPPD128 [13] x y)
|
|
(GreaterEqualFloat64x4 x y) => (VCMPPD256 [13] x y)
|
|
(GreaterEqualFloat64x8 x y) => (VPMOVMToVec64x8 (VCMPPD512 [13] x y))
|
|
(GreaterEqualInt8x64 x y) => (VPMOVMToVec8x64 (VPCMPB512 [13] x y))
|
|
(GreaterEqualInt16x32 x y) => (VPMOVMToVec16x32 (VPCMPW512 [13] x y))
|
|
(GreaterEqualInt32x16 x y) => (VPMOVMToVec32x16 (VPCMPD512 [13] x y))
|
|
(GreaterEqualInt64x8 x y) => (VPMOVMToVec64x8 (VPCMPQ512 [13] x y))
|
|
(GreaterEqualUint8x64 x y) => (VPMOVMToVec8x64 (VPCMPUB512 [13] x y))
|
|
(GreaterEqualUint16x32 x y) => (VPMOVMToVec16x32 (VPCMPUW512 [13] x y))
|
|
(GreaterEqualUint32x16 x y) => (VPMOVMToVec32x16 (VPCMPUD512 [13] x y))
|
|
(GreaterEqualUint64x8 x y) => (VPMOVMToVec64x8 (VPCMPUQ512 [13] x y))
|
|
(InterleaveHiInt16x8 ...) => (VPUNPCKHWD128 ...)
|
|
(InterleaveHiInt32x4 ...) => (VPUNPCKHDQ128 ...)
|
|
(InterleaveHiInt64x2 ...) => (VPUNPCKHQDQ128 ...)
|
|
(InterleaveHiUint16x8 ...) => (VPUNPCKHWD128 ...)
|
|
(InterleaveHiUint32x4 ...) => (VPUNPCKHDQ128 ...)
|
|
(InterleaveHiUint64x2 ...) => (VPUNPCKHQDQ128 ...)
|
|
(InterleaveHiGroupedInt16x16 ...) => (VPUNPCKHWD256 ...)
|
|
(InterleaveHiGroupedInt16x32 ...) => (VPUNPCKHWD512 ...)
|
|
(InterleaveHiGroupedInt32x8 ...) => (VPUNPCKHDQ256 ...)
|
|
(InterleaveHiGroupedInt32x16 ...) => (VPUNPCKHDQ512 ...)
|
|
(InterleaveHiGroupedInt64x4 ...) => (VPUNPCKHQDQ256 ...)
|
|
(InterleaveHiGroupedInt64x8 ...) => (VPUNPCKHQDQ512 ...)
|
|
(InterleaveHiGroupedUint16x16 ...) => (VPUNPCKHWD256 ...)
|
|
(InterleaveHiGroupedUint16x32 ...) => (VPUNPCKHWD512 ...)
|
|
(InterleaveHiGroupedUint32x8 ...) => (VPUNPCKHDQ256 ...)
|
|
(InterleaveHiGroupedUint32x16 ...) => (VPUNPCKHDQ512 ...)
|
|
(InterleaveHiGroupedUint64x4 ...) => (VPUNPCKHQDQ256 ...)
|
|
(InterleaveHiGroupedUint64x8 ...) => (VPUNPCKHQDQ512 ...)
|
|
(InterleaveLoInt16x8 ...) => (VPUNPCKLWD128 ...)
|
|
(InterleaveLoInt32x4 ...) => (VPUNPCKLDQ128 ...)
|
|
(InterleaveLoInt64x2 ...) => (VPUNPCKLQDQ128 ...)
|
|
(InterleaveLoUint16x8 ...) => (VPUNPCKLWD128 ...)
|
|
(InterleaveLoUint32x4 ...) => (VPUNPCKLDQ128 ...)
|
|
(InterleaveLoUint64x2 ...) => (VPUNPCKLQDQ128 ...)
|
|
(InterleaveLoGroupedInt16x16 ...) => (VPUNPCKLWD256 ...)
|
|
(InterleaveLoGroupedInt16x32 ...) => (VPUNPCKLWD512 ...)
|
|
(InterleaveLoGroupedInt32x8 ...) => (VPUNPCKLDQ256 ...)
|
|
(InterleaveLoGroupedInt32x16 ...) => (VPUNPCKLDQ512 ...)
|
|
(InterleaveLoGroupedInt64x4 ...) => (VPUNPCKLQDQ256 ...)
|
|
(InterleaveLoGroupedInt64x8 ...) => (VPUNPCKLQDQ512 ...)
|
|
(InterleaveLoGroupedUint16x16 ...) => (VPUNPCKLWD256 ...)
|
|
(InterleaveLoGroupedUint16x32 ...) => (VPUNPCKLWD512 ...)
|
|
(InterleaveLoGroupedUint32x8 ...) => (VPUNPCKLDQ256 ...)
|
|
(InterleaveLoGroupedUint32x16 ...) => (VPUNPCKLDQ512 ...)
|
|
(InterleaveLoGroupedUint64x4 ...) => (VPUNPCKLQDQ256 ...)
|
|
(InterleaveLoGroupedUint64x8 ...) => (VPUNPCKLQDQ512 ...)
|
|
(IsNanFloat32x4 x y) => (VCMPPS128 [3] x y)
|
|
(IsNanFloat32x8 x y) => (VCMPPS256 [3] x y)
|
|
(IsNanFloat32x16 x y) => (VPMOVMToVec32x16 (VCMPPS512 [3] x y))
|
|
(IsNanFloat64x2 x y) => (VCMPPD128 [3] x y)
|
|
(IsNanFloat64x4 x y) => (VCMPPD256 [3] x y)
|
|
(IsNanFloat64x8 x y) => (VPMOVMToVec64x8 (VCMPPD512 [3] x y))
|
|
(LeadingZerosInt32x4 ...) => (VPLZCNTD128 ...)
|
|
(LeadingZerosInt32x8 ...) => (VPLZCNTD256 ...)
|
|
(LeadingZerosInt32x16 ...) => (VPLZCNTD512 ...)
|
|
(LeadingZerosInt64x2 ...) => (VPLZCNTQ128 ...)
|
|
(LeadingZerosInt64x4 ...) => (VPLZCNTQ256 ...)
|
|
(LeadingZerosInt64x8 ...) => (VPLZCNTQ512 ...)
|
|
(LeadingZerosUint32x4 ...) => (VPLZCNTD128 ...)
|
|
(LeadingZerosUint32x8 ...) => (VPLZCNTD256 ...)
|
|
(LeadingZerosUint32x16 ...) => (VPLZCNTD512 ...)
|
|
(LeadingZerosUint64x2 ...) => (VPLZCNTQ128 ...)
|
|
(LeadingZerosUint64x4 ...) => (VPLZCNTQ256 ...)
|
|
(LeadingZerosUint64x8 ...) => (VPLZCNTQ512 ...)
|
|
(LessFloat32x4 x y) => (VCMPPS128 [1] x y)
|
|
(LessFloat32x8 x y) => (VCMPPS256 [1] x y)
|
|
(LessFloat32x16 x y) => (VPMOVMToVec32x16 (VCMPPS512 [1] x y))
|
|
(LessFloat64x2 x y) => (VCMPPD128 [1] x y)
|
|
(LessFloat64x4 x y) => (VCMPPD256 [1] x y)
|
|
(LessFloat64x8 x y) => (VPMOVMToVec64x8 (VCMPPD512 [1] x y))
|
|
(LessInt8x64 x y) => (VPMOVMToVec8x64 (VPCMPB512 [1] x y))
|
|
(LessInt16x32 x y) => (VPMOVMToVec16x32 (VPCMPW512 [1] x y))
|
|
(LessInt32x16 x y) => (VPMOVMToVec32x16 (VPCMPD512 [1] x y))
|
|
(LessInt64x8 x y) => (VPMOVMToVec64x8 (VPCMPQ512 [1] x y))
|
|
(LessUint8x64 x y) => (VPMOVMToVec8x64 (VPCMPUB512 [1] x y))
|
|
(LessUint16x32 x y) => (VPMOVMToVec16x32 (VPCMPUW512 [1] x y))
|
|
(LessUint32x16 x y) => (VPMOVMToVec32x16 (VPCMPUD512 [1] x y))
|
|
(LessUint64x8 x y) => (VPMOVMToVec64x8 (VPCMPUQ512 [1] x y))
|
|
(LessEqualFloat32x4 x y) => (VCMPPS128 [2] x y)
|
|
(LessEqualFloat32x8 x y) => (VCMPPS256 [2] x y)
|
|
(LessEqualFloat32x16 x y) => (VPMOVMToVec32x16 (VCMPPS512 [2] x y))
|
|
(LessEqualFloat64x2 x y) => (VCMPPD128 [2] x y)
|
|
(LessEqualFloat64x4 x y) => (VCMPPD256 [2] x y)
|
|
(LessEqualFloat64x8 x y) => (VPMOVMToVec64x8 (VCMPPD512 [2] x y))
|
|
(LessEqualInt8x64 x y) => (VPMOVMToVec8x64 (VPCMPB512 [2] x y))
|
|
(LessEqualInt16x32 x y) => (VPMOVMToVec16x32 (VPCMPW512 [2] x y))
|
|
(LessEqualInt32x16 x y) => (VPMOVMToVec32x16 (VPCMPD512 [2] x y))
|
|
(LessEqualInt64x8 x y) => (VPMOVMToVec64x8 (VPCMPQ512 [2] x y))
|
|
(LessEqualUint8x64 x y) => (VPMOVMToVec8x64 (VPCMPUB512 [2] x y))
|
|
(LessEqualUint16x32 x y) => (VPMOVMToVec16x32 (VPCMPUW512 [2] x y))
|
|
(LessEqualUint32x16 x y) => (VPMOVMToVec32x16 (VPCMPUD512 [2] x y))
|
|
(LessEqualUint64x8 x y) => (VPMOVMToVec64x8 (VPCMPUQ512 [2] x y))
|
|
(MaxFloat32x4 ...) => (VMAXPS128 ...)
|
|
(MaxFloat32x8 ...) => (VMAXPS256 ...)
|
|
(MaxFloat32x16 ...) => (VMAXPS512 ...)
|
|
(MaxFloat64x2 ...) => (VMAXPD128 ...)
|
|
(MaxFloat64x4 ...) => (VMAXPD256 ...)
|
|
(MaxFloat64x8 ...) => (VMAXPD512 ...)
|
|
(MaxInt8x16 ...) => (VPMAXSB128 ...)
|
|
(MaxInt8x32 ...) => (VPMAXSB256 ...)
|
|
(MaxInt8x64 ...) => (VPMAXSB512 ...)
|
|
(MaxInt16x8 ...) => (VPMAXSW128 ...)
|
|
(MaxInt16x16 ...) => (VPMAXSW256 ...)
|
|
(MaxInt16x32 ...) => (VPMAXSW512 ...)
|
|
(MaxInt32x4 ...) => (VPMAXSD128 ...)
|
|
(MaxInt32x8 ...) => (VPMAXSD256 ...)
|
|
(MaxInt32x16 ...) => (VPMAXSD512 ...)
|
|
(MaxInt64x2 ...) => (VPMAXSQ128 ...)
|
|
(MaxInt64x4 ...) => (VPMAXSQ256 ...)
|
|
(MaxInt64x8 ...) => (VPMAXSQ512 ...)
|
|
(MaxUint8x16 ...) => (VPMAXUB128 ...)
|
|
(MaxUint8x32 ...) => (VPMAXUB256 ...)
|
|
(MaxUint8x64 ...) => (VPMAXUB512 ...)
|
|
(MaxUint16x8 ...) => (VPMAXUW128 ...)
|
|
(MaxUint16x16 ...) => (VPMAXUW256 ...)
|
|
(MaxUint16x32 ...) => (VPMAXUW512 ...)
|
|
(MaxUint32x4 ...) => (VPMAXUD128 ...)
|
|
(MaxUint32x8 ...) => (VPMAXUD256 ...)
|
|
(MaxUint32x16 ...) => (VPMAXUD512 ...)
|
|
(MaxUint64x2 ...) => (VPMAXUQ128 ...)
|
|
(MaxUint64x4 ...) => (VPMAXUQ256 ...)
|
|
(MaxUint64x8 ...) => (VPMAXUQ512 ...)
|
|
(MinFloat32x4 ...) => (VMINPS128 ...)
|
|
(MinFloat32x8 ...) => (VMINPS256 ...)
|
|
(MinFloat32x16 ...) => (VMINPS512 ...)
|
|
(MinFloat64x2 ...) => (VMINPD128 ...)
|
|
(MinFloat64x4 ...) => (VMINPD256 ...)
|
|
(MinFloat64x8 ...) => (VMINPD512 ...)
|
|
(MinInt8x16 ...) => (VPMINSB128 ...)
|
|
(MinInt8x32 ...) => (VPMINSB256 ...)
|
|
(MinInt8x64 ...) => (VPMINSB512 ...)
|
|
(MinInt16x8 ...) => (VPMINSW128 ...)
|
|
(MinInt16x16 ...) => (VPMINSW256 ...)
|
|
(MinInt16x32 ...) => (VPMINSW512 ...)
|
|
(MinInt32x4 ...) => (VPMINSD128 ...)
|
|
(MinInt32x8 ...) => (VPMINSD256 ...)
|
|
(MinInt32x16 ...) => (VPMINSD512 ...)
|
|
(MinInt64x2 ...) => (VPMINSQ128 ...)
|
|
(MinInt64x4 ...) => (VPMINSQ256 ...)
|
|
(MinInt64x8 ...) => (VPMINSQ512 ...)
|
|
(MinUint8x16 ...) => (VPMINUB128 ...)
|
|
(MinUint8x32 ...) => (VPMINUB256 ...)
|
|
(MinUint8x64 ...) => (VPMINUB512 ...)
|
|
(MinUint16x8 ...) => (VPMINUW128 ...)
|
|
(MinUint16x16 ...) => (VPMINUW256 ...)
|
|
(MinUint16x32 ...) => (VPMINUW512 ...)
|
|
(MinUint32x4 ...) => (VPMINUD128 ...)
|
|
(MinUint32x8 ...) => (VPMINUD256 ...)
|
|
(MinUint32x16 ...) => (VPMINUD512 ...)
|
|
(MinUint64x2 ...) => (VPMINUQ128 ...)
|
|
(MinUint64x4 ...) => (VPMINUQ256 ...)
|
|
(MinUint64x8 ...) => (VPMINUQ512 ...)
|
|
(MulFloat32x4 ...) => (VMULPS128 ...)
|
|
(MulFloat32x8 ...) => (VMULPS256 ...)
|
|
(MulFloat32x16 ...) => (VMULPS512 ...)
|
|
(MulFloat64x2 ...) => (VMULPD128 ...)
|
|
(MulFloat64x4 ...) => (VMULPD256 ...)
|
|
(MulFloat64x8 ...) => (VMULPD512 ...)
|
|
(MulInt16x8 ...) => (VPMULLW128 ...)
|
|
(MulInt16x16 ...) => (VPMULLW256 ...)
|
|
(MulInt16x32 ...) => (VPMULLW512 ...)
|
|
(MulInt32x4 ...) => (VPMULLD128 ...)
|
|
(MulInt32x8 ...) => (VPMULLD256 ...)
|
|
(MulInt32x16 ...) => (VPMULLD512 ...)
|
|
(MulInt64x2 ...) => (VPMULLQ128 ...)
|
|
(MulInt64x4 ...) => (VPMULLQ256 ...)
|
|
(MulInt64x8 ...) => (VPMULLQ512 ...)
|
|
(MulUint16x8 ...) => (VPMULLW128 ...)
|
|
(MulUint16x16 ...) => (VPMULLW256 ...)
|
|
(MulUint16x32 ...) => (VPMULLW512 ...)
|
|
(MulUint32x4 ...) => (VPMULLD128 ...)
|
|
(MulUint32x8 ...) => (VPMULLD256 ...)
|
|
(MulUint32x16 ...) => (VPMULLD512 ...)
|
|
(MulUint64x2 ...) => (VPMULLQ128 ...)
|
|
(MulUint64x4 ...) => (VPMULLQ256 ...)
|
|
(MulUint64x8 ...) => (VPMULLQ512 ...)
|
|
(MulAddFloat32x4 ...) => (VFMADD213PS128 ...)
|
|
(MulAddFloat32x8 ...) => (VFMADD213PS256 ...)
|
|
(MulAddFloat32x16 ...) => (VFMADD213PS512 ...)
|
|
(MulAddFloat64x2 ...) => (VFMADD213PD128 ...)
|
|
(MulAddFloat64x4 ...) => (VFMADD213PD256 ...)
|
|
(MulAddFloat64x8 ...) => (VFMADD213PD512 ...)
|
|
(MulAddSubFloat32x4 ...) => (VFMADDSUB213PS128 ...)
|
|
(MulAddSubFloat32x8 ...) => (VFMADDSUB213PS256 ...)
|
|
(MulAddSubFloat32x16 ...) => (VFMADDSUB213PS512 ...)
|
|
(MulAddSubFloat64x2 ...) => (VFMADDSUB213PD128 ...)
|
|
(MulAddSubFloat64x4 ...) => (VFMADDSUB213PD256 ...)
|
|
(MulAddSubFloat64x8 ...) => (VFMADDSUB213PD512 ...)
|
|
(MulEvenWidenInt32x4 ...) => (VPMULDQ128 ...)
|
|
(MulEvenWidenInt32x8 ...) => (VPMULDQ256 ...)
|
|
(MulEvenWidenUint32x4 ...) => (VPMULUDQ128 ...)
|
|
(MulEvenWidenUint32x8 ...) => (VPMULUDQ256 ...)
|
|
(MulHighInt16x8 ...) => (VPMULHW128 ...)
|
|
(MulHighInt16x16 ...) => (VPMULHW256 ...)
|
|
(MulHighInt16x32 ...) => (VPMULHW512 ...)
|
|
(MulHighUint16x8 ...) => (VPMULHUW128 ...)
|
|
(MulHighUint16x16 ...) => (VPMULHUW256 ...)
|
|
(MulHighUint16x32 ...) => (VPMULHUW512 ...)
|
|
(MulSubAddFloat32x4 ...) => (VFMSUBADD213PS128 ...)
|
|
(MulSubAddFloat32x8 ...) => (VFMSUBADD213PS256 ...)
|
|
(MulSubAddFloat32x16 ...) => (VFMSUBADD213PS512 ...)
|
|
(MulSubAddFloat64x2 ...) => (VFMSUBADD213PD128 ...)
|
|
(MulSubAddFloat64x4 ...) => (VFMSUBADD213PD256 ...)
|
|
(MulSubAddFloat64x8 ...) => (VFMSUBADD213PD512 ...)
|
|
(NotEqualFloat32x4 x y) => (VCMPPS128 [4] x y)
|
|
(NotEqualFloat32x8 x y) => (VCMPPS256 [4] x y)
|
|
(NotEqualFloat32x16 x y) => (VPMOVMToVec32x16 (VCMPPS512 [4] x y))
|
|
(NotEqualFloat64x2 x y) => (VCMPPD128 [4] x y)
|
|
(NotEqualFloat64x4 x y) => (VCMPPD256 [4] x y)
|
|
(NotEqualFloat64x8 x y) => (VPMOVMToVec64x8 (VCMPPD512 [4] x y))
|
|
(NotEqualInt8x64 x y) => (VPMOVMToVec8x64 (VPCMPB512 [4] x y))
|
|
(NotEqualInt16x32 x y) => (VPMOVMToVec16x32 (VPCMPW512 [4] x y))
|
|
(NotEqualInt32x16 x y) => (VPMOVMToVec32x16 (VPCMPD512 [4] x y))
|
|
(NotEqualInt64x8 x y) => (VPMOVMToVec64x8 (VPCMPQ512 [4] x y))
|
|
(NotEqualUint8x64 x y) => (VPMOVMToVec8x64 (VPCMPUB512 [4] x y))
|
|
(NotEqualUint16x32 x y) => (VPMOVMToVec16x32 (VPCMPUW512 [4] x y))
|
|
(NotEqualUint32x16 x y) => (VPMOVMToVec32x16 (VPCMPUD512 [4] x y))
|
|
(NotEqualUint64x8 x y) => (VPMOVMToVec64x8 (VPCMPUQ512 [4] x y))
|
|
(OnesCountInt8x16 ...) => (VPOPCNTB128 ...)
|
|
(OnesCountInt8x32 ...) => (VPOPCNTB256 ...)
|
|
(OnesCountInt8x64 ...) => (VPOPCNTB512 ...)
|
|
(OnesCountInt16x8 ...) => (VPOPCNTW128 ...)
|
|
(OnesCountInt16x16 ...) => (VPOPCNTW256 ...)
|
|
(OnesCountInt16x32 ...) => (VPOPCNTW512 ...)
|
|
(OnesCountInt32x4 ...) => (VPOPCNTD128 ...)
|
|
(OnesCountInt32x8 ...) => (VPOPCNTD256 ...)
|
|
(OnesCountInt32x16 ...) => (VPOPCNTD512 ...)
|
|
(OnesCountInt64x2 ...) => (VPOPCNTQ128 ...)
|
|
(OnesCountInt64x4 ...) => (VPOPCNTQ256 ...)
|
|
(OnesCountInt64x8 ...) => (VPOPCNTQ512 ...)
|
|
(OnesCountUint8x16 ...) => (VPOPCNTB128 ...)
|
|
(OnesCountUint8x32 ...) => (VPOPCNTB256 ...)
|
|
(OnesCountUint8x64 ...) => (VPOPCNTB512 ...)
|
|
(OnesCountUint16x8 ...) => (VPOPCNTW128 ...)
|
|
(OnesCountUint16x16 ...) => (VPOPCNTW256 ...)
|
|
(OnesCountUint16x32 ...) => (VPOPCNTW512 ...)
|
|
(OnesCountUint32x4 ...) => (VPOPCNTD128 ...)
|
|
(OnesCountUint32x8 ...) => (VPOPCNTD256 ...)
|
|
(OnesCountUint32x16 ...) => (VPOPCNTD512 ...)
|
|
(OnesCountUint64x2 ...) => (VPOPCNTQ128 ...)
|
|
(OnesCountUint64x4 ...) => (VPOPCNTQ256 ...)
|
|
(OnesCountUint64x8 ...) => (VPOPCNTQ512 ...)
|
|
(OrInt8x16 ...) => (VPOR128 ...)
|
|
(OrInt8x32 ...) => (VPOR256 ...)
|
|
(OrInt8x64 ...) => (VPORD512 ...)
|
|
(OrInt16x8 ...) => (VPOR128 ...)
|
|
(OrInt16x16 ...) => (VPOR256 ...)
|
|
(OrInt16x32 ...) => (VPORD512 ...)
|
|
(OrInt32x4 ...) => (VPOR128 ...)
|
|
(OrInt32x8 ...) => (VPOR256 ...)
|
|
(OrInt32x16 ...) => (VPORD512 ...)
|
|
(OrInt64x2 ...) => (VPOR128 ...)
|
|
(OrInt64x4 ...) => (VPOR256 ...)
|
|
(OrInt64x8 ...) => (VPORQ512 ...)
|
|
(OrUint8x16 ...) => (VPOR128 ...)
|
|
(OrUint8x32 ...) => (VPOR256 ...)
|
|
(OrUint8x64 ...) => (VPORD512 ...)
|
|
(OrUint16x8 ...) => (VPOR128 ...)
|
|
(OrUint16x16 ...) => (VPOR256 ...)
|
|
(OrUint16x32 ...) => (VPORD512 ...)
|
|
(OrUint32x4 ...) => (VPOR128 ...)
|
|
(OrUint32x8 ...) => (VPOR256 ...)
|
|
(OrUint32x16 ...) => (VPORD512 ...)
|
|
(OrUint64x2 ...) => (VPOR128 ...)
|
|
(OrUint64x4 ...) => (VPOR256 ...)
|
|
(OrUint64x8 ...) => (VPORQ512 ...)
|
|
(PermuteFloat32x8 ...) => (VPERMPS256 ...)
|
|
(PermuteFloat32x16 ...) => (VPERMPS512 ...)
|
|
(PermuteFloat64x4 ...) => (VPERMPD256 ...)
|
|
(PermuteFloat64x8 ...) => (VPERMPD512 ...)
|
|
(PermuteInt8x16 ...) => (VPSHUFB128 ...)
|
|
(PermuteInt8x32 ...) => (VPERMB256 ...)
|
|
(PermuteInt8x64 ...) => (VPERMB512 ...)
|
|
(PermuteInt16x8 ...) => (VPERMW128 ...)
|
|
(PermuteInt16x16 ...) => (VPERMW256 ...)
|
|
(PermuteInt16x32 ...) => (VPERMW512 ...)
|
|
(PermuteInt32x8 ...) => (VPERMD256 ...)
|
|
(PermuteInt32x16 ...) => (VPERMD512 ...)
|
|
(PermuteInt64x4 ...) => (VPERMQ256 ...)
|
|
(PermuteInt64x8 ...) => (VPERMQ512 ...)
|
|
(PermuteUint8x16 ...) => (VPSHUFB128 ...)
|
|
(PermuteUint8x32 ...) => (VPERMB256 ...)
|
|
(PermuteUint8x64 ...) => (VPERMB512 ...)
|
|
(PermuteUint16x8 ...) => (VPERMW128 ...)
|
|
(PermuteUint16x16 ...) => (VPERMW256 ...)
|
|
(PermuteUint16x32 ...) => (VPERMW512 ...)
|
|
(PermuteUint32x8 ...) => (VPERMD256 ...)
|
|
(PermuteUint32x16 ...) => (VPERMD512 ...)
|
|
(PermuteUint64x4 ...) => (VPERMQ256 ...)
|
|
(PermuteUint64x8 ...) => (VPERMQ512 ...)
|
|
(Permute2Float32x4 ...) => (VPERMI2PS128 ...)
|
|
(Permute2Float32x8 ...) => (VPERMI2PS256 ...)
|
|
(Permute2Float32x16 ...) => (VPERMI2PS512 ...)
|
|
(Permute2Float64x2 ...) => (VPERMI2PD128 ...)
|
|
(Permute2Float64x4 ...) => (VPERMI2PD256 ...)
|
|
(Permute2Float64x8 ...) => (VPERMI2PD512 ...)
|
|
(Permute2Int8x16 ...) => (VPERMI2B128 ...)
|
|
(Permute2Int8x32 ...) => (VPERMI2B256 ...)
|
|
(Permute2Int8x64 ...) => (VPERMI2B512 ...)
|
|
(Permute2Int16x8 ...) => (VPERMI2W128 ...)
|
|
(Permute2Int16x16 ...) => (VPERMI2W256 ...)
|
|
(Permute2Int16x32 ...) => (VPERMI2W512 ...)
|
|
(Permute2Int32x4 ...) => (VPERMI2D128 ...)
|
|
(Permute2Int32x8 ...) => (VPERMI2D256 ...)
|
|
(Permute2Int32x16 ...) => (VPERMI2D512 ...)
|
|
(Permute2Int64x2 ...) => (VPERMI2Q128 ...)
|
|
(Permute2Int64x4 ...) => (VPERMI2Q256 ...)
|
|
(Permute2Int64x8 ...) => (VPERMI2Q512 ...)
|
|
(Permute2Uint8x16 ...) => (VPERMI2B128 ...)
|
|
(Permute2Uint8x32 ...) => (VPERMI2B256 ...)
|
|
(Permute2Uint8x64 ...) => (VPERMI2B512 ...)
|
|
(Permute2Uint16x8 ...) => (VPERMI2W128 ...)
|
|
(Permute2Uint16x16 ...) => (VPERMI2W256 ...)
|
|
(Permute2Uint16x32 ...) => (VPERMI2W512 ...)
|
|
(Permute2Uint32x4 ...) => (VPERMI2D128 ...)
|
|
(Permute2Uint32x8 ...) => (VPERMI2D256 ...)
|
|
(Permute2Uint32x16 ...) => (VPERMI2D512 ...)
|
|
(Permute2Uint64x2 ...) => (VPERMI2Q128 ...)
|
|
(Permute2Uint64x4 ...) => (VPERMI2Q256 ...)
|
|
(Permute2Uint64x8 ...) => (VPERMI2Q512 ...)
|
|
(PermuteConstantInt32x4 ...) => (VPSHUFD128 ...)
|
|
(PermuteConstantUint32x4 ...) => (VPSHUFD128 ...)
|
|
(PermuteConstantGroupedInt32x8 ...) => (VPSHUFD256 ...)
|
|
(PermuteConstantGroupedInt32x16 ...) => (VPSHUFD512 ...)
|
|
(PermuteConstantGroupedUint32x8 ...) => (VPSHUFD256 ...)
|
|
(PermuteConstantGroupedUint32x16 ...) => (VPSHUFD512 ...)
|
|
(PermuteConstantHiInt16x8 ...) => (VPSHUFHW128 ...)
|
|
(PermuteConstantHiInt32x4 ...) => (VPSHUFHW128 ...)
|
|
(PermuteConstantHiUint16x8 ...) => (VPSHUFHW128 ...)
|
|
(PermuteConstantHiUint32x4 ...) => (VPSHUFHW128 ...)
|
|
(PermuteConstantHiGroupedInt16x16 ...) => (VPSHUFHW256 ...)
|
|
(PermuteConstantHiGroupedInt16x32 ...) => (VPSHUFHW512 ...)
|
|
(PermuteConstantHiGroupedUint16x16 ...) => (VPSHUFHW256 ...)
|
|
(PermuteConstantHiGroupedUint16x32 ...) => (VPSHUFHW512 ...)
|
|
(PermuteConstantLoInt16x8 ...) => (VPSHUFHW128 ...)
|
|
(PermuteConstantLoInt32x4 ...) => (VPSHUFHW128 ...)
|
|
(PermuteConstantLoUint16x8 ...) => (VPSHUFHW128 ...)
|
|
(PermuteConstantLoUint32x4 ...) => (VPSHUFHW128 ...)
|
|
(PermuteConstantLoGroupedInt16x16 ...) => (VPSHUFHW256 ...)
|
|
(PermuteConstantLoGroupedInt16x32 ...) => (VPSHUFHW512 ...)
|
|
(PermuteConstantLoGroupedUint16x16 ...) => (VPSHUFHW256 ...)
|
|
(PermuteConstantLoGroupedUint16x32 ...) => (VPSHUFHW512 ...)
|
|
(PermuteGroupedInt8x32 ...) => (VPSHUFB256 ...)
|
|
(PermuteGroupedInt8x64 ...) => (VPSHUFB512 ...)
|
|
(PermuteGroupedUint8x32 ...) => (VPSHUFB256 ...)
|
|
(PermuteGroupedUint8x64 ...) => (VPSHUFB512 ...)
|
|
(ReciprocalFloat32x4 ...) => (VRCPPS128 ...)
|
|
(ReciprocalFloat32x8 ...) => (VRCPPS256 ...)
|
|
(ReciprocalFloat32x16 ...) => (VRCP14PS512 ...)
|
|
(ReciprocalFloat64x2 ...) => (VRCP14PD128 ...)
|
|
(ReciprocalFloat64x4 ...) => (VRCP14PD256 ...)
|
|
(ReciprocalFloat64x8 ...) => (VRCP14PD512 ...)
|
|
(ReciprocalSqrtFloat32x4 ...) => (VRSQRTPS128 ...)
|
|
(ReciprocalSqrtFloat32x8 ...) => (VRSQRTPS256 ...)
|
|
(ReciprocalSqrtFloat32x16 ...) => (VRSQRT14PS512 ...)
|
|
(ReciprocalSqrtFloat64x2 ...) => (VRSQRT14PD128 ...)
|
|
(ReciprocalSqrtFloat64x4 ...) => (VRSQRT14PD256 ...)
|
|
(ReciprocalSqrtFloat64x8 ...) => (VRSQRT14PD512 ...)
|
|
(RotateAllLeftInt32x4 ...) => (VPROLD128 ...)
|
|
(RotateAllLeftInt32x8 ...) => (VPROLD256 ...)
|
|
(RotateAllLeftInt32x16 ...) => (VPROLD512 ...)
|
|
(RotateAllLeftInt64x2 ...) => (VPROLQ128 ...)
|
|
(RotateAllLeftInt64x4 ...) => (VPROLQ256 ...)
|
|
(RotateAllLeftInt64x8 ...) => (VPROLQ512 ...)
|
|
(RotateAllLeftUint32x4 ...) => (VPROLD128 ...)
|
|
(RotateAllLeftUint32x8 ...) => (VPROLD256 ...)
|
|
(RotateAllLeftUint32x16 ...) => (VPROLD512 ...)
|
|
(RotateAllLeftUint64x2 ...) => (VPROLQ128 ...)
|
|
(RotateAllLeftUint64x4 ...) => (VPROLQ256 ...)
|
|
(RotateAllLeftUint64x8 ...) => (VPROLQ512 ...)
|
|
(RotateAllRightInt32x4 ...) => (VPRORD128 ...)
|
|
(RotateAllRightInt32x8 ...) => (VPRORD256 ...)
|
|
(RotateAllRightInt32x16 ...) => (VPRORD512 ...)
|
|
(RotateAllRightInt64x2 ...) => (VPRORQ128 ...)
|
|
(RotateAllRightInt64x4 ...) => (VPRORQ256 ...)
|
|
(RotateAllRightInt64x8 ...) => (VPRORQ512 ...)
|
|
(RotateAllRightUint32x4 ...) => (VPRORD128 ...)
|
|
(RotateAllRightUint32x8 ...) => (VPRORD256 ...)
|
|
(RotateAllRightUint32x16 ...) => (VPRORD512 ...)
|
|
(RotateAllRightUint64x2 ...) => (VPRORQ128 ...)
|
|
(RotateAllRightUint64x4 ...) => (VPRORQ256 ...)
|
|
(RotateAllRightUint64x8 ...) => (VPRORQ512 ...)
|
|
(RotateLeftInt32x4 ...) => (VPROLVD128 ...)
|
|
(RotateLeftInt32x8 ...) => (VPROLVD256 ...)
|
|
(RotateLeftInt32x16 ...) => (VPROLVD512 ...)
|
|
(RotateLeftInt64x2 ...) => (VPROLVQ128 ...)
|
|
(RotateLeftInt64x4 ...) => (VPROLVQ256 ...)
|
|
(RotateLeftInt64x8 ...) => (VPROLVQ512 ...)
|
|
(RotateLeftUint32x4 ...) => (VPROLVD128 ...)
|
|
(RotateLeftUint32x8 ...) => (VPROLVD256 ...)
|
|
(RotateLeftUint32x16 ...) => (VPROLVD512 ...)
|
|
(RotateLeftUint64x2 ...) => (VPROLVQ128 ...)
|
|
(RotateLeftUint64x4 ...) => (VPROLVQ256 ...)
|
|
(RotateLeftUint64x8 ...) => (VPROLVQ512 ...)
|
|
(RotateRightInt32x4 ...) => (VPRORVD128 ...)
|
|
(RotateRightInt32x8 ...) => (VPRORVD256 ...)
|
|
(RotateRightInt32x16 ...) => (VPRORVD512 ...)
|
|
(RotateRightInt64x2 ...) => (VPRORVQ128 ...)
|
|
(RotateRightInt64x4 ...) => (VPRORVQ256 ...)
|
|
(RotateRightInt64x8 ...) => (VPRORVQ512 ...)
|
|
(RotateRightUint32x4 ...) => (VPRORVD128 ...)
|
|
(RotateRightUint32x8 ...) => (VPRORVD256 ...)
|
|
(RotateRightUint32x16 ...) => (VPRORVD512 ...)
|
|
(RotateRightUint64x2 ...) => (VPRORVQ128 ...)
|
|
(RotateRightUint64x4 ...) => (VPRORVQ256 ...)
|
|
(RotateRightUint64x8 ...) => (VPRORVQ512 ...)
|
|
(RoundToEvenFloat32x4 x) => (VROUNDPS128 [0] x)
|
|
(RoundToEvenFloat32x8 x) => (VROUNDPS256 [0] x)
|
|
(RoundToEvenFloat64x2 x) => (VROUNDPD128 [0] x)
|
|
(RoundToEvenFloat64x4 x) => (VROUNDPD256 [0] x)
|
|
(RoundToEvenScaledFloat32x4 [a] x) => (VRNDSCALEPS128 [a+0] x)
|
|
(RoundToEvenScaledFloat32x8 [a] x) => (VRNDSCALEPS256 [a+0] x)
|
|
(RoundToEvenScaledFloat32x16 [a] x) => (VRNDSCALEPS512 [a+0] x)
|
|
(RoundToEvenScaledFloat64x2 [a] x) => (VRNDSCALEPD128 [a+0] x)
|
|
(RoundToEvenScaledFloat64x4 [a] x) => (VRNDSCALEPD256 [a+0] x)
|
|
(RoundToEvenScaledFloat64x8 [a] x) => (VRNDSCALEPD512 [a+0] x)
|
|
(RoundToEvenScaledResidueFloat32x4 [a] x) => (VREDUCEPS128 [a+0] x)
|
|
(RoundToEvenScaledResidueFloat32x8 [a] x) => (VREDUCEPS256 [a+0] x)
|
|
(RoundToEvenScaledResidueFloat32x16 [a] x) => (VREDUCEPS512 [a+0] x)
|
|
(RoundToEvenScaledResidueFloat64x2 [a] x) => (VREDUCEPD128 [a+0] x)
|
|
(RoundToEvenScaledResidueFloat64x4 [a] x) => (VREDUCEPD256 [a+0] x)
|
|
(RoundToEvenScaledResidueFloat64x8 [a] x) => (VREDUCEPD512 [a+0] x)
|
|
(SHA1Msg1Int32x4 ...) => (SHA1MSG1128 ...)
|
|
(SHA1Msg1Uint32x4 ...) => (SHA1MSG1128 ...)
|
|
(SHA1Msg2Int32x4 ...) => (SHA1MSG2128 ...)
|
|
(SHA1Msg2Uint32x4 ...) => (SHA1MSG2128 ...)
|
|
(SHA1NextEInt32x4 ...) => (SHA1NEXTE128 ...)
|
|
(SHA1NextEUint32x4 ...) => (SHA1NEXTE128 ...)
|
|
(SHA1Round4Int32x4 ...) => (SHA1RNDS4128 ...)
|
|
(SHA1Round4Uint32x4 ...) => (SHA1RNDS4128 ...)
|
|
(SHA256Msg1Int32x4 ...) => (SHA256MSG1128 ...)
|
|
(SHA256Msg1Uint32x4 ...) => (SHA256MSG1128 ...)
|
|
(SHA256Msg2Int32x4 ...) => (SHA256MSG1128 ...)
|
|
(SHA256Msg2Uint32x4 ...) => (SHA256MSG1128 ...)
|
|
(SHA256Rounds2Int32x4 ...) => (SHA256RNDS2128 ...)
|
|
(SHA256Rounds2Uint32x4 ...) => (SHA256RNDS2128 ...)
|
|
(ScaleFloat32x4 ...) => (VSCALEFPS128 ...)
|
|
(ScaleFloat32x8 ...) => (VSCALEFPS256 ...)
|
|
(ScaleFloat32x16 ...) => (VSCALEFPS512 ...)
|
|
(ScaleFloat64x2 ...) => (VSCALEFPD128 ...)
|
|
(ScaleFloat64x4 ...) => (VSCALEFPD256 ...)
|
|
(ScaleFloat64x8 ...) => (VSCALEFPD512 ...)
|
|
(Select128FromPairFloat32x8 ...) => (VPERM2F128256 ...)
|
|
(Select128FromPairFloat64x4 ...) => (VPERM2F128256 ...)
|
|
(Select128FromPairInt32x8 ...) => (VPERM2I128256 ...)
|
|
(Select128FromPairInt64x4 ...) => (VPERM2I128256 ...)
|
|
(Select128FromPairUint32x8 ...) => (VPERM2I128256 ...)
|
|
(Select128FromPairUint64x4 ...) => (VPERM2I128256 ...)
|
|
(SetElemFloat32x4 ...) => (VPINSRD128 ...)
|
|
(SetElemFloat64x2 ...) => (VPINSRQ128 ...)
|
|
(SetElemInt8x16 ...) => (VPINSRB128 ...)
|
|
(SetElemInt16x8 ...) => (VPINSRW128 ...)
|
|
(SetElemInt32x4 ...) => (VPINSRD128 ...)
|
|
(SetElemInt64x2 ...) => (VPINSRQ128 ...)
|
|
(SetElemUint8x16 ...) => (VPINSRB128 ...)
|
|
(SetElemUint16x8 ...) => (VPINSRW128 ...)
|
|
(SetElemUint32x4 ...) => (VPINSRD128 ...)
|
|
(SetElemUint64x2 ...) => (VPINSRQ128 ...)
|
|
(SetHiFloat32x8 x y) => (VINSERTF128256 [1] x y)
|
|
(SetHiFloat32x16 x y) => (VINSERTF64X4512 [1] x y)
|
|
(SetHiFloat64x4 x y) => (VINSERTF128256 [1] x y)
|
|
(SetHiFloat64x8 x y) => (VINSERTF64X4512 [1] x y)
|
|
(SetHiInt8x32 x y) => (VINSERTI128256 [1] x y)
|
|
(SetHiInt8x64 x y) => (VINSERTI64X4512 [1] x y)
|
|
(SetHiInt16x16 x y) => (VINSERTI128256 [1] x y)
|
|
(SetHiInt16x32 x y) => (VINSERTI64X4512 [1] x y)
|
|
(SetHiInt32x8 x y) => (VINSERTI128256 [1] x y)
|
|
(SetHiInt32x16 x y) => (VINSERTI64X4512 [1] x y)
|
|
(SetHiInt64x4 x y) => (VINSERTI128256 [1] x y)
|
|
(SetHiInt64x8 x y) => (VINSERTI64X4512 [1] x y)
|
|
(SetHiUint8x32 x y) => (VINSERTI128256 [1] x y)
|
|
(SetHiUint8x64 x y) => (VINSERTI64X4512 [1] x y)
|
|
(SetHiUint16x16 x y) => (VINSERTI128256 [1] x y)
|
|
(SetHiUint16x32 x y) => (VINSERTI64X4512 [1] x y)
|
|
(SetHiUint32x8 x y) => (VINSERTI128256 [1] x y)
|
|
(SetHiUint32x16 x y) => (VINSERTI64X4512 [1] x y)
|
|
(SetHiUint64x4 x y) => (VINSERTI128256 [1] x y)
|
|
(SetHiUint64x8 x y) => (VINSERTI64X4512 [1] x y)
|
|
(SetLoFloat32x8 x y) => (VINSERTF128256 [0] x y)
|
|
(SetLoFloat32x16 x y) => (VINSERTF64X4512 [0] x y)
|
|
(SetLoFloat64x4 x y) => (VINSERTF128256 [0] x y)
|
|
(SetLoFloat64x8 x y) => (VINSERTF64X4512 [0] x y)
|
|
(SetLoInt8x32 x y) => (VINSERTI128256 [0] x y)
|
|
(SetLoInt8x64 x y) => (VINSERTI64X4512 [0] x y)
|
|
(SetLoInt16x16 x y) => (VINSERTI128256 [0] x y)
|
|
(SetLoInt16x32 x y) => (VINSERTI64X4512 [0] x y)
|
|
(SetLoInt32x8 x y) => (VINSERTI128256 [0] x y)
|
|
(SetLoInt32x16 x y) => (VINSERTI64X4512 [0] x y)
|
|
(SetLoInt64x4 x y) => (VINSERTI128256 [0] x y)
|
|
(SetLoInt64x8 x y) => (VINSERTI64X4512 [0] x y)
|
|
(SetLoUint8x32 x y) => (VINSERTI128256 [0] x y)
|
|
(SetLoUint8x64 x y) => (VINSERTI64X4512 [0] x y)
|
|
(SetLoUint16x16 x y) => (VINSERTI128256 [0] x y)
|
|
(SetLoUint16x32 x y) => (VINSERTI64X4512 [0] x y)
|
|
(SetLoUint32x8 x y) => (VINSERTI128256 [0] x y)
|
|
(SetLoUint32x16 x y) => (VINSERTI64X4512 [0] x y)
|
|
(SetLoUint64x4 x y) => (VINSERTI128256 [0] x y)
|
|
(SetLoUint64x8 x y) => (VINSERTI64X4512 [0] x y)
|
|
(ShiftAllLeftInt16x8 ...) => (VPSLLW128 ...)
|
|
(VPSLLW128 x (MOVQconst [c])) => (VPSLLW128const [uint8(c)] x)
|
|
(ShiftAllLeftInt16x16 ...) => (VPSLLW256 ...)
|
|
(VPSLLW256 x (MOVQconst [c])) => (VPSLLW256const [uint8(c)] x)
|
|
(ShiftAllLeftInt16x32 ...) => (VPSLLW512 ...)
|
|
(VPSLLW512 x (MOVQconst [c])) => (VPSLLW512const [uint8(c)] x)
|
|
(ShiftAllLeftInt32x4 ...) => (VPSLLD128 ...)
|
|
(VPSLLD128 x (MOVQconst [c])) => (VPSLLD128const [uint8(c)] x)
|
|
(ShiftAllLeftInt32x8 ...) => (VPSLLD256 ...)
|
|
(VPSLLD256 x (MOVQconst [c])) => (VPSLLD256const [uint8(c)] x)
|
|
(ShiftAllLeftInt32x16 ...) => (VPSLLD512 ...)
|
|
(VPSLLD512 x (MOVQconst [c])) => (VPSLLD512const [uint8(c)] x)
|
|
(ShiftAllLeftInt64x2 ...) => (VPSLLQ128 ...)
|
|
(VPSLLQ128 x (MOVQconst [c])) => (VPSLLQ128const [uint8(c)] x)
|
|
(ShiftAllLeftInt64x4 ...) => (VPSLLQ256 ...)
|
|
(VPSLLQ256 x (MOVQconst [c])) => (VPSLLQ256const [uint8(c)] x)
|
|
(ShiftAllLeftInt64x8 ...) => (VPSLLQ512 ...)
|
|
(VPSLLQ512 x (MOVQconst [c])) => (VPSLLQ512const [uint8(c)] x)
|
|
(ShiftAllLeftUint16x8 ...) => (VPSLLW128 ...)
|
|
(ShiftAllLeftUint16x16 ...) => (VPSLLW256 ...)
|
|
(ShiftAllLeftUint16x32 ...) => (VPSLLW512 ...)
|
|
(ShiftAllLeftUint32x4 ...) => (VPSLLD128 ...)
|
|
(ShiftAllLeftUint32x8 ...) => (VPSLLD256 ...)
|
|
(ShiftAllLeftUint32x16 ...) => (VPSLLD512 ...)
|
|
(ShiftAllLeftUint64x2 ...) => (VPSLLQ128 ...)
|
|
(ShiftAllLeftUint64x4 ...) => (VPSLLQ256 ...)
|
|
(ShiftAllLeftUint64x8 ...) => (VPSLLQ512 ...)
|
|
(ShiftAllLeftConcatInt16x8 ...) => (VPSHLDW128 ...)
|
|
(ShiftAllLeftConcatInt16x16 ...) => (VPSHLDW256 ...)
|
|
(ShiftAllLeftConcatInt16x32 ...) => (VPSHLDW512 ...)
|
|
(ShiftAllLeftConcatInt32x4 ...) => (VPSHLDD128 ...)
|
|
(ShiftAllLeftConcatInt32x8 ...) => (VPSHLDD256 ...)
|
|
(ShiftAllLeftConcatInt32x16 ...) => (VPSHLDD512 ...)
|
|
(ShiftAllLeftConcatInt64x2 ...) => (VPSHLDQ128 ...)
|
|
(ShiftAllLeftConcatInt64x4 ...) => (VPSHLDQ256 ...)
|
|
(ShiftAllLeftConcatInt64x8 ...) => (VPSHLDQ512 ...)
|
|
(ShiftAllLeftConcatUint16x8 ...) => (VPSHLDW128 ...)
|
|
(ShiftAllLeftConcatUint16x16 ...) => (VPSHLDW256 ...)
|
|
(ShiftAllLeftConcatUint16x32 ...) => (VPSHLDW512 ...)
|
|
(ShiftAllLeftConcatUint32x4 ...) => (VPSHLDD128 ...)
|
|
(ShiftAllLeftConcatUint32x8 ...) => (VPSHLDD256 ...)
|
|
(ShiftAllLeftConcatUint32x16 ...) => (VPSHLDD512 ...)
|
|
(ShiftAllLeftConcatUint64x2 ...) => (VPSHLDQ128 ...)
|
|
(ShiftAllLeftConcatUint64x4 ...) => (VPSHLDQ256 ...)
|
|
(ShiftAllLeftConcatUint64x8 ...) => (VPSHLDQ512 ...)
|
|
(VPSLLWMasked128 x (MOVQconst [c]) mask) => (VPSLLWMasked128const [uint8(c)] x mask)
|
|
(VPSLLWMasked256 x (MOVQconst [c]) mask) => (VPSLLWMasked256const [uint8(c)] x mask)
|
|
(VPSLLWMasked512 x (MOVQconst [c]) mask) => (VPSLLWMasked512const [uint8(c)] x mask)
|
|
(VPSLLDMasked128 x (MOVQconst [c]) mask) => (VPSLLDMasked128const [uint8(c)] x mask)
|
|
(VPSLLDMasked256 x (MOVQconst [c]) mask) => (VPSLLDMasked256const [uint8(c)] x mask)
|
|
(VPSLLDMasked512 x (MOVQconst [c]) mask) => (VPSLLDMasked512const [uint8(c)] x mask)
|
|
(VPSLLQMasked128 x (MOVQconst [c]) mask) => (VPSLLQMasked128const [uint8(c)] x mask)
|
|
(VPSLLQMasked256 x (MOVQconst [c]) mask) => (VPSLLQMasked256const [uint8(c)] x mask)
|
|
(VPSLLQMasked512 x (MOVQconst [c]) mask) => (VPSLLQMasked512const [uint8(c)] x mask)
|
|
(ShiftAllRightInt16x8 ...) => (VPSRAW128 ...)
|
|
(VPSRAW128 x (MOVQconst [c])) => (VPSRAW128const [uint8(c)] x)
|
|
(ShiftAllRightInt16x16 ...) => (VPSRAW256 ...)
|
|
(VPSRAW256 x (MOVQconst [c])) => (VPSRAW256const [uint8(c)] x)
|
|
(ShiftAllRightInt16x32 ...) => (VPSRAW512 ...)
|
|
(VPSRAW512 x (MOVQconst [c])) => (VPSRAW512const [uint8(c)] x)
|
|
(ShiftAllRightInt32x4 ...) => (VPSRAD128 ...)
|
|
(VPSRAD128 x (MOVQconst [c])) => (VPSRAD128const [uint8(c)] x)
|
|
(ShiftAllRightInt32x8 ...) => (VPSRAD256 ...)
|
|
(VPSRAD256 x (MOVQconst [c])) => (VPSRAD256const [uint8(c)] x)
|
|
(ShiftAllRightInt32x16 ...) => (VPSRAD512 ...)
|
|
(VPSRAD512 x (MOVQconst [c])) => (VPSRAD512const [uint8(c)] x)
|
|
(ShiftAllRightInt64x2 ...) => (VPSRAQ128 ...)
|
|
(VPSRAQ128 x (MOVQconst [c])) => (VPSRAQ128const [uint8(c)] x)
|
|
(ShiftAllRightInt64x4 ...) => (VPSRAQ256 ...)
|
|
(VPSRAQ256 x (MOVQconst [c])) => (VPSRAQ256const [uint8(c)] x)
|
|
(ShiftAllRightInt64x8 ...) => (VPSRAQ512 ...)
|
|
(VPSRAQ512 x (MOVQconst [c])) => (VPSRAQ512const [uint8(c)] x)
|
|
(ShiftAllRightUint16x8 ...) => (VPSRLW128 ...)
|
|
(ShiftAllRightUint16x16 ...) => (VPSRLW256 ...)
|
|
(ShiftAllRightUint16x32 ...) => (VPSRLW512 ...)
|
|
(ShiftAllRightUint32x4 ...) => (VPSRLD128 ...)
|
|
(ShiftAllRightUint32x8 ...) => (VPSRLD256 ...)
|
|
(ShiftAllRightUint32x16 ...) => (VPSRLD512 ...)
|
|
(ShiftAllRightUint64x2 ...) => (VPSRLQ128 ...)
|
|
(ShiftAllRightUint64x4 ...) => (VPSRLQ256 ...)
|
|
(ShiftAllRightUint64x8 ...) => (VPSRLQ512 ...)
|
|
(ShiftAllRightConcatInt16x8 ...) => (VPSHRDW128 ...)
|
|
(ShiftAllRightConcatInt16x16 ...) => (VPSHRDW256 ...)
|
|
(ShiftAllRightConcatInt16x32 ...) => (VPSHRDW512 ...)
|
|
(ShiftAllRightConcatInt32x4 ...) => (VPSHRDD128 ...)
|
|
(ShiftAllRightConcatInt32x8 ...) => (VPSHRDD256 ...)
|
|
(ShiftAllRightConcatInt32x16 ...) => (VPSHRDD512 ...)
|
|
(ShiftAllRightConcatInt64x2 ...) => (VPSHRDQ128 ...)
|
|
(ShiftAllRightConcatInt64x4 ...) => (VPSHRDQ256 ...)
|
|
(ShiftAllRightConcatInt64x8 ...) => (VPSHRDQ512 ...)
|
|
(ShiftAllRightConcatUint16x8 ...) => (VPSHRDW128 ...)
|
|
(ShiftAllRightConcatUint16x16 ...) => (VPSHRDW256 ...)
|
|
(ShiftAllRightConcatUint16x32 ...) => (VPSHRDW512 ...)
|
|
(ShiftAllRightConcatUint32x4 ...) => (VPSHRDD128 ...)
|
|
(ShiftAllRightConcatUint32x8 ...) => (VPSHRDD256 ...)
|
|
(ShiftAllRightConcatUint32x16 ...) => (VPSHRDD512 ...)
|
|
(ShiftAllRightConcatUint64x2 ...) => (VPSHRDQ128 ...)
|
|
(ShiftAllRightConcatUint64x4 ...) => (VPSHRDQ256 ...)
|
|
(ShiftAllRightConcatUint64x8 ...) => (VPSHRDQ512 ...)
|
|
(VPSRAWMasked128 x (MOVQconst [c]) mask) => (VPSRAWMasked128const [uint8(c)] x mask)
|
|
(VPSRAWMasked256 x (MOVQconst [c]) mask) => (VPSRAWMasked256const [uint8(c)] x mask)
|
|
(VPSRAWMasked512 x (MOVQconst [c]) mask) => (VPSRAWMasked512const [uint8(c)] x mask)
|
|
(VPSRADMasked128 x (MOVQconst [c]) mask) => (VPSRADMasked128const [uint8(c)] x mask)
|
|
(VPSRADMasked256 x (MOVQconst [c]) mask) => (VPSRADMasked256const [uint8(c)] x mask)
|
|
(VPSRADMasked512 x (MOVQconst [c]) mask) => (VPSRADMasked512const [uint8(c)] x mask)
|
|
(VPSRAQMasked128 x (MOVQconst [c]) mask) => (VPSRAQMasked128const [uint8(c)] x mask)
|
|
(VPSRAQMasked256 x (MOVQconst [c]) mask) => (VPSRAQMasked256const [uint8(c)] x mask)
|
|
(VPSRAQMasked512 x (MOVQconst [c]) mask) => (VPSRAQMasked512const [uint8(c)] x mask)
|
|
(ShiftLeftInt16x8 ...) => (VPSLLVW128 ...)
|
|
(ShiftLeftInt16x16 ...) => (VPSLLVW256 ...)
|
|
(ShiftLeftInt16x32 ...) => (VPSLLVW512 ...)
|
|
(ShiftLeftInt32x4 ...) => (VPSLLVD128 ...)
|
|
(ShiftLeftInt32x8 ...) => (VPSLLVD256 ...)
|
|
(ShiftLeftInt32x16 ...) => (VPSLLVD512 ...)
|
|
(ShiftLeftInt64x2 ...) => (VPSLLVQ128 ...)
|
|
(ShiftLeftInt64x4 ...) => (VPSLLVQ256 ...)
|
|
(ShiftLeftInt64x8 ...) => (VPSLLVQ512 ...)
|
|
(ShiftLeftUint16x8 ...) => (VPSLLVW128 ...)
|
|
(ShiftLeftUint16x16 ...) => (VPSLLVW256 ...)
|
|
(ShiftLeftUint16x32 ...) => (VPSLLVW512 ...)
|
|
(ShiftLeftUint32x4 ...) => (VPSLLVD128 ...)
|
|
(ShiftLeftUint32x8 ...) => (VPSLLVD256 ...)
|
|
(ShiftLeftUint32x16 ...) => (VPSLLVD512 ...)
|
|
(ShiftLeftUint64x2 ...) => (VPSLLVQ128 ...)
|
|
(ShiftLeftUint64x4 ...) => (VPSLLVQ256 ...)
|
|
(ShiftLeftUint64x8 ...) => (VPSLLVQ512 ...)
|
|
(ShiftLeftConcatInt16x8 ...) => (VPSHLDVW128 ...)
|
|
(ShiftLeftConcatInt16x16 ...) => (VPSHLDVW256 ...)
|
|
(ShiftLeftConcatInt16x32 ...) => (VPSHLDVW512 ...)
|
|
(ShiftLeftConcatInt32x4 ...) => (VPSHLDVD128 ...)
|
|
(ShiftLeftConcatInt32x8 ...) => (VPSHLDVD256 ...)
|
|
(ShiftLeftConcatInt32x16 ...) => (VPSHLDVD512 ...)
|
|
(ShiftLeftConcatInt64x2 ...) => (VPSHLDVQ128 ...)
|
|
(ShiftLeftConcatInt64x4 ...) => (VPSHLDVQ256 ...)
|
|
(ShiftLeftConcatInt64x8 ...) => (VPSHLDVQ512 ...)
|
|
(ShiftLeftConcatUint16x8 ...) => (VPSHLDVW128 ...)
|
|
(ShiftLeftConcatUint16x16 ...) => (VPSHLDVW256 ...)
|
|
(ShiftLeftConcatUint16x32 ...) => (VPSHLDVW512 ...)
|
|
(ShiftLeftConcatUint32x4 ...) => (VPSHLDVD128 ...)
|
|
(ShiftLeftConcatUint32x8 ...) => (VPSHLDVD256 ...)
|
|
(ShiftLeftConcatUint32x16 ...) => (VPSHLDVD512 ...)
|
|
(ShiftLeftConcatUint64x2 ...) => (VPSHLDVQ128 ...)
|
|
(ShiftLeftConcatUint64x4 ...) => (VPSHLDVQ256 ...)
|
|
(ShiftLeftConcatUint64x8 ...) => (VPSHLDVQ512 ...)
|
|
(ShiftRightInt16x8 ...) => (VPSRAVW128 ...)
|
|
(ShiftRightInt16x16 ...) => (VPSRAVW256 ...)
|
|
(ShiftRightInt16x32 ...) => (VPSRAVW512 ...)
|
|
(ShiftRightInt32x4 ...) => (VPSRAVD128 ...)
|
|
(ShiftRightInt32x8 ...) => (VPSRAVD256 ...)
|
|
(ShiftRightInt32x16 ...) => (VPSRAVD512 ...)
|
|
(ShiftRightInt64x2 ...) => (VPSRAVQ128 ...)
|
|
(ShiftRightInt64x4 ...) => (VPSRAVQ256 ...)
|
|
(ShiftRightInt64x8 ...) => (VPSRAVQ512 ...)
|
|
(ShiftRightUint16x8 ...) => (VPSRLVW128 ...)
|
|
(ShiftRightUint16x16 ...) => (VPSRLVW256 ...)
|
|
(ShiftRightUint16x32 ...) => (VPSRLVW512 ...)
|
|
(ShiftRightUint32x4 ...) => (VPSRLVD128 ...)
|
|
(ShiftRightUint32x8 ...) => (VPSRLVD256 ...)
|
|
(ShiftRightUint32x16 ...) => (VPSRLVD512 ...)
|
|
(ShiftRightUint64x2 ...) => (VPSRLVQ128 ...)
|
|
(ShiftRightUint64x4 ...) => (VPSRLVQ256 ...)
|
|
(ShiftRightUint64x8 ...) => (VPSRLVQ512 ...)
|
|
(ShiftRightConcatInt16x8 ...) => (VPSHRDVW128 ...)
|
|
(ShiftRightConcatInt16x16 ...) => (VPSHRDVW256 ...)
|
|
(ShiftRightConcatInt16x32 ...) => (VPSHRDVW512 ...)
|
|
(ShiftRightConcatInt32x4 ...) => (VPSHRDVD128 ...)
|
|
(ShiftRightConcatInt32x8 ...) => (VPSHRDVD256 ...)
|
|
(ShiftRightConcatInt32x16 ...) => (VPSHRDVD512 ...)
|
|
(ShiftRightConcatInt64x2 ...) => (VPSHRDVQ128 ...)
|
|
(ShiftRightConcatInt64x4 ...) => (VPSHRDVQ256 ...)
|
|
(ShiftRightConcatInt64x8 ...) => (VPSHRDVQ512 ...)
|
|
(ShiftRightConcatUint16x8 ...) => (VPSHRDVW128 ...)
|
|
(ShiftRightConcatUint16x16 ...) => (VPSHRDVW256 ...)
|
|
(ShiftRightConcatUint16x32 ...) => (VPSHRDVW512 ...)
|
|
(ShiftRightConcatUint32x4 ...) => (VPSHRDVD128 ...)
|
|
(ShiftRightConcatUint32x8 ...) => (VPSHRDVD256 ...)
|
|
(ShiftRightConcatUint32x16 ...) => (VPSHRDVD512 ...)
|
|
(ShiftRightConcatUint64x2 ...) => (VPSHRDVQ128 ...)
|
|
(ShiftRightConcatUint64x4 ...) => (VPSHRDVQ256 ...)
|
|
(ShiftRightConcatUint64x8 ...) => (VPSHRDVQ512 ...)
|
|
(SqrtFloat32x4 ...) => (VSQRTPS128 ...)
|
|
(SqrtFloat32x8 ...) => (VSQRTPS256 ...)
|
|
(SqrtFloat32x16 ...) => (VSQRTPS512 ...)
|
|
(SqrtFloat64x2 ...) => (VSQRTPD128 ...)
|
|
(SqrtFloat64x4 ...) => (VSQRTPD256 ...)
|
|
(SqrtFloat64x8 ...) => (VSQRTPD512 ...)
|
|
(SubFloat32x4 ...) => (VSUBPS128 ...)
|
|
(SubFloat32x8 ...) => (VSUBPS256 ...)
|
|
(SubFloat32x16 ...) => (VSUBPS512 ...)
|
|
(SubFloat64x2 ...) => (VSUBPD128 ...)
|
|
(SubFloat64x4 ...) => (VSUBPD256 ...)
|
|
(SubFloat64x8 ...) => (VSUBPD512 ...)
|
|
(SubInt8x16 ...) => (VPSUBB128 ...)
|
|
(SubInt8x32 ...) => (VPSUBB256 ...)
|
|
(SubInt8x64 ...) => (VPSUBB512 ...)
|
|
(SubInt16x8 ...) => (VPSUBW128 ...)
|
|
(SubInt16x16 ...) => (VPSUBW256 ...)
|
|
(SubInt16x32 ...) => (VPSUBW512 ...)
|
|
(SubInt32x4 ...) => (VPSUBD128 ...)
|
|
(SubInt32x8 ...) => (VPSUBD256 ...)
|
|
(SubInt32x16 ...) => (VPSUBD512 ...)
|
|
(SubInt64x2 ...) => (VPSUBQ128 ...)
|
|
(SubInt64x4 ...) => (VPSUBQ256 ...)
|
|
(SubInt64x8 ...) => (VPSUBQ512 ...)
|
|
(SubUint8x16 ...) => (VPSUBB128 ...)
|
|
(SubUint8x32 ...) => (VPSUBB256 ...)
|
|
(SubUint8x64 ...) => (VPSUBB512 ...)
|
|
(SubUint16x8 ...) => (VPSUBW128 ...)
|
|
(SubUint16x16 ...) => (VPSUBW256 ...)
|
|
(SubUint16x32 ...) => (VPSUBW512 ...)
|
|
(SubUint32x4 ...) => (VPSUBD128 ...)
|
|
(SubUint32x8 ...) => (VPSUBD256 ...)
|
|
(SubUint32x16 ...) => (VPSUBD512 ...)
|
|
(SubUint64x2 ...) => (VPSUBQ128 ...)
|
|
(SubUint64x4 ...) => (VPSUBQ256 ...)
|
|
(SubUint64x8 ...) => (VPSUBQ512 ...)
|
|
(SubPairsFloat32x4 ...) => (VHSUBPS128 ...)
|
|
(SubPairsFloat32x8 ...) => (VHSUBPS256 ...)
|
|
(SubPairsFloat64x2 ...) => (VHSUBPD128 ...)
|
|
(SubPairsFloat64x4 ...) => (VHSUBPD256 ...)
|
|
(SubPairsInt16x8 ...) => (VPHSUBW128 ...)
|
|
(SubPairsInt16x16 ...) => (VPHSUBW256 ...)
|
|
(SubPairsInt32x4 ...) => (VPHSUBD128 ...)
|
|
(SubPairsInt32x8 ...) => (VPHSUBD256 ...)
|
|
(SubPairsUint16x8 ...) => (VPHSUBW128 ...)
|
|
(SubPairsUint16x16 ...) => (VPHSUBW256 ...)
|
|
(SubPairsUint32x4 ...) => (VPHSUBD128 ...)
|
|
(SubPairsUint32x8 ...) => (VPHSUBD256 ...)
|
|
(SubPairsSaturatedInt16x8 ...) => (VPHSUBSW128 ...)
|
|
(SubPairsSaturatedInt16x16 ...) => (VPHSUBSW256 ...)
|
|
(SubSaturatedInt8x16 ...) => (VPSUBSB128 ...)
|
|
(SubSaturatedInt8x32 ...) => (VPSUBSB256 ...)
|
|
(SubSaturatedInt8x64 ...) => (VPSUBSB512 ...)
|
|
(SubSaturatedInt16x8 ...) => (VPSUBSW128 ...)
|
|
(SubSaturatedInt16x16 ...) => (VPSUBSW256 ...)
|
|
(SubSaturatedInt16x32 ...) => (VPSUBSW512 ...)
|
|
(SubSaturatedUint8x16 ...) => (VPSUBUSB128 ...)
|
|
(SubSaturatedUint8x32 ...) => (VPSUBUSB256 ...)
|
|
(SubSaturatedUint8x64 ...) => (VPSUBUSB512 ...)
|
|
(SubSaturatedUint16x8 ...) => (VPSUBUSW128 ...)
|
|
(SubSaturatedUint16x16 ...) => (VPSUBUSW256 ...)
|
|
(SubSaturatedUint16x32 ...) => (VPSUBUSW512 ...)
|
|
(SumAbsDiffUint8x16 ...) => (VPSADBW128 ...)
|
|
(SumAbsDiffUint8x32 ...) => (VPSADBW256 ...)
|
|
(SumAbsDiffUint8x64 ...) => (VPSADBW512 ...)
|
|
(TruncFloat32x4 x) => (VROUNDPS128 [3] x)
|
|
(TruncFloat32x8 x) => (VROUNDPS256 [3] x)
|
|
(TruncFloat64x2 x) => (VROUNDPD128 [3] x)
|
|
(TruncFloat64x4 x) => (VROUNDPD256 [3] x)
|
|
(TruncScaledFloat32x4 [a] x) => (VRNDSCALEPS128 [a+3] x)
|
|
(TruncScaledFloat32x8 [a] x) => (VRNDSCALEPS256 [a+3] x)
|
|
(TruncScaledFloat32x16 [a] x) => (VRNDSCALEPS512 [a+3] x)
|
|
(TruncScaledFloat64x2 [a] x) => (VRNDSCALEPD128 [a+3] x)
|
|
(TruncScaledFloat64x4 [a] x) => (VRNDSCALEPD256 [a+3] x)
|
|
(TruncScaledFloat64x8 [a] x) => (VRNDSCALEPD512 [a+3] x)
|
|
(TruncScaledResidueFloat32x4 [a] x) => (VREDUCEPS128 [a+3] x)
|
|
(TruncScaledResidueFloat32x8 [a] x) => (VREDUCEPS256 [a+3] x)
|
|
(TruncScaledResidueFloat32x16 [a] x) => (VREDUCEPS512 [a+3] x)
|
|
(TruncScaledResidueFloat64x2 [a] x) => (VREDUCEPD128 [a+3] x)
|
|
(TruncScaledResidueFloat64x4 [a] x) => (VREDUCEPD256 [a+3] x)
|
|
(TruncScaledResidueFloat64x8 [a] x) => (VREDUCEPD512 [a+3] x)
|
|
(XorInt8x16 ...) => (VPXOR128 ...)
|
|
(XorInt8x32 ...) => (VPXOR256 ...)
|
|
(XorInt8x64 ...) => (VPXORD512 ...)
|
|
(XorInt16x8 ...) => (VPXOR128 ...)
|
|
(XorInt16x16 ...) => (VPXOR256 ...)
|
|
(XorInt16x32 ...) => (VPXORD512 ...)
|
|
(XorInt32x4 ...) => (VPXOR128 ...)
|
|
(XorInt32x8 ...) => (VPXOR256 ...)
|
|
(XorInt32x16 ...) => (VPXORD512 ...)
|
|
(XorInt64x2 ...) => (VPXOR128 ...)
|
|
(XorInt64x4 ...) => (VPXOR256 ...)
|
|
(XorInt64x8 ...) => (VPXORQ512 ...)
|
|
(XorUint8x16 ...) => (VPXOR128 ...)
|
|
(XorUint8x32 ...) => (VPXOR256 ...)
|
|
(XorUint8x64 ...) => (VPXORD512 ...)
|
|
(XorUint16x8 ...) => (VPXOR128 ...)
|
|
(XorUint16x16 ...) => (VPXOR256 ...)
|
|
(XorUint16x32 ...) => (VPXORD512 ...)
|
|
(XorUint32x4 ...) => (VPXOR128 ...)
|
|
(XorUint32x8 ...) => (VPXOR256 ...)
|
|
(XorUint32x16 ...) => (VPXORD512 ...)
|
|
(XorUint64x2 ...) => (VPXOR128 ...)
|
|
(XorUint64x4 ...) => (VPXOR256 ...)
|
|
(XorUint64x8 ...) => (VPXORQ512 ...)
|
|
(blendInt8x16 ...) => (VPBLENDVB128 ...)
|
|
(blendInt8x32 ...) => (VPBLENDVB256 ...)
|
|
(blendMaskedInt8x64 x y mask) => (VPBLENDMBMasked512 x y (VPMOVVec8x64ToM <types.TypeMask> mask))
|
|
(blendMaskedInt16x32 x y mask) => (VPBLENDMWMasked512 x y (VPMOVVec16x32ToM <types.TypeMask> mask))
|
|
(blendMaskedInt32x16 x y mask) => (VPBLENDMDMasked512 x y (VPMOVVec32x16ToM <types.TypeMask> mask))
|
|
(blendMaskedInt64x8 x y mask) => (VPBLENDMQMasked512 x y (VPMOVVec64x8ToM <types.TypeMask> mask))
|
|
(concatSelectedConstantFloat32x4 ...) => (VSHUFPS128 ...)
|
|
(concatSelectedConstantFloat64x2 ...) => (VSHUFPD128 ...)
|
|
(concatSelectedConstantInt32x4 ...) => (VSHUFPS128 ...)
|
|
(concatSelectedConstantInt64x2 ...) => (VSHUFPD128 ...)
|
|
(concatSelectedConstantUint32x4 ...) => (VSHUFPS128 ...)
|
|
(concatSelectedConstantUint64x2 ...) => (VSHUFPD128 ...)
|
|
(concatSelectedConstantGroupedFloat32x8 ...) => (VSHUFPS256 ...)
|
|
(concatSelectedConstantGroupedFloat32x16 ...) => (VSHUFPS512 ...)
|
|
(concatSelectedConstantGroupedFloat64x4 ...) => (VSHUFPD256 ...)
|
|
(concatSelectedConstantGroupedFloat64x8 ...) => (VSHUFPD512 ...)
|
|
(concatSelectedConstantGroupedInt32x8 ...) => (VSHUFPS256 ...)
|
|
(concatSelectedConstantGroupedInt32x16 ...) => (VSHUFPS512 ...)
|
|
(concatSelectedConstantGroupedInt64x4 ...) => (VSHUFPD256 ...)
|
|
(concatSelectedConstantGroupedInt64x8 ...) => (VSHUFPD512 ...)
|
|
(concatSelectedConstantGroupedUint32x8 ...) => (VSHUFPS256 ...)
|
|
(concatSelectedConstantGroupedUint32x16 ...) => (VSHUFPS512 ...)
|
|
(concatSelectedConstantGroupedUint64x4 ...) => (VSHUFPD256 ...)
|
|
(concatSelectedConstantGroupedUint64x8 ...) => (VSHUFPD512 ...)
|
|
(ternInt32x4 ...) => (VPTERNLOGD128 ...)
|
|
(ternInt32x8 ...) => (VPTERNLOGD256 ...)
|
|
(ternInt32x16 ...) => (VPTERNLOGD512 ...)
|
|
(ternInt64x2 ...) => (VPTERNLOGQ128 ...)
|
|
(ternInt64x4 ...) => (VPTERNLOGQ256 ...)
|
|
(ternInt64x8 ...) => (VPTERNLOGQ512 ...)
|
|
(ternUint32x4 ...) => (VPTERNLOGD128 ...)
|
|
(ternUint32x8 ...) => (VPTERNLOGD256 ...)
|
|
(ternUint32x16 ...) => (VPTERNLOGD512 ...)
|
|
(ternUint64x2 ...) => (VPTERNLOGQ128 ...)
|
|
(ternUint64x4 ...) => (VPTERNLOGQ256 ...)
|
|
(ternUint64x8 ...) => (VPTERNLOGQ512 ...)
|
|
(VMOVDQU8Masked128 (VPABSB128 x) mask) => (VPABSBMasked128 x mask)
|
|
(VMOVDQU8Masked256 (VPABSB256 x) mask) => (VPABSBMasked256 x mask)
|
|
(VMOVDQU8Masked512 (VPABSB512 x) mask) => (VPABSBMasked512 x mask)
|
|
(VMOVDQU16Masked128 (VPABSW128 x) mask) => (VPABSWMasked128 x mask)
|
|
(VMOVDQU16Masked256 (VPABSW256 x) mask) => (VPABSWMasked256 x mask)
|
|
(VMOVDQU16Masked512 (VPABSW512 x) mask) => (VPABSWMasked512 x mask)
|
|
(VMOVDQU32Masked128 (VPABSD128 x) mask) => (VPABSDMasked128 x mask)
|
|
(VMOVDQU32Masked256 (VPABSD256 x) mask) => (VPABSDMasked256 x mask)
|
|
(VMOVDQU32Masked512 (VPABSD512 x) mask) => (VPABSDMasked512 x mask)
|
|
(VMOVDQU64Masked128 (VPABSQ128 x) mask) => (VPABSQMasked128 x mask)
|
|
(VMOVDQU64Masked256 (VPABSQ256 x) mask) => (VPABSQMasked256 x mask)
|
|
(VMOVDQU64Masked512 (VPABSQ512 x) mask) => (VPABSQMasked512 x mask)
|
|
(VMOVDQU32Masked128 (VPDPBUSD128 x y z) mask) => (VPDPBUSDMasked128 x y z mask)
|
|
(VMOVDQU32Masked256 (VPDPBUSD256 x y z) mask) => (VPDPBUSDMasked256 x y z mask)
|
|
(VMOVDQU32Masked512 (VPDPBUSD512 x y z) mask) => (VPDPBUSDMasked512 x y z mask)
|
|
(VMOVDQU32Masked128 (VPDPBUSDS128 x y z) mask) => (VPDPBUSDSMasked128 x y z mask)
|
|
(VMOVDQU32Masked256 (VPDPBUSDS256 x y z) mask) => (VPDPBUSDSMasked256 x y z mask)
|
|
(VMOVDQU32Masked512 (VPDPBUSDS512 x y z) mask) => (VPDPBUSDSMasked512 x y z mask)
|
|
(VMOVDQU32Masked128 (VADDPS128 x y) mask) => (VADDPSMasked128 x y mask)
|
|
(VMOVDQU32Masked256 (VADDPS256 x y) mask) => (VADDPSMasked256 x y mask)
|
|
(VMOVDQU32Masked512 (VADDPS512 x y) mask) => (VADDPSMasked512 x y mask)
|
|
(VMOVDQU64Masked128 (VADDPD128 x y) mask) => (VADDPDMasked128 x y mask)
|
|
(VMOVDQU64Masked256 (VADDPD256 x y) mask) => (VADDPDMasked256 x y mask)
|
|
(VMOVDQU64Masked512 (VADDPD512 x y) mask) => (VADDPDMasked512 x y mask)
|
|
(VMOVDQU8Masked128 (VPADDB128 x y) mask) => (VPADDBMasked128 x y mask)
|
|
(VMOVDQU8Masked256 (VPADDB256 x y) mask) => (VPADDBMasked256 x y mask)
|
|
(VMOVDQU8Masked512 (VPADDB512 x y) mask) => (VPADDBMasked512 x y mask)
|
|
(VMOVDQU16Masked128 (VPADDW128 x y) mask) => (VPADDWMasked128 x y mask)
|
|
(VMOVDQU16Masked256 (VPADDW256 x y) mask) => (VPADDWMasked256 x y mask)
|
|
(VMOVDQU16Masked512 (VPADDW512 x y) mask) => (VPADDWMasked512 x y mask)
|
|
(VMOVDQU32Masked128 (VPADDD128 x y) mask) => (VPADDDMasked128 x y mask)
|
|
(VMOVDQU32Masked256 (VPADDD256 x y) mask) => (VPADDDMasked256 x y mask)
|
|
(VMOVDQU32Masked512 (VPADDD512 x y) mask) => (VPADDDMasked512 x y mask)
|
|
(VMOVDQU64Masked128 (VPADDQ128 x y) mask) => (VPADDQMasked128 x y mask)
|
|
(VMOVDQU64Masked256 (VPADDQ256 x y) mask) => (VPADDQMasked256 x y mask)
|
|
(VMOVDQU64Masked512 (VPADDQ512 x y) mask) => (VPADDQMasked512 x y mask)
|
|
(VMOVDQU8Masked128 (VPADDSB128 x y) mask) => (VPADDSBMasked128 x y mask)
|
|
(VMOVDQU8Masked256 (VPADDSB256 x y) mask) => (VPADDSBMasked256 x y mask)
|
|
(VMOVDQU8Masked512 (VPADDSB512 x y) mask) => (VPADDSBMasked512 x y mask)
|
|
(VMOVDQU16Masked128 (VPADDSW128 x y) mask) => (VPADDSWMasked128 x y mask)
|
|
(VMOVDQU16Masked256 (VPADDSW256 x y) mask) => (VPADDSWMasked256 x y mask)
|
|
(VMOVDQU16Masked512 (VPADDSW512 x y) mask) => (VPADDSWMasked512 x y mask)
|
|
(VMOVDQU8Masked128 (VPADDUSB128 x y) mask) => (VPADDUSBMasked128 x y mask)
|
|
(VMOVDQU8Masked256 (VPADDUSB256 x y) mask) => (VPADDUSBMasked256 x y mask)
|
|
(VMOVDQU8Masked512 (VPADDUSB512 x y) mask) => (VPADDUSBMasked512 x y mask)
|
|
(VMOVDQU16Masked128 (VPADDUSW128 x y) mask) => (VPADDUSWMasked128 x y mask)
|
|
(VMOVDQU16Masked256 (VPADDUSW256 x y) mask) => (VPADDUSWMasked256 x y mask)
|
|
(VMOVDQU16Masked512 (VPADDUSW512 x y) mask) => (VPADDUSWMasked512 x y mask)
|
|
(VMOVDQU32Masked512 (VPANDD512 x y) mask) => (VPANDDMasked512 x y mask)
|
|
(VMOVDQU64Masked512 (VPANDQ512 x y) mask) => (VPANDQMasked512 x y mask)
|
|
(VMOVDQU32Masked512 (VPANDND512 x y) mask) => (VPANDNDMasked512 x y mask)
|
|
(VMOVDQU64Masked512 (VPANDNQ512 x y) mask) => (VPANDNQMasked512 x y mask)
|
|
(VMOVDQU8Masked128 (VPAVGB128 x y) mask) => (VPAVGBMasked128 x y mask)
|
|
(VMOVDQU8Masked256 (VPAVGB256 x y) mask) => (VPAVGBMasked256 x y mask)
|
|
(VMOVDQU8Masked512 (VPAVGB512 x y) mask) => (VPAVGBMasked512 x y mask)
|
|
(VMOVDQU16Masked128 (VPAVGW128 x y) mask) => (VPAVGWMasked128 x y mask)
|
|
(VMOVDQU16Masked256 (VPAVGW256 x y) mask) => (VPAVGWMasked256 x y mask)
|
|
(VMOVDQU16Masked512 (VPAVGW512 x y) mask) => (VPAVGWMasked512 x y mask)
|
|
(VMOVDQU32Masked128 (VBROADCASTSS128 x) mask) => (VBROADCASTSSMasked128 x mask)
|
|
(VMOVDQU64Masked128 (VPBROADCASTQ128 x) mask) => (VPBROADCASTQMasked128 x mask)
|
|
(VMOVDQU8Masked128 (VPBROADCASTB128 x) mask) => (VPBROADCASTBMasked128 x mask)
|
|
(VMOVDQU16Masked128 (VPBROADCASTW128 x) mask) => (VPBROADCASTWMasked128 x mask)
|
|
(VMOVDQU32Masked128 (VPBROADCASTD128 x) mask) => (VPBROADCASTDMasked128 x mask)
|
|
(VMOVDQU32Masked256 (VBROADCASTSS256 x) mask) => (VBROADCASTSSMasked256 x mask)
|
|
(VMOVDQU64Masked256 (VBROADCASTSD256 x) mask) => (VBROADCASTSDMasked256 x mask)
|
|
(VMOVDQU8Masked256 (VPBROADCASTB256 x) mask) => (VPBROADCASTBMasked256 x mask)
|
|
(VMOVDQU16Masked256 (VPBROADCASTW256 x) mask) => (VPBROADCASTWMasked256 x mask)
|
|
(VMOVDQU32Masked256 (VPBROADCASTD256 x) mask) => (VPBROADCASTDMasked256 x mask)
|
|
(VMOVDQU64Masked256 (VPBROADCASTQ256 x) mask) => (VPBROADCASTQMasked256 x mask)
|
|
(VMOVDQU32Masked512 (VBROADCASTSS512 x) mask) => (VBROADCASTSSMasked512 x mask)
|
|
(VMOVDQU64Masked512 (VBROADCASTSD512 x) mask) => (VBROADCASTSDMasked512 x mask)
|
|
(VMOVDQU8Masked512 (VPBROADCASTB512 x) mask) => (VPBROADCASTBMasked512 x mask)
|
|
(VMOVDQU16Masked512 (VPBROADCASTW512 x) mask) => (VPBROADCASTWMasked512 x mask)
|
|
(VMOVDQU32Masked512 (VPBROADCASTD512 x) mask) => (VPBROADCASTDMasked512 x mask)
|
|
(VMOVDQU64Masked512 (VPBROADCASTQ512 x) mask) => (VPBROADCASTQMasked512 x mask)
|
|
(VMOVDQU32Masked128 (VRNDSCALEPS128 [a] x) mask) => (VRNDSCALEPSMasked128 [a] x mask)
|
|
(VMOVDQU32Masked256 (VRNDSCALEPS256 [a] x) mask) => (VRNDSCALEPSMasked256 [a] x mask)
|
|
(VMOVDQU32Masked512 (VRNDSCALEPS512 [a] x) mask) => (VRNDSCALEPSMasked512 [a] x mask)
|
|
(VMOVDQU64Masked128 (VRNDSCALEPD128 [a] x) mask) => (VRNDSCALEPDMasked128 [a] x mask)
|
|
(VMOVDQU64Masked256 (VRNDSCALEPD256 [a] x) mask) => (VRNDSCALEPDMasked256 [a] x mask)
|
|
(VMOVDQU64Masked512 (VRNDSCALEPD512 [a] x) mask) => (VRNDSCALEPDMasked512 [a] x mask)
|
|
(VMOVDQU32Masked128 (VREDUCEPS128 [a] x) mask) => (VREDUCEPSMasked128 [a] x mask)
|
|
(VMOVDQU32Masked256 (VREDUCEPS256 [a] x) mask) => (VREDUCEPSMasked256 [a] x mask)
|
|
(VMOVDQU32Masked512 (VREDUCEPS512 [a] x) mask) => (VREDUCEPSMasked512 [a] x mask)
|
|
(VMOVDQU64Masked128 (VREDUCEPD128 [a] x) mask) => (VREDUCEPDMasked128 [a] x mask)
|
|
(VMOVDQU64Masked256 (VREDUCEPD256 [a] x) mask) => (VREDUCEPDMasked256 [a] x mask)
|
|
(VMOVDQU64Masked512 (VREDUCEPD512 [a] x) mask) => (VREDUCEPDMasked512 [a] x mask)
|
|
(VMOVDQU8Masked256 (VPALIGNR256 [a] x y) mask) => (VPALIGNRMasked256 [a] x y mask)
|
|
(VMOVDQU8Masked512 (VPALIGNR512 [a] x y) mask) => (VPALIGNRMasked512 [a] x y mask)
|
|
(VMOVDQU8Masked128 (VPALIGNR128 [a] x y) mask) => (VPALIGNRMasked128 [a] x y mask)
|
|
(VMOVDQU16Masked128 (VPMOVWB128_128 x) mask) => (VPMOVWBMasked128_128 x mask)
|
|
(VMOVDQU16Masked256 (VPMOVWB128_256 x) mask) => (VPMOVWBMasked128_256 x mask)
|
|
(VMOVDQU16Masked256 (VPMOVWB256 x) mask) => (VPMOVWBMasked256 x mask)
|
|
(VMOVDQU32Masked128 (VPMOVDB128_128 x) mask) => (VPMOVDBMasked128_128 x mask)
|
|
(VMOVDQU32Masked256 (VPMOVDB128_256 x) mask) => (VPMOVDBMasked128_256 x mask)
|
|
(VMOVDQU32Masked512 (VPMOVDB128_512 x) mask) => (VPMOVDBMasked128_512 x mask)
|
|
(VMOVDQU64Masked128 (VPMOVQB128_128 x) mask) => (VPMOVQBMasked128_128 x mask)
|
|
(VMOVDQU64Masked256 (VPMOVQB128_256 x) mask) => (VPMOVQBMasked128_256 x mask)
|
|
(VMOVDQU64Masked512 (VPMOVQB128_512 x) mask) => (VPMOVQBMasked128_512 x mask)
|
|
(VMOVDQU16Masked128 (VPMOVSWB128_128 x) mask) => (VPMOVSWBMasked128_128 x mask)
|
|
(VMOVDQU16Masked256 (VPMOVSWB128_256 x) mask) => (VPMOVSWBMasked128_256 x mask)
|
|
(VMOVDQU16Masked256 (VPMOVSWB256 x) mask) => (VPMOVSWBMasked256 x mask)
|
|
(VMOVDQU32Masked128 (VPMOVSDB128_128 x) mask) => (VPMOVSDBMasked128_128 x mask)
|
|
(VMOVDQU32Masked256 (VPMOVSDB128_256 x) mask) => (VPMOVSDBMasked128_256 x mask)
|
|
(VMOVDQU32Masked512 (VPMOVSDB128_512 x) mask) => (VPMOVSDBMasked128_512 x mask)
|
|
(VMOVDQU64Masked128 (VPMOVSQB128_128 x) mask) => (VPMOVSQBMasked128_128 x mask)
|
|
(VMOVDQU64Masked256 (VPMOVSQB128_256 x) mask) => (VPMOVSQBMasked128_256 x mask)
|
|
(VMOVDQU64Masked512 (VPMOVSQB128_512 x) mask) => (VPMOVSQBMasked128_512 x mask)
|
|
(VMOVDQU8Masked256 (VPMOVSXBW256 x) mask) => (VPMOVSXBWMasked256 x mask)
|
|
(VMOVDQU8Masked512 (VPMOVSXBW512 x) mask) => (VPMOVSXBWMasked512 x mask)
|
|
(VMOVDQU32Masked128 (VPMOVDW128_128 x) mask) => (VPMOVDWMasked128_128 x mask)
|
|
(VMOVDQU32Masked256 (VPMOVDW128_256 x) mask) => (VPMOVDWMasked128_256 x mask)
|
|
(VMOVDQU32Masked256 (VPMOVDW256 x) mask) => (VPMOVDWMasked256 x mask)
|
|
(VMOVDQU64Masked128 (VPMOVQW128_128 x) mask) => (VPMOVQWMasked128_128 x mask)
|
|
(VMOVDQU64Masked256 (VPMOVQW128_256 x) mask) => (VPMOVQWMasked128_256 x mask)
|
|
(VMOVDQU64Masked512 (VPMOVQW128_512 x) mask) => (VPMOVQWMasked128_512 x mask)
|
|
(VMOVDQU32Masked128 (VPMOVSDW128_128 x) mask) => (VPMOVSDWMasked128_128 x mask)
|
|
(VMOVDQU32Masked256 (VPMOVSDW128_256 x) mask) => (VPMOVSDWMasked128_256 x mask)
|
|
(VMOVDQU32Masked256 (VPMOVSDW256 x) mask) => (VPMOVSDWMasked256 x mask)
|
|
(VMOVDQU64Masked128 (VPMOVSQW128_128 x) mask) => (VPMOVSQWMasked128_128 x mask)
|
|
(VMOVDQU64Masked256 (VPMOVSQW128_256 x) mask) => (VPMOVSQWMasked128_256 x mask)
|
|
(VMOVDQU64Masked512 (VPMOVSQW128_512 x) mask) => (VPMOVSQWMasked128_512 x mask)
|
|
(VMOVDQU32Masked128 (VPACKSSDW128 x y) mask) => (VPACKSSDWMasked128 x y mask)
|
|
(VMOVDQU32Masked256 (VPACKSSDW256 x y) mask) => (VPACKSSDWMasked256 x y mask)
|
|
(VMOVDQU32Masked512 (VPACKSSDW512 x y) mask) => (VPACKSSDWMasked512 x y mask)
|
|
(VMOVDQU8Masked128 (VPMOVSXBW128 x) mask) => (VPMOVSXBWMasked128 x mask)
|
|
(VMOVDQU32Masked128 (VCVTTPS2DQ128 x) mask) => (VCVTTPS2DQMasked128 x mask)
|
|
(VMOVDQU32Masked256 (VCVTTPS2DQ256 x) mask) => (VCVTTPS2DQMasked256 x mask)
|
|
(VMOVDQU32Masked512 (VCVTTPS2DQ512 x) mask) => (VCVTTPS2DQMasked512 x mask)
|
|
(VMOVDQU8Masked512 (VPMOVSXBD512 x) mask) => (VPMOVSXBDMasked512 x mask)
|
|
(VMOVDQU16Masked256 (VPMOVSXWD256 x) mask) => (VPMOVSXWDMasked256 x mask)
|
|
(VMOVDQU16Masked512 (VPMOVSXWD512 x) mask) => (VPMOVSXWDMasked512 x mask)
|
|
(VMOVDQU64Masked128 (VPMOVQD128_128 x) mask) => (VPMOVQDMasked128_128 x mask)
|
|
(VMOVDQU64Masked256 (VPMOVQD128_256 x) mask) => (VPMOVQDMasked128_256 x mask)
|
|
(VMOVDQU64Masked256 (VPMOVQD256 x) mask) => (VPMOVQDMasked256 x mask)
|
|
(VMOVDQU64Masked128 (VPMOVSQD128_128 x) mask) => (VPMOVSQDMasked128_128 x mask)
|
|
(VMOVDQU64Masked256 (VPMOVSQD128_256 x) mask) => (VPMOVSQDMasked128_256 x mask)
|
|
(VMOVDQU64Masked256 (VPMOVSQD256 x) mask) => (VPMOVSQDMasked256 x mask)
|
|
(VMOVDQU8Masked128 (VPMOVSXBD128 x) mask) => (VPMOVSXBDMasked128 x mask)
|
|
(VMOVDQU16Masked128 (VPMOVSXWD128 x) mask) => (VPMOVSXWDMasked128 x mask)
|
|
(VMOVDQU8Masked256 (VPMOVSXBD256 x) mask) => (VPMOVSXBDMasked256 x mask)
|
|
(VMOVDQU16Masked512 (VPMOVSXWQ512 x) mask) => (VPMOVSXWQMasked512 x mask)
|
|
(VMOVDQU32Masked256 (VPMOVSXDQ256 x) mask) => (VPMOVSXDQMasked256 x mask)
|
|
(VMOVDQU32Masked512 (VPMOVSXDQ512 x) mask) => (VPMOVSXDQMasked512 x mask)
|
|
(VMOVDQU8Masked128 (VPMOVSXBQ128 x) mask) => (VPMOVSXBQMasked128 x mask)
|
|
(VMOVDQU16Masked128 (VPMOVSXWQ128 x) mask) => (VPMOVSXWQMasked128 x mask)
|
|
(VMOVDQU32Masked128 (VPMOVSXDQ128 x) mask) => (VPMOVSXDQMasked128 x mask)
|
|
(VMOVDQU8Masked256 (VPMOVSXBQ256 x) mask) => (VPMOVSXBQMasked256 x mask)
|
|
(VMOVDQU8Masked512 (VPMOVSXBQ512 x) mask) => (VPMOVSXBQMasked512 x mask)
|
|
(VMOVDQU16Masked128 (VPMOVUSWB128_128 x) mask) => (VPMOVUSWBMasked128_128 x mask)
|
|
(VMOVDQU16Masked256 (VPMOVUSWB128_256 x) mask) => (VPMOVUSWBMasked128_256 x mask)
|
|
(VMOVDQU16Masked256 (VPMOVUSWB256 x) mask) => (VPMOVUSWBMasked256 x mask)
|
|
(VMOVDQU32Masked128 (VPMOVUSDB128_128 x) mask) => (VPMOVUSDBMasked128_128 x mask)
|
|
(VMOVDQU32Masked256 (VPMOVUSDB128_256 x) mask) => (VPMOVUSDBMasked128_256 x mask)
|
|
(VMOVDQU32Masked512 (VPMOVUSDB128_512 x) mask) => (VPMOVUSDBMasked128_512 x mask)
|
|
(VMOVDQU64Masked128 (VPMOVUSQB128_128 x) mask) => (VPMOVUSQBMasked128_128 x mask)
|
|
(VMOVDQU64Masked256 (VPMOVUSQB128_256 x) mask) => (VPMOVUSQBMasked128_256 x mask)
|
|
(VMOVDQU64Masked512 (VPMOVUSQB128_512 x) mask) => (VPMOVUSQBMasked128_512 x mask)
|
|
(VMOVDQU8Masked256 (VPMOVZXBW256 x) mask) => (VPMOVZXBWMasked256 x mask)
|
|
(VMOVDQU8Masked512 (VPMOVZXBW512 x) mask) => (VPMOVZXBWMasked512 x mask)
|
|
(VMOVDQU32Masked128 (VPMOVUSDW128_128 x) mask) => (VPMOVUSDWMasked128_128 x mask)
|
|
(VMOVDQU32Masked256 (VPMOVUSDW128_256 x) mask) => (VPMOVUSDWMasked128_256 x mask)
|
|
(VMOVDQU32Masked256 (VPMOVUSDW256 x) mask) => (VPMOVUSDWMasked256 x mask)
|
|
(VMOVDQU64Masked128 (VPMOVUSQW128_128 x) mask) => (VPMOVUSQWMasked128_128 x mask)
|
|
(VMOVDQU64Masked256 (VPMOVUSQW128_256 x) mask) => (VPMOVUSQWMasked128_256 x mask)
|
|
(VMOVDQU64Masked512 (VPMOVUSQW128_512 x) mask) => (VPMOVUSQWMasked128_512 x mask)
|
|
(VMOVDQU32Masked128 (VPACKUSDW128 x y) mask) => (VPACKUSDWMasked128 x y mask)
|
|
(VMOVDQU32Masked256 (VPACKUSDW256 x y) mask) => (VPACKUSDWMasked256 x y mask)
|
|
(VMOVDQU32Masked512 (VPACKUSDW512 x y) mask) => (VPACKUSDWMasked512 x y mask)
|
|
(VMOVDQU8Masked128 (VPMOVZXBW128 x) mask) => (VPMOVZXBWMasked128 x mask)
|
|
(VMOVDQU32Masked128 (VCVTPS2UDQ128 x) mask) => (VCVTPS2UDQMasked128 x mask)
|
|
(VMOVDQU32Masked256 (VCVTPS2UDQ256 x) mask) => (VCVTPS2UDQMasked256 x mask)
|
|
(VMOVDQU32Masked512 (VCVTPS2UDQ512 x) mask) => (VCVTPS2UDQMasked512 x mask)
|
|
(VMOVDQU8Masked512 (VPMOVZXBD512 x) mask) => (VPMOVZXBDMasked512 x mask)
|
|
(VMOVDQU16Masked256 (VPMOVZXWD256 x) mask) => (VPMOVZXWDMasked256 x mask)
|
|
(VMOVDQU16Masked512 (VPMOVZXWD512 x) mask) => (VPMOVZXWDMasked512 x mask)
|
|
(VMOVDQU64Masked128 (VPMOVUSQD128_128 x) mask) => (VPMOVUSQDMasked128_128 x mask)
|
|
(VMOVDQU64Masked256 (VPMOVUSQD128_256 x) mask) => (VPMOVUSQDMasked128_256 x mask)
|
|
(VMOVDQU64Masked256 (VPMOVUSQD256 x) mask) => (VPMOVUSQDMasked256 x mask)
|
|
(VMOVDQU8Masked128 (VPMOVZXBD128 x) mask) => (VPMOVZXBDMasked128 x mask)
|
|
(VMOVDQU16Masked128 (VPMOVZXWD128 x) mask) => (VPMOVZXWDMasked128 x mask)
|
|
(VMOVDQU8Masked256 (VPMOVZXBD256 x) mask) => (VPMOVZXBDMasked256 x mask)
|
|
(VMOVDQU16Masked512 (VPMOVZXWQ512 x) mask) => (VPMOVZXWQMasked512 x mask)
|
|
(VMOVDQU32Masked256 (VPMOVZXDQ256 x) mask) => (VPMOVZXDQMasked256 x mask)
|
|
(VMOVDQU32Masked512 (VPMOVZXDQ512 x) mask) => (VPMOVZXDQMasked512 x mask)
|
|
(VMOVDQU8Masked128 (VPMOVZXBQ128 x) mask) => (VPMOVZXBQMasked128 x mask)
|
|
(VMOVDQU16Masked128 (VPMOVZXWQ128 x) mask) => (VPMOVZXWQMasked128 x mask)
|
|
(VMOVDQU32Masked128 (VPMOVZXDQ128 x) mask) => (VPMOVZXDQMasked128 x mask)
|
|
(VMOVDQU16Masked256 (VPMOVSXWQ256 x) mask) => (VPMOVSXWQMasked256 x mask)
|
|
(VMOVDQU8Masked256 (VPMOVZXBQ256 x) mask) => (VPMOVZXBQMasked256 x mask)
|
|
(VMOVDQU16Masked256 (VPMOVZXWQ256 x) mask) => (VPMOVZXWQMasked256 x mask)
|
|
(VMOVDQU8Masked512 (VPMOVZXBQ512 x) mask) => (VPMOVZXBQMasked512 x mask)
|
|
(VMOVDQU32Masked128 (VDIVPS128 x y) mask) => (VDIVPSMasked128 x y mask)
|
|
(VMOVDQU32Masked256 (VDIVPS256 x y) mask) => (VDIVPSMasked256 x y mask)
|
|
(VMOVDQU32Masked512 (VDIVPS512 x y) mask) => (VDIVPSMasked512 x y mask)
|
|
(VMOVDQU64Masked128 (VDIVPD128 x y) mask) => (VDIVPDMasked128 x y mask)
|
|
(VMOVDQU64Masked256 (VDIVPD256 x y) mask) => (VDIVPDMasked256 x y mask)
|
|
(VMOVDQU64Masked512 (VDIVPD512 x y) mask) => (VDIVPDMasked512 x y mask)
|
|
(VMOVDQU16Masked128 (VPMADDWD128 x y) mask) => (VPMADDWDMasked128 x y mask)
|
|
(VMOVDQU16Masked256 (VPMADDWD256 x y) mask) => (VPMADDWDMasked256 x y mask)
|
|
(VMOVDQU16Masked512 (VPMADDWD512 x y) mask) => (VPMADDWDMasked512 x y mask)
|
|
(VMOVDQU16Masked128 (VPMADDUBSW128 x y) mask) => (VPMADDUBSWMasked128 x y mask)
|
|
(VMOVDQU16Masked256 (VPMADDUBSW256 x y) mask) => (VPMADDUBSWMasked256 x y mask)
|
|
(VMOVDQU16Masked512 (VPMADDUBSW512 x y) mask) => (VPMADDUBSWMasked512 x y mask)
|
|
(VMOVDQU8Masked128 (VGF2P8AFFINEINVQB128 [a] x y) mask) => (VGF2P8AFFINEINVQBMasked128 [a] x y mask)
|
|
(VMOVDQU8Masked256 (VGF2P8AFFINEINVQB256 [a] x y) mask) => (VGF2P8AFFINEINVQBMasked256 [a] x y mask)
|
|
(VMOVDQU8Masked512 (VGF2P8AFFINEINVQB512 [a] x y) mask) => (VGF2P8AFFINEINVQBMasked512 [a] x y mask)
|
|
(VMOVDQU8Masked128 (VGF2P8AFFINEQB128 [a] x y) mask) => (VGF2P8AFFINEQBMasked128 [a] x y mask)
|
|
(VMOVDQU8Masked256 (VGF2P8AFFINEQB256 [a] x y) mask) => (VGF2P8AFFINEQBMasked256 [a] x y mask)
|
|
(VMOVDQU8Masked512 (VGF2P8AFFINEQB512 [a] x y) mask) => (VGF2P8AFFINEQBMasked512 [a] x y mask)
|
|
(VMOVDQU8Masked128 (VGF2P8MULB128 x y) mask) => (VGF2P8MULBMasked128 x y mask)
|
|
(VMOVDQU8Masked256 (VGF2P8MULB256 x y) mask) => (VGF2P8MULBMasked256 x y mask)
|
|
(VMOVDQU8Masked512 (VGF2P8MULB512 x y) mask) => (VGF2P8MULBMasked512 x y mask)
|
|
(VMOVDQU32Masked128 (VPLZCNTD128 x) mask) => (VPLZCNTDMasked128 x mask)
|
|
(VMOVDQU32Masked256 (VPLZCNTD256 x) mask) => (VPLZCNTDMasked256 x mask)
|
|
(VMOVDQU32Masked512 (VPLZCNTD512 x) mask) => (VPLZCNTDMasked512 x mask)
|
|
(VMOVDQU64Masked128 (VPLZCNTQ128 x) mask) => (VPLZCNTQMasked128 x mask)
|
|
(VMOVDQU64Masked256 (VPLZCNTQ256 x) mask) => (VPLZCNTQMasked256 x mask)
|
|
(VMOVDQU64Masked512 (VPLZCNTQ512 x) mask) => (VPLZCNTQMasked512 x mask)
|
|
(VMOVDQU32Masked128 (VMAXPS128 x y) mask) => (VMAXPSMasked128 x y mask)
|
|
(VMOVDQU32Masked256 (VMAXPS256 x y) mask) => (VMAXPSMasked256 x y mask)
|
|
(VMOVDQU32Masked512 (VMAXPS512 x y) mask) => (VMAXPSMasked512 x y mask)
|
|
(VMOVDQU64Masked128 (VMAXPD128 x y) mask) => (VMAXPDMasked128 x y mask)
|
|
(VMOVDQU64Masked256 (VMAXPD256 x y) mask) => (VMAXPDMasked256 x y mask)
|
|
(VMOVDQU64Masked512 (VMAXPD512 x y) mask) => (VMAXPDMasked512 x y mask)
|
|
(VMOVDQU8Masked128 (VPMAXSB128 x y) mask) => (VPMAXSBMasked128 x y mask)
|
|
(VMOVDQU8Masked256 (VPMAXSB256 x y) mask) => (VPMAXSBMasked256 x y mask)
|
|
(VMOVDQU8Masked512 (VPMAXSB512 x y) mask) => (VPMAXSBMasked512 x y mask)
|
|
(VMOVDQU16Masked128 (VPMAXSW128 x y) mask) => (VPMAXSWMasked128 x y mask)
|
|
(VMOVDQU16Masked256 (VPMAXSW256 x y) mask) => (VPMAXSWMasked256 x y mask)
|
|
(VMOVDQU16Masked512 (VPMAXSW512 x y) mask) => (VPMAXSWMasked512 x y mask)
|
|
(VMOVDQU32Masked128 (VPMAXSD128 x y) mask) => (VPMAXSDMasked128 x y mask)
|
|
(VMOVDQU32Masked256 (VPMAXSD256 x y) mask) => (VPMAXSDMasked256 x y mask)
|
|
(VMOVDQU32Masked512 (VPMAXSD512 x y) mask) => (VPMAXSDMasked512 x y mask)
|
|
(VMOVDQU64Masked128 (VPMAXSQ128 x y) mask) => (VPMAXSQMasked128 x y mask)
|
|
(VMOVDQU64Masked256 (VPMAXSQ256 x y) mask) => (VPMAXSQMasked256 x y mask)
|
|
(VMOVDQU64Masked512 (VPMAXSQ512 x y) mask) => (VPMAXSQMasked512 x y mask)
|
|
(VMOVDQU8Masked128 (VPMAXUB128 x y) mask) => (VPMAXUBMasked128 x y mask)
|
|
(VMOVDQU8Masked256 (VPMAXUB256 x y) mask) => (VPMAXUBMasked256 x y mask)
|
|
(VMOVDQU8Masked512 (VPMAXUB512 x y) mask) => (VPMAXUBMasked512 x y mask)
|
|
(VMOVDQU16Masked128 (VPMAXUW128 x y) mask) => (VPMAXUWMasked128 x y mask)
|
|
(VMOVDQU16Masked256 (VPMAXUW256 x y) mask) => (VPMAXUWMasked256 x y mask)
|
|
(VMOVDQU16Masked512 (VPMAXUW512 x y) mask) => (VPMAXUWMasked512 x y mask)
|
|
(VMOVDQU32Masked128 (VPMAXUD128 x y) mask) => (VPMAXUDMasked128 x y mask)
|
|
(VMOVDQU32Masked256 (VPMAXUD256 x y) mask) => (VPMAXUDMasked256 x y mask)
|
|
(VMOVDQU32Masked512 (VPMAXUD512 x y) mask) => (VPMAXUDMasked512 x y mask)
|
|
(VMOVDQU64Masked128 (VPMAXUQ128 x y) mask) => (VPMAXUQMasked128 x y mask)
|
|
(VMOVDQU64Masked256 (VPMAXUQ256 x y) mask) => (VPMAXUQMasked256 x y mask)
|
|
(VMOVDQU64Masked512 (VPMAXUQ512 x y) mask) => (VPMAXUQMasked512 x y mask)
|
|
(VMOVDQU32Masked128 (VMINPS128 x y) mask) => (VMINPSMasked128 x y mask)
|
|
(VMOVDQU32Masked256 (VMINPS256 x y) mask) => (VMINPSMasked256 x y mask)
|
|
(VMOVDQU32Masked512 (VMINPS512 x y) mask) => (VMINPSMasked512 x y mask)
|
|
(VMOVDQU64Masked128 (VMINPD128 x y) mask) => (VMINPDMasked128 x y mask)
|
|
(VMOVDQU64Masked256 (VMINPD256 x y) mask) => (VMINPDMasked256 x y mask)
|
|
(VMOVDQU64Masked512 (VMINPD512 x y) mask) => (VMINPDMasked512 x y mask)
|
|
(VMOVDQU8Masked128 (VPMINSB128 x y) mask) => (VPMINSBMasked128 x y mask)
|
|
(VMOVDQU8Masked256 (VPMINSB256 x y) mask) => (VPMINSBMasked256 x y mask)
|
|
(VMOVDQU8Masked512 (VPMINSB512 x y) mask) => (VPMINSBMasked512 x y mask)
|
|
(VMOVDQU16Masked128 (VPMINSW128 x y) mask) => (VPMINSWMasked128 x y mask)
|
|
(VMOVDQU16Masked256 (VPMINSW256 x y) mask) => (VPMINSWMasked256 x y mask)
|
|
(VMOVDQU16Masked512 (VPMINSW512 x y) mask) => (VPMINSWMasked512 x y mask)
|
|
(VMOVDQU32Masked128 (VPMINSD128 x y) mask) => (VPMINSDMasked128 x y mask)
|
|
(VMOVDQU32Masked256 (VPMINSD256 x y) mask) => (VPMINSDMasked256 x y mask)
|
|
(VMOVDQU32Masked512 (VPMINSD512 x y) mask) => (VPMINSDMasked512 x y mask)
|
|
(VMOVDQU64Masked128 (VPMINSQ128 x y) mask) => (VPMINSQMasked128 x y mask)
|
|
(VMOVDQU64Masked256 (VPMINSQ256 x y) mask) => (VPMINSQMasked256 x y mask)
|
|
(VMOVDQU64Masked512 (VPMINSQ512 x y) mask) => (VPMINSQMasked512 x y mask)
|
|
(VMOVDQU8Masked128 (VPMINUB128 x y) mask) => (VPMINUBMasked128 x y mask)
|
|
(VMOVDQU8Masked256 (VPMINUB256 x y) mask) => (VPMINUBMasked256 x y mask)
|
|
(VMOVDQU8Masked512 (VPMINUB512 x y) mask) => (VPMINUBMasked512 x y mask)
|
|
(VMOVDQU16Masked128 (VPMINUW128 x y) mask) => (VPMINUWMasked128 x y mask)
|
|
(VMOVDQU16Masked256 (VPMINUW256 x y) mask) => (VPMINUWMasked256 x y mask)
|
|
(VMOVDQU16Masked512 (VPMINUW512 x y) mask) => (VPMINUWMasked512 x y mask)
|
|
(VMOVDQU32Masked128 (VPMINUD128 x y) mask) => (VPMINUDMasked128 x y mask)
|
|
(VMOVDQU32Masked256 (VPMINUD256 x y) mask) => (VPMINUDMasked256 x y mask)
|
|
(VMOVDQU32Masked512 (VPMINUD512 x y) mask) => (VPMINUDMasked512 x y mask)
|
|
(VMOVDQU64Masked128 (VPMINUQ128 x y) mask) => (VPMINUQMasked128 x y mask)
|
|
(VMOVDQU64Masked256 (VPMINUQ256 x y) mask) => (VPMINUQMasked256 x y mask)
|
|
(VMOVDQU64Masked512 (VPMINUQ512 x y) mask) => (VPMINUQMasked512 x y mask)
|
|
(VMOVDQU32Masked128 (VFMADD213PS128 x y z) mask) => (VFMADD213PSMasked128 x y z mask)
|
|
(VMOVDQU32Masked256 (VFMADD213PS256 x y z) mask) => (VFMADD213PSMasked256 x y z mask)
|
|
(VMOVDQU32Masked512 (VFMADD213PS512 x y z) mask) => (VFMADD213PSMasked512 x y z mask)
|
|
(VMOVDQU64Masked128 (VFMADD213PD128 x y z) mask) => (VFMADD213PDMasked128 x y z mask)
|
|
(VMOVDQU64Masked256 (VFMADD213PD256 x y z) mask) => (VFMADD213PDMasked256 x y z mask)
|
|
(VMOVDQU64Masked512 (VFMADD213PD512 x y z) mask) => (VFMADD213PDMasked512 x y z mask)
|
|
(VMOVDQU32Masked128 (VFMADDSUB213PS128 x y z) mask) => (VFMADDSUB213PSMasked128 x y z mask)
|
|
(VMOVDQU32Masked256 (VFMADDSUB213PS256 x y z) mask) => (VFMADDSUB213PSMasked256 x y z mask)
|
|
(VMOVDQU32Masked512 (VFMADDSUB213PS512 x y z) mask) => (VFMADDSUB213PSMasked512 x y z mask)
|
|
(VMOVDQU64Masked128 (VFMADDSUB213PD128 x y z) mask) => (VFMADDSUB213PDMasked128 x y z mask)
|
|
(VMOVDQU64Masked256 (VFMADDSUB213PD256 x y z) mask) => (VFMADDSUB213PDMasked256 x y z mask)
|
|
(VMOVDQU64Masked512 (VFMADDSUB213PD512 x y z) mask) => (VFMADDSUB213PDMasked512 x y z mask)
|
|
(VMOVDQU16Masked128 (VPMULHW128 x y) mask) => (VPMULHWMasked128 x y mask)
|
|
(VMOVDQU16Masked256 (VPMULHW256 x y) mask) => (VPMULHWMasked256 x y mask)
|
|
(VMOVDQU16Masked512 (VPMULHW512 x y) mask) => (VPMULHWMasked512 x y mask)
|
|
(VMOVDQU16Masked128 (VPMULHUW128 x y) mask) => (VPMULHUWMasked128 x y mask)
|
|
(VMOVDQU16Masked256 (VPMULHUW256 x y) mask) => (VPMULHUWMasked256 x y mask)
|
|
(VMOVDQU16Masked512 (VPMULHUW512 x y) mask) => (VPMULHUWMasked512 x y mask)
|
|
(VMOVDQU32Masked128 (VMULPS128 x y) mask) => (VMULPSMasked128 x y mask)
|
|
(VMOVDQU32Masked256 (VMULPS256 x y) mask) => (VMULPSMasked256 x y mask)
|
|
(VMOVDQU32Masked512 (VMULPS512 x y) mask) => (VMULPSMasked512 x y mask)
|
|
(VMOVDQU64Masked128 (VMULPD128 x y) mask) => (VMULPDMasked128 x y mask)
|
|
(VMOVDQU64Masked256 (VMULPD256 x y) mask) => (VMULPDMasked256 x y mask)
|
|
(VMOVDQU64Masked512 (VMULPD512 x y) mask) => (VMULPDMasked512 x y mask)
|
|
(VMOVDQU16Masked128 (VPMULLW128 x y) mask) => (VPMULLWMasked128 x y mask)
|
|
(VMOVDQU16Masked256 (VPMULLW256 x y) mask) => (VPMULLWMasked256 x y mask)
|
|
(VMOVDQU16Masked512 (VPMULLW512 x y) mask) => (VPMULLWMasked512 x y mask)
|
|
(VMOVDQU32Masked128 (VPMULLD128 x y) mask) => (VPMULLDMasked128 x y mask)
|
|
(VMOVDQU32Masked256 (VPMULLD256 x y) mask) => (VPMULLDMasked256 x y mask)
|
|
(VMOVDQU32Masked512 (VPMULLD512 x y) mask) => (VPMULLDMasked512 x y mask)
|
|
(VMOVDQU64Masked128 (VPMULLQ128 x y) mask) => (VPMULLQMasked128 x y mask)
|
|
(VMOVDQU64Masked256 (VPMULLQ256 x y) mask) => (VPMULLQMasked256 x y mask)
|
|
(VMOVDQU64Masked512 (VPMULLQ512 x y) mask) => (VPMULLQMasked512 x y mask)
|
|
(VMOVDQU32Masked128 (VFMSUBADD213PS128 x y z) mask) => (VFMSUBADD213PSMasked128 x y z mask)
|
|
(VMOVDQU32Masked256 (VFMSUBADD213PS256 x y z) mask) => (VFMSUBADD213PSMasked256 x y z mask)
|
|
(VMOVDQU32Masked512 (VFMSUBADD213PS512 x y z) mask) => (VFMSUBADD213PSMasked512 x y z mask)
|
|
(VMOVDQU64Masked128 (VFMSUBADD213PD128 x y z) mask) => (VFMSUBADD213PDMasked128 x y z mask)
|
|
(VMOVDQU64Masked256 (VFMSUBADD213PD256 x y z) mask) => (VFMSUBADD213PDMasked256 x y z mask)
|
|
(VMOVDQU64Masked512 (VFMSUBADD213PD512 x y z) mask) => (VFMSUBADD213PDMasked512 x y z mask)
|
|
(VMOVDQU8Masked128 (VPOPCNTB128 x) mask) => (VPOPCNTBMasked128 x mask)
|
|
(VMOVDQU8Masked256 (VPOPCNTB256 x) mask) => (VPOPCNTBMasked256 x mask)
|
|
(VMOVDQU8Masked512 (VPOPCNTB512 x) mask) => (VPOPCNTBMasked512 x mask)
|
|
(VMOVDQU16Masked128 (VPOPCNTW128 x) mask) => (VPOPCNTWMasked128 x mask)
|
|
(VMOVDQU16Masked256 (VPOPCNTW256 x) mask) => (VPOPCNTWMasked256 x mask)
|
|
(VMOVDQU16Masked512 (VPOPCNTW512 x) mask) => (VPOPCNTWMasked512 x mask)
|
|
(VMOVDQU32Masked128 (VPOPCNTD128 x) mask) => (VPOPCNTDMasked128 x mask)
|
|
(VMOVDQU32Masked256 (VPOPCNTD256 x) mask) => (VPOPCNTDMasked256 x mask)
|
|
(VMOVDQU32Masked512 (VPOPCNTD512 x) mask) => (VPOPCNTDMasked512 x mask)
|
|
(VMOVDQU64Masked128 (VPOPCNTQ128 x) mask) => (VPOPCNTQMasked128 x mask)
|
|
(VMOVDQU64Masked256 (VPOPCNTQ256 x) mask) => (VPOPCNTQMasked256 x mask)
|
|
(VMOVDQU64Masked512 (VPOPCNTQ512 x) mask) => (VPOPCNTQMasked512 x mask)
|
|
(VMOVDQU32Masked512 (VPORD512 x y) mask) => (VPORDMasked512 x y mask)
|
|
(VMOVDQU64Masked512 (VPORQ512 x y) mask) => (VPORQMasked512 x y mask)
|
|
(VMOVDQU8Masked128 (VPERMI2B128 x y z) mask) => (VPERMI2BMasked128 x y z mask)
|
|
(VMOVDQU8Masked256 (VPERMI2B256 x y z) mask) => (VPERMI2BMasked256 x y z mask)
|
|
(VMOVDQU8Masked512 (VPERMI2B512 x y z) mask) => (VPERMI2BMasked512 x y z mask)
|
|
(VMOVDQU16Masked128 (VPERMI2W128 x y z) mask) => (VPERMI2WMasked128 x y z mask)
|
|
(VMOVDQU16Masked256 (VPERMI2W256 x y z) mask) => (VPERMI2WMasked256 x y z mask)
|
|
(VMOVDQU16Masked512 (VPERMI2W512 x y z) mask) => (VPERMI2WMasked512 x y z mask)
|
|
(VMOVDQU32Masked128 (VPERMI2PS128 x y z) mask) => (VPERMI2PSMasked128 x y z mask)
|
|
(VMOVDQU32Masked128 (VPERMI2D128 x y z) mask) => (VPERMI2DMasked128 x y z mask)
|
|
(VMOVDQU32Masked256 (VPERMI2PS256 x y z) mask) => (VPERMI2PSMasked256 x y z mask)
|
|
(VMOVDQU32Masked256 (VPERMI2D256 x y z) mask) => (VPERMI2DMasked256 x y z mask)
|
|
(VMOVDQU32Masked512 (VPERMI2PS512 x y z) mask) => (VPERMI2PSMasked512 x y z mask)
|
|
(VMOVDQU32Masked512 (VPERMI2D512 x y z) mask) => (VPERMI2DMasked512 x y z mask)
|
|
(VMOVDQU64Masked128 (VPERMI2PD128 x y z) mask) => (VPERMI2PDMasked128 x y z mask)
|
|
(VMOVDQU64Masked128 (VPERMI2Q128 x y z) mask) => (VPERMI2QMasked128 x y z mask)
|
|
(VMOVDQU64Masked256 (VPERMI2PD256 x y z) mask) => (VPERMI2PDMasked256 x y z mask)
|
|
(VMOVDQU64Masked256 (VPERMI2Q256 x y z) mask) => (VPERMI2QMasked256 x y z mask)
|
|
(VMOVDQU64Masked512 (VPERMI2PD512 x y z) mask) => (VPERMI2PDMasked512 x y z mask)
|
|
(VMOVDQU64Masked512 (VPERMI2Q512 x y z) mask) => (VPERMI2QMasked512 x y z mask)
|
|
(VMOVDQU32Masked256 (VPSHUFD256 [a] x) mask) => (VPSHUFDMasked256 [a] x mask)
|
|
(VMOVDQU32Masked512 (VPSHUFD512 [a] x) mask) => (VPSHUFDMasked512 [a] x mask)
|
|
(VMOVDQU16Masked256 (VPSHUFHW256 [a] x) mask) => (VPSHUFHWMasked256 [a] x mask)
|
|
(VMOVDQU16Masked512 (VPSHUFHW512 [a] x) mask) => (VPSHUFHWMasked512 [a] x mask)
|
|
(VMOVDQU16Masked128 (VPSHUFHW128 [a] x) mask) => (VPSHUFHWMasked128 [a] x mask)
|
|
(VMOVDQU32Masked128 (VPSHUFD128 [a] x) mask) => (VPSHUFDMasked128 [a] x mask)
|
|
(VMOVDQU8Masked256 (VPSHUFB256 x y) mask) => (VPSHUFBMasked256 x y mask)
|
|
(VMOVDQU8Masked512 (VPSHUFB512 x y) mask) => (VPSHUFBMasked512 x y mask)
|
|
(VMOVDQU8Masked128 (VPSHUFB128 x y) mask) => (VPSHUFBMasked128 x y mask)
|
|
(VMOVDQU8Masked256 (VPERMB256 x y) mask) => (VPERMBMasked256 x y mask)
|
|
(VMOVDQU8Masked512 (VPERMB512 x y) mask) => (VPERMBMasked512 x y mask)
|
|
(VMOVDQU16Masked128 (VPERMW128 x y) mask) => (VPERMWMasked128 x y mask)
|
|
(VMOVDQU16Masked256 (VPERMW256 x y) mask) => (VPERMWMasked256 x y mask)
|
|
(VMOVDQU16Masked512 (VPERMW512 x y) mask) => (VPERMWMasked512 x y mask)
|
|
(VMOVDQU32Masked256 (VPERMPS256 x y) mask) => (VPERMPSMasked256 x y mask)
|
|
(VMOVDQU32Masked256 (VPERMD256 x y) mask) => (VPERMDMasked256 x y mask)
|
|
(VMOVDQU32Masked512 (VPERMPS512 x y) mask) => (VPERMPSMasked512 x y mask)
|
|
(VMOVDQU32Masked512 (VPERMD512 x y) mask) => (VPERMDMasked512 x y mask)
|
|
(VMOVDQU64Masked256 (VPERMPD256 x y) mask) => (VPERMPDMasked256 x y mask)
|
|
(VMOVDQU64Masked256 (VPERMQ256 x y) mask) => (VPERMQMasked256 x y mask)
|
|
(VMOVDQU64Masked512 (VPERMPD512 x y) mask) => (VPERMPDMasked512 x y mask)
|
|
(VMOVDQU64Masked512 (VPERMQ512 x y) mask) => (VPERMQMasked512 x y mask)
|
|
(VMOVDQU32Masked512 (VRCP14PS512 x) mask) => (VRCP14PSMasked512 x mask)
|
|
(VMOVDQU64Masked128 (VRCP14PD128 x) mask) => (VRCP14PDMasked128 x mask)
|
|
(VMOVDQU64Masked256 (VRCP14PD256 x) mask) => (VRCP14PDMasked256 x mask)
|
|
(VMOVDQU64Masked512 (VRCP14PD512 x) mask) => (VRCP14PDMasked512 x mask)
|
|
(VMOVDQU32Masked512 (VRSQRT14PS512 x) mask) => (VRSQRT14PSMasked512 x mask)
|
|
(VMOVDQU64Masked128 (VRSQRT14PD128 x) mask) => (VRSQRT14PDMasked128 x mask)
|
|
(VMOVDQU64Masked256 (VRSQRT14PD256 x) mask) => (VRSQRT14PDMasked256 x mask)
|
|
(VMOVDQU64Masked512 (VRSQRT14PD512 x) mask) => (VRSQRT14PDMasked512 x mask)
|
|
(VMOVDQU32Masked128 (VPROLD128 [a] x) mask) => (VPROLDMasked128 [a] x mask)
|
|
(VMOVDQU32Masked256 (VPROLD256 [a] x) mask) => (VPROLDMasked256 [a] x mask)
|
|
(VMOVDQU32Masked512 (VPROLD512 [a] x) mask) => (VPROLDMasked512 [a] x mask)
|
|
(VMOVDQU64Masked128 (VPROLQ128 [a] x) mask) => (VPROLQMasked128 [a] x mask)
|
|
(VMOVDQU64Masked256 (VPROLQ256 [a] x) mask) => (VPROLQMasked256 [a] x mask)
|
|
(VMOVDQU64Masked512 (VPROLQ512 [a] x) mask) => (VPROLQMasked512 [a] x mask)
|
|
(VMOVDQU32Masked128 (VPRORD128 [a] x) mask) => (VPRORDMasked128 [a] x mask)
|
|
(VMOVDQU32Masked256 (VPRORD256 [a] x) mask) => (VPRORDMasked256 [a] x mask)
|
|
(VMOVDQU32Masked512 (VPRORD512 [a] x) mask) => (VPRORDMasked512 [a] x mask)
|
|
(VMOVDQU64Masked128 (VPRORQ128 [a] x) mask) => (VPRORQMasked128 [a] x mask)
|
|
(VMOVDQU64Masked256 (VPRORQ256 [a] x) mask) => (VPRORQMasked256 [a] x mask)
|
|
(VMOVDQU64Masked512 (VPRORQ512 [a] x) mask) => (VPRORQMasked512 [a] x mask)
|
|
(VMOVDQU32Masked128 (VPROLVD128 x y) mask) => (VPROLVDMasked128 x y mask)
|
|
(VMOVDQU32Masked256 (VPROLVD256 x y) mask) => (VPROLVDMasked256 x y mask)
|
|
(VMOVDQU32Masked512 (VPROLVD512 x y) mask) => (VPROLVDMasked512 x y mask)
|
|
(VMOVDQU64Masked128 (VPROLVQ128 x y) mask) => (VPROLVQMasked128 x y mask)
|
|
(VMOVDQU64Masked256 (VPROLVQ256 x y) mask) => (VPROLVQMasked256 x y mask)
|
|
(VMOVDQU64Masked512 (VPROLVQ512 x y) mask) => (VPROLVQMasked512 x y mask)
|
|
(VMOVDQU32Masked128 (VPRORVD128 x y) mask) => (VPRORVDMasked128 x y mask)
|
|
(VMOVDQU32Masked256 (VPRORVD256 x y) mask) => (VPRORVDMasked256 x y mask)
|
|
(VMOVDQU32Masked512 (VPRORVD512 x y) mask) => (VPRORVDMasked512 x y mask)
|
|
(VMOVDQU64Masked128 (VPRORVQ128 x y) mask) => (VPRORVQMasked128 x y mask)
|
|
(VMOVDQU64Masked256 (VPRORVQ256 x y) mask) => (VPRORVQMasked256 x y mask)
|
|
(VMOVDQU64Masked512 (VPRORVQ512 x y) mask) => (VPRORVQMasked512 x y mask)
|
|
(VMOVDQU32Masked128 (VSCALEFPS128 x y) mask) => (VSCALEFPSMasked128 x y mask)
|
|
(VMOVDQU32Masked256 (VSCALEFPS256 x y) mask) => (VSCALEFPSMasked256 x y mask)
|
|
(VMOVDQU32Masked512 (VSCALEFPS512 x y) mask) => (VSCALEFPSMasked512 x y mask)
|
|
(VMOVDQU64Masked128 (VSCALEFPD128 x y) mask) => (VSCALEFPDMasked128 x y mask)
|
|
(VMOVDQU64Masked256 (VSCALEFPD256 x y) mask) => (VSCALEFPDMasked256 x y mask)
|
|
(VMOVDQU64Masked512 (VSCALEFPD512 x y) mask) => (VSCALEFPDMasked512 x y mask)
|
|
(VMOVDQU16Masked128 (VPSHLDW128 [a] x y) mask) => (VPSHLDWMasked128 [a] x y mask)
|
|
(VMOVDQU16Masked256 (VPSHLDW256 [a] x y) mask) => (VPSHLDWMasked256 [a] x y mask)
|
|
(VMOVDQU16Masked512 (VPSHLDW512 [a] x y) mask) => (VPSHLDWMasked512 [a] x y mask)
|
|
(VMOVDQU32Masked128 (VPSHLDD128 [a] x y) mask) => (VPSHLDDMasked128 [a] x y mask)
|
|
(VMOVDQU32Masked256 (VPSHLDD256 [a] x y) mask) => (VPSHLDDMasked256 [a] x y mask)
|
|
(VMOVDQU32Masked512 (VPSHLDD512 [a] x y) mask) => (VPSHLDDMasked512 [a] x y mask)
|
|
(VMOVDQU64Masked128 (VPSHLDQ128 [a] x y) mask) => (VPSHLDQMasked128 [a] x y mask)
|
|
(VMOVDQU64Masked256 (VPSHLDQ256 [a] x y) mask) => (VPSHLDQMasked256 [a] x y mask)
|
|
(VMOVDQU64Masked512 (VPSHLDQ512 [a] x y) mask) => (VPSHLDQMasked512 [a] x y mask)
|
|
(VMOVDQU16Masked128 (VPSLLW128 x y) mask) => (VPSLLWMasked128 x y mask)
|
|
(VMOVDQU16Masked256 (VPSLLW256 x y) mask) => (VPSLLWMasked256 x y mask)
|
|
(VMOVDQU16Masked512 (VPSLLW512 x y) mask) => (VPSLLWMasked512 x y mask)
|
|
(VMOVDQU32Masked128 (VPSLLD128 x y) mask) => (VPSLLDMasked128 x y mask)
|
|
(VMOVDQU32Masked256 (VPSLLD256 x y) mask) => (VPSLLDMasked256 x y mask)
|
|
(VMOVDQU32Masked512 (VPSLLD512 x y) mask) => (VPSLLDMasked512 x y mask)
|
|
(VMOVDQU64Masked128 (VPSLLQ128 x y) mask) => (VPSLLQMasked128 x y mask)
|
|
(VMOVDQU64Masked256 (VPSLLQ256 x y) mask) => (VPSLLQMasked256 x y mask)
|
|
(VMOVDQU64Masked512 (VPSLLQ512 x y) mask) => (VPSLLQMasked512 x y mask)
|
|
(VMOVDQU16Masked128 (VPSHRDW128 [a] x y) mask) => (VPSHRDWMasked128 [a] x y mask)
|
|
(VMOVDQU16Masked256 (VPSHRDW256 [a] x y) mask) => (VPSHRDWMasked256 [a] x y mask)
|
|
(VMOVDQU16Masked512 (VPSHRDW512 [a] x y) mask) => (VPSHRDWMasked512 [a] x y mask)
|
|
(VMOVDQU32Masked128 (VPSHRDD128 [a] x y) mask) => (VPSHRDDMasked128 [a] x y mask)
|
|
(VMOVDQU32Masked256 (VPSHRDD256 [a] x y) mask) => (VPSHRDDMasked256 [a] x y mask)
|
|
(VMOVDQU32Masked512 (VPSHRDD512 [a] x y) mask) => (VPSHRDDMasked512 [a] x y mask)
|
|
(VMOVDQU64Masked128 (VPSHRDQ128 [a] x y) mask) => (VPSHRDQMasked128 [a] x y mask)
|
|
(VMOVDQU64Masked256 (VPSHRDQ256 [a] x y) mask) => (VPSHRDQMasked256 [a] x y mask)
|
|
(VMOVDQU64Masked512 (VPSHRDQ512 [a] x y) mask) => (VPSHRDQMasked512 [a] x y mask)
|
|
(VMOVDQU16Masked128 (VPSRAW128 x y) mask) => (VPSRAWMasked128 x y mask)
|
|
(VMOVDQU16Masked256 (VPSRAW256 x y) mask) => (VPSRAWMasked256 x y mask)
|
|
(VMOVDQU16Masked512 (VPSRAW512 x y) mask) => (VPSRAWMasked512 x y mask)
|
|
(VMOVDQU32Masked128 (VPSRAD128 x y) mask) => (VPSRADMasked128 x y mask)
|
|
(VMOVDQU32Masked256 (VPSRAD256 x y) mask) => (VPSRADMasked256 x y mask)
|
|
(VMOVDQU32Masked512 (VPSRAD512 x y) mask) => (VPSRADMasked512 x y mask)
|
|
(VMOVDQU64Masked128 (VPSRAQ128 x y) mask) => (VPSRAQMasked128 x y mask)
|
|
(VMOVDQU64Masked256 (VPSRAQ256 x y) mask) => (VPSRAQMasked256 x y mask)
|
|
(VMOVDQU64Masked512 (VPSRAQ512 x y) mask) => (VPSRAQMasked512 x y mask)
|
|
(VMOVDQU16Masked128 (VPSRLW128 x y) mask) => (VPSRLWMasked128 x y mask)
|
|
(VMOVDQU16Masked256 (VPSRLW256 x y) mask) => (VPSRLWMasked256 x y mask)
|
|
(VMOVDQU16Masked512 (VPSRLW512 x y) mask) => (VPSRLWMasked512 x y mask)
|
|
(VMOVDQU32Masked128 (VPSRLD128 x y) mask) => (VPSRLDMasked128 x y mask)
|
|
(VMOVDQU32Masked256 (VPSRLD256 x y) mask) => (VPSRLDMasked256 x y mask)
|
|
(VMOVDQU32Masked512 (VPSRLD512 x y) mask) => (VPSRLDMasked512 x y mask)
|
|
(VMOVDQU64Masked128 (VPSRLQ128 x y) mask) => (VPSRLQMasked128 x y mask)
|
|
(VMOVDQU64Masked256 (VPSRLQ256 x y) mask) => (VPSRLQMasked256 x y mask)
|
|
(VMOVDQU64Masked512 (VPSRLQ512 x y) mask) => (VPSRLQMasked512 x y mask)
|
|
(VMOVDQU16Masked128 (VPSHLDVW128 x y z) mask) => (VPSHLDVWMasked128 x y z mask)
|
|
(VMOVDQU16Masked256 (VPSHLDVW256 x y z) mask) => (VPSHLDVWMasked256 x y z mask)
|
|
(VMOVDQU16Masked512 (VPSHLDVW512 x y z) mask) => (VPSHLDVWMasked512 x y z mask)
|
|
(VMOVDQU32Masked128 (VPSHLDVD128 x y z) mask) => (VPSHLDVDMasked128 x y z mask)
|
|
(VMOVDQU32Masked256 (VPSHLDVD256 x y z) mask) => (VPSHLDVDMasked256 x y z mask)
|
|
(VMOVDQU32Masked512 (VPSHLDVD512 x y z) mask) => (VPSHLDVDMasked512 x y z mask)
|
|
(VMOVDQU64Masked128 (VPSHLDVQ128 x y z) mask) => (VPSHLDVQMasked128 x y z mask)
|
|
(VMOVDQU64Masked256 (VPSHLDVQ256 x y z) mask) => (VPSHLDVQMasked256 x y z mask)
|
|
(VMOVDQU64Masked512 (VPSHLDVQ512 x y z) mask) => (VPSHLDVQMasked512 x y z mask)
|
|
(VMOVDQU16Masked128 (VPSLLVW128 x y) mask) => (VPSLLVWMasked128 x y mask)
|
|
(VMOVDQU16Masked256 (VPSLLVW256 x y) mask) => (VPSLLVWMasked256 x y mask)
|
|
(VMOVDQU16Masked512 (VPSLLVW512 x y) mask) => (VPSLLVWMasked512 x y mask)
|
|
(VMOVDQU32Masked128 (VPSLLVD128 x y) mask) => (VPSLLVDMasked128 x y mask)
|
|
(VMOVDQU32Masked256 (VPSLLVD256 x y) mask) => (VPSLLVDMasked256 x y mask)
|
|
(VMOVDQU32Masked512 (VPSLLVD512 x y) mask) => (VPSLLVDMasked512 x y mask)
|
|
(VMOVDQU64Masked128 (VPSLLVQ128 x y) mask) => (VPSLLVQMasked128 x y mask)
|
|
(VMOVDQU64Masked256 (VPSLLVQ256 x y) mask) => (VPSLLVQMasked256 x y mask)
|
|
(VMOVDQU64Masked512 (VPSLLVQ512 x y) mask) => (VPSLLVQMasked512 x y mask)
|
|
(VMOVDQU16Masked128 (VPSHRDVW128 x y z) mask) => (VPSHRDVWMasked128 x y z mask)
|
|
(VMOVDQU16Masked256 (VPSHRDVW256 x y z) mask) => (VPSHRDVWMasked256 x y z mask)
|
|
(VMOVDQU16Masked512 (VPSHRDVW512 x y z) mask) => (VPSHRDVWMasked512 x y z mask)
|
|
(VMOVDQU32Masked128 (VPSHRDVD128 x y z) mask) => (VPSHRDVDMasked128 x y z mask)
|
|
(VMOVDQU32Masked256 (VPSHRDVD256 x y z) mask) => (VPSHRDVDMasked256 x y z mask)
|
|
(VMOVDQU32Masked512 (VPSHRDVD512 x y z) mask) => (VPSHRDVDMasked512 x y z mask)
|
|
(VMOVDQU64Masked128 (VPSHRDVQ128 x y z) mask) => (VPSHRDVQMasked128 x y z mask)
|
|
(VMOVDQU64Masked256 (VPSHRDVQ256 x y z) mask) => (VPSHRDVQMasked256 x y z mask)
|
|
(VMOVDQU64Masked512 (VPSHRDVQ512 x y z) mask) => (VPSHRDVQMasked512 x y z mask)
|
|
(VMOVDQU16Masked128 (VPSRAVW128 x y) mask) => (VPSRAVWMasked128 x y mask)
|
|
(VMOVDQU16Masked256 (VPSRAVW256 x y) mask) => (VPSRAVWMasked256 x y mask)
|
|
(VMOVDQU16Masked512 (VPSRAVW512 x y) mask) => (VPSRAVWMasked512 x y mask)
|
|
(VMOVDQU32Masked128 (VPSRAVD128 x y) mask) => (VPSRAVDMasked128 x y mask)
|
|
(VMOVDQU32Masked256 (VPSRAVD256 x y) mask) => (VPSRAVDMasked256 x y mask)
|
|
(VMOVDQU32Masked512 (VPSRAVD512 x y) mask) => (VPSRAVDMasked512 x y mask)
|
|
(VMOVDQU64Masked128 (VPSRAVQ128 x y) mask) => (VPSRAVQMasked128 x y mask)
|
|
(VMOVDQU64Masked256 (VPSRAVQ256 x y) mask) => (VPSRAVQMasked256 x y mask)
|
|
(VMOVDQU64Masked512 (VPSRAVQ512 x y) mask) => (VPSRAVQMasked512 x y mask)
|
|
(VMOVDQU16Masked128 (VPSRLVW128 x y) mask) => (VPSRLVWMasked128 x y mask)
|
|
(VMOVDQU16Masked256 (VPSRLVW256 x y) mask) => (VPSRLVWMasked256 x y mask)
|
|
(VMOVDQU16Masked512 (VPSRLVW512 x y) mask) => (VPSRLVWMasked512 x y mask)
|
|
(VMOVDQU32Masked128 (VPSRLVD128 x y) mask) => (VPSRLVDMasked128 x y mask)
|
|
(VMOVDQU32Masked256 (VPSRLVD256 x y) mask) => (VPSRLVDMasked256 x y mask)
|
|
(VMOVDQU32Masked512 (VPSRLVD512 x y) mask) => (VPSRLVDMasked512 x y mask)
|
|
(VMOVDQU64Masked128 (VPSRLVQ128 x y) mask) => (VPSRLVQMasked128 x y mask)
|
|
(VMOVDQU64Masked256 (VPSRLVQ256 x y) mask) => (VPSRLVQMasked256 x y mask)
|
|
(VMOVDQU64Masked512 (VPSRLVQ512 x y) mask) => (VPSRLVQMasked512 x y mask)
|
|
(VMOVDQU32Masked128 (VSQRTPS128 x) mask) => (VSQRTPSMasked128 x mask)
|
|
(VMOVDQU32Masked256 (VSQRTPS256 x) mask) => (VSQRTPSMasked256 x mask)
|
|
(VMOVDQU32Masked512 (VSQRTPS512 x) mask) => (VSQRTPSMasked512 x mask)
|
|
(VMOVDQU64Masked128 (VSQRTPD128 x) mask) => (VSQRTPDMasked128 x mask)
|
|
(VMOVDQU64Masked256 (VSQRTPD256 x) mask) => (VSQRTPDMasked256 x mask)
|
|
(VMOVDQU64Masked512 (VSQRTPD512 x) mask) => (VSQRTPDMasked512 x mask)
|
|
(VMOVDQU32Masked128 (VSUBPS128 x y) mask) => (VSUBPSMasked128 x y mask)
|
|
(VMOVDQU32Masked256 (VSUBPS256 x y) mask) => (VSUBPSMasked256 x y mask)
|
|
(VMOVDQU32Masked512 (VSUBPS512 x y) mask) => (VSUBPSMasked512 x y mask)
|
|
(VMOVDQU64Masked128 (VSUBPD128 x y) mask) => (VSUBPDMasked128 x y mask)
|
|
(VMOVDQU64Masked256 (VSUBPD256 x y) mask) => (VSUBPDMasked256 x y mask)
|
|
(VMOVDQU64Masked512 (VSUBPD512 x y) mask) => (VSUBPDMasked512 x y mask)
|
|
(VMOVDQU8Masked128 (VPSUBB128 x y) mask) => (VPSUBBMasked128 x y mask)
|
|
(VMOVDQU8Masked256 (VPSUBB256 x y) mask) => (VPSUBBMasked256 x y mask)
|
|
(VMOVDQU8Masked512 (VPSUBB512 x y) mask) => (VPSUBBMasked512 x y mask)
|
|
(VMOVDQU16Masked128 (VPSUBW128 x y) mask) => (VPSUBWMasked128 x y mask)
|
|
(VMOVDQU16Masked256 (VPSUBW256 x y) mask) => (VPSUBWMasked256 x y mask)
|
|
(VMOVDQU16Masked512 (VPSUBW512 x y) mask) => (VPSUBWMasked512 x y mask)
|
|
(VMOVDQU32Masked128 (VPSUBD128 x y) mask) => (VPSUBDMasked128 x y mask)
|
|
(VMOVDQU32Masked256 (VPSUBD256 x y) mask) => (VPSUBDMasked256 x y mask)
|
|
(VMOVDQU32Masked512 (VPSUBD512 x y) mask) => (VPSUBDMasked512 x y mask)
|
|
(VMOVDQU64Masked128 (VPSUBQ128 x y) mask) => (VPSUBQMasked128 x y mask)
|
|
(VMOVDQU64Masked256 (VPSUBQ256 x y) mask) => (VPSUBQMasked256 x y mask)
|
|
(VMOVDQU64Masked512 (VPSUBQ512 x y) mask) => (VPSUBQMasked512 x y mask)
|
|
(VMOVDQU8Masked128 (VPSUBSB128 x y) mask) => (VPSUBSBMasked128 x y mask)
|
|
(VMOVDQU8Masked256 (VPSUBSB256 x y) mask) => (VPSUBSBMasked256 x y mask)
|
|
(VMOVDQU8Masked512 (VPSUBSB512 x y) mask) => (VPSUBSBMasked512 x y mask)
|
|
(VMOVDQU16Masked128 (VPSUBSW128 x y) mask) => (VPSUBSWMasked128 x y mask)
|
|
(VMOVDQU16Masked256 (VPSUBSW256 x y) mask) => (VPSUBSWMasked256 x y mask)
|
|
(VMOVDQU16Masked512 (VPSUBSW512 x y) mask) => (VPSUBSWMasked512 x y mask)
|
|
(VMOVDQU8Masked128 (VPSUBUSB128 x y) mask) => (VPSUBUSBMasked128 x y mask)
|
|
(VMOVDQU8Masked256 (VPSUBUSB256 x y) mask) => (VPSUBUSBMasked256 x y mask)
|
|
(VMOVDQU8Masked512 (VPSUBUSB512 x y) mask) => (VPSUBUSBMasked512 x y mask)
|
|
(VMOVDQU16Masked128 (VPSUBUSW128 x y) mask) => (VPSUBUSWMasked128 x y mask)
|
|
(VMOVDQU16Masked256 (VPSUBUSW256 x y) mask) => (VPSUBUSWMasked256 x y mask)
|
|
(VMOVDQU16Masked512 (VPSUBUSW512 x y) mask) => (VPSUBUSWMasked512 x y mask)
|
|
(VMOVDQU32Masked512 (VPXORD512 x y) mask) => (VPXORDMasked512 x y mask)
|
|
(VMOVDQU64Masked512 (VPXORQ512 x y) mask) => (VPXORQMasked512 x y mask)
|
|
(VMOVDQU16Masked128 (VPSLLW128const [a] x) mask) => (VPSLLWMasked128const [a] x mask)
|
|
(VMOVDQU16Masked256 (VPSLLW256const [a] x) mask) => (VPSLLWMasked256const [a] x mask)
|
|
(VMOVDQU16Masked512 (VPSLLW512const [a] x) mask) => (VPSLLWMasked512const [a] x mask)
|
|
(VMOVDQU32Masked128 (VPSLLD128const [a] x) mask) => (VPSLLDMasked128const [a] x mask)
|
|
(VMOVDQU32Masked256 (VPSLLD256const [a] x) mask) => (VPSLLDMasked256const [a] x mask)
|
|
(VMOVDQU32Masked512 (VPSLLD512const [a] x) mask) => (VPSLLDMasked512const [a] x mask)
|
|
(VMOVDQU64Masked128 (VPSLLQ128const [a] x) mask) => (VPSLLQMasked128const [a] x mask)
|
|
(VMOVDQU64Masked256 (VPSLLQ256const [a] x) mask) => (VPSLLQMasked256const [a] x mask)
|
|
(VMOVDQU64Masked512 (VPSLLQ512const [a] x) mask) => (VPSLLQMasked512const [a] x mask)
|
|
(VMOVDQU16Masked128 (VPSRAW128const [a] x) mask) => (VPSRAWMasked128const [a] x mask)
|
|
(VMOVDQU16Masked256 (VPSRAW256const [a] x) mask) => (VPSRAWMasked256const [a] x mask)
|
|
(VMOVDQU16Masked512 (VPSRAW512const [a] x) mask) => (VPSRAWMasked512const [a] x mask)
|
|
(VMOVDQU32Masked128 (VPSRAD128const [a] x) mask) => (VPSRADMasked128const [a] x mask)
|
|
(VMOVDQU32Masked256 (VPSRAD256const [a] x) mask) => (VPSRADMasked256const [a] x mask)
|
|
(VMOVDQU32Masked512 (VPSRAD512const [a] x) mask) => (VPSRADMasked512const [a] x mask)
|
|
(VMOVDQU64Masked128 (VPSRAQ128const [a] x) mask) => (VPSRAQMasked128const [a] x mask)
|
|
(VMOVDQU64Masked256 (VPSRAQ256const [a] x) mask) => (VPSRAQMasked256const [a] x mask)
|
|
(VMOVDQU64Masked512 (VPSRAQ512const [a] x) mask) => (VPSRAQMasked512const [a] x mask)
|
|
(VPBLENDMBMasked512 dst (VGF2P8MULB512 x y) mask) => (VGF2P8MULBMasked512Merging dst x y mask)
|
|
(VPBLENDMBMasked512 dst (VPABSB512 x) mask) => (VPABSBMasked512Merging dst x mask)
|
|
(VPBLENDMBMasked512 dst (VPADDB512 x y) mask) => (VPADDBMasked512Merging dst x y mask)
|
|
(VPBLENDMBMasked512 dst (VPADDSB512 x y) mask) => (VPADDSBMasked512Merging dst x y mask)
|
|
(VPBLENDMBMasked512 dst (VPADDUSB512 x y) mask) => (VPADDUSBMasked512Merging dst x y mask)
|
|
(VPBLENDMBMasked512 dst (VPALIGNR512 [a] x y) mask) => (VPALIGNRMasked512Merging dst [a] x y mask)
|
|
(VPBLENDMBMasked512 dst (VPAVGB512 x y) mask) => (VPAVGBMasked512Merging dst x y mask)
|
|
(VPBLENDMBMasked512 dst (VPMAXSB512 x y) mask) => (VPMAXSBMasked512Merging dst x y mask)
|
|
(VPBLENDMBMasked512 dst (VPMAXUB512 x y) mask) => (VPMAXUBMasked512Merging dst x y mask)
|
|
(VPBLENDMBMasked512 dst (VPMINSB512 x y) mask) => (VPMINSBMasked512Merging dst x y mask)
|
|
(VPBLENDMBMasked512 dst (VPMINUB512 x y) mask) => (VPMINUBMasked512Merging dst x y mask)
|
|
(VPBLENDMBMasked512 dst (VPOPCNTB512 x) mask) => (VPOPCNTBMasked512Merging dst x mask)
|
|
(VPBLENDMBMasked512 dst (VPSHUFB512 x y) mask) => (VPSHUFBMasked512Merging dst x y mask)
|
|
(VPBLENDMBMasked512 dst (VPSUBB512 x y) mask) => (VPSUBBMasked512Merging dst x y mask)
|
|
(VPBLENDMBMasked512 dst (VPSUBSB512 x y) mask) => (VPSUBSBMasked512Merging dst x y mask)
|
|
(VPBLENDMBMasked512 dst (VPSUBUSB512 x y) mask) => (VPSUBUSBMasked512Merging dst x y mask)
|
|
(VPBLENDMDMasked512 dst (VADDPS512 x y) mask) => (VADDPSMasked512Merging dst x y mask)
|
|
(VPBLENDMDMasked512 dst (VCVTPS2UDQ512 x) mask) => (VCVTPS2UDQMasked512Merging dst x mask)
|
|
(VPBLENDMDMasked512 dst (VCVTTPS2DQ512 x) mask) => (VCVTTPS2DQMasked512Merging dst x mask)
|
|
(VPBLENDMDMasked512 dst (VDIVPS512 x y) mask) => (VDIVPSMasked512Merging dst x y mask)
|
|
(VPBLENDMDMasked512 dst (VMAXPS512 x y) mask) => (VMAXPSMasked512Merging dst x y mask)
|
|
(VPBLENDMDMasked512 dst (VMINPS512 x y) mask) => (VMINPSMasked512Merging dst x y mask)
|
|
(VPBLENDMDMasked512 dst (VMULPS512 x y) mask) => (VMULPSMasked512Merging dst x y mask)
|
|
(VPBLENDMDMasked512 dst (VPABSD512 x) mask) => (VPABSDMasked512Merging dst x mask)
|
|
(VPBLENDMDMasked512 dst (VPACKSSDW512 x y) mask) => (VPACKSSDWMasked512Merging dst x y mask)
|
|
(VPBLENDMDMasked512 dst (VPACKUSDW512 x y) mask) => (VPACKUSDWMasked512Merging dst x y mask)
|
|
(VPBLENDMDMasked512 dst (VPADDD512 x y) mask) => (VPADDDMasked512Merging dst x y mask)
|
|
(VPBLENDMDMasked512 dst (VPANDD512 x y) mask) => (VPANDDMasked512Merging dst x y mask)
|
|
(VPBLENDMDMasked512 dst (VPLZCNTD512 x) mask) => (VPLZCNTDMasked512Merging dst x mask)
|
|
(VPBLENDMDMasked512 dst (VPMAXSD512 x y) mask) => (VPMAXSDMasked512Merging dst x y mask)
|
|
(VPBLENDMDMasked512 dst (VPMAXUD512 x y) mask) => (VPMAXUDMasked512Merging dst x y mask)
|
|
(VPBLENDMDMasked512 dst (VPMINSD512 x y) mask) => (VPMINSDMasked512Merging dst x y mask)
|
|
(VPBLENDMDMasked512 dst (VPMINUD512 x y) mask) => (VPMINUDMasked512Merging dst x y mask)
|
|
(VPBLENDMDMasked512 dst (VPMOVDB128_512 x) mask) => (VPMOVDBMasked128_512Merging dst x mask)
|
|
(VPBLENDMDMasked512 dst (VPMOVDW256 x) mask) => (VPMOVDWMasked256Merging dst x mask)
|
|
(VPBLENDMDMasked512 dst (VPMOVSDB128_512 x) mask) => (VPMOVSDBMasked128_512Merging dst x mask)
|
|
(VPBLENDMDMasked512 dst (VPMOVSDW256 x) mask) => (VPMOVSDWMasked256Merging dst x mask)
|
|
(VPBLENDMDMasked512 dst (VPMOVUSDB128_512 x) mask) => (VPMOVUSDBMasked128_512Merging dst x mask)
|
|
(VPBLENDMDMasked512 dst (VPMOVUSDW256 x) mask) => (VPMOVUSDWMasked256Merging dst x mask)
|
|
(VPBLENDMDMasked512 dst (VPMULLD512 x y) mask) => (VPMULLDMasked512Merging dst x y mask)
|
|
(VPBLENDMDMasked512 dst (VPOPCNTD512 x) mask) => (VPOPCNTDMasked512Merging dst x mask)
|
|
(VPBLENDMDMasked512 dst (VPORD512 x y) mask) => (VPORDMasked512Merging dst x y mask)
|
|
(VPBLENDMDMasked512 dst (VPROLD512 [a] x) mask) => (VPROLDMasked512Merging dst [a] x mask)
|
|
(VPBLENDMDMasked512 dst (VPROLVD512 x y) mask) => (VPROLVDMasked512Merging dst x y mask)
|
|
(VPBLENDMDMasked512 dst (VPRORD512 [a] x) mask) => (VPRORDMasked512Merging dst [a] x mask)
|
|
(VPBLENDMDMasked512 dst (VPRORVD512 x y) mask) => (VPRORVDMasked512Merging dst x y mask)
|
|
(VPBLENDMDMasked512 dst (VPSHLDD512 [a] x y) mask) => (VPSHLDDMasked512Merging dst [a] x y mask)
|
|
(VPBLENDMDMasked512 dst (VPSHRDD512 [a] x y) mask) => (VPSHRDDMasked512Merging dst [a] x y mask)
|
|
(VPBLENDMDMasked512 dst (VPSHUFD512 [a] x) mask) => (VPSHUFDMasked512Merging dst [a] x mask)
|
|
(VPBLENDMDMasked512 dst (VPSLLD512const [a] x) mask) => (VPSLLDMasked512constMerging dst [a] x mask)
|
|
(VPBLENDMDMasked512 dst (VPSLLVD512 x y) mask) => (VPSLLVDMasked512Merging dst x y mask)
|
|
(VPBLENDMDMasked512 dst (VPSRAD512const [a] x) mask) => (VPSRADMasked512constMerging dst [a] x mask)
|
|
(VPBLENDMDMasked512 dst (VPSRAVD512 x y) mask) => (VPSRAVDMasked512Merging dst x y mask)
|
|
(VPBLENDMDMasked512 dst (VPSRLVD512 x y) mask) => (VPSRLVDMasked512Merging dst x y mask)
|
|
(VPBLENDMDMasked512 dst (VPSUBD512 x y) mask) => (VPSUBDMasked512Merging dst x y mask)
|
|
(VPBLENDMDMasked512 dst (VPXORD512 x y) mask) => (VPXORDMasked512Merging dst x y mask)
|
|
(VPBLENDMDMasked512 dst (VRCP14PS512 x) mask) => (VRCP14PSMasked512Merging dst x mask)
|
|
(VPBLENDMDMasked512 dst (VREDUCEPS512 [a] x) mask) => (VREDUCEPSMasked512Merging dst [a] x mask)
|
|
(VPBLENDMDMasked512 dst (VRNDSCALEPS512 [a] x) mask) => (VRNDSCALEPSMasked512Merging dst [a] x mask)
|
|
(VPBLENDMDMasked512 dst (VRSQRT14PS512 x) mask) => (VRSQRT14PSMasked512Merging dst x mask)
|
|
(VPBLENDMDMasked512 dst (VSCALEFPS512 x y) mask) => (VSCALEFPSMasked512Merging dst x y mask)
|
|
(VPBLENDMDMasked512 dst (VSQRTPS512 x) mask) => (VSQRTPSMasked512Merging dst x mask)
|
|
(VPBLENDMDMasked512 dst (VSUBPS512 x y) mask) => (VSUBPSMasked512Merging dst x y mask)
|
|
(VPBLENDMQMasked512 dst (VADDPD512 x y) mask) => (VADDPDMasked512Merging dst x y mask)
|
|
(VPBLENDMQMasked512 dst (VDIVPD512 x y) mask) => (VDIVPDMasked512Merging dst x y mask)
|
|
(VPBLENDMQMasked512 dst (VMAXPD512 x y) mask) => (VMAXPDMasked512Merging dst x y mask)
|
|
(VPBLENDMQMasked512 dst (VMINPD512 x y) mask) => (VMINPDMasked512Merging dst x y mask)
|
|
(VPBLENDMQMasked512 dst (VMULPD512 x y) mask) => (VMULPDMasked512Merging dst x y mask)
|
|
(VPBLENDMQMasked512 dst (VPABSQ512 x) mask) => (VPABSQMasked512Merging dst x mask)
|
|
(VPBLENDMQMasked512 dst (VPADDQ512 x y) mask) => (VPADDQMasked512Merging dst x y mask)
|
|
(VPBLENDMQMasked512 dst (VPANDQ512 x y) mask) => (VPANDQMasked512Merging dst x y mask)
|
|
(VPBLENDMQMasked512 dst (VPLZCNTQ512 x) mask) => (VPLZCNTQMasked512Merging dst x mask)
|
|
(VPBLENDMQMasked512 dst (VPMAXSQ512 x y) mask) => (VPMAXSQMasked512Merging dst x y mask)
|
|
(VPBLENDMQMasked512 dst (VPMAXUQ512 x y) mask) => (VPMAXUQMasked512Merging dst x y mask)
|
|
(VPBLENDMQMasked512 dst (VPMINSQ512 x y) mask) => (VPMINSQMasked512Merging dst x y mask)
|
|
(VPBLENDMQMasked512 dst (VPMINUQ512 x y) mask) => (VPMINUQMasked512Merging dst x y mask)
|
|
(VPBLENDMQMasked512 dst (VPMOVQB128_512 x) mask) => (VPMOVQBMasked128_512Merging dst x mask)
|
|
(VPBLENDMQMasked512 dst (VPMOVQD256 x) mask) => (VPMOVQDMasked256Merging dst x mask)
|
|
(VPBLENDMQMasked512 dst (VPMOVQW128_512 x) mask) => (VPMOVQWMasked128_512Merging dst x mask)
|
|
(VPBLENDMQMasked512 dst (VPMOVSQB128_512 x) mask) => (VPMOVSQBMasked128_512Merging dst x mask)
|
|
(VPBLENDMQMasked512 dst (VPMOVSQD256 x) mask) => (VPMOVSQDMasked256Merging dst x mask)
|
|
(VPBLENDMQMasked512 dst (VPMOVSQW128_512 x) mask) => (VPMOVSQWMasked128_512Merging dst x mask)
|
|
(VPBLENDMQMasked512 dst (VPMOVUSQB128_512 x) mask) => (VPMOVUSQBMasked128_512Merging dst x mask)
|
|
(VPBLENDMQMasked512 dst (VPMOVUSQD256 x) mask) => (VPMOVUSQDMasked256Merging dst x mask)
|
|
(VPBLENDMQMasked512 dst (VPMOVUSQW128_512 x) mask) => (VPMOVUSQWMasked128_512Merging dst x mask)
|
|
(VPBLENDMQMasked512 dst (VPMULLQ512 x y) mask) => (VPMULLQMasked512Merging dst x y mask)
|
|
(VPBLENDMQMasked512 dst (VPOPCNTQ512 x) mask) => (VPOPCNTQMasked512Merging dst x mask)
|
|
(VPBLENDMQMasked512 dst (VPORQ512 x y) mask) => (VPORQMasked512Merging dst x y mask)
|
|
(VPBLENDMQMasked512 dst (VPROLQ512 [a] x) mask) => (VPROLQMasked512Merging dst [a] x mask)
|
|
(VPBLENDMQMasked512 dst (VPROLVQ512 x y) mask) => (VPROLVQMasked512Merging dst x y mask)
|
|
(VPBLENDMQMasked512 dst (VPRORQ512 [a] x) mask) => (VPRORQMasked512Merging dst [a] x mask)
|
|
(VPBLENDMQMasked512 dst (VPRORVQ512 x y) mask) => (VPRORVQMasked512Merging dst x y mask)
|
|
(VPBLENDMQMasked512 dst (VPSHLDQ512 [a] x y) mask) => (VPSHLDQMasked512Merging dst [a] x y mask)
|
|
(VPBLENDMQMasked512 dst (VPSHRDQ512 [a] x y) mask) => (VPSHRDQMasked512Merging dst [a] x y mask)
|
|
(VPBLENDMQMasked512 dst (VPSLLQ512const [a] x) mask) => (VPSLLQMasked512constMerging dst [a] x mask)
|
|
(VPBLENDMQMasked512 dst (VPSLLVQ512 x y) mask) => (VPSLLVQMasked512Merging dst x y mask)
|
|
(VPBLENDMQMasked512 dst (VPSRAQ512const [a] x) mask) => (VPSRAQMasked512constMerging dst [a] x mask)
|
|
(VPBLENDMQMasked512 dst (VPSRAVQ512 x y) mask) => (VPSRAVQMasked512Merging dst x y mask)
|
|
(VPBLENDMQMasked512 dst (VPSRLVQ512 x y) mask) => (VPSRLVQMasked512Merging dst x y mask)
|
|
(VPBLENDMQMasked512 dst (VPSUBQ512 x y) mask) => (VPSUBQMasked512Merging dst x y mask)
|
|
(VPBLENDMQMasked512 dst (VPXORQ512 x y) mask) => (VPXORQMasked512Merging dst x y mask)
|
|
(VPBLENDMQMasked512 dst (VRCP14PD512 x) mask) => (VRCP14PDMasked512Merging dst x mask)
|
|
(VPBLENDMQMasked512 dst (VREDUCEPD512 [a] x) mask) => (VREDUCEPDMasked512Merging dst [a] x mask)
|
|
(VPBLENDMQMasked512 dst (VRNDSCALEPD512 [a] x) mask) => (VRNDSCALEPDMasked512Merging dst [a] x mask)
|
|
(VPBLENDMQMasked512 dst (VRSQRT14PD512 x) mask) => (VRSQRT14PDMasked512Merging dst x mask)
|
|
(VPBLENDMQMasked512 dst (VSCALEFPD512 x y) mask) => (VSCALEFPDMasked512Merging dst x y mask)
|
|
(VPBLENDMQMasked512 dst (VSQRTPD512 x) mask) => (VSQRTPDMasked512Merging dst x mask)
|
|
(VPBLENDMQMasked512 dst (VSUBPD512 x y) mask) => (VSUBPDMasked512Merging dst x y mask)
|
|
(VPBLENDMWMasked512 dst (VPABSW512 x) mask) => (VPABSWMasked512Merging dst x mask)
|
|
(VPBLENDMWMasked512 dst (VPADDSW512 x y) mask) => (VPADDSWMasked512Merging dst x y mask)
|
|
(VPBLENDMWMasked512 dst (VPADDUSW512 x y) mask) => (VPADDUSWMasked512Merging dst x y mask)
|
|
(VPBLENDMWMasked512 dst (VPADDW512 x y) mask) => (VPADDWMasked512Merging dst x y mask)
|
|
(VPBLENDMWMasked512 dst (VPAVGW512 x y) mask) => (VPAVGWMasked512Merging dst x y mask)
|
|
(VPBLENDMWMasked512 dst (VPMADDUBSW512 x y) mask) => (VPMADDUBSWMasked512Merging dst x y mask)
|
|
(VPBLENDMWMasked512 dst (VPMADDWD512 x y) mask) => (VPMADDWDMasked512Merging dst x y mask)
|
|
(VPBLENDMWMasked512 dst (VPMAXSW512 x y) mask) => (VPMAXSWMasked512Merging dst x y mask)
|
|
(VPBLENDMWMasked512 dst (VPMAXUW512 x y) mask) => (VPMAXUWMasked512Merging dst x y mask)
|
|
(VPBLENDMWMasked512 dst (VPMINSW512 x y) mask) => (VPMINSWMasked512Merging dst x y mask)
|
|
(VPBLENDMWMasked512 dst (VPMINUW512 x y) mask) => (VPMINUWMasked512Merging dst x y mask)
|
|
(VPBLENDMWMasked512 dst (VPMOVSWB256 x) mask) => (VPMOVSWBMasked256Merging dst x mask)
|
|
(VPBLENDMWMasked512 dst (VPMOVUSWB256 x) mask) => (VPMOVUSWBMasked256Merging dst x mask)
|
|
(VPBLENDMWMasked512 dst (VPMOVWB256 x) mask) => (VPMOVWBMasked256Merging dst x mask)
|
|
(VPBLENDMWMasked512 dst (VPMULHUW512 x y) mask) => (VPMULHUWMasked512Merging dst x y mask)
|
|
(VPBLENDMWMasked512 dst (VPMULHW512 x y) mask) => (VPMULHWMasked512Merging dst x y mask)
|
|
(VPBLENDMWMasked512 dst (VPMULLW512 x y) mask) => (VPMULLWMasked512Merging dst x y mask)
|
|
(VPBLENDMWMasked512 dst (VPOPCNTW512 x) mask) => (VPOPCNTWMasked512Merging dst x mask)
|
|
(VPBLENDMWMasked512 dst (VPSHLDW512 [a] x y) mask) => (VPSHLDWMasked512Merging dst [a] x y mask)
|
|
(VPBLENDMWMasked512 dst (VPSHRDW512 [a] x y) mask) => (VPSHRDWMasked512Merging dst [a] x y mask)
|
|
(VPBLENDMWMasked512 dst (VPSHUFHW512 [a] x) mask) => (VPSHUFHWMasked512Merging dst [a] x mask)
|
|
(VPBLENDMWMasked512 dst (VPSLLVW512 x y) mask) => (VPSLLVWMasked512Merging dst x y mask)
|
|
(VPBLENDMWMasked512 dst (VPSLLW512const [a] x) mask) => (VPSLLWMasked512constMerging dst [a] x mask)
|
|
(VPBLENDMWMasked512 dst (VPSRAVW512 x y) mask) => (VPSRAVWMasked512Merging dst x y mask)
|
|
(VPBLENDMWMasked512 dst (VPSRAW512const [a] x) mask) => (VPSRAWMasked512constMerging dst [a] x mask)
|
|
(VPBLENDMWMasked512 dst (VPSRLVW512 x y) mask) => (VPSRLVWMasked512Merging dst x y mask)
|
|
(VPBLENDMWMasked512 dst (VPSUBSW512 x y) mask) => (VPSUBSWMasked512Merging dst x y mask)
|
|
(VPBLENDMWMasked512 dst (VPSUBUSW512 x y) mask) => (VPSUBUSWMasked512Merging dst x y mask)
|
|
(VPBLENDMWMasked512 dst (VPSUBW512 x y) mask) => (VPSUBWMasked512Merging dst x y mask)
|
|
(VPBLENDVB128 dst (VADDPD128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VADDPDMasked128Merging dst x y (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VADDPS128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VADDPSMasked128Merging dst x y (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VBROADCASTSD256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VBROADCASTSDMasked256Merging dst x (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VBROADCASTSD512 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VBROADCASTSDMasked512Merging dst x (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VBROADCASTSS128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VBROADCASTSSMasked128Merging dst x (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VBROADCASTSS256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VBROADCASTSSMasked256Merging dst x (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VBROADCASTSS512 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VBROADCASTSSMasked512Merging dst x (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VCVTPS2UDQ128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VCVTPS2UDQMasked128Merging dst x (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VCVTTPS2DQ128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VCVTTPS2DQMasked128Merging dst x (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VDIVPD128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VDIVPDMasked128Merging dst x y (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VDIVPS128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VDIVPSMasked128Merging dst x y (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VGF2P8MULB128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VGF2P8MULBMasked128Merging dst x y (VPMOVVec8x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VMAXPD128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VMAXPDMasked128Merging dst x y (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VMAXPS128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VMAXPSMasked128Merging dst x y (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VMINPD128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VMINPDMasked128Merging dst x y (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VMINPS128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VMINPSMasked128Merging dst x y (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VMULPD128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VMULPDMasked128Merging dst x y (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VMULPS128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VMULPSMasked128Merging dst x y (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPABSB128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPABSBMasked128Merging dst x (VPMOVVec8x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPABSD128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPABSDMasked128Merging dst x (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPABSQ128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPABSQMasked128Merging dst x (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPABSW128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPABSWMasked128Merging dst x (VPMOVVec16x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPACKSSDW128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPACKSSDWMasked128Merging dst x y (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPACKUSDW128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPACKUSDWMasked128Merging dst x y (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPADDB128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPADDBMasked128Merging dst x y (VPMOVVec8x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPADDD128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPADDDMasked128Merging dst x y (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPADDQ128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPADDQMasked128Merging dst x y (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPADDSB128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPADDSBMasked128Merging dst x y (VPMOVVec8x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPADDSW128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPADDSWMasked128Merging dst x y (VPMOVVec16x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPADDUSB128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPADDUSBMasked128Merging dst x y (VPMOVVec8x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPADDUSW128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPADDUSWMasked128Merging dst x y (VPMOVVec16x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPADDW128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPADDWMasked128Merging dst x y (VPMOVVec16x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPALIGNR128 [a] x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPALIGNRMasked128Merging dst [a] x y (VPMOVVec8x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPAVGB128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPAVGBMasked128Merging dst x y (VPMOVVec8x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPAVGW128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPAVGWMasked128Merging dst x y (VPMOVVec16x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPBROADCASTB128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPBROADCASTBMasked128Merging dst x (VPMOVVec8x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPBROADCASTB256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPBROADCASTBMasked256Merging dst x (VPMOVVec8x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPBROADCASTB512 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPBROADCASTBMasked512Merging dst x (VPMOVVec8x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPBROADCASTD128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPBROADCASTDMasked128Merging dst x (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPBROADCASTD256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPBROADCASTDMasked256Merging dst x (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPBROADCASTD512 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPBROADCASTDMasked512Merging dst x (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPBROADCASTQ128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPBROADCASTQMasked128Merging dst x (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPBROADCASTQ256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPBROADCASTQMasked256Merging dst x (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPBROADCASTQ512 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPBROADCASTQMasked512Merging dst x (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPBROADCASTW128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPBROADCASTWMasked128Merging dst x (VPMOVVec16x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPBROADCASTW256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPBROADCASTWMasked256Merging dst x (VPMOVVec16x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPBROADCASTW512 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPBROADCASTWMasked512Merging dst x (VPMOVVec16x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPLZCNTD128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPLZCNTDMasked128Merging dst x (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPLZCNTQ128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPLZCNTQMasked128Merging dst x (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMADDUBSW128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMADDUBSWMasked128Merging dst x y (VPMOVVec16x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMADDWD128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMADDWDMasked128Merging dst x y (VPMOVVec16x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMAXSB128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMAXSBMasked128Merging dst x y (VPMOVVec8x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMAXSD128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMAXSDMasked128Merging dst x y (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMAXSQ128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMAXSQMasked128Merging dst x y (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMAXSW128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMAXSWMasked128Merging dst x y (VPMOVVec16x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMAXUB128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMAXUBMasked128Merging dst x y (VPMOVVec8x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMAXUD128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMAXUDMasked128Merging dst x y (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMAXUQ128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMAXUQMasked128Merging dst x y (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMAXUW128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMAXUWMasked128Merging dst x y (VPMOVVec16x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMINSB128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMINSBMasked128Merging dst x y (VPMOVVec8x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMINSD128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMINSDMasked128Merging dst x y (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMINSQ128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMINSQMasked128Merging dst x y (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMINSW128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMINSWMasked128Merging dst x y (VPMOVVec16x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMINUB128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMINUBMasked128Merging dst x y (VPMOVVec8x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMINUD128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMINUDMasked128Merging dst x y (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMINUQ128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMINUQMasked128Merging dst x y (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMINUW128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMINUWMasked128Merging dst x y (VPMOVVec16x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVDB128_128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVDBMasked128_128Merging dst x (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVDW128_128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVDWMasked128_128Merging dst x (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVQB128_128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVQBMasked128_128Merging dst x (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVQD128_128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVQDMasked128_128Merging dst x (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVQW128_128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVQWMasked128_128Merging dst x (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVSDB128_128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVSDBMasked128_128Merging dst x (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVSDW128_128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVSDWMasked128_128Merging dst x (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVSQB128_128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVSQBMasked128_128Merging dst x (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVSQD128_128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVSQDMasked128_128Merging dst x (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVSQW128_128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVSQWMasked128_128Merging dst x (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVSWB128_128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVSWBMasked128_128Merging dst x (VPMOVVec16x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVSXBD128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVSXBDMasked128Merging dst x (VPMOVVec8x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVSXBD256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVSXBDMasked256Merging dst x (VPMOVVec8x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVSXBD512 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVSXBDMasked512Merging dst x (VPMOVVec8x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVSXBQ128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVSXBQMasked128Merging dst x (VPMOVVec8x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVSXBQ256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVSXBQMasked256Merging dst x (VPMOVVec8x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVSXBQ512 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVSXBQMasked512Merging dst x (VPMOVVec8x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVSXBW128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVSXBWMasked128Merging dst x (VPMOVVec8x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVSXBW256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVSXBWMasked256Merging dst x (VPMOVVec8x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVSXDQ128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVSXDQMasked128Merging dst x (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVSXDQ256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVSXDQMasked256Merging dst x (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVSXWD128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVSXWDMasked128Merging dst x (VPMOVVec16x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVSXWD256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVSXWDMasked256Merging dst x (VPMOVVec16x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVSXWQ128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVSXWQMasked128Merging dst x (VPMOVVec16x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVSXWQ256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVSXWQMasked256Merging dst x (VPMOVVec16x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVSXWQ512 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVSXWQMasked512Merging dst x (VPMOVVec16x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVUSDB128_128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVUSDBMasked128_128Merging dst x (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVUSDW128_128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVUSDWMasked128_128Merging dst x (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVUSQB128_128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVUSQBMasked128_128Merging dst x (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVUSQD128_128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVUSQDMasked128_128Merging dst x (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVUSQW128_128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVUSQWMasked128_128Merging dst x (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVUSWB128_128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVUSWBMasked128_128Merging dst x (VPMOVVec16x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVWB128_128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVWBMasked128_128Merging dst x (VPMOVVec16x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVZXBD128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVZXBDMasked128Merging dst x (VPMOVVec8x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVZXBD256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVZXBDMasked256Merging dst x (VPMOVVec8x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVZXBD512 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVZXBDMasked512Merging dst x (VPMOVVec8x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVZXBQ128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVZXBQMasked128Merging dst x (VPMOVVec8x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVZXBQ256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVZXBQMasked256Merging dst x (VPMOVVec8x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVZXBQ512 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVZXBQMasked512Merging dst x (VPMOVVec8x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVZXBW128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVZXBWMasked128Merging dst x (VPMOVVec8x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVZXBW256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVZXBWMasked256Merging dst x (VPMOVVec8x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVZXDQ128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVZXDQMasked128Merging dst x (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVZXDQ256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVZXDQMasked256Merging dst x (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVZXWD128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVZXWDMasked128Merging dst x (VPMOVVec16x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVZXWD256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVZXWDMasked256Merging dst x (VPMOVVec16x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVZXWQ128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVZXWQMasked128Merging dst x (VPMOVVec16x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVZXWQ256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVZXWQMasked256Merging dst x (VPMOVVec16x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMOVZXWQ512 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVZXWQMasked512Merging dst x (VPMOVVec16x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMULHUW128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMULHUWMasked128Merging dst x y (VPMOVVec16x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMULHW128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMULHWMasked128Merging dst x y (VPMOVVec16x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMULLD128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMULLDMasked128Merging dst x y (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMULLQ128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMULLQMasked128Merging dst x y (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPMULLW128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMULLWMasked128Merging dst x y (VPMOVVec16x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPOPCNTB128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPOPCNTBMasked128Merging dst x (VPMOVVec8x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPOPCNTD128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPOPCNTDMasked128Merging dst x (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPOPCNTQ128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPOPCNTQMasked128Merging dst x (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPOPCNTW128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPOPCNTWMasked128Merging dst x (VPMOVVec16x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPROLD128 [a] x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPROLDMasked128Merging dst [a] x (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPROLQ128 [a] x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPROLQMasked128Merging dst [a] x (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPROLVD128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPROLVDMasked128Merging dst x y (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPROLVQ128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPROLVQMasked128Merging dst x y (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPRORD128 [a] x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPRORDMasked128Merging dst [a] x (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPRORQ128 [a] x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPRORQMasked128Merging dst [a] x (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPRORVD128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPRORVDMasked128Merging dst x y (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPRORVQ128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPRORVQMasked128Merging dst x y (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPSHLDD128 [a] x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSHLDDMasked128Merging dst [a] x y (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPSHLDQ128 [a] x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSHLDQMasked128Merging dst [a] x y (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPSHLDW128 [a] x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSHLDWMasked128Merging dst [a] x y (VPMOVVec16x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPSHRDD128 [a] x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSHRDDMasked128Merging dst [a] x y (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPSHRDQ128 [a] x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSHRDQMasked128Merging dst [a] x y (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPSHRDW128 [a] x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSHRDWMasked128Merging dst [a] x y (VPMOVVec16x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPSHUFB128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSHUFBMasked128Merging dst x y (VPMOVVec8x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPSHUFD128 [a] x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSHUFDMasked128Merging dst [a] x (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPSHUFHW128 [a] x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSHUFHWMasked128Merging dst [a] x (VPMOVVec16x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPSLLD128const [a] x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSLLDMasked128constMerging dst [a] x (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPSLLQ128const [a] x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSLLQMasked128constMerging dst [a] x (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPSLLVD128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSLLVDMasked128Merging dst x y (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPSLLVQ128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSLLVQMasked128Merging dst x y (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPSLLVW128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSLLVWMasked128Merging dst x y (VPMOVVec16x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPSLLW128const [a] x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSLLWMasked128constMerging dst [a] x (VPMOVVec16x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPSRAD128const [a] x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSRADMasked128constMerging dst [a] x (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPSRAQ128const [a] x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSRAQMasked128constMerging dst [a] x (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPSRAVD128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSRAVDMasked128Merging dst x y (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPSRAVQ128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSRAVQMasked128Merging dst x y (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPSRAVW128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSRAVWMasked128Merging dst x y (VPMOVVec16x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPSRAW128const [a] x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSRAWMasked128constMerging dst [a] x (VPMOVVec16x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPSRLVD128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSRLVDMasked128Merging dst x y (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPSRLVQ128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSRLVQMasked128Merging dst x y (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPSRLVW128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSRLVWMasked128Merging dst x y (VPMOVVec16x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPSUBB128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSUBBMasked128Merging dst x y (VPMOVVec8x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPSUBD128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSUBDMasked128Merging dst x y (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPSUBQ128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSUBQMasked128Merging dst x y (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPSUBSB128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSUBSBMasked128Merging dst x y (VPMOVVec8x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPSUBSW128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSUBSWMasked128Merging dst x y (VPMOVVec16x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPSUBUSB128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSUBUSBMasked128Merging dst x y (VPMOVVec8x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPSUBUSW128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSUBUSWMasked128Merging dst x y (VPMOVVec16x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VPSUBW128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSUBWMasked128Merging dst x y (VPMOVVec16x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VRCP14PD128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VRCP14PDMasked128Merging dst x (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VREDUCEPD128 [a] x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VREDUCEPDMasked128Merging dst [a] x (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VREDUCEPS128 [a] x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VREDUCEPSMasked128Merging dst [a] x (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VRNDSCALEPD128 [a] x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VRNDSCALEPDMasked128Merging dst [a] x (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VRNDSCALEPS128 [a] x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VRNDSCALEPSMasked128Merging dst [a] x (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VRSQRT14PD128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VRSQRT14PDMasked128Merging dst x (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VSCALEFPD128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VSCALEFPDMasked128Merging dst x y (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VSCALEFPS128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VSCALEFPSMasked128Merging dst x y (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VSQRTPD128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VSQRTPDMasked128Merging dst x (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VSQRTPS128 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VSQRTPSMasked128Merging dst x (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VSUBPD128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VSUBPDMasked128Merging dst x y (VPMOVVec64x2ToM <types.TypeMask> mask))
|
|
(VPBLENDVB128 dst (VSUBPS128 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VSUBPSMasked128Merging dst x y (VPMOVVec32x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VADDPD256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VADDPDMasked256Merging dst x y (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VADDPS256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VADDPSMasked256Merging dst x y (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VCVTPS2UDQ256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VCVTPS2UDQMasked256Merging dst x (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VCVTTPS2DQ256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VCVTTPS2DQMasked256Merging dst x (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VDIVPD256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VDIVPDMasked256Merging dst x y (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VDIVPS256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VDIVPSMasked256Merging dst x y (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VGF2P8MULB256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VGF2P8MULBMasked256Merging dst x y (VPMOVVec8x32ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VMAXPD256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VMAXPDMasked256Merging dst x y (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VMAXPS256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VMAXPSMasked256Merging dst x y (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VMINPD256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VMINPDMasked256Merging dst x y (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VMINPS256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VMINPSMasked256Merging dst x y (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VMULPD256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VMULPDMasked256Merging dst x y (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VMULPS256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VMULPSMasked256Merging dst x y (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPABSB256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPABSBMasked256Merging dst x (VPMOVVec8x32ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPABSD256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPABSDMasked256Merging dst x (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPABSQ256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPABSQMasked256Merging dst x (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPABSW256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPABSWMasked256Merging dst x (VPMOVVec16x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPACKSSDW256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPACKSSDWMasked256Merging dst x y (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPACKUSDW256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPACKUSDWMasked256Merging dst x y (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPADDB256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPADDBMasked256Merging dst x y (VPMOVVec8x32ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPADDD256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPADDDMasked256Merging dst x y (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPADDQ256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPADDQMasked256Merging dst x y (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPADDSB256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPADDSBMasked256Merging dst x y (VPMOVVec8x32ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPADDSW256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPADDSWMasked256Merging dst x y (VPMOVVec16x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPADDUSB256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPADDUSBMasked256Merging dst x y (VPMOVVec8x32ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPADDUSW256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPADDUSWMasked256Merging dst x y (VPMOVVec16x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPADDW256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPADDWMasked256Merging dst x y (VPMOVVec16x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPALIGNR256 [a] x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPALIGNRMasked256Merging dst [a] x y (VPMOVVec8x32ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPAVGB256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPAVGBMasked256Merging dst x y (VPMOVVec8x32ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPAVGW256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPAVGWMasked256Merging dst x y (VPMOVVec16x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPLZCNTD256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPLZCNTDMasked256Merging dst x (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPLZCNTQ256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPLZCNTQMasked256Merging dst x (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPMADDUBSW256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMADDUBSWMasked256Merging dst x y (VPMOVVec16x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPMADDWD256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMADDWDMasked256Merging dst x y (VPMOVVec16x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPMAXSB256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMAXSBMasked256Merging dst x y (VPMOVVec8x32ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPMAXSD256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMAXSDMasked256Merging dst x y (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPMAXSQ256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMAXSQMasked256Merging dst x y (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPMAXSW256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMAXSWMasked256Merging dst x y (VPMOVVec16x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPMAXUB256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMAXUBMasked256Merging dst x y (VPMOVVec8x32ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPMAXUD256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMAXUDMasked256Merging dst x y (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPMAXUQ256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMAXUQMasked256Merging dst x y (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPMAXUW256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMAXUWMasked256Merging dst x y (VPMOVVec16x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPMINSB256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMINSBMasked256Merging dst x y (VPMOVVec8x32ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPMINSD256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMINSDMasked256Merging dst x y (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPMINSQ256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMINSQMasked256Merging dst x y (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPMINSW256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMINSWMasked256Merging dst x y (VPMOVVec16x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPMINUB256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMINUBMasked256Merging dst x y (VPMOVVec8x32ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPMINUD256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMINUDMasked256Merging dst x y (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPMINUQ256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMINUQMasked256Merging dst x y (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPMINUW256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMINUWMasked256Merging dst x y (VPMOVVec16x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPMOVDB128_256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVDBMasked128_256Merging dst x (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPMOVDW128_256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVDWMasked128_256Merging dst x (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPMOVQB128_256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVQBMasked128_256Merging dst x (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPMOVQD128_256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVQDMasked128_256Merging dst x (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPMOVQW128_256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVQWMasked128_256Merging dst x (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPMOVSDB128_256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVSDBMasked128_256Merging dst x (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPMOVSDW128_256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVSDWMasked128_256Merging dst x (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPMOVSQB128_256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVSQBMasked128_256Merging dst x (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPMOVSQD128_256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVSQDMasked128_256Merging dst x (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPMOVSQW128_256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVSQWMasked128_256Merging dst x (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPMOVSWB128_256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVSWBMasked128_256Merging dst x (VPMOVVec16x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPMOVSXBW512 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVSXBWMasked512Merging dst x (VPMOVVec8x32ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPMOVSXDQ512 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVSXDQMasked512Merging dst x (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPMOVSXWD512 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVSXWDMasked512Merging dst x (VPMOVVec16x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPMOVUSDB128_256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVUSDBMasked128_256Merging dst x (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPMOVUSDW128_256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVUSDWMasked128_256Merging dst x (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPMOVUSQB128_256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVUSQBMasked128_256Merging dst x (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPMOVUSQD128_256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVUSQDMasked128_256Merging dst x (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPMOVUSQW128_256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVUSQWMasked128_256Merging dst x (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPMOVUSWB128_256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVUSWBMasked128_256Merging dst x (VPMOVVec16x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPMOVWB128_256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVWBMasked128_256Merging dst x (VPMOVVec16x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPMOVZXBW512 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVZXBWMasked512Merging dst x (VPMOVVec8x32ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPMOVZXDQ512 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVZXDQMasked512Merging dst x (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPMOVZXWD512 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMOVZXWDMasked512Merging dst x (VPMOVVec16x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPMULHUW256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMULHUWMasked256Merging dst x y (VPMOVVec16x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPMULHW256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMULHWMasked256Merging dst x y (VPMOVVec16x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPMULLD256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMULLDMasked256Merging dst x y (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPMULLQ256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMULLQMasked256Merging dst x y (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPMULLW256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPMULLWMasked256Merging dst x y (VPMOVVec16x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPOPCNTB256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPOPCNTBMasked256Merging dst x (VPMOVVec8x32ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPOPCNTD256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPOPCNTDMasked256Merging dst x (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPOPCNTQ256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPOPCNTQMasked256Merging dst x (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPOPCNTW256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPOPCNTWMasked256Merging dst x (VPMOVVec16x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPROLD256 [a] x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPROLDMasked256Merging dst [a] x (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPROLQ256 [a] x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPROLQMasked256Merging dst [a] x (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPROLVD256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPROLVDMasked256Merging dst x y (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPROLVQ256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPROLVQMasked256Merging dst x y (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPRORD256 [a] x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPRORDMasked256Merging dst [a] x (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPRORQ256 [a] x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPRORQMasked256Merging dst [a] x (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPRORVD256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPRORVDMasked256Merging dst x y (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPRORVQ256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPRORVQMasked256Merging dst x y (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPSHLDD256 [a] x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSHLDDMasked256Merging dst [a] x y (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPSHLDQ256 [a] x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSHLDQMasked256Merging dst [a] x y (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPSHLDW256 [a] x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSHLDWMasked256Merging dst [a] x y (VPMOVVec16x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPSHRDD256 [a] x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSHRDDMasked256Merging dst [a] x y (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPSHRDQ256 [a] x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSHRDQMasked256Merging dst [a] x y (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPSHRDW256 [a] x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSHRDWMasked256Merging dst [a] x y (VPMOVVec16x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPSHUFB256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSHUFBMasked256Merging dst x y (VPMOVVec8x32ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPSHUFD256 [a] x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSHUFDMasked256Merging dst [a] x (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPSHUFHW256 [a] x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSHUFHWMasked256Merging dst [a] x (VPMOVVec16x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPSLLD256const [a] x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSLLDMasked256constMerging dst [a] x (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPSLLQ256const [a] x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSLLQMasked256constMerging dst [a] x (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPSLLVD256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSLLVDMasked256Merging dst x y (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPSLLVQ256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSLLVQMasked256Merging dst x y (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPSLLVW256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSLLVWMasked256Merging dst x y (VPMOVVec16x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPSLLW256const [a] x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSLLWMasked256constMerging dst [a] x (VPMOVVec16x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPSRAD256const [a] x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSRADMasked256constMerging dst [a] x (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPSRAQ256const [a] x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSRAQMasked256constMerging dst [a] x (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPSRAVD256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSRAVDMasked256Merging dst x y (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPSRAVQ256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSRAVQMasked256Merging dst x y (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPSRAVW256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSRAVWMasked256Merging dst x y (VPMOVVec16x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPSRAW256const [a] x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSRAWMasked256constMerging dst [a] x (VPMOVVec16x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPSRLVD256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSRLVDMasked256Merging dst x y (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPSRLVQ256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSRLVQMasked256Merging dst x y (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPSRLVW256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSRLVWMasked256Merging dst x y (VPMOVVec16x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPSUBB256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSUBBMasked256Merging dst x y (VPMOVVec8x32ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPSUBD256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSUBDMasked256Merging dst x y (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPSUBQ256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSUBQMasked256Merging dst x y (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPSUBSB256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSUBSBMasked256Merging dst x y (VPMOVVec8x32ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPSUBSW256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSUBSWMasked256Merging dst x y (VPMOVVec16x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPSUBUSB256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSUBUSBMasked256Merging dst x y (VPMOVVec8x32ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPSUBUSW256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSUBUSWMasked256Merging dst x y (VPMOVVec16x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VPSUBW256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VPSUBWMasked256Merging dst x y (VPMOVVec16x16ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VRCP14PD256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VRCP14PDMasked256Merging dst x (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VREDUCEPD256 [a] x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VREDUCEPDMasked256Merging dst [a] x (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VREDUCEPS256 [a] x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VREDUCEPSMasked256Merging dst [a] x (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VRNDSCALEPD256 [a] x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VRNDSCALEPDMasked256Merging dst [a] x (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VRNDSCALEPS256 [a] x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VRNDSCALEPSMasked256Merging dst [a] x (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VRSQRT14PD256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VRSQRT14PDMasked256Merging dst x (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VSCALEFPD256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VSCALEFPDMasked256Merging dst x y (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VSCALEFPS256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VSCALEFPSMasked256Merging dst x y (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VSQRTPD256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VSQRTPDMasked256Merging dst x (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VSQRTPS256 x) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VSQRTPSMasked256Merging dst x (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VSUBPD256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VSUBPDMasked256Merging dst x y (VPMOVVec64x4ToM <types.TypeMask> mask))
|
|
(VPBLENDVB256 dst (VSUBPS256 x y) mask) && v.Block.CPUfeatures.hasFeature(CPUavx512) => (VSUBPSMasked256Merging dst x y (VPMOVVec32x8ToM <types.TypeMask> mask))
|
|
(VPABSD512 l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPABSD512load {sym} [off] ptr mem)
|
|
(VPABSQ128 l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPABSQ128load {sym} [off] ptr mem)
|
|
(VPABSQ256 l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPABSQ256load {sym} [off] ptr mem)
|
|
(VPABSQ512 l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPABSQ512load {sym} [off] ptr mem)
|
|
(VPABSDMasked128 l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPABSDMasked128load {sym} [off] ptr mask mem)
|
|
(VPABSDMasked256 l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPABSDMasked256load {sym} [off] ptr mask mem)
|
|
(VPABSDMasked512 l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPABSDMasked512load {sym} [off] ptr mask mem)
|
|
(VPABSQMasked128 l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPABSQMasked128load {sym} [off] ptr mask mem)
|
|
(VPABSQMasked256 l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPABSQMasked256load {sym} [off] ptr mask mem)
|
|
(VPABSQMasked512 l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPABSQMasked512load {sym} [off] ptr mask mem)
|
|
(VADDPS512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VADDPS512load {sym} [off] x ptr mem)
|
|
(VADDPD512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VADDPD512load {sym} [off] x ptr mem)
|
|
(VPADDD512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPADDD512load {sym} [off] x ptr mem)
|
|
(VPADDQ512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPADDQ512load {sym} [off] x ptr mem)
|
|
(VPDPWSSD512 x y l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPDPWSSD512load {sym} [off] x y ptr mem)
|
|
(VPDPWSSDMasked128 x y l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPDPWSSDMasked128load {sym} [off] x y ptr mask mem)
|
|
(VPDPWSSDMasked256 x y l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPDPWSSDMasked256load {sym} [off] x y ptr mask mem)
|
|
(VPDPWSSDMasked512 x y l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPDPWSSDMasked512load {sym} [off] x y ptr mask mem)
|
|
(VPDPBUSD512 x y l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPDPBUSD512load {sym} [off] x y ptr mem)
|
|
(VPDPBUSDMasked128 x y l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPDPBUSDMasked128load {sym} [off] x y ptr mask mem)
|
|
(VPDPBUSDMasked256 x y l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPDPBUSDMasked256load {sym} [off] x y ptr mask mem)
|
|
(VPDPBUSDMasked512 x y l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPDPBUSDMasked512load {sym} [off] x y ptr mask mem)
|
|
(VPDPBUSDS512 x y l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPDPBUSDS512load {sym} [off] x y ptr mem)
|
|
(VPDPBUSDSMasked128 x y l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPDPBUSDSMasked128load {sym} [off] x y ptr mask mem)
|
|
(VPDPBUSDSMasked256 x y l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPDPBUSDSMasked256load {sym} [off] x y ptr mask mem)
|
|
(VPDPBUSDSMasked512 x y l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPDPBUSDSMasked512load {sym} [off] x y ptr mask mem)
|
|
(VADDPSMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VADDPSMasked128load {sym} [off] x ptr mask mem)
|
|
(VADDPSMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VADDPSMasked256load {sym} [off] x ptr mask mem)
|
|
(VADDPSMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VADDPSMasked512load {sym} [off] x ptr mask mem)
|
|
(VADDPDMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VADDPDMasked128load {sym} [off] x ptr mask mem)
|
|
(VADDPDMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VADDPDMasked256load {sym} [off] x ptr mask mem)
|
|
(VADDPDMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VADDPDMasked512load {sym} [off] x ptr mask mem)
|
|
(VPADDDMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPADDDMasked128load {sym} [off] x ptr mask mem)
|
|
(VPADDDMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPADDDMasked256load {sym} [off] x ptr mask mem)
|
|
(VPADDDMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPADDDMasked512load {sym} [off] x ptr mask mem)
|
|
(VPADDQMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPADDQMasked128load {sym} [off] x ptr mask mem)
|
|
(VPADDQMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPADDQMasked256load {sym} [off] x ptr mask mem)
|
|
(VPADDQMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPADDQMasked512load {sym} [off] x ptr mask mem)
|
|
(VPANDD512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPANDD512load {sym} [off] x ptr mem)
|
|
(VPANDQ512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPANDQ512load {sym} [off] x ptr mem)
|
|
(VPANDDMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPANDDMasked128load {sym} [off] x ptr mask mem)
|
|
(VPANDDMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPANDDMasked256load {sym} [off] x ptr mask mem)
|
|
(VPANDDMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPANDDMasked512load {sym} [off] x ptr mask mem)
|
|
(VPANDQMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPANDQMasked128load {sym} [off] x ptr mask mem)
|
|
(VPANDQMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPANDQMasked256load {sym} [off] x ptr mask mem)
|
|
(VPANDQMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPANDQMasked512load {sym} [off] x ptr mask mem)
|
|
(VPANDND512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPANDND512load {sym} [off] x ptr mem)
|
|
(VPANDNQ512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPANDNQ512load {sym} [off] x ptr mem)
|
|
(VPANDNDMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPANDNDMasked128load {sym} [off] x ptr mask mem)
|
|
(VPANDNDMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPANDNDMasked256load {sym} [off] x ptr mask mem)
|
|
(VPANDNDMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPANDNDMasked512load {sym} [off] x ptr mask mem)
|
|
(VPANDNQMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPANDNQMasked128load {sym} [off] x ptr mask mem)
|
|
(VPANDNQMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPANDNQMasked256load {sym} [off] x ptr mask mem)
|
|
(VPANDNQMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPANDNQMasked512load {sym} [off] x ptr mask mem)
|
|
(VRNDSCALEPS128 [c] l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VRNDSCALEPS128load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mem)
|
|
(VRNDSCALEPS256 [c] l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VRNDSCALEPS256load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mem)
|
|
(VRNDSCALEPS512 [c] l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VRNDSCALEPS512load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mem)
|
|
(VRNDSCALEPD128 [c] l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VRNDSCALEPD128load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mem)
|
|
(VRNDSCALEPD256 [c] l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VRNDSCALEPD256load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mem)
|
|
(VRNDSCALEPD512 [c] l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VRNDSCALEPD512load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mem)
|
|
(VRNDSCALEPSMasked128 [c] l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VRNDSCALEPSMasked128load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mask mem)
|
|
(VRNDSCALEPSMasked256 [c] l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VRNDSCALEPSMasked256load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mask mem)
|
|
(VRNDSCALEPSMasked512 [c] l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VRNDSCALEPSMasked512load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mask mem)
|
|
(VRNDSCALEPDMasked128 [c] l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VRNDSCALEPDMasked128load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mask mem)
|
|
(VRNDSCALEPDMasked256 [c] l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VRNDSCALEPDMasked256load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mask mem)
|
|
(VRNDSCALEPDMasked512 [c] l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VRNDSCALEPDMasked512load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mask mem)
|
|
(VREDUCEPS128 [c] l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VREDUCEPS128load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mem)
|
|
(VREDUCEPS256 [c] l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VREDUCEPS256load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mem)
|
|
(VREDUCEPS512 [c] l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VREDUCEPS512load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mem)
|
|
(VREDUCEPD128 [c] l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VREDUCEPD128load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mem)
|
|
(VREDUCEPD256 [c] l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VREDUCEPD256load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mem)
|
|
(VREDUCEPD512 [c] l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VREDUCEPD512load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mem)
|
|
(VREDUCEPSMasked128 [c] l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VREDUCEPSMasked128load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mask mem)
|
|
(VREDUCEPSMasked256 [c] l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VREDUCEPSMasked256load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mask mem)
|
|
(VREDUCEPSMasked512 [c] l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VREDUCEPSMasked512load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mask mem)
|
|
(VREDUCEPDMasked128 [c] l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VREDUCEPDMasked128load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mask mem)
|
|
(VREDUCEPDMasked256 [c] l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VREDUCEPDMasked256load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mask mem)
|
|
(VREDUCEPDMasked512 [c] l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VREDUCEPDMasked512load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mask mem)
|
|
(VPACKSSDW512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPACKSSDW512load {sym} [off] x ptr mem)
|
|
(VPACKSSDWMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPACKSSDWMasked128load {sym} [off] x ptr mask mem)
|
|
(VPACKSSDWMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPACKSSDWMasked256load {sym} [off] x ptr mask mem)
|
|
(VPACKSSDWMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPACKSSDWMasked512load {sym} [off] x ptr mask mem)
|
|
(VCVTTPS2DQ512 l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VCVTTPS2DQ512load {sym} [off] ptr mem)
|
|
(VCVTTPS2DQMasked128 l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VCVTTPS2DQMasked128load {sym} [off] ptr mask mem)
|
|
(VCVTTPS2DQMasked256 l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VCVTTPS2DQMasked256load {sym} [off] ptr mask mem)
|
|
(VCVTTPS2DQMasked512 l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VCVTTPS2DQMasked512load {sym} [off] ptr mask mem)
|
|
(VPACKUSDW512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPACKUSDW512load {sym} [off] x ptr mem)
|
|
(VPACKUSDWMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPACKUSDWMasked128load {sym} [off] x ptr mask mem)
|
|
(VPACKUSDWMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPACKUSDWMasked256load {sym} [off] x ptr mask mem)
|
|
(VPACKUSDWMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPACKUSDWMasked512load {sym} [off] x ptr mask mem)
|
|
(VCVTPS2UDQ128 l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VCVTPS2UDQ128load {sym} [off] ptr mem)
|
|
(VCVTPS2UDQ256 l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VCVTPS2UDQ256load {sym} [off] ptr mem)
|
|
(VCVTPS2UDQ512 l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VCVTPS2UDQ512load {sym} [off] ptr mem)
|
|
(VCVTPS2UDQMasked128 l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VCVTPS2UDQMasked128load {sym} [off] ptr mask mem)
|
|
(VCVTPS2UDQMasked256 l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VCVTPS2UDQMasked256load {sym} [off] ptr mask mem)
|
|
(VCVTPS2UDQMasked512 l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VCVTPS2UDQMasked512load {sym} [off] ptr mask mem)
|
|
(VDIVPS512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VDIVPS512load {sym} [off] x ptr mem)
|
|
(VDIVPD512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VDIVPD512load {sym} [off] x ptr mem)
|
|
(VDIVPSMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VDIVPSMasked128load {sym} [off] x ptr mask mem)
|
|
(VDIVPSMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VDIVPSMasked256load {sym} [off] x ptr mask mem)
|
|
(VDIVPSMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VDIVPSMasked512load {sym} [off] x ptr mask mem)
|
|
(VDIVPDMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VDIVPDMasked128load {sym} [off] x ptr mask mem)
|
|
(VDIVPDMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VDIVPDMasked256load {sym} [off] x ptr mask mem)
|
|
(VDIVPDMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VDIVPDMasked512load {sym} [off] x ptr mask mem)
|
|
(VPCMPEQD512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPCMPEQD512load {sym} [off] x ptr mem)
|
|
(VPCMPEQQ512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPCMPEQQ512load {sym} [off] x ptr mem)
|
|
(VCMPPS512 [c] x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VCMPPS512load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mem)
|
|
(VCMPPD512 [c] x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VCMPPD512load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mem)
|
|
(VCMPPSMasked128 [c] x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VCMPPSMasked128load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mask mem)
|
|
(VCMPPSMasked256 [c] x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VCMPPSMasked256load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mask mem)
|
|
(VCMPPSMasked512 [c] x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VCMPPSMasked512load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mask mem)
|
|
(VCMPPDMasked128 [c] x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VCMPPDMasked128load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mask mem)
|
|
(VCMPPDMasked256 [c] x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VCMPPDMasked256load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mask mem)
|
|
(VCMPPDMasked512 [c] x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VCMPPDMasked512load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mask mem)
|
|
(VPCMPDMasked128 [c] x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPCMPDMasked128load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mask mem)
|
|
(VPCMPDMasked256 [c] x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPCMPDMasked256load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mask mem)
|
|
(VPCMPDMasked512 [c] x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPCMPDMasked512load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mask mem)
|
|
(VPCMPQMasked128 [c] x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPCMPQMasked128load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mask mem)
|
|
(VPCMPQMasked256 [c] x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPCMPQMasked256load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mask mem)
|
|
(VPCMPQMasked512 [c] x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPCMPQMasked512load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mask mem)
|
|
(VPCMPUDMasked128 [c] x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPCMPUDMasked128load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mask mem)
|
|
(VPCMPUDMasked256 [c] x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPCMPUDMasked256load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mask mem)
|
|
(VPCMPUDMasked512 [c] x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPCMPUDMasked512load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mask mem)
|
|
(VPCMPUQMasked128 [c] x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPCMPUQMasked128load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mask mem)
|
|
(VPCMPUQMasked256 [c] x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPCMPUQMasked256load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mask mem)
|
|
(VPCMPUQMasked512 [c] x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPCMPUQMasked512load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mask mem)
|
|
(VGF2P8AFFINEQB128 [c] x l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VGF2P8AFFINEQB128load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mem)
|
|
(VGF2P8AFFINEQB256 [c] x l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VGF2P8AFFINEQB256load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mem)
|
|
(VGF2P8AFFINEQB512 [c] x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VGF2P8AFFINEQB512load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mem)
|
|
(VGF2P8AFFINEINVQB128 [c] x l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VGF2P8AFFINEINVQB128load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mem)
|
|
(VGF2P8AFFINEINVQB256 [c] x l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VGF2P8AFFINEINVQB256load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mem)
|
|
(VGF2P8AFFINEINVQB512 [c] x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VGF2P8AFFINEINVQB512load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mem)
|
|
(VGF2P8AFFINEINVQBMasked128 [c] x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VGF2P8AFFINEINVQBMasked128load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mask mem)
|
|
(VGF2P8AFFINEINVQBMasked256 [c] x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VGF2P8AFFINEINVQBMasked256load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mask mem)
|
|
(VGF2P8AFFINEINVQBMasked512 [c] x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VGF2P8AFFINEINVQBMasked512load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mask mem)
|
|
(VGF2P8AFFINEQBMasked128 [c] x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VGF2P8AFFINEQBMasked128load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mask mem)
|
|
(VGF2P8AFFINEQBMasked256 [c] x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VGF2P8AFFINEQBMasked256load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mask mem)
|
|
(VGF2P8AFFINEQBMasked512 [c] x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VGF2P8AFFINEQBMasked512load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mask mem)
|
|
(VPCMPGTD512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPCMPGTD512load {sym} [off] x ptr mem)
|
|
(VPCMPGTQ512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPCMPGTQ512load {sym} [off] x ptr mem)
|
|
(VPCMPUD512 [c] x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPCMPUD512load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mem)
|
|
(VPCMPUQ512 [c] x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPCMPUQ512load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mem)
|
|
(VPCMPD512 [c] x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPCMPD512load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mem)
|
|
(VPCMPQ512 [c] x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPCMPQ512load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mem)
|
|
(VPUNPCKHDQ512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPUNPCKHDQ512load {sym} [off] x ptr mem)
|
|
(VPUNPCKHQDQ512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPUNPCKHQDQ512load {sym} [off] x ptr mem)
|
|
(VPUNPCKLDQ512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPUNPCKLDQ512load {sym} [off] x ptr mem)
|
|
(VPUNPCKLQDQ512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPUNPCKLQDQ512load {sym} [off] x ptr mem)
|
|
(VPLZCNTD128 l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPLZCNTD128load {sym} [off] ptr mem)
|
|
(VPLZCNTD256 l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPLZCNTD256load {sym} [off] ptr mem)
|
|
(VPLZCNTD512 l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPLZCNTD512load {sym} [off] ptr mem)
|
|
(VPLZCNTQ128 l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPLZCNTQ128load {sym} [off] ptr mem)
|
|
(VPLZCNTQ256 l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPLZCNTQ256load {sym} [off] ptr mem)
|
|
(VPLZCNTQ512 l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPLZCNTQ512load {sym} [off] ptr mem)
|
|
(VPLZCNTDMasked128 l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPLZCNTDMasked128load {sym} [off] ptr mask mem)
|
|
(VPLZCNTDMasked256 l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPLZCNTDMasked256load {sym} [off] ptr mask mem)
|
|
(VPLZCNTDMasked512 l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPLZCNTDMasked512load {sym} [off] ptr mask mem)
|
|
(VPLZCNTQMasked128 l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPLZCNTQMasked128load {sym} [off] ptr mask mem)
|
|
(VPLZCNTQMasked256 l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPLZCNTQMasked256load {sym} [off] ptr mask mem)
|
|
(VPLZCNTQMasked512 l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPLZCNTQMasked512load {sym} [off] ptr mask mem)
|
|
(VMAXPS512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VMAXPS512load {sym} [off] x ptr mem)
|
|
(VMAXPD512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VMAXPD512load {sym} [off] x ptr mem)
|
|
(VPMAXSD512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPMAXSD512load {sym} [off] x ptr mem)
|
|
(VPMAXSQ128 x l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPMAXSQ128load {sym} [off] x ptr mem)
|
|
(VPMAXSQ256 x l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPMAXSQ256load {sym} [off] x ptr mem)
|
|
(VPMAXSQ512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPMAXSQ512load {sym} [off] x ptr mem)
|
|
(VPMAXUD512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPMAXUD512load {sym} [off] x ptr mem)
|
|
(VPMAXUQ128 x l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPMAXUQ128load {sym} [off] x ptr mem)
|
|
(VPMAXUQ256 x l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPMAXUQ256load {sym} [off] x ptr mem)
|
|
(VPMAXUQ512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPMAXUQ512load {sym} [off] x ptr mem)
|
|
(VMAXPSMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VMAXPSMasked128load {sym} [off] x ptr mask mem)
|
|
(VMAXPSMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VMAXPSMasked256load {sym} [off] x ptr mask mem)
|
|
(VMAXPSMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VMAXPSMasked512load {sym} [off] x ptr mask mem)
|
|
(VMAXPDMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VMAXPDMasked128load {sym} [off] x ptr mask mem)
|
|
(VMAXPDMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VMAXPDMasked256load {sym} [off] x ptr mask mem)
|
|
(VMAXPDMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VMAXPDMasked512load {sym} [off] x ptr mask mem)
|
|
(VPMAXSDMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPMAXSDMasked128load {sym} [off] x ptr mask mem)
|
|
(VPMAXSDMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPMAXSDMasked256load {sym} [off] x ptr mask mem)
|
|
(VPMAXSDMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPMAXSDMasked512load {sym} [off] x ptr mask mem)
|
|
(VPMAXSQMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPMAXSQMasked128load {sym} [off] x ptr mask mem)
|
|
(VPMAXSQMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPMAXSQMasked256load {sym} [off] x ptr mask mem)
|
|
(VPMAXSQMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPMAXSQMasked512load {sym} [off] x ptr mask mem)
|
|
(VPMAXUDMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPMAXUDMasked128load {sym} [off] x ptr mask mem)
|
|
(VPMAXUDMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPMAXUDMasked256load {sym} [off] x ptr mask mem)
|
|
(VPMAXUDMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPMAXUDMasked512load {sym} [off] x ptr mask mem)
|
|
(VPMAXUQMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPMAXUQMasked128load {sym} [off] x ptr mask mem)
|
|
(VPMAXUQMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPMAXUQMasked256load {sym} [off] x ptr mask mem)
|
|
(VPMAXUQMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPMAXUQMasked512load {sym} [off] x ptr mask mem)
|
|
(VMINPS512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VMINPS512load {sym} [off] x ptr mem)
|
|
(VMINPD512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VMINPD512load {sym} [off] x ptr mem)
|
|
(VPMINSD512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPMINSD512load {sym} [off] x ptr mem)
|
|
(VPMINSQ128 x l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPMINSQ128load {sym} [off] x ptr mem)
|
|
(VPMINSQ256 x l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPMINSQ256load {sym} [off] x ptr mem)
|
|
(VPMINSQ512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPMINSQ512load {sym} [off] x ptr mem)
|
|
(VPMINUD512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPMINUD512load {sym} [off] x ptr mem)
|
|
(VPMINUQ128 x l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPMINUQ128load {sym} [off] x ptr mem)
|
|
(VPMINUQ256 x l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPMINUQ256load {sym} [off] x ptr mem)
|
|
(VPMINUQ512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPMINUQ512load {sym} [off] x ptr mem)
|
|
(VMINPSMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VMINPSMasked128load {sym} [off] x ptr mask mem)
|
|
(VMINPSMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VMINPSMasked256load {sym} [off] x ptr mask mem)
|
|
(VMINPSMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VMINPSMasked512load {sym} [off] x ptr mask mem)
|
|
(VMINPDMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VMINPDMasked128load {sym} [off] x ptr mask mem)
|
|
(VMINPDMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VMINPDMasked256load {sym} [off] x ptr mask mem)
|
|
(VMINPDMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VMINPDMasked512load {sym} [off] x ptr mask mem)
|
|
(VPMINSDMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPMINSDMasked128load {sym} [off] x ptr mask mem)
|
|
(VPMINSDMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPMINSDMasked256load {sym} [off] x ptr mask mem)
|
|
(VPMINSDMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPMINSDMasked512load {sym} [off] x ptr mask mem)
|
|
(VPMINSQMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPMINSQMasked128load {sym} [off] x ptr mask mem)
|
|
(VPMINSQMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPMINSQMasked256load {sym} [off] x ptr mask mem)
|
|
(VPMINSQMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPMINSQMasked512load {sym} [off] x ptr mask mem)
|
|
(VPMINUDMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPMINUDMasked128load {sym} [off] x ptr mask mem)
|
|
(VPMINUDMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPMINUDMasked256load {sym} [off] x ptr mask mem)
|
|
(VPMINUDMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPMINUDMasked512load {sym} [off] x ptr mask mem)
|
|
(VPMINUQMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPMINUQMasked128load {sym} [off] x ptr mask mem)
|
|
(VPMINUQMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPMINUQMasked256load {sym} [off] x ptr mask mem)
|
|
(VPMINUQMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPMINUQMasked512load {sym} [off] x ptr mask mem)
|
|
(VMULPS512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VMULPS512load {sym} [off] x ptr mem)
|
|
(VMULPD512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VMULPD512load {sym} [off] x ptr mem)
|
|
(VPMULLD512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPMULLD512load {sym} [off] x ptr mem)
|
|
(VPMULLQ128 x l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPMULLQ128load {sym} [off] x ptr mem)
|
|
(VPMULLQ256 x l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPMULLQ256load {sym} [off] x ptr mem)
|
|
(VPMULLQ512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPMULLQ512load {sym} [off] x ptr mem)
|
|
(VFMADD213PS128 x y l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VFMADD213PS128load {sym} [off] x y ptr mem)
|
|
(VFMADD213PS256 x y l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VFMADD213PS256load {sym} [off] x y ptr mem)
|
|
(VFMADD213PS512 x y l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VFMADD213PS512load {sym} [off] x y ptr mem)
|
|
(VFMADD213PD128 x y l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VFMADD213PD128load {sym} [off] x y ptr mem)
|
|
(VFMADD213PD256 x y l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VFMADD213PD256load {sym} [off] x y ptr mem)
|
|
(VFMADD213PD512 x y l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VFMADD213PD512load {sym} [off] x y ptr mem)
|
|
(VFMADD213PSMasked128 x y l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VFMADD213PSMasked128load {sym} [off] x y ptr mask mem)
|
|
(VFMADD213PSMasked256 x y l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VFMADD213PSMasked256load {sym} [off] x y ptr mask mem)
|
|
(VFMADD213PSMasked512 x y l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VFMADD213PSMasked512load {sym} [off] x y ptr mask mem)
|
|
(VFMADD213PDMasked128 x y l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VFMADD213PDMasked128load {sym} [off] x y ptr mask mem)
|
|
(VFMADD213PDMasked256 x y l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VFMADD213PDMasked256load {sym} [off] x y ptr mask mem)
|
|
(VFMADD213PDMasked512 x y l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VFMADD213PDMasked512load {sym} [off] x y ptr mask mem)
|
|
(VFMADDSUB213PS128 x y l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VFMADDSUB213PS128load {sym} [off] x y ptr mem)
|
|
(VFMADDSUB213PS256 x y l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VFMADDSUB213PS256load {sym} [off] x y ptr mem)
|
|
(VFMADDSUB213PS512 x y l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VFMADDSUB213PS512load {sym} [off] x y ptr mem)
|
|
(VFMADDSUB213PD128 x y l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VFMADDSUB213PD128load {sym} [off] x y ptr mem)
|
|
(VFMADDSUB213PD256 x y l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VFMADDSUB213PD256load {sym} [off] x y ptr mem)
|
|
(VFMADDSUB213PD512 x y l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VFMADDSUB213PD512load {sym} [off] x y ptr mem)
|
|
(VFMADDSUB213PSMasked128 x y l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VFMADDSUB213PSMasked128load {sym} [off] x y ptr mask mem)
|
|
(VFMADDSUB213PSMasked256 x y l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VFMADDSUB213PSMasked256load {sym} [off] x y ptr mask mem)
|
|
(VFMADDSUB213PSMasked512 x y l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VFMADDSUB213PSMasked512load {sym} [off] x y ptr mask mem)
|
|
(VFMADDSUB213PDMasked128 x y l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VFMADDSUB213PDMasked128load {sym} [off] x y ptr mask mem)
|
|
(VFMADDSUB213PDMasked256 x y l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VFMADDSUB213PDMasked256load {sym} [off] x y ptr mask mem)
|
|
(VFMADDSUB213PDMasked512 x y l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VFMADDSUB213PDMasked512load {sym} [off] x y ptr mask mem)
|
|
(VMULPSMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VMULPSMasked128load {sym} [off] x ptr mask mem)
|
|
(VMULPSMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VMULPSMasked256load {sym} [off] x ptr mask mem)
|
|
(VMULPSMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VMULPSMasked512load {sym} [off] x ptr mask mem)
|
|
(VMULPDMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VMULPDMasked128load {sym} [off] x ptr mask mem)
|
|
(VMULPDMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VMULPDMasked256load {sym} [off] x ptr mask mem)
|
|
(VMULPDMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VMULPDMasked512load {sym} [off] x ptr mask mem)
|
|
(VPMULLDMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPMULLDMasked128load {sym} [off] x ptr mask mem)
|
|
(VPMULLDMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPMULLDMasked256load {sym} [off] x ptr mask mem)
|
|
(VPMULLDMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPMULLDMasked512load {sym} [off] x ptr mask mem)
|
|
(VPMULLQMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPMULLQMasked128load {sym} [off] x ptr mask mem)
|
|
(VPMULLQMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPMULLQMasked256load {sym} [off] x ptr mask mem)
|
|
(VPMULLQMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPMULLQMasked512load {sym} [off] x ptr mask mem)
|
|
(VFMSUBADD213PS128 x y l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VFMSUBADD213PS128load {sym} [off] x y ptr mem)
|
|
(VFMSUBADD213PS256 x y l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VFMSUBADD213PS256load {sym} [off] x y ptr mem)
|
|
(VFMSUBADD213PS512 x y l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VFMSUBADD213PS512load {sym} [off] x y ptr mem)
|
|
(VFMSUBADD213PD128 x y l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VFMSUBADD213PD128load {sym} [off] x y ptr mem)
|
|
(VFMSUBADD213PD256 x y l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VFMSUBADD213PD256load {sym} [off] x y ptr mem)
|
|
(VFMSUBADD213PD512 x y l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VFMSUBADD213PD512load {sym} [off] x y ptr mem)
|
|
(VFMSUBADD213PSMasked128 x y l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VFMSUBADD213PSMasked128load {sym} [off] x y ptr mask mem)
|
|
(VFMSUBADD213PSMasked256 x y l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VFMSUBADD213PSMasked256load {sym} [off] x y ptr mask mem)
|
|
(VFMSUBADD213PSMasked512 x y l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VFMSUBADD213PSMasked512load {sym} [off] x y ptr mask mem)
|
|
(VFMSUBADD213PDMasked128 x y l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VFMSUBADD213PDMasked128load {sym} [off] x y ptr mask mem)
|
|
(VFMSUBADD213PDMasked256 x y l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VFMSUBADD213PDMasked256load {sym} [off] x y ptr mask mem)
|
|
(VFMSUBADD213PDMasked512 x y l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VFMSUBADD213PDMasked512load {sym} [off] x y ptr mask mem)
|
|
(VPOPCNTD128 l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPOPCNTD128load {sym} [off] ptr mem)
|
|
(VPOPCNTD256 l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPOPCNTD256load {sym} [off] ptr mem)
|
|
(VPOPCNTD512 l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPOPCNTD512load {sym} [off] ptr mem)
|
|
(VPOPCNTQ128 l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPOPCNTQ128load {sym} [off] ptr mem)
|
|
(VPOPCNTQ256 l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPOPCNTQ256load {sym} [off] ptr mem)
|
|
(VPOPCNTQ512 l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPOPCNTQ512load {sym} [off] ptr mem)
|
|
(VPOPCNTDMasked128 l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPOPCNTDMasked128load {sym} [off] ptr mask mem)
|
|
(VPOPCNTDMasked256 l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPOPCNTDMasked256load {sym} [off] ptr mask mem)
|
|
(VPOPCNTDMasked512 l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPOPCNTDMasked512load {sym} [off] ptr mask mem)
|
|
(VPOPCNTQMasked128 l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPOPCNTQMasked128load {sym} [off] ptr mask mem)
|
|
(VPOPCNTQMasked256 l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPOPCNTQMasked256load {sym} [off] ptr mask mem)
|
|
(VPOPCNTQMasked512 l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPOPCNTQMasked512load {sym} [off] ptr mask mem)
|
|
(VPORD512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPORD512load {sym} [off] x ptr mem)
|
|
(VPORQ512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPORQ512load {sym} [off] x ptr mem)
|
|
(VPORDMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPORDMasked128load {sym} [off] x ptr mask mem)
|
|
(VPORDMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPORDMasked256load {sym} [off] x ptr mask mem)
|
|
(VPORDMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPORDMasked512load {sym} [off] x ptr mask mem)
|
|
(VPORQMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPORQMasked128load {sym} [off] x ptr mask mem)
|
|
(VPORQMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPORQMasked256load {sym} [off] x ptr mask mem)
|
|
(VPORQMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPORQMasked512load {sym} [off] x ptr mask mem)
|
|
(VPERMPS512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPERMPS512load {sym} [off] x ptr mem)
|
|
(VPERMD512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPERMD512load {sym} [off] x ptr mem)
|
|
(VPERMPD256 x l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPERMPD256load {sym} [off] x ptr mem)
|
|
(VPERMQ256 x l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPERMQ256load {sym} [off] x ptr mem)
|
|
(VPERMPD512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPERMPD512load {sym} [off] x ptr mem)
|
|
(VPERMQ512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPERMQ512load {sym} [off] x ptr mem)
|
|
(VPERMI2PS128 x y l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPERMI2PS128load {sym} [off] x y ptr mem)
|
|
(VPERMI2D128 x y l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPERMI2D128load {sym} [off] x y ptr mem)
|
|
(VPERMI2PS256 x y l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPERMI2PS256load {sym} [off] x y ptr mem)
|
|
(VPERMI2D256 x y l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPERMI2D256load {sym} [off] x y ptr mem)
|
|
(VPERMI2PS512 x y l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPERMI2PS512load {sym} [off] x y ptr mem)
|
|
(VPERMI2D512 x y l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPERMI2D512load {sym} [off] x y ptr mem)
|
|
(VPERMI2PD128 x y l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPERMI2PD128load {sym} [off] x y ptr mem)
|
|
(VPERMI2Q128 x y l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPERMI2Q128load {sym} [off] x y ptr mem)
|
|
(VPERMI2PD256 x y l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPERMI2PD256load {sym} [off] x y ptr mem)
|
|
(VPERMI2Q256 x y l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPERMI2Q256load {sym} [off] x y ptr mem)
|
|
(VPERMI2PD512 x y l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPERMI2PD512load {sym} [off] x y ptr mem)
|
|
(VPERMI2Q512 x y l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPERMI2Q512load {sym} [off] x y ptr mem)
|
|
(VPERMI2PSMasked128 x y l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPERMI2PSMasked128load {sym} [off] x y ptr mask mem)
|
|
(VPERMI2DMasked128 x y l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPERMI2DMasked128load {sym} [off] x y ptr mask mem)
|
|
(VPERMI2PSMasked256 x y l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPERMI2PSMasked256load {sym} [off] x y ptr mask mem)
|
|
(VPERMI2DMasked256 x y l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPERMI2DMasked256load {sym} [off] x y ptr mask mem)
|
|
(VPERMI2PSMasked512 x y l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPERMI2PSMasked512load {sym} [off] x y ptr mask mem)
|
|
(VPERMI2DMasked512 x y l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPERMI2DMasked512load {sym} [off] x y ptr mask mem)
|
|
(VPERMI2PDMasked128 x y l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPERMI2PDMasked128load {sym} [off] x y ptr mask mem)
|
|
(VPERMI2QMasked128 x y l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPERMI2QMasked128load {sym} [off] x y ptr mask mem)
|
|
(VPERMI2PDMasked256 x y l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPERMI2PDMasked256load {sym} [off] x y ptr mask mem)
|
|
(VPERMI2QMasked256 x y l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPERMI2QMasked256load {sym} [off] x y ptr mask mem)
|
|
(VPERMI2PDMasked512 x y l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPERMI2PDMasked512load {sym} [off] x y ptr mask mem)
|
|
(VPERMI2QMasked512 x y l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPERMI2QMasked512load {sym} [off] x y ptr mask mem)
|
|
(VPSHUFD512 [c] l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPSHUFD512load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mem)
|
|
(VPSHUFDMasked256 [c] l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSHUFDMasked256load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mask mem)
|
|
(VPSHUFDMasked512 [c] l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSHUFDMasked512load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mask mem)
|
|
(VPSHUFDMasked128 [c] l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSHUFDMasked128load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mask mem)
|
|
(VPERMPSMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPERMPSMasked256load {sym} [off] x ptr mask mem)
|
|
(VPERMDMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPERMDMasked256load {sym} [off] x ptr mask mem)
|
|
(VPERMPSMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPERMPSMasked512load {sym} [off] x ptr mask mem)
|
|
(VPERMDMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPERMDMasked512load {sym} [off] x ptr mask mem)
|
|
(VPERMPDMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPERMPDMasked256load {sym} [off] x ptr mask mem)
|
|
(VPERMQMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPERMQMasked256load {sym} [off] x ptr mask mem)
|
|
(VPERMPDMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPERMPDMasked512load {sym} [off] x ptr mask mem)
|
|
(VPERMQMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPERMQMasked512load {sym} [off] x ptr mask mem)
|
|
(VRCP14PS512 l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VRCP14PS512load {sym} [off] ptr mem)
|
|
(VRCP14PD128 l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VRCP14PD128load {sym} [off] ptr mem)
|
|
(VRCP14PD256 l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VRCP14PD256load {sym} [off] ptr mem)
|
|
(VRCP14PD512 l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VRCP14PD512load {sym} [off] ptr mem)
|
|
(VRCP14PSMasked128 l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VRCP14PSMasked128load {sym} [off] ptr mask mem)
|
|
(VRCP14PSMasked256 l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VRCP14PSMasked256load {sym} [off] ptr mask mem)
|
|
(VRCP14PSMasked512 l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VRCP14PSMasked512load {sym} [off] ptr mask mem)
|
|
(VRCP14PDMasked128 l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VRCP14PDMasked128load {sym} [off] ptr mask mem)
|
|
(VRCP14PDMasked256 l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VRCP14PDMasked256load {sym} [off] ptr mask mem)
|
|
(VRCP14PDMasked512 l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VRCP14PDMasked512load {sym} [off] ptr mask mem)
|
|
(VRSQRT14PS512 l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VRSQRT14PS512load {sym} [off] ptr mem)
|
|
(VRSQRT14PD128 l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VRSQRT14PD128load {sym} [off] ptr mem)
|
|
(VRSQRT14PD256 l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VRSQRT14PD256load {sym} [off] ptr mem)
|
|
(VRSQRT14PD512 l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VRSQRT14PD512load {sym} [off] ptr mem)
|
|
(VRSQRT14PSMasked128 l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VRSQRT14PSMasked128load {sym} [off] ptr mask mem)
|
|
(VRSQRT14PSMasked256 l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VRSQRT14PSMasked256load {sym} [off] ptr mask mem)
|
|
(VRSQRT14PSMasked512 l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VRSQRT14PSMasked512load {sym} [off] ptr mask mem)
|
|
(VRSQRT14PDMasked128 l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VRSQRT14PDMasked128load {sym} [off] ptr mask mem)
|
|
(VRSQRT14PDMasked256 l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VRSQRT14PDMasked256load {sym} [off] ptr mask mem)
|
|
(VRSQRT14PDMasked512 l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VRSQRT14PDMasked512load {sym} [off] ptr mask mem)
|
|
(VPROLD128 [c] l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPROLD128load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mem)
|
|
(VPROLD256 [c] l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPROLD256load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mem)
|
|
(VPROLD512 [c] l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPROLD512load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mem)
|
|
(VPROLQ128 [c] l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPROLQ128load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mem)
|
|
(VPROLQ256 [c] l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPROLQ256load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mem)
|
|
(VPROLQ512 [c] l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPROLQ512load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mem)
|
|
(VPROLDMasked128 [c] l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPROLDMasked128load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mask mem)
|
|
(VPROLDMasked256 [c] l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPROLDMasked256load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mask mem)
|
|
(VPROLDMasked512 [c] l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPROLDMasked512load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mask mem)
|
|
(VPROLQMasked128 [c] l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPROLQMasked128load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mask mem)
|
|
(VPROLQMasked256 [c] l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPROLQMasked256load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mask mem)
|
|
(VPROLQMasked512 [c] l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPROLQMasked512load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mask mem)
|
|
(VPRORD128 [c] l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPRORD128load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mem)
|
|
(VPRORD256 [c] l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPRORD256load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mem)
|
|
(VPRORD512 [c] l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPRORD512load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mem)
|
|
(VPRORQ128 [c] l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPRORQ128load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mem)
|
|
(VPRORQ256 [c] l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPRORQ256load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mem)
|
|
(VPRORQ512 [c] l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPRORQ512load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mem)
|
|
(VPRORDMasked128 [c] l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPRORDMasked128load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mask mem)
|
|
(VPRORDMasked256 [c] l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPRORDMasked256load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mask mem)
|
|
(VPRORDMasked512 [c] l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPRORDMasked512load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mask mem)
|
|
(VPRORQMasked128 [c] l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPRORQMasked128load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mask mem)
|
|
(VPRORQMasked256 [c] l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPRORQMasked256load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mask mem)
|
|
(VPRORQMasked512 [c] l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPRORQMasked512load {sym} [makeValAndOff(int32(int8(c)),off)] ptr mask mem)
|
|
(VPROLVD128 x l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPROLVD128load {sym} [off] x ptr mem)
|
|
(VPROLVD256 x l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPROLVD256load {sym} [off] x ptr mem)
|
|
(VPROLVD512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPROLVD512load {sym} [off] x ptr mem)
|
|
(VPROLVQ128 x l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPROLVQ128load {sym} [off] x ptr mem)
|
|
(VPROLVQ256 x l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPROLVQ256load {sym} [off] x ptr mem)
|
|
(VPROLVQ512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPROLVQ512load {sym} [off] x ptr mem)
|
|
(VPROLVDMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPROLVDMasked128load {sym} [off] x ptr mask mem)
|
|
(VPROLVDMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPROLVDMasked256load {sym} [off] x ptr mask mem)
|
|
(VPROLVDMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPROLVDMasked512load {sym} [off] x ptr mask mem)
|
|
(VPROLVQMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPROLVQMasked128load {sym} [off] x ptr mask mem)
|
|
(VPROLVQMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPROLVQMasked256load {sym} [off] x ptr mask mem)
|
|
(VPROLVQMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPROLVQMasked512load {sym} [off] x ptr mask mem)
|
|
(VPRORVD128 x l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPRORVD128load {sym} [off] x ptr mem)
|
|
(VPRORVD256 x l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPRORVD256load {sym} [off] x ptr mem)
|
|
(VPRORVD512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPRORVD512load {sym} [off] x ptr mem)
|
|
(VPRORVQ128 x l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPRORVQ128load {sym} [off] x ptr mem)
|
|
(VPRORVQ256 x l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPRORVQ256load {sym} [off] x ptr mem)
|
|
(VPRORVQ512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPRORVQ512load {sym} [off] x ptr mem)
|
|
(VPRORVDMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPRORVDMasked128load {sym} [off] x ptr mask mem)
|
|
(VPRORVDMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPRORVDMasked256load {sym} [off] x ptr mask mem)
|
|
(VPRORVDMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPRORVDMasked512load {sym} [off] x ptr mask mem)
|
|
(VPRORVQMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPRORVQMasked128load {sym} [off] x ptr mask mem)
|
|
(VPRORVQMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPRORVQMasked256load {sym} [off] x ptr mask mem)
|
|
(VPRORVQMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPRORVQMasked512load {sym} [off] x ptr mask mem)
|
|
(VSCALEFPS128 x l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VSCALEFPS128load {sym} [off] x ptr mem)
|
|
(VSCALEFPS256 x l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VSCALEFPS256load {sym} [off] x ptr mem)
|
|
(VSCALEFPS512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VSCALEFPS512load {sym} [off] x ptr mem)
|
|
(VSCALEFPD128 x l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VSCALEFPD128load {sym} [off] x ptr mem)
|
|
(VSCALEFPD256 x l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VSCALEFPD256load {sym} [off] x ptr mem)
|
|
(VSCALEFPD512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VSCALEFPD512load {sym} [off] x ptr mem)
|
|
(VSCALEFPSMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VSCALEFPSMasked128load {sym} [off] x ptr mask mem)
|
|
(VSCALEFPSMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VSCALEFPSMasked256load {sym} [off] x ptr mask mem)
|
|
(VSCALEFPSMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VSCALEFPSMasked512load {sym} [off] x ptr mask mem)
|
|
(VSCALEFPDMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VSCALEFPDMasked128load {sym} [off] x ptr mask mem)
|
|
(VSCALEFPDMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VSCALEFPDMasked256load {sym} [off] x ptr mask mem)
|
|
(VSCALEFPDMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VSCALEFPDMasked512load {sym} [off] x ptr mask mem)
|
|
(VPSHLDD128 [c] x l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPSHLDD128load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mem)
|
|
(VPSHLDD256 [c] x l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPSHLDD256load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mem)
|
|
(VPSHLDD512 [c] x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPSHLDD512load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mem)
|
|
(VPSHLDQ128 [c] x l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPSHLDQ128load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mem)
|
|
(VPSHLDQ256 [c] x l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPSHLDQ256load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mem)
|
|
(VPSHLDQ512 [c] x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPSHLDQ512load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mem)
|
|
(VPSHLDDMasked128 [c] x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSHLDDMasked128load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mask mem)
|
|
(VPSHLDDMasked256 [c] x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSHLDDMasked256load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mask mem)
|
|
(VPSHLDDMasked512 [c] x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSHLDDMasked512load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mask mem)
|
|
(VPSHLDQMasked128 [c] x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSHLDQMasked128load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mask mem)
|
|
(VPSHLDQMasked256 [c] x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSHLDQMasked256load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mask mem)
|
|
(VPSHLDQMasked512 [c] x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSHLDQMasked512load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mask mem)
|
|
(VPSHRDD128 [c] x l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPSHRDD128load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mem)
|
|
(VPSHRDD256 [c] x l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPSHRDD256load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mem)
|
|
(VPSHRDD512 [c] x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPSHRDD512load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mem)
|
|
(VPSHRDQ128 [c] x l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPSHRDQ128load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mem)
|
|
(VPSHRDQ256 [c] x l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPSHRDQ256load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mem)
|
|
(VPSHRDQ512 [c] x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPSHRDQ512load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mem)
|
|
(VPSHRDDMasked128 [c] x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSHRDDMasked128load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mask mem)
|
|
(VPSHRDDMasked256 [c] x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSHRDDMasked256load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mask mem)
|
|
(VPSHRDDMasked512 [c] x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSHRDDMasked512load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mask mem)
|
|
(VPSHRDQMasked128 [c] x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSHRDQMasked128load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mask mem)
|
|
(VPSHRDQMasked256 [c] x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSHRDQMasked256load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mask mem)
|
|
(VPSHRDQMasked512 [c] x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSHRDQMasked512load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mask mem)
|
|
(VPSLLVD512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPSLLVD512load {sym} [off] x ptr mem)
|
|
(VPSLLVQ512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPSLLVQ512load {sym} [off] x ptr mem)
|
|
(VPSHLDVD128 x y l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPSHLDVD128load {sym} [off] x y ptr mem)
|
|
(VPSHLDVD256 x y l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPSHLDVD256load {sym} [off] x y ptr mem)
|
|
(VPSHLDVD512 x y l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPSHLDVD512load {sym} [off] x y ptr mem)
|
|
(VPSHLDVQ128 x y l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPSHLDVQ128load {sym} [off] x y ptr mem)
|
|
(VPSHLDVQ256 x y l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPSHLDVQ256load {sym} [off] x y ptr mem)
|
|
(VPSHLDVQ512 x y l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPSHLDVQ512load {sym} [off] x y ptr mem)
|
|
(VPSHLDVDMasked128 x y l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSHLDVDMasked128load {sym} [off] x y ptr mask mem)
|
|
(VPSHLDVDMasked256 x y l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSHLDVDMasked256load {sym} [off] x y ptr mask mem)
|
|
(VPSHLDVDMasked512 x y l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSHLDVDMasked512load {sym} [off] x y ptr mask mem)
|
|
(VPSHLDVQMasked128 x y l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSHLDVQMasked128load {sym} [off] x y ptr mask mem)
|
|
(VPSHLDVQMasked256 x y l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSHLDVQMasked256load {sym} [off] x y ptr mask mem)
|
|
(VPSHLDVQMasked512 x y l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSHLDVQMasked512load {sym} [off] x y ptr mask mem)
|
|
(VPSLLVDMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSLLVDMasked128load {sym} [off] x ptr mask mem)
|
|
(VPSLLVDMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSLLVDMasked256load {sym} [off] x ptr mask mem)
|
|
(VPSLLVDMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSLLVDMasked512load {sym} [off] x ptr mask mem)
|
|
(VPSLLVQMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSLLVQMasked128load {sym} [off] x ptr mask mem)
|
|
(VPSLLVQMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSLLVQMasked256load {sym} [off] x ptr mask mem)
|
|
(VPSLLVQMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSLLVQMasked512load {sym} [off] x ptr mask mem)
|
|
(VPSRAVD512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPSRAVD512load {sym} [off] x ptr mem)
|
|
(VPSRAVQ128 x l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPSRAVQ128load {sym} [off] x ptr mem)
|
|
(VPSRAVQ256 x l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPSRAVQ256load {sym} [off] x ptr mem)
|
|
(VPSRAVQ512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPSRAVQ512load {sym} [off] x ptr mem)
|
|
(VPSRLVD512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPSRLVD512load {sym} [off] x ptr mem)
|
|
(VPSRLVQ512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPSRLVQ512load {sym} [off] x ptr mem)
|
|
(VPSHRDVD128 x y l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPSHRDVD128load {sym} [off] x y ptr mem)
|
|
(VPSHRDVD256 x y l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPSHRDVD256load {sym} [off] x y ptr mem)
|
|
(VPSHRDVD512 x y l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPSHRDVD512load {sym} [off] x y ptr mem)
|
|
(VPSHRDVQ128 x y l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPSHRDVQ128load {sym} [off] x y ptr mem)
|
|
(VPSHRDVQ256 x y l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPSHRDVQ256load {sym} [off] x y ptr mem)
|
|
(VPSHRDVQ512 x y l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPSHRDVQ512load {sym} [off] x y ptr mem)
|
|
(VPSHRDVDMasked128 x y l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSHRDVDMasked128load {sym} [off] x y ptr mask mem)
|
|
(VPSHRDVDMasked256 x y l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSHRDVDMasked256load {sym} [off] x y ptr mask mem)
|
|
(VPSHRDVDMasked512 x y l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSHRDVDMasked512load {sym} [off] x y ptr mask mem)
|
|
(VPSHRDVQMasked128 x y l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSHRDVQMasked128load {sym} [off] x y ptr mask mem)
|
|
(VPSHRDVQMasked256 x y l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSHRDVQMasked256load {sym} [off] x y ptr mask mem)
|
|
(VPSHRDVQMasked512 x y l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSHRDVQMasked512load {sym} [off] x y ptr mask mem)
|
|
(VPSRAVDMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSRAVDMasked128load {sym} [off] x ptr mask mem)
|
|
(VPSRAVDMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSRAVDMasked256load {sym} [off] x ptr mask mem)
|
|
(VPSRAVDMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSRAVDMasked512load {sym} [off] x ptr mask mem)
|
|
(VPSRAVQMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSRAVQMasked128load {sym} [off] x ptr mask mem)
|
|
(VPSRAVQMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSRAVQMasked256load {sym} [off] x ptr mask mem)
|
|
(VPSRAVQMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSRAVQMasked512load {sym} [off] x ptr mask mem)
|
|
(VPSRLVDMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSRLVDMasked128load {sym} [off] x ptr mask mem)
|
|
(VPSRLVDMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSRLVDMasked256load {sym} [off] x ptr mask mem)
|
|
(VPSRLVDMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSRLVDMasked512load {sym} [off] x ptr mask mem)
|
|
(VPSRLVQMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSRLVQMasked128load {sym} [off] x ptr mask mem)
|
|
(VPSRLVQMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSRLVQMasked256load {sym} [off] x ptr mask mem)
|
|
(VPSRLVQMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSRLVQMasked512load {sym} [off] x ptr mask mem)
|
|
(VSQRTPS512 l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VSQRTPS512load {sym} [off] ptr mem)
|
|
(VSQRTPD512 l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VSQRTPD512load {sym} [off] ptr mem)
|
|
(VSQRTPSMasked128 l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VSQRTPSMasked128load {sym} [off] ptr mask mem)
|
|
(VSQRTPSMasked256 l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VSQRTPSMasked256load {sym} [off] ptr mask mem)
|
|
(VSQRTPSMasked512 l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VSQRTPSMasked512load {sym} [off] ptr mask mem)
|
|
(VSQRTPDMasked128 l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VSQRTPDMasked128load {sym} [off] ptr mask mem)
|
|
(VSQRTPDMasked256 l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VSQRTPDMasked256load {sym} [off] ptr mask mem)
|
|
(VSQRTPDMasked512 l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VSQRTPDMasked512load {sym} [off] ptr mask mem)
|
|
(VSUBPS512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VSUBPS512load {sym} [off] x ptr mem)
|
|
(VSUBPD512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VSUBPD512load {sym} [off] x ptr mem)
|
|
(VPSUBD512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPSUBD512load {sym} [off] x ptr mem)
|
|
(VPSUBQ512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPSUBQ512load {sym} [off] x ptr mem)
|
|
(VSUBPSMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VSUBPSMasked128load {sym} [off] x ptr mask mem)
|
|
(VSUBPSMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VSUBPSMasked256load {sym} [off] x ptr mask mem)
|
|
(VSUBPSMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VSUBPSMasked512load {sym} [off] x ptr mask mem)
|
|
(VSUBPDMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VSUBPDMasked128load {sym} [off] x ptr mask mem)
|
|
(VSUBPDMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VSUBPDMasked256load {sym} [off] x ptr mask mem)
|
|
(VSUBPDMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VSUBPDMasked512load {sym} [off] x ptr mask mem)
|
|
(VPSUBDMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSUBDMasked128load {sym} [off] x ptr mask mem)
|
|
(VPSUBDMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSUBDMasked256load {sym} [off] x ptr mask mem)
|
|
(VPSUBDMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSUBDMasked512load {sym} [off] x ptr mask mem)
|
|
(VPSUBQMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSUBQMasked128load {sym} [off] x ptr mask mem)
|
|
(VPSUBQMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSUBQMasked256load {sym} [off] x ptr mask mem)
|
|
(VPSUBQMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSUBQMasked512load {sym} [off] x ptr mask mem)
|
|
(VPXORD512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPXORD512load {sym} [off] x ptr mem)
|
|
(VPXORQ512 x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPXORQ512load {sym} [off] x ptr mem)
|
|
(VPXORDMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPXORDMasked128load {sym} [off] x ptr mask mem)
|
|
(VPXORDMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPXORDMasked256load {sym} [off] x ptr mask mem)
|
|
(VPXORDMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPXORDMasked512load {sym} [off] x ptr mask mem)
|
|
(VPXORQMasked128 x l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPXORQMasked128load {sym} [off] x ptr mask mem)
|
|
(VPXORQMasked256 x l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPXORQMasked256load {sym} [off] x ptr mask mem)
|
|
(VPXORQMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPXORQMasked512load {sym} [off] x ptr mask mem)
|
|
(VPBLENDMDMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPBLENDMDMasked512load {sym} [off] x ptr mask mem)
|
|
(VPBLENDMQMasked512 x l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPBLENDMQMasked512load {sym} [off] x ptr mask mem)
|
|
(VSHUFPS512 [c] x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VSHUFPS512load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mem)
|
|
(VSHUFPD512 [c] x l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VSHUFPD512load {sym} [makeValAndOff(int32(int8(c)),off)] x ptr mem)
|
|
(VPSLLD512const [c] l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPSLLD512constload {sym} [makeValAndOff(int32(int8(c)),off)] ptr mem)
|
|
(VPSLLQ512const [c] l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPSLLQ512constload {sym} [makeValAndOff(int32(int8(c)),off)] ptr mem)
|
|
(VPSLLDMasked128const [c] l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSLLDMasked128constload {sym} [makeValAndOff(int32(int8(c)),off)] ptr mask mem)
|
|
(VPSLLDMasked256const [c] l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSLLDMasked256constload {sym} [makeValAndOff(int32(int8(c)),off)] ptr mask mem)
|
|
(VPSLLDMasked512const [c] l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSLLDMasked512constload {sym} [makeValAndOff(int32(int8(c)),off)] ptr mask mem)
|
|
(VPSLLQMasked128const [c] l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSLLQMasked128constload {sym} [makeValAndOff(int32(int8(c)),off)] ptr mask mem)
|
|
(VPSLLQMasked256const [c] l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSLLQMasked256constload {sym} [makeValAndOff(int32(int8(c)),off)] ptr mask mem)
|
|
(VPSLLQMasked512const [c] l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSLLQMasked512constload {sym} [makeValAndOff(int32(int8(c)),off)] ptr mask mem)
|
|
(VPSRLD512const [c] l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPSRLD512constload {sym} [makeValAndOff(int32(int8(c)),off)] ptr mem)
|
|
(VPSRLQ512const [c] l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPSRLQ512constload {sym} [makeValAndOff(int32(int8(c)),off)] ptr mem)
|
|
(VPSRAD512const [c] l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPSRAD512constload {sym} [makeValAndOff(int32(int8(c)),off)] ptr mem)
|
|
(VPSRAQ128const [c] l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPSRAQ128constload {sym} [makeValAndOff(int32(int8(c)),off)] ptr mem)
|
|
(VPSRAQ256const [c] l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPSRAQ256constload {sym} [makeValAndOff(int32(int8(c)),off)] ptr mem)
|
|
(VPSRAQ512const [c] l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPSRAQ512constload {sym} [makeValAndOff(int32(int8(c)),off)] ptr mem)
|
|
(VPSRLDMasked128const [c] l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSRLDMasked128constload {sym} [makeValAndOff(int32(int8(c)),off)] ptr mask mem)
|
|
(VPSRLDMasked256const [c] l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSRLDMasked256constload {sym} [makeValAndOff(int32(int8(c)),off)] ptr mask mem)
|
|
(VPSRLDMasked512const [c] l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSRLDMasked512constload {sym} [makeValAndOff(int32(int8(c)),off)] ptr mask mem)
|
|
(VPSRLQMasked128const [c] l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSRLQMasked128constload {sym} [makeValAndOff(int32(int8(c)),off)] ptr mask mem)
|
|
(VPSRLQMasked256const [c] l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSRLQMasked256constload {sym} [makeValAndOff(int32(int8(c)),off)] ptr mask mem)
|
|
(VPSRLQMasked512const [c] l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSRLQMasked512constload {sym} [makeValAndOff(int32(int8(c)),off)] ptr mask mem)
|
|
(VPSRADMasked128const [c] l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSRADMasked128constload {sym} [makeValAndOff(int32(int8(c)),off)] ptr mask mem)
|
|
(VPSRADMasked256const [c] l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSRADMasked256constload {sym} [makeValAndOff(int32(int8(c)),off)] ptr mask mem)
|
|
(VPSRADMasked512const [c] l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSRADMasked512constload {sym} [makeValAndOff(int32(int8(c)),off)] ptr mask mem)
|
|
(VPSRAQMasked128const [c] l:(VMOVDQUload128 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSRAQMasked128constload {sym} [makeValAndOff(int32(int8(c)),off)] ptr mask mem)
|
|
(VPSRAQMasked256const [c] l:(VMOVDQUload256 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSRAQMasked256constload {sym} [makeValAndOff(int32(int8(c)),off)] ptr mask mem)
|
|
(VPSRAQMasked512const [c] l:(VMOVDQUload512 {sym} [off] ptr mem) mask) && canMergeLoad(v, l) && clobber(l) => (VPSRAQMasked512constload {sym} [makeValAndOff(int32(int8(c)),off)] ptr mask mem)
|
|
(VPTERNLOGD128 [c] x y l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPTERNLOGD128load {sym} [makeValAndOff(int32(int8(c)),off)] x y ptr mem)
|
|
(VPTERNLOGD256 [c] x y l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPTERNLOGD256load {sym} [makeValAndOff(int32(int8(c)),off)] x y ptr mem)
|
|
(VPTERNLOGD512 [c] x y l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPTERNLOGD512load {sym} [makeValAndOff(int32(int8(c)),off)] x y ptr mem)
|
|
(VPTERNLOGQ128 [c] x y l:(VMOVDQUload128 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPTERNLOGQ128load {sym} [makeValAndOff(int32(int8(c)),off)] x y ptr mem)
|
|
(VPTERNLOGQ256 [c] x y l:(VMOVDQUload256 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPTERNLOGQ256load {sym} [makeValAndOff(int32(int8(c)),off)] x y ptr mem)
|
|
(VPTERNLOGQ512 [c] x y l:(VMOVDQUload512 {sym} [off] ptr mem)) && canMergeLoad(v, l) && clobber(l) => (VPTERNLOGQ512load {sym} [makeValAndOff(int32(int8(c)),off)] x y ptr mem)
|