(VMULslfq QPR:$Qdn, QPR:$Qdn, DPR_VFP2:$Dm,
VectorIndex32:$lane, pred:$p)>;
+// VQADD (register) two-operand aliases.
+def : NEONInstAlias<"vqadd${p}.s8 $Vdn, $Vm",
+ (VQADDsv8i8 DPR:$Vdn, DPR:$Vdn, DPR:$Vm, pred:$p)>;
+def : NEONInstAlias<"vqadd${p}.s16 $Vdn, $Vm",
+ (VQADDsv4i16 DPR:$Vdn, DPR:$Vdn, DPR:$Vm, pred:$p)>;
+def : NEONInstAlias<"vqadd${p}.s32 $Vdn, $Vm",
+ (VQADDsv2i32 DPR:$Vdn, DPR:$Vdn, DPR:$Vm, pred:$p)>;
+def : NEONInstAlias<"vqadd${p}.s64 $Vdn, $Vm",
+ (VQADDsv1i64 DPR:$Vdn, DPR:$Vdn, DPR:$Vm, pred:$p)>;
+def : NEONInstAlias<"vqadd${p}.u8 $Vdn, $Vm",
+ (VQADDuv8i8 DPR:$Vdn, DPR:$Vdn, DPR:$Vm, pred:$p)>;
+def : NEONInstAlias<"vqadd${p}.u16 $Vdn, $Vm",
+ (VQADDuv4i16 DPR:$Vdn, DPR:$Vdn, DPR:$Vm, pred:$p)>;
+def : NEONInstAlias<"vqadd${p}.u32 $Vdn, $Vm",
+ (VQADDuv2i32 DPR:$Vdn, DPR:$Vdn, DPR:$Vm, pred:$p)>;
+def : NEONInstAlias<"vqadd${p}.u64 $Vdn, $Vm",
+ (VQADDuv1i64 DPR:$Vdn, DPR:$Vdn, DPR:$Vm, pred:$p)>;
+
+def : NEONInstAlias<"vqadd${p}.s8 $Vdn, $Vm",
+ (VQADDsv16i8 QPR:$Vdn, QPR:$Vdn, QPR:$Vm, pred:$p)>;
+def : NEONInstAlias<"vqadd${p}.s16 $Vdn, $Vm",
+ (VQADDsv8i16 QPR:$Vdn, QPR:$Vdn, QPR:$Vm, pred:$p)>;
+def : NEONInstAlias<"vqadd${p}.s32 $Vdn, $Vm",
+ (VQADDsv4i32 QPR:$Vdn, QPR:$Vdn, QPR:$Vm, pred:$p)>;
+def : NEONInstAlias<"vqadd${p}.s64 $Vdn, $Vm",
+ (VQADDsv2i64 QPR:$Vdn, QPR:$Vdn, QPR:$Vm, pred:$p)>;
+def : NEONInstAlias<"vqadd${p}.u8 $Vdn, $Vm",
+ (VQADDuv16i8 QPR:$Vdn, QPR:$Vdn, QPR:$Vm, pred:$p)>;
+def : NEONInstAlias<"vqadd${p}.u16 $Vdn, $Vm",
+ (VQADDuv8i16 QPR:$Vdn, QPR:$Vdn, QPR:$Vm, pred:$p)>;
+def : NEONInstAlias<"vqadd${p}.u32 $Vdn, $Vm",
+ (VQADDuv4i32 QPR:$Vdn, QPR:$Vdn, QPR:$Vm, pred:$p)>;
+def : NEONInstAlias<"vqadd${p}.u64 $Vdn, $Vm",
+ (VQADDuv2i64 QPR:$Vdn, QPR:$Vdn, QPR:$Vm, pred:$p)>;
+
// VSHL (immediate) two-operand aliases.
def : NEONInstAlias<"vshl${p}.i8 $Vdn, $imm",
(VSHLiv8i8 DPR:$Vdn, DPR:$Vdn, imm0_7:$imm, pred:$p)>;
@ CHECK: vrhadd.u32 q8, q8, q9 @ encoding: [0xe2,0x01,0x60,0xf3]
vrhadd.u32 q8, q8, q9
-@ CHECK: vqadd.s8 d16, d16, d17 @ encoding: [0xb1,0x00,0x40,0xf2]
vqadd.s8 d16, d16, d17
-@ CHECK: vqadd.s16 d16, d16, d17 @ encoding: [0xb1,0x00,0x50,0xf2]
vqadd.s16 d16, d16, d17
-@ CHECK: vqadd.s32 d16, d16, d17 @ encoding: [0xb1,0x00,0x60,0xf2]
vqadd.s32 d16, d16, d17
-@ CHECK: vqadd.s64 d16, d16, d17 @ encoding: [0xb1,0x00,0x70,0xf2]
vqadd.s64 d16, d16, d17
-@ CHECK: vqadd.u8 d16, d16, d17 @ encoding: [0xb1,0x00,0x40,0xf3]
vqadd.u8 d16, d16, d17
-@ CHECK: vqadd.u16 d16, d16, d17 @ encoding: [0xb1,0x00,0x50,0xf3]
vqadd.u16 d16, d16, d17
-@ CHECK: vqadd.u32 d16, d16, d17 @ encoding: [0xb1,0x00,0x60,0xf3]
vqadd.u32 d16, d16, d17
-@ CHECK: vqadd.u64 d16, d16, d17 @ encoding: [0xb1,0x00,0x70,0xf3]
vqadd.u64 d16, d16, d17
-@ CHECK: vqadd.s8 q8, q8, q9 @ encoding: [0xf2,0x00,0x40,0xf2]
+
+@ CHECK: vqadd.s8 d16, d16, d17 @ encoding: [0xb1,0x00,0x40,0xf2]
+@ CHECK: vqadd.s16 d16, d16, d17 @ encoding: [0xb1,0x00,0x50,0xf2]
+@ CHECK: vqadd.s32 d16, d16, d17 @ encoding: [0xb1,0x00,0x60,0xf2]
+@ CHECK: vqadd.s64 d16, d16, d17 @ encoding: [0xb1,0x00,0x70,0xf2]
+@ CHECK: vqadd.u8 d16, d16, d17 @ encoding: [0xb1,0x00,0x40,0xf3]
+@ CHECK: vqadd.u16 d16, d16, d17 @ encoding: [0xb1,0x00,0x50,0xf3]
+@ CHECK: vqadd.u32 d16, d16, d17 @ encoding: [0xb1,0x00,0x60,0xf3]
+@ CHECK: vqadd.u64 d16, d16, d17 @ encoding: [0xb1,0x00,0x70,0xf3]
+
vqadd.s8 q8, q8, q9
-@ CHECK: vqadd.s16 q8, q8, q9 @ encoding: [0xf2,0x00,0x50,0xf2]
vqadd.s16 q8, q8, q9
-@ CHECK: vqadd.s32 q8, q8, q9 @ encoding: [0xf2,0x00,0x60,0xf2]
vqadd.s32 q8, q8, q9
-@ CHECK: vqadd.s64 q8, q8, q9 @ encoding: [0xf2,0x00,0x70,0xf2]
vqadd.s64 q8, q8, q9
-@ CHECK: vqadd.u8 q8, q8, q9 @ encoding: [0xf2,0x00,0x40,0xf3]
vqadd.u8 q8, q8, q9
-@ CHECK: vqadd.u16 q8, q8, q9 @ encoding: [0xf2,0x00,0x50,0xf3]
vqadd.u16 q8, q8, q9
-@ CHECK: vqadd.u32 q8, q8, q9 @ encoding: [0xf2,0x00,0x60,0xf3]
vqadd.u32 q8, q8, q9
-@ CHECK: vqadd.u64 q8, q8, q9 @ encoding: [0xf2,0x00,0x70,0xf3]
vqadd.u64 q8, q8, q9
+@ CHECK: vqadd.s8 q8, q8, q9 @ encoding: [0xf2,0x00,0x40,0xf2]
+@ CHECK: vqadd.s16 q8, q8, q9 @ encoding: [0xf2,0x00,0x50,0xf2]
+@ CHECK: vqadd.s32 q8, q8, q9 @ encoding: [0xf2,0x00,0x60,0xf2]
+@ CHECK: vqadd.s64 q8, q8, q9 @ encoding: [0xf2,0x00,0x70,0xf2]
+@ CHECK: vqadd.u8 q8, q8, q9 @ encoding: [0xf2,0x00,0x40,0xf3]
+@ CHECK: vqadd.u16 q8, q8, q9 @ encoding: [0xf2,0x00,0x50,0xf3]
+@ CHECK: vqadd.u32 q8, q8, q9 @ encoding: [0xf2,0x00,0x60,0xf3]
+@ CHECK: vqadd.u64 q8, q8, q9 @ encoding: [0xf2,0x00,0x70,0xf3]
+
+
+@ two-operand variants.
+ vqadd.s8 d16, d17
+ vqadd.s16 d16, d17
+ vqadd.s32 d16, d17
+ vqadd.s64 d16, d17
+ vqadd.u8 d16, d17
+ vqadd.u16 d16, d17
+ vqadd.u32 d16, d17
+ vqadd.u64 d16, d17
+
+@ CHECK: vqadd.s8 d16, d16, d17 @ encoding: [0xb1,0x00,0x40,0xf2]
+@ CHECK: vqadd.s16 d16, d16, d17 @ encoding: [0xb1,0x00,0x50,0xf2]
+@ CHECK: vqadd.s32 d16, d16, d17 @ encoding: [0xb1,0x00,0x60,0xf2]
+@ CHECK: vqadd.s64 d16, d16, d17 @ encoding: [0xb1,0x00,0x70,0xf2]
+@ CHECK: vqadd.u8 d16, d16, d17 @ encoding: [0xb1,0x00,0x40,0xf3]
+@ CHECK: vqadd.u16 d16, d16, d17 @ encoding: [0xb1,0x00,0x50,0xf3]
+@ CHECK: vqadd.u32 d16, d16, d17 @ encoding: [0xb1,0x00,0x60,0xf3]
+@ CHECK: vqadd.u64 d16, d16, d17 @ encoding: [0xb1,0x00,0x70,0xf3]
+
+ vqadd.s8 q8, q9
+ vqadd.s16 q8, q9
+ vqadd.s32 q8, q9
+ vqadd.s64 q8, q9
+ vqadd.u8 q8, q9
+ vqadd.u16 q8, q9
+ vqadd.u32 q8, q9
+ vqadd.u64 q8, q9
+
+@ CHECK: vqadd.s8 q8, q8, q9 @ encoding: [0xf2,0x00,0x40,0xf2]
+@ CHECK: vqadd.s16 q8, q8, q9 @ encoding: [0xf2,0x00,0x50,0xf2]
+@ CHECK: vqadd.s32 q8, q8, q9 @ encoding: [0xf2,0x00,0x60,0xf2]
+@ CHECK: vqadd.s64 q8, q8, q9 @ encoding: [0xf2,0x00,0x70,0xf2]
+@ CHECK: vqadd.u8 q8, q8, q9 @ encoding: [0xf2,0x00,0x40,0xf3]
+@ CHECK: vqadd.u16 q8, q8, q9 @ encoding: [0xf2,0x00,0x50,0xf3]
+@ CHECK: vqadd.u32 q8, q8, q9 @ encoding: [0xf2,0x00,0x60,0xf3]
+@ CHECK: vqadd.u64 q8, q8, q9 @ encoding: [0xf2,0x00,0x70,0xf3]
+
+
@ CHECK: vaddhn.i16 d16, q8, q9 @ encoding: [0xa2,0x04,0xc0,0xf2]
vaddhn.i16 d16, q8, q9
@ CHECK: vaddhn.i32 d16, q8, q9 @ encoding: [0xa2,0x04,0xd0,0xf2]