def VMOVDQUYrr : VSSI<0x6F, MRMSrcReg, (outs VR256:$dst), (ins VR256:$src),
"movdqu\t{$src, $dst|$dst, $src}", []>, VEX;
+// For Disassembler
+let isCodeGenOnly = 1 in {
+def VMOVDQArr_REV : VPDI<0x7F, MRMDestReg, (outs VR128:$dst), (ins VR128:$src),
+ "movdqa\t{$src, $dst|$dst, $src}", []>, VEX;
+def VMOVDQAYrr_REV : VPDI<0x7F, MRMDestReg, (outs VR256:$dst), (ins VR256:$src),
+ "movdqa\t{$src, $dst|$dst, $src}", []>, VEX;
+def VMOVDQUrr_REV : VSSI<0x7F, MRMDestReg, (outs VR128:$dst), (ins VR128:$src),
+ "movdqu\t{$src, $dst|$dst, $src}", []>, VEX;
+def VMOVDQUYrr_REV : VSSI<0x7F, MRMDestReg, (outs VR256:$dst), (ins VR256:$src),
+ "movdqu\t{$src, $dst|$dst, $src}", []>, VEX;
+}
+
let canFoldAsLoad = 1, mayLoad = 1 in {
def VMOVDQArm : VPDI<0x6F, MRMSrcMem, (outs VR128:$dst), (ins i128mem:$src),
"movdqa\t{$src, $dst|$dst, $src}", []>, VEX;
"movdqu\t{$src, $dst|$dst, $src}",
[]>, XS, Requires<[HasSSE2]>;
+// For Disassembler
+let isCodeGenOnly = 1 in {
+def MOVDQArr_REV : PDI<0x7F, MRMDestReg, (outs VR128:$dst), (ins VR128:$src),
+ "movdqa\t{$src, $dst|$dst, $src}", []>;
+
+def MOVDQUrr_REV : I<0x7F, MRMDestReg, (outs VR128:$dst), (ins VR128:$src),
+ "movdqu\t{$src, $dst|$dst, $src}",
+ []>, XS, Requires<[HasSSE2]>;
+}
+
let canFoldAsLoad = 1, mayLoad = 1 in {
def MOVDQArm : PDI<0x6F, MRMSrcMem, (outs VR128:$dst), (ins i128mem:$src),
"movdqa\t{$src, $dst|$dst, $src}",
0x66 0x0f 0x29 0xc1
# CHECK: vmovups %xmm1, %xmm0
-0xc5 0xf0 0x10 0xc1
+0xc5 0xf8 0x10 0xc1
# CHECK: vmovups %xmm0, %xmm1
-0xc5 0xf0 0x11 0xc1
+0xc5 0xf8 0x11 0xc1
# CHECK: vmovaps %xmm1, %xmm0
-0xc5 0xf0 0x28 0xc1
+0xc5 0xf8 0x28 0xc1
# CHECK: vmovaps %xmm0, %xmm1
-0xc5 0xf0 0x29 0xc1
+0xc5 0xf8 0x29 0xc1
# CHECK: vmovupd %xmm1, %xmm0
-0xc5 0xf1 0x10 0xc1
+0xc5 0xf9 0x10 0xc1
# CHECK: vmovupd %xmm0, %xmm1
-0xc5 0xf1 0x11 0xc1
+0xc5 0xf9 0x11 0xc1
# CHECK: vmovapd %xmm1, %xmm0
-0xc5 0xf1 0x28 0xc1
+0xc5 0xf9 0x28 0xc1
# CHECK: vmovapd %xmm0, %xmm1
-0xc5 0xf1 0x29 0xc1
+0xc5 0xf9 0x29 0xc1
# CHECK: vmovups %ymm1, %ymm0
-0xc5 0xf4 0x10 0xc1
+0xc5 0xfc 0x10 0xc1
# CHECK: vmovups %ymm0, %ymm1
-0xc5 0xf4 0x11 0xc1
+0xc5 0xfc 0x11 0xc1
# CHECK: vmovaps %ymm1, %ymm0
-0xc5 0xf4 0x28 0xc1
+0xc5 0xfc 0x28 0xc1
# CHECK: vmovaps %ymm0, %ymm1
-0xc5 0xf4 0x29 0xc1
+0xc5 0xfc 0x29 0xc1
+
+# CHECK: movdqa %xmm1, %xmm0
+0x66 0x0f 0x6f 0xc1
+
+# CHECK: movdqa %xmm0, %xmm1
+0x66 0x0f 0x7f 0xc1
+
+# CHECK: movdqu %xmm1, %xmm0
+0xf3 0x0f 0x6f 0xc1
+
+# CHECK: movdqu %xmm0, %xmm1
+0xf3 0x0f 0x7f 0xc1
+
+# CHECK: vmovdqa %xmm1, %xmm0
+0xc5 0xf9 0x6f 0xc1
+
+# CHECK: vmovdqa %xmm0, %xmm1
+0xc5 0xf9 0x7f 0xc1
+
+# CHECK: vmovdqa %ymm1, %ymm0
+0xc5 0xfd 0x6f 0xc1
+
+# CHECK: vmovdqa %ymm0, %ymm1
+0xc5 0xfd 0x7f 0xc1
+
+# CHECK: vmovdqu %xmm1, %xmm0
+0xc5 0xfa 0x6f 0xc1
+
+# CHECK: vmovdqu %xmm0, %xmm1
+0xc5 0xfa 0x7f 0xc1
+
+# CHECK: vmovdqu %ymm1, %ymm0
+0xc5 0xfe 0x6f 0xc1
+
+# CHECK: vmovdqu %ymm0, %ymm1
+0xc5 0xfe 0x7f 0xc1