Do the same as r138461. Mark VZEROALL as clobbering all YMM registers
authorBruno Cardoso Lopes <bruno.cardoso@gmail.com>
Thu, 25 Aug 2011 22:23:58 +0000 (22:23 +0000)
committerBruno Cardoso Lopes <bruno.cardoso@gmail.com>
Thu, 25 Aug 2011 22:23:58 +0000 (22:23 +0000)
git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@138592 91177308-0d34-0410-b5e6-96231b3b80d8

lib/Target/X86/X86InstrSSE.td

index 6bafc2d6c7a3e030172a16b326aecacaca03fbc3..321d4504a95eaa79d454ff5185ae01016ba21984 100644 (file)
@@ -6175,12 +6175,11 @@ let Defs = [YMM0, YMM1, YMM2, YMM3, YMM4, YMM5, YMM6, YMM7,
   def VZEROALL : I<0x77, RawFrm, (outs), (ins), "vzeroall",
                    [(int_x86_avx_vzeroall)]>, TB, VEX, VEX_L, Requires<[HasAVX]>;
 
+  // Zero Upper bits of YMM registers
+  def VZEROUPPER : I<0x77, RawFrm, (outs), (ins), "vzeroupper",
+                     [(int_x86_avx_vzeroupper)]>, TB, VEX, Requires<[HasAVX]>;
 }
 
-// Zero Upper bits of YMM registers
-def VZEROUPPER : I<0x77, RawFrm, (outs), (ins), "vzeroupper",
-                   [(int_x86_avx_vzeroupper)]>, TB, VEX, Requires<[HasAVX]>;
-
 //===----------------------------------------------------------------------===//
 // SSE Shuffle pattern fragments
 //===----------------------------------------------------------------------===//