X-Git-Url: http://plrg.eecs.uci.edu/git/?p=oota-llvm.git;a=blobdiff_plain;f=test%2FCodeGen%2FX86%2Fvector-shuffle-512-v8.ll;h=f9e131b40397b55dacdc25f3a1aef3d9fa192892;hp=06097d970e63c2076c4e83c9b731cb5c1f81d88e;hb=fbde8bffba6b7ccd461fa82dc812bc3f3b609b1a;hpb=8e93ce17803b6b3249846f42916a5c9fbb53330a diff --git a/test/CodeGen/X86/vector-shuffle-512-v8.ll b/test/CodeGen/X86/vector-shuffle-512-v8.ll index 06097d970e6..f9e131b4039 100644 --- a/test/CodeGen/X86/vector-shuffle-512-v8.ll +++ b/test/CodeGen/X86/vector-shuffle-512-v8.ll @@ -1,23 +1,22 @@ -; RUN: llc < %s -mcpu=x86-64 -mattr=+avx512f -x86-experimental-vector-shuffle-lowering | FileCheck %s --check-prefix=ALL --check-prefix=AVX512 --check-prefix=AVX512F -; RUN: llc < %s -mcpu=x86-64 -mattr=+avx512bw -x86-experimental-vector-shuffle-lowering | FileCheck %s --check-prefix=ALL --check-prefix=AVX512 --check-prefix=AVX512BW +; RUN: llc < %s -mcpu=x86-64 -mattr=+avx512f -x86-experimental-vector-shuffle-legality | FileCheck %s --check-prefix=ALL --check-prefix=AVX512 --check-prefix=AVX512F +; RUN: llc < %s -mcpu=x86-64 -mattr=+avx512bw -x86-experimental-vector-shuffle-legality | FileCheck %s --check-prefix=ALL --check-prefix=AVX512 --check-prefix=AVX512BW target triple = "x86_64-unknown-unknown" define <8 x double> @shuffle_v8f64_00000000(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_00000000 +; ALL-LABEL: shuffle_v8f64_00000000: ; ALL: # BB#0: -; ALL-NEXT: vpermpd {{.*}} # ymm0 = ymm0[0,0,0,0] -; ALL-NEXT: vinsertf64x4 $1, %ymm0, %zmm0, %zmm0 +; ALL-NEXT: vbroadcastsd %xmm0, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> ret <8 x double> %shuffle } define <8 x double> @shuffle_v8f64_00000010(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_00000010 +; ALL-LABEL: shuffle_v8f64_00000010: ; ALL: # BB#0: -; ALL-NEXT: vpermpd {{.*}} # ymm1 = ymm0[0,0,0,0] -; ALL-NEXT: vpermpd {{.*}} # ymm0 = ymm0[0,0,1,0] +; ALL-NEXT: vbroadcastsd %xmm0, %ymm1 +; ALL-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,0,1,0] ; ALL-NEXT: vinsertf64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> @@ -25,10 +24,10 @@ define <8 x double> @shuffle_v8f64_00000010(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_00000200(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_00000200 +; ALL-LABEL: shuffle_v8f64_00000200: ; ALL: # BB#0: -; ALL-NEXT: vpermpd {{.*}} # ymm1 = ymm0[0,0,0,0] -; ALL-NEXT: vpermpd {{.*}} # ymm0 = ymm0[0,2,0,0] +; ALL-NEXT: vbroadcastsd %xmm0, %ymm1 +; ALL-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,0,0] ; ALL-NEXT: vinsertf64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> @@ -36,10 +35,10 @@ define <8 x double> @shuffle_v8f64_00000200(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_00003000(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_00003000 +; ALL-LABEL: shuffle_v8f64_00003000: ; ALL: # BB#0: -; ALL-NEXT: vpermpd {{.*}} # ymm1 = ymm0[0,0,0,0] -; ALL-NEXT: vpermpd {{.*}} # ymm0 = ymm0[3,0,0,0] +; ALL-NEXT: vbroadcastsd %xmm0, %ymm1 +; ALL-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[3,0,0,0] ; ALL-NEXT: vinsertf64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> @@ -47,13 +46,12 @@ define <8 x double> @shuffle_v8f64_00003000(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_00040000(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_00040000 +; ALL-LABEL: shuffle_v8f64_00040000: ; ALL: # BB#0: ; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm1 -; ALL-NEXT: vpermpd {{.*}} # ymm1 = ymm1[0,1,2,0] -; ALL-NEXT: vpermpd {{.*}} # ymm2 = ymm0[0,0,0,3] -; ALL-NEXT: vblendpd {{.*}} # ymm1 = ymm2[0,1,2],ymm1[3] -; ALL-NEXT: vpermpd {{.*}} # ymm0 = ymm0[0,0,0,0] +; ALL-NEXT: vbroadcastsd %xmm1, %ymm1 +; ALL-NEXT: vbroadcastsd %xmm0, %ymm0 +; ALL-NEXT: vblendpd {{.*#+}} ymm1 = ymm0[0,1,2],ymm1[3] ; ALL-NEXT: vinsertf64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> @@ -61,13 +59,12 @@ define <8 x double> @shuffle_v8f64_00040000(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_00500000(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_00500000 +; ALL-LABEL: shuffle_v8f64_00500000: ; ALL: # BB#0: ; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm1 -; ALL-NEXT: vpermpd {{.*}} # ymm1 = ymm1[0,1,1,3] -; ALL-NEXT: vpermpd {{.*}} # ymm2 = ymm0[0,0,2,0] -; ALL-NEXT: vblendpd {{.*}} # ymm1 = ymm2[0,1],ymm1[2],ymm2[3] -; ALL-NEXT: vpermpd {{.*}} # ymm0 = ymm0[0,0,0,0] +; ALL-NEXT: vblendpd {{.*#+}} ymm1 = ymm0[0],ymm1[1],ymm0[2,3] +; ALL-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,0,1,0] +; ALL-NEXT: vbroadcastsd %xmm0, %ymm0 ; ALL-NEXT: vinsertf64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> @@ -75,13 +72,12 @@ define <8 x double> @shuffle_v8f64_00500000(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_06000000(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_06000000 +; ALL-LABEL: shuffle_v8f64_06000000: ; ALL: # BB#0: ; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm1 -; ALL-NEXT: vpermpd {{.*}} # ymm1 = ymm1[0,2,2,3] -; ALL-NEXT: vpermpd {{.*}} # ymm2 = ymm0[0,1,0,0] -; ALL-NEXT: vblendpd {{.*}} # ymm1 = ymm2[0],ymm1[1],ymm2[2,3] -; ALL-NEXT: vpermpd {{.*}} # ymm0 = ymm0[0,0,0,0] +; ALL-NEXT: vblendpd {{.*#+}} ymm1 = ymm0[0,1],ymm1[2],ymm0[3] +; ALL-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,2,0,0] +; ALL-NEXT: vbroadcastsd %xmm0, %ymm0 ; ALL-NEXT: vinsertf64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> @@ -89,12 +85,12 @@ define <8 x double> @shuffle_v8f64_06000000(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_70000000(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_70000000 +; ALL-LABEL: shuffle_v8f64_70000000: ; ALL: # BB#0: ; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm1 -; ALL-NEXT: vpermpd {{.*}} # ymm1 = ymm1[3,1,2,3] -; ALL-NEXT: vpermpd {{.*}} # ymm0 = ymm0[0,0,0,0] -; ALL-NEXT: vblendpd {{.*}} # ymm1 = ymm1[0],ymm0[1,2,3] +; ALL-NEXT: vblendpd {{.*#+}} ymm1 = ymm0[0,1,2],ymm1[3] +; ALL-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[3,0,0,0] +; ALL-NEXT: vbroadcastsd %xmm0, %ymm0 ; ALL-NEXT: vinsertf64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> @@ -102,22 +98,22 @@ define <8 x double> @shuffle_v8f64_70000000(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_01014545(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_01014545 +; ALL-LABEL: shuffle_v8f64_01014545: ; ALL: # BB#0: -; ALL-NEXT: vpermpd {{.*}} # ymm1 = ymm0[0,1,0,1] -; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpermpd {{.*}} # ymm0 = ymm0[0,1,0,1] -; ALL-NEXT: vinsertf64x4 $1, %ymm0, %zmm1, %zmm0 +; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm1 +; ALL-NEXT: vinsertf128 $1, %xmm1, %ymm1, %ymm1 +; ALL-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0 +; ALL-NEXT: vinsertf64x4 $1, %ymm1, %zmm0, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> ret <8 x double> %shuffle } define <8 x double> @shuffle_v8f64_00112233(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_00112233 +; ALL-LABEL: shuffle_v8f64_00112233: ; ALL: # BB#0: -; ALL-NEXT: vpermpd {{.*}} # ymm1 = ymm0[0,0,1,1] -; ALL-NEXT: vpermpd {{.*}} # ymm0 = ymm0[2,2,3,3] +; ALL-NEXT: vpermpd {{.*#+}} ymm1 = ymm0[0,0,1,1] +; ALL-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[2,2,3,3] ; ALL-NEXT: vinsertf64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> @@ -125,10 +121,10 @@ define <8 x double> @shuffle_v8f64_00112233(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_00001111(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_00001111 +; ALL-LABEL: shuffle_v8f64_00001111: ; ALL: # BB#0: -; ALL-NEXT: vpermpd {{.*}} # ymm1 = ymm0[0,0,0,0] -; ALL-NEXT: vpermpd {{.*}} # ymm0 = ymm0[1,1,1,1] +; ALL-NEXT: vbroadcastsd %xmm0, %ymm1 +; ALL-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[1,1,1,1] ; ALL-NEXT: vinsertf64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> @@ -136,12 +132,12 @@ define <8 x double> @shuffle_v8f64_00001111(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_81a3c5e7(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_81a3c5e7 +; ALL-LABEL: shuffle_v8f64_81a3c5e7: ; ALL: # BB#0: ; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm2 ; ALL-NEXT: vextractf64x4 $1, %zmm1, %ymm3 -; ALL-NEXT: vblendpd {{.*}} # ymm2 = ymm3[0],ymm2[1],ymm3[2],ymm2[3] -; ALL-NEXT: vblendpd {{.*}} # ymm0 = ymm1[0],ymm0[1],ymm1[2],ymm0[3] +; ALL-NEXT: vblendpd {{.*#+}} ymm2 = ymm3[0],ymm2[1],ymm3[2],ymm2[3] +; ALL-NEXT: vblendpd {{.*#+}} ymm0 = ymm1[0],ymm0[1],ymm1[2],ymm0[3] ; ALL-NEXT: vinsertf64x4 $1, %ymm2, %zmm0, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> @@ -149,11 +145,11 @@ define <8 x double> @shuffle_v8f64_81a3c5e7(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_08080808(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_08080808 +; ALL-LABEL: shuffle_v8f64_08080808: ; ALL: # BB#0: -; ALL-NEXT: vpermpd {{.*}} # ymm1 = ymm1[0,0,2,0] -; ALL-NEXT: vpermpd {{.*}} # ymm0 = ymm0[0,1,0,3] -; ALL-NEXT: vblendpd {{.*}} # ymm0 = ymm0[0],ymm1[1],ymm0[2],ymm1[3] +; ALL-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0 +; ALL-NEXT: vbroadcastsd %xmm1, %ymm1 +; ALL-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2],ymm1[3] ; ALL-NEXT: vinsertf64x4 $1, %ymm0, %zmm0, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> @@ -161,16 +157,16 @@ define <8 x double> @shuffle_v8f64_08080808(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_08084c4c(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_08084c4c +; ALL-LABEL: shuffle_v8f64_08084c4c: ; ALL: # BB#0: -; ALL-NEXT: vextractf64x4 $1, %zmm1, %ymm2 -; ALL-NEXT: vpermpd {{.*}} # ymm2 = ymm2[0,0,2,0] -; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm3 -; ALL-NEXT: vpermpd {{.*}} # ymm3 = ymm3[0,1,0,3] -; ALL-NEXT: vblendpd {{.*}} # ymm2 = ymm3[0],ymm2[1],ymm3[2],ymm2[3] -; ALL-NEXT: vpermpd {{.*}} # ymm1 = ymm1[0,0,2,0] -; ALL-NEXT: vpermpd {{.*}} # ymm0 = ymm0[0,1,0,3] -; ALL-NEXT: vblendpd {{.*}} # ymm0 = ymm0[0],ymm1[1],ymm0[2],ymm1[3] +; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm2 +; ALL-NEXT: vinsertf128 $1, %xmm2, %ymm2, %ymm2 +; ALL-NEXT: vextractf64x4 $1, %zmm1, %ymm3 +; ALL-NEXT: vbroadcastsd %xmm3, %ymm3 +; ALL-NEXT: vblendpd {{.*#+}} ymm2 = ymm2[0],ymm3[1],ymm2[2],ymm3[3] +; ALL-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0 +; ALL-NEXT: vbroadcastsd %xmm1, %ymm1 +; ALL-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2],ymm1[3] ; ALL-NEXT: vinsertf64x4 $1, %ymm2, %zmm0, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> @@ -178,14 +174,14 @@ define <8 x double> @shuffle_v8f64_08084c4c(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_8823cc67(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_8823cc67 +; ALL-LABEL: shuffle_v8f64_8823cc67: ; ALL: # BB#0: ; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm2 ; ALL-NEXT: vextractf64x4 $1, %zmm1, %ymm3 -; ALL-NEXT: vpermilpd {{.*}} # ymm3 = ymm3[0,0,2,2] -; ALL-NEXT: vblendpd {{.*}} # ymm2 = ymm3[0,1],ymm2[2,3] -; ALL-NEXT: vpermilpd {{.*}} # ymm1 = ymm1[0,0,2,2] -; ALL-NEXT: vblendpd {{.*}} # ymm0 = ymm1[0,1],ymm0[2,3] +; ALL-NEXT: vbroadcastsd %xmm3, %ymm3 +; ALL-NEXT: vblendpd {{.*#+}} ymm2 = ymm3[0,1],ymm2[2,3] +; ALL-NEXT: vbroadcastsd %xmm1, %ymm1 +; ALL-NEXT: vblendpd {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3] ; ALL-NEXT: vinsertf64x4 $1, %ymm2, %zmm0, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> @@ -193,50 +189,46 @@ define <8 x double> @shuffle_v8f64_8823cc67(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_9832dc76(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_9832dc76 +; ALL-LABEL: shuffle_v8f64_9832dc76: ; ALL: # BB#0: -; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm2 -; ALL-NEXT: vpermilpd {{.*}} # ymm2 = ymm2[0,0,3,2] -; ALL-NEXT: vextractf64x4 $1, %zmm1, %ymm3 -; ALL-NEXT: vpermilpd {{.*}} # ymm3 = ymm3[1,0,2,2] -; ALL-NEXT: vblendpd {{.*}} # ymm2 = ymm3[0,1],ymm2[2,3] -; ALL-NEXT: vpermilpd {{.*}} # ymm0 = ymm0[0,0,3,2] -; ALL-NEXT: vpermilpd {{.*}} # ymm1 = ymm1[1,0,2,2] -; ALL-NEXT: vblendpd {{.*}} # ymm0 = ymm1[0,1],ymm0[2,3] -; ALL-NEXT: vinsertf64x4 $1, %ymm2, %zmm0, %zmm0 +; ALL-NEXT: vblendpd {{.*#+}} ymm2 = ymm1[0,1],ymm0[2,3] +; ALL-NEXT: vpermilpd {{.*#+}} ymm2 = ymm2[1,0,3,2] +; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm0 +; ALL-NEXT: vextractf64x4 $1, %zmm1, %ymm1 +; ALL-NEXT: vblendpd {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3] +; ALL-NEXT: vpermilpd {{.*#+}} ymm0 = ymm0[1,0,3,2] +; ALL-NEXT: vinsertf64x4 $1, %ymm0, %zmm2, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> ret <8 x double> %shuffle } define <8 x double> @shuffle_v8f64_9810dc54(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_9810dc54 +; ALL-LABEL: shuffle_v8f64_9810dc54: ; ALL: # BB#0: -; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm2 -; ALL-NEXT: vpermpd {{.*}} # ymm2 = ymm2[0,1,1,0] -; ALL-NEXT: vextractf64x4 $1, %zmm1, %ymm3 -; ALL-NEXT: vpermilpd {{.*}} # ymm3 = ymm3[1,0,2,2] -; ALL-NEXT: vblendpd {{.*}} # ymm2 = ymm3[0,1],ymm2[2,3] -; ALL-NEXT: vpermpd {{.*}} # ymm0 = ymm0[0,1,1,0] -; ALL-NEXT: vpermilpd {{.*}} # ymm1 = ymm1[1,0,2,2] -; ALL-NEXT: vblendpd {{.*}} # ymm0 = ymm1[0,1],ymm0[2,3] -; ALL-NEXT: vinsertf64x4 $1, %ymm2, %zmm0, %zmm0 +; ALL-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm2 +; ALL-NEXT: vpermilpd {{.*#+}} ymm2 = ymm2[1,0,3,2] +; ALL-NEXT: vextractf64x4 $1, %zmm1, %ymm1 +; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm0 +; ALL-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0 +; ALL-NEXT: vpermilpd {{.*#+}} ymm0 = ymm0[1,0,3,2] +; ALL-NEXT: vinsertf64x4 $1, %ymm0, %zmm2, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> ret <8 x double> %shuffle } define <8 x double> @shuffle_v8f64_08194c5d(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_08194c5d +; ALL-LABEL: shuffle_v8f64_08194c5d: ; ALL: # BB#0: ; ALL-NEXT: vextractf64x4 $1, %zmm1, %ymm2 -; ALL-NEXT: vpermpd {{.*}} # ymm2 = ymm2[0,0,2,1] +; ALL-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,0,2,1] ; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm3 -; ALL-NEXT: vpermpd {{.*}} # ymm3 = ymm3[0,1,1,3] -; ALL-NEXT: vblendpd {{.*}} # ymm2 = ymm3[0],ymm2[1],ymm3[2],ymm2[3] -; ALL-NEXT: vpermpd {{.*}} # ymm1 = ymm1[0,0,2,1] -; ALL-NEXT: vpermpd {{.*}} # ymm0 = ymm0[0,1,1,3] -; ALL-NEXT: vblendpd {{.*}} # ymm0 = ymm0[0],ymm1[1],ymm0[2],ymm1[3] +; ALL-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[0,1,1,3] +; ALL-NEXT: vblendpd {{.*#+}} ymm2 = ymm3[0],ymm2[1],ymm3[2],ymm2[3] +; ALL-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,0,2,1] +; ALL-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,1,3] +; ALL-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2],ymm1[3] ; ALL-NEXT: vinsertf64x4 $1, %ymm2, %zmm0, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> @@ -244,16 +236,16 @@ define <8 x double> @shuffle_v8f64_08194c5d(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_2a3b6e7f(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_2a3b6e7f +; ALL-LABEL: shuffle_v8f64_2a3b6e7f: ; ALL: # BB#0: ; ALL-NEXT: vextractf64x4 $1, %zmm1, %ymm2 -; ALL-NEXT: vpermpd {{.*}} # ymm2 = ymm2[0,2,2,3] +; ALL-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[0,2,2,3] ; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm3 -; ALL-NEXT: vpermpd {{.*}} # ymm3 = ymm3[2,1,3,3] -; ALL-NEXT: vblendpd {{.*}} # ymm2 = ymm3[0],ymm2[1],ymm3[2],ymm2[3] -; ALL-NEXT: vpermpd {{.*}} # ymm1 = ymm1[0,2,2,3] -; ALL-NEXT: vpermpd {{.*}} # ymm0 = ymm0[2,1,3,3] -; ALL-NEXT: vblendpd {{.*}} # ymm0 = ymm0[0],ymm1[1],ymm0[2],ymm1[3] +; ALL-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[2,1,3,3] +; ALL-NEXT: vblendpd {{.*#+}} ymm2 = ymm3[0],ymm2[1],ymm3[2],ymm2[3] +; ALL-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,2,2,3] +; ALL-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[2,1,3,3] +; ALL-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2],ymm1[3] ; ALL-NEXT: vinsertf64x4 $1, %ymm2, %zmm0, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> @@ -261,14 +253,14 @@ define <8 x double> @shuffle_v8f64_2a3b6e7f(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_08192a3b(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_08192a3b -; ALL: # BB#0: -; ALL-NEXT: vpermpd {{.*}} # ymm2 = ymm1[0,2,2,3] -; ALL-NEXT: vpermpd {{.*}} # ymm3 = ymm0[2,1,3,3] -; ALL-NEXT: vblendpd {{.*}} # ymm2 = ymm3[0],ymm2[1],ymm3[2],ymm2[3] -; ALL-NEXT: vpermpd {{.*}} # ymm1 = ymm1[0,0,2,1] -; ALL-NEXT: vpermpd {{.*}} # ymm0 = ymm0[0,1,1,3] -; ALL-NEXT: vblendpd {{.*}} # ymm0 = ymm0[0],ymm1[1],ymm0[2],ymm1[3] +; ALL-LABEL: shuffle_v8f64_08192a3b: +; ALL: # BB#0: +; ALL-NEXT: vpermpd {{.*#+}} ymm2 = ymm1[0,2,2,3] +; ALL-NEXT: vpermpd {{.*#+}} ymm3 = ymm0[2,1,3,3] +; ALL-NEXT: vblendpd {{.*#+}} ymm2 = ymm3[0],ymm2[1],ymm3[2],ymm2[3] +; ALL-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[0,0,2,1] +; ALL-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,1,3] +; ALL-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2],ymm1[3] ; ALL-NEXT: vinsertf64x4 $1, %ymm2, %zmm0, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> @@ -276,27 +268,26 @@ define <8 x double> @shuffle_v8f64_08192a3b(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_08991abb(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_08991abb +; ALL-LABEL: shuffle_v8f64_08991abb: ; ALL: # BB#0: -; ALL-NEXT: vpermilpd {{.*}} # ymm2 = ymm0[1,0,2,2] -; ALL-NEXT: vpermpd {{.*}} # ymm3 = ymm1[0,2,3,3] -; ALL-NEXT: vblendpd {{.*}} # ymm2 = ymm2[0],ymm3[1,2,3] -; ALL-NEXT: vpermpd {{.*}} # ymm1 = ymm1[0,0,1,1] -; ALL-NEXT: vblendpd {{.*}} # ymm0 = ymm0[0],ymm1[1,2,3] -; ALL-NEXT: vinsertf64x4 $1, %ymm2, %zmm0, %zmm0 +; ALL-NEXT: vpermpd {{.*#+}} ymm2 = ymm1[0,0,1,1] +; ALL-NEXT: vblendpd {{.*#+}} ymm2 = ymm0[0],ymm2[1,2,3] +; ALL-NEXT: vblendpd {{.*#+}} ymm0 = ymm1[0],ymm0[1],ymm1[2,3] +; ALL-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[1,2,3,3] +; ALL-NEXT: vinsertf64x4 $1, %ymm0, %zmm2, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> ret <8 x double> %shuffle } define <8 x double> @shuffle_v8f64_091b2d3f(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_091b2d3f +; ALL-LABEL: shuffle_v8f64_091b2d3f: ; ALL: # BB#0: ; ALL-NEXT: vextractf64x4 $1, %zmm1, %ymm2 -; ALL-NEXT: vpermpd {{.*}} # ymm3 = ymm0[2,1,3,3] -; ALL-NEXT: vblendpd {{.*}} # ymm2 = ymm3[0],ymm2[1],ymm3[2],ymm2[3] -; ALL-NEXT: vpermpd {{.*}} # ymm0 = ymm0[0,1,1,3] -; ALL-NEXT: vblendpd {{.*}} # ymm0 = ymm0[0],ymm1[1],ymm0[2],ymm1[3] +; ALL-NEXT: vpermpd {{.*#+}} ymm3 = ymm0[2,1,3,3] +; ALL-NEXT: vblendpd {{.*#+}} ymm2 = ymm3[0],ymm2[1],ymm3[2],ymm2[3] +; ALL-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,1,3] +; ALL-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2],ymm1[3] ; ALL-NEXT: vinsertf64x4 $1, %ymm2, %zmm0, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> @@ -304,12 +295,12 @@ define <8 x double> @shuffle_v8f64_091b2d3f(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_09ab1def(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_09ab1def +; ALL-LABEL: shuffle_v8f64_09ab1def: ; ALL: # BB#0: ; ALL-NEXT: vextractf64x4 $1, %zmm1, %ymm2 -; ALL-NEXT: vpermilpd {{.*}} # ymm3 = ymm0[1,0,2,2] -; ALL-NEXT: vblendpd {{.*}} # ymm2 = ymm3[0],ymm2[1,2,3] -; ALL-NEXT: vblendpd {{.*}} # ymm0 = ymm0[0],ymm1[1,2,3] +; ALL-NEXT: vpermilpd {{.*#+}} ymm3 = ymm0[1,0,2,2] +; ALL-NEXT: vblendpd {{.*#+}} ymm2 = ymm3[0],ymm2[1,2,3] +; ALL-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0],ymm1[1,2,3] ; ALL-NEXT: vinsertf64x4 $1, %ymm2, %zmm0, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> @@ -317,11 +308,11 @@ define <8 x double> @shuffle_v8f64_09ab1def(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_00014445(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_00014445 +; ALL-LABEL: shuffle_v8f64_00014445: ; ALL: # BB#0: -; ALL-NEXT: vpermpd {{.*}} # ymm1 = ymm0[0,0,0,1] +; ALL-NEXT: vpermpd {{.*#+}} ymm1 = ymm0[0,0,0,1] ; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpermpd {{.*}} # ymm0 = ymm0[0,0,0,1] +; ALL-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,0,0,1] ; ALL-NEXT: vinsertf64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> @@ -329,11 +320,11 @@ define <8 x double> @shuffle_v8f64_00014445(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_00204464(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_00204464 +; ALL-LABEL: shuffle_v8f64_00204464: ; ALL: # BB#0: -; ALL-NEXT: vpermpd {{.*}} # ymm1 = ymm0[0,0,2,0] +; ALL-NEXT: vpermpd {{.*#+}} ymm1 = ymm0[0,0,2,0] ; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpermpd {{.*}} # ymm0 = ymm0[0,0,2,0] +; ALL-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,0,2,0] ; ALL-NEXT: vinsertf64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> @@ -341,11 +332,11 @@ define <8 x double> @shuffle_v8f64_00204464(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_03004744(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_03004744 +; ALL-LABEL: shuffle_v8f64_03004744: ; ALL: # BB#0: -; ALL-NEXT: vpermpd {{.*}} # ymm1 = ymm0[0,3,0,0] +; ALL-NEXT: vpermpd {{.*#+}} ymm1 = ymm0[0,3,0,0] ; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpermpd {{.*}} # ymm0 = ymm0[0,3,0,0] +; ALL-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,3,0,0] ; ALL-NEXT: vinsertf64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> @@ -353,11 +344,11 @@ define <8 x double> @shuffle_v8f64_03004744(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_10005444(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_10005444 +; ALL-LABEL: shuffle_v8f64_10005444: ; ALL: # BB#0: -; ALL-NEXT: vpermpd {{.*}} # ymm1 = ymm0[1,0,0,0] +; ALL-NEXT: vpermpd {{.*#+}} ymm1 = ymm0[1,0,0,0] ; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpermpd {{.*}} # ymm0 = ymm0[1,0,0,0] +; ALL-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[1,0,0,0] ; ALL-NEXT: vinsertf64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> @@ -365,11 +356,11 @@ define <8 x double> @shuffle_v8f64_10005444(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_22006644(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_22006644 +; ALL-LABEL: shuffle_v8f64_22006644: ; ALL: # BB#0: -; ALL-NEXT: vpermpd {{.*}} # ymm1 = ymm0[2,2,0,0] +; ALL-NEXT: vpermpd {{.*#+}} ymm1 = ymm0[2,2,0,0] ; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpermpd {{.*}} # ymm0 = ymm0[2,2,0,0] +; ALL-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[2,2,0,0] ; ALL-NEXT: vinsertf64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> @@ -377,11 +368,11 @@ define <8 x double> @shuffle_v8f64_22006644(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_33307774(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_33307774 +; ALL-LABEL: shuffle_v8f64_33307774: ; ALL: # BB#0: -; ALL-NEXT: vpermpd {{.*}} # ymm1 = ymm0[3,3,3,0] +; ALL-NEXT: vpermpd {{.*#+}} ymm1 = ymm0[3,3,3,0] ; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpermpd {{.*}} # ymm0 = ymm0[3,3,3,0] +; ALL-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[3,3,3,0] ; ALL-NEXT: vinsertf64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> @@ -389,11 +380,11 @@ define <8 x double> @shuffle_v8f64_33307774(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_32107654(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_32107654 +; ALL-LABEL: shuffle_v8f64_32107654: ; ALL: # BB#0: -; ALL-NEXT: vpermpd {{.*}} # ymm1 = ymm0[3,2,1,0] +; ALL-NEXT: vpermpd {{.*#+}} ymm1 = ymm0[3,2,1,0] ; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpermpd {{.*}} # ymm0 = ymm0[3,2,1,0] +; ALL-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[3,2,1,0] ; ALL-NEXT: vinsertf64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> @@ -401,11 +392,11 @@ define <8 x double> @shuffle_v8f64_32107654(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_00234467(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_00234467 +; ALL-LABEL: shuffle_v8f64_00234467: ; ALL: # BB#0: -; ALL-NEXT: vpermilpd {{.*}} # ymm1 = ymm0[0,0,2,3] +; ALL-NEXT: vpermilpd {{.*#+}} ymm1 = ymm0[0,0,2,3] ; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpermilpd {{.*}} # ymm0 = ymm0[0,0,2,3] +; ALL-NEXT: vpermilpd {{.*#+}} ymm0 = ymm0[0,0,2,3] ; ALL-NEXT: vinsertf64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> @@ -413,11 +404,11 @@ define <8 x double> @shuffle_v8f64_00234467(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_00224466(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_00224466 +; ALL-LABEL: shuffle_v8f64_00224466: ; ALL: # BB#0: -; ALL-NEXT: vpermilpd {{.*}} # ymm1 = ymm0[0,0,2,2] +; ALL-NEXT: vmovddup {{.*#+}} ymm1 = ymm0[0,0,2,2] ; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpermilpd {{.*}} # ymm0 = ymm0[0,0,2,2] +; ALL-NEXT: vmovddup {{.*#+}} ymm0 = ymm0[0,0,2,2] ; ALL-NEXT: vinsertf64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> @@ -425,11 +416,11 @@ define <8 x double> @shuffle_v8f64_00224466(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_10325476(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_10325476 +; ALL-LABEL: shuffle_v8f64_10325476: ; ALL: # BB#0: -; ALL-NEXT: vpermilpd {{.*}} # ymm1 = ymm0[1,0,3,2] +; ALL-NEXT: vpermilpd {{.*#+}} ymm1 = ymm0[1,0,3,2] ; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpermilpd {{.*}} # ymm0 = ymm0[1,0,3,2] +; ALL-NEXT: vpermilpd {{.*#+}} ymm0 = ymm0[1,0,3,2] ; ALL-NEXT: vinsertf64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> @@ -437,11 +428,11 @@ define <8 x double> @shuffle_v8f64_10325476(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_11335577(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_11335577 +; ALL-LABEL: shuffle_v8f64_11335577: ; ALL: # BB#0: -; ALL-NEXT: vpermilpd {{.*}} # ymm1 = ymm0[1,1,3,3] +; ALL-NEXT: vpermilpd {{.*#+}} ymm1 = ymm0[1,1,3,3] ; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpermilpd {{.*}} # ymm0 = ymm0[1,1,3,3] +; ALL-NEXT: vpermilpd {{.*#+}} ymm0 = ymm0[1,1,3,3] ; ALL-NEXT: vinsertf64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> @@ -449,11 +440,11 @@ define <8 x double> @shuffle_v8f64_11335577(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_10235467(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_10235467 +; ALL-LABEL: shuffle_v8f64_10235467: ; ALL: # BB#0: -; ALL-NEXT: vpermilpd {{.*}} # ymm1 = ymm0[1,0,2,3] +; ALL-NEXT: vpermilpd {{.*#+}} ymm1 = ymm0[1,0,2,3] ; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpermilpd {{.*}} # ymm0 = ymm0[1,0,2,3] +; ALL-NEXT: vpermilpd {{.*#+}} ymm0 = ymm0[1,0,2,3] ; ALL-NEXT: vinsertf64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> @@ -461,11 +452,11 @@ define <8 x double> @shuffle_v8f64_10235467(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_10225466(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_10225466 +; ALL-LABEL: shuffle_v8f64_10225466: ; ALL: # BB#0: -; ALL-NEXT: vpermilpd {{.*}} # ymm1 = ymm0[1,0,2,2] +; ALL-NEXT: vpermilpd {{.*#+}} ymm1 = ymm0[1,0,2,2] ; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpermilpd {{.*}} # ymm0 = ymm0[1,0,2,2] +; ALL-NEXT: vpermilpd {{.*#+}} ymm0 = ymm0[1,0,2,2] ; ALL-NEXT: vinsertf64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> @@ -473,11 +464,11 @@ define <8 x double> @shuffle_v8f64_10225466(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_00015444(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_00015444 +; ALL-LABEL: shuffle_v8f64_00015444: ; ALL: # BB#0: -; ALL-NEXT: vpermpd {{.*}} # ymm1 = ymm0[0,0,0,1] +; ALL-NEXT: vpermpd {{.*#+}} ymm1 = ymm0[0,0,0,1] ; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpermpd {{.*}} # ymm0 = ymm0[1,0,0,0] +; ALL-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[1,0,0,0] ; ALL-NEXT: vinsertf64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> @@ -485,11 +476,11 @@ define <8 x double> @shuffle_v8f64_00015444(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_00204644(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_00204644 +; ALL-LABEL: shuffle_v8f64_00204644: ; ALL: # BB#0: -; ALL-NEXT: vpermpd {{.*}} # ymm1 = ymm0[0,0,2,0] +; ALL-NEXT: vpermpd {{.*#+}} ymm1 = ymm0[0,0,2,0] ; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpermpd {{.*}} # ymm0 = ymm0[0,2,0,0] +; ALL-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,2,0,0] ; ALL-NEXT: vinsertf64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> @@ -497,11 +488,11 @@ define <8 x double> @shuffle_v8f64_00204644(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_03004474(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_03004474 +; ALL-LABEL: shuffle_v8f64_03004474: ; ALL: # BB#0: -; ALL-NEXT: vpermpd {{.*}} # ymm1 = ymm0[0,3,0,0] +; ALL-NEXT: vpermpd {{.*#+}} ymm1 = ymm0[0,3,0,0] ; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpermpd {{.*}} # ymm0 = ymm0[0,0,3,0] +; ALL-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,0,3,0] ; ALL-NEXT: vinsertf64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> @@ -509,11 +500,11 @@ define <8 x double> @shuffle_v8f64_03004474(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_10004444(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_10004444 +; ALL-LABEL: shuffle_v8f64_10004444: ; ALL: # BB#0: -; ALL-NEXT: vpermpd {{.*}} # ymm1 = ymm0[1,0,0,0] +; ALL-NEXT: vpermpd {{.*#+}} ymm1 = ymm0[1,0,0,0] ; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpermpd {{.*}} # ymm0 = ymm0[0,0,0,0] +; ALL-NEXT: vbroadcastsd %xmm0, %ymm0 ; ALL-NEXT: vinsertf64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> @@ -521,11 +512,11 @@ define <8 x double> @shuffle_v8f64_10004444(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_22006446(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_22006446 +; ALL-LABEL: shuffle_v8f64_22006446: ; ALL: # BB#0: -; ALL-NEXT: vpermpd {{.*}} # ymm1 = ymm0[2,2,0,0] +; ALL-NEXT: vpermpd {{.*#+}} ymm1 = ymm0[2,2,0,0] ; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpermpd {{.*}} # ymm0 = ymm0[2,0,0,2] +; ALL-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[2,0,0,2] ; ALL-NEXT: vinsertf64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> @@ -533,11 +524,11 @@ define <8 x double> @shuffle_v8f64_22006446(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_33307474(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_33307474 +; ALL-LABEL: shuffle_v8f64_33307474: ; ALL: # BB#0: -; ALL-NEXT: vpermpd {{.*}} # ymm1 = ymm0[3,3,3,0] +; ALL-NEXT: vpermpd {{.*#+}} ymm1 = ymm0[3,3,3,0] ; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpermpd {{.*}} # ymm0 = ymm0[3,0,3,0] +; ALL-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[3,0,3,0] ; ALL-NEXT: vinsertf64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> @@ -545,9 +536,9 @@ define <8 x double> @shuffle_v8f64_33307474(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_32104567(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_32104567 +; ALL-LABEL: shuffle_v8f64_32104567: ; ALL: # BB#0: -; ALL-NEXT: vpermpd {{.*}} # ymm1 = ymm0[3,2,1,0] +; ALL-NEXT: vpermpd {{.*#+}} ymm1 = ymm0[3,2,1,0] ; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm0 ; ALL-NEXT: vinsertf64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq @@ -556,11 +547,11 @@ define <8 x double> @shuffle_v8f64_32104567(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_00236744(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_00236744 +; ALL-LABEL: shuffle_v8f64_00236744: ; ALL: # BB#0: -; ALL-NEXT: vpermilpd {{.*}} # ymm1 = ymm0[0,0,2,3] +; ALL-NEXT: vpermilpd {{.*#+}} ymm1 = ymm0[0,0,2,3] ; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpermpd {{.*}} # ymm0 = ymm0[2,3,0,0] +; ALL-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[2,3,0,0] ; ALL-NEXT: vinsertf64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> @@ -568,11 +559,11 @@ define <8 x double> @shuffle_v8f64_00236744(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_00226644(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_00226644 +; ALL-LABEL: shuffle_v8f64_00226644: ; ALL: # BB#0: -; ALL-NEXT: vpermilpd {{.*}} # ymm1 = ymm0[0,0,2,2] +; ALL-NEXT: vmovddup {{.*#+}} ymm1 = ymm0[0,0,2,2] ; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpermpd {{.*}} # ymm0 = ymm0[2,2,0,0] +; ALL-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[2,2,0,0] ; ALL-NEXT: vinsertf64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> @@ -580,9 +571,9 @@ define <8 x double> @shuffle_v8f64_00226644(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_10324567(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_10324567 +; ALL-LABEL: shuffle_v8f64_10324567: ; ALL: # BB#0: -; ALL-NEXT: vpermilpd {{.*}} # ymm1 = ymm0[1,0,3,2] +; ALL-NEXT: vpermilpd {{.*#+}} ymm1 = ymm0[1,0,3,2] ; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm0 ; ALL-NEXT: vinsertf64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq @@ -591,9 +582,9 @@ define <8 x double> @shuffle_v8f64_10324567(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_11334567(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_11334567 +; ALL-LABEL: shuffle_v8f64_11334567: ; ALL: # BB#0: -; ALL-NEXT: vpermilpd {{.*}} # ymm1 = ymm0[1,1,3,3] +; ALL-NEXT: vpermilpd {{.*#+}} ymm1 = ymm0[1,1,3,3] ; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm0 ; ALL-NEXT: vinsertf64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq @@ -602,10 +593,10 @@ define <8 x double> @shuffle_v8f64_11334567(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_01235467(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_01235467 +; ALL-LABEL: shuffle_v8f64_01235467: ; ALL: # BB#0: ; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm1 -; ALL-NEXT: vpermilpd {{.*}} # ymm1 = ymm1[1,0,2,3] +; ALL-NEXT: vpermilpd {{.*#+}} ymm1 = ymm1[1,0,2,3] ; ALL-NEXT: vinsertf64x4 $1, %ymm1, %zmm0, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> @@ -613,10 +604,10 @@ define <8 x double> @shuffle_v8f64_01235467(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_01235466(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_01235466 +; ALL-LABEL: shuffle_v8f64_01235466: ; ALL: # BB#0: ; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm1 -; ALL-NEXT: vpermilpd {{.*}} # ymm1 = ymm1[1,0,2,2] +; ALL-NEXT: vpermilpd {{.*#+}} ymm1 = ymm1[1,0,2,2] ; ALL-NEXT: vinsertf64x4 $1, %ymm1, %zmm0, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> @@ -624,11 +615,11 @@ define <8 x double> @shuffle_v8f64_01235466(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_002u6u44(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_002u6u44 +; ALL-LABEL: shuffle_v8f64_002u6u44: ; ALL: # BB#0: -; ALL-NEXT: vpermilpd {{.*}} # ymm1 = ymm0[0,0,2,2] +; ALL-NEXT: vmovddup {{.*#+}} ymm1 = ymm0[0,0,2,2] ; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpermpd {{.*}} # ymm0 = ymm0[2,1,0,0] +; ALL-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[2,1,0,0] ; ALL-NEXT: vinsertf64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> @@ -636,11 +627,11 @@ define <8 x double> @shuffle_v8f64_002u6u44(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_00uu66uu(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_00uu66uu +; ALL-LABEL: shuffle_v8f64_00uu66uu: ; ALL: # BB#0: -; ALL-NEXT: vpermilpd {{.*}} # ymm1 = ymm0[0,0,2,2] +; ALL-NEXT: vbroadcastsd %xmm0, %ymm1 ; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpermpd {{.*}} # ymm0 = ymm0[2,2,2,3] +; ALL-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[2,2,2,3] ; ALL-NEXT: vinsertf64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> @@ -648,9 +639,9 @@ define <8 x double> @shuffle_v8f64_00uu66uu(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_103245uu(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_103245uu +; ALL-LABEL: shuffle_v8f64_103245uu: ; ALL: # BB#0: -; ALL-NEXT: vpermilpd {{.*}} # ymm1 = ymm0[1,0,3,2] +; ALL-NEXT: vpermilpd {{.*#+}} ymm1 = ymm0[1,0,3,2] ; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm0 ; ALL-NEXT: vinsertf64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq @@ -659,9 +650,9 @@ define <8 x double> @shuffle_v8f64_103245uu(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_1133uu67(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_1133uu67 +; ALL-LABEL: shuffle_v8f64_1133uu67: ; ALL: # BB#0: -; ALL-NEXT: vpermilpd {{.*}} # ymm1 = ymm0[1,1,3,3] +; ALL-NEXT: vpermilpd {{.*#+}} ymm1 = ymm0[1,1,3,3] ; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm0 ; ALL-NEXT: vinsertf64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq @@ -670,10 +661,10 @@ define <8 x double> @shuffle_v8f64_1133uu67(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_0uu354uu(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_0uu354uu +; ALL-LABEL: shuffle_v8f64_0uu354uu: ; ALL: # BB#0: ; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm1 -; ALL-NEXT: vpermilpd {{.*}} # ymm1 = ymm1[1,0,2,2] +; ALL-NEXT: vpermilpd {{.*#+}} ymm1 = ymm1[1,0,2,2] ; ALL-NEXT: vinsertf64x4 $1, %ymm1, %zmm0, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> @@ -681,10 +672,10 @@ define <8 x double> @shuffle_v8f64_0uu354uu(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_uuu3uu66(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_uuu3uu66 +; ALL-LABEL: shuffle_v8f64_uuu3uu66: ; ALL: # BB#0: ; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm1 -; ALL-NEXT: vpermilpd {{.*}} # ymm1 = ymm1[0,0,2,2] +; ALL-NEXT: vmovddup {{.*#+}} ymm1 = ymm1[0,0,2,2] ; ALL-NEXT: vinsertf64x4 $1, %ymm1, %zmm0, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> @@ -692,19 +683,17 @@ define <8 x double> @shuffle_v8f64_uuu3uu66(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_c348cda0(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_c348cda0 +; ALL-LABEL: shuffle_v8f64_c348cda0: ; ALL: # BB#0: ; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm2 -; ALL-NEXT: vpermpd {{.*}} # ymm2 = ymm2[0,1,0,3] -; ALL-NEXT: vpermpd {{.*}} # ymm3 = ymm0[0,3,2,3] -; ALL-NEXT: vblendpd {{.*}} # ymm2 = ymm3[0,1],ymm2[2],ymm3[3] +; ALL-NEXT: vperm2f128 {{.*#+}} ymm2 = ymm0[0,1],ymm2[0,1] ; ALL-NEXT: vextractf64x4 $1, %zmm1, %ymm3 -; ALL-NEXT: vpermpd {{.*}} # ymm4 = ymm1[0,1,2,0] -; ALL-NEXT: vblendpd {{.*}} # ymm4 = ymm3[0,1,2],ymm4[3] -; ALL-NEXT: vblendpd {{.*}} # ymm2 = ymm4[0],ymm2[1,2],ymm4[3] -; ALL-NEXT: vblendpd {{.*}} # ymm1 = ymm3[0,1],ymm1[2],ymm3[3] -; ALL-NEXT: vpermpd {{.*}} # ymm0 = ymm0[0,1,2,0] -; ALL-NEXT: vblendpd {{.*}} # ymm0 = ymm1[0,1,2],ymm0[3] +; ALL-NEXT: vbroadcastsd %xmm1, %ymm4 +; ALL-NEXT: vblendpd {{.*#+}} ymm4 = ymm3[0,1,2],ymm4[3] +; ALL-NEXT: vblendpd {{.*#+}} ymm2 = ymm4[0],ymm2[1,2],ymm4[3] +; ALL-NEXT: vblendpd {{.*#+}} ymm1 = ymm3[0,1],ymm1[2],ymm3[3] +; ALL-NEXT: vbroadcastsd %xmm0, %ymm0 +; ALL-NEXT: vblendpd {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3] ; ALL-NEXT: vinsertf64x4 $1, %ymm0, %zmm2, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> @@ -712,19 +701,18 @@ define <8 x double> @shuffle_v8f64_c348cda0(<8 x double> %a, <8 x double> %b) { } define <8 x double> @shuffle_v8f64_f511235a(<8 x double> %a, <8 x double> %b) { -; ALL-LABEL: @shuffle_v8f64_f511235a +; ALL-LABEL: shuffle_v8f64_f511235a: ; ALL: # BB#0: ; ALL-NEXT: vextractf64x4 $1, %zmm0, %ymm2 -; ALL-NEXT: vpermpd {{.*}} # ymm3 = ymm2[0,1,1,3] -; ALL-NEXT: vpermpd {{.*}} # ymm4 = ymm0[2,3,2,3] -; ALL-NEXT: vblendpd {{.*}} # ymm3 = ymm4[0,1],ymm3[2],ymm4[3] -; ALL-NEXT: vpermilpd {{.*}} # ymm4 = ymm1[0,0,2,2] -; ALL-NEXT: vblendpd {{.*}} # ymm3 = ymm3[0,1,2],ymm4[3] -; ALL-NEXT: vpermpd {{.*}} # ymm0 = ymm0[0,1,1,1] -; ALL-NEXT: vblendpd {{.*}} # ymm0 = ymm0[0],ymm2[1],ymm0[2,3] +; ALL-NEXT: vblendpd {{.*#+}} ymm3 = ymm0[0],ymm2[1],ymm0[2,3] +; ALL-NEXT: vpermpd {{.*#+}} ymm3 = ymm3[2,3,1,3] +; ALL-NEXT: vmovddup {{.*#+}} ymm4 = ymm1[0,0,2,2] +; ALL-NEXT: vblendpd {{.*#+}} ymm3 = ymm3[0,1,2],ymm4[3] +; ALL-NEXT: vpermpd {{.*#+}} ymm0 = ymm0[0,1,1,1] +; ALL-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0],ymm2[1],ymm0[2,3] ; ALL-NEXT: vextractf64x4 $1, %zmm1, %ymm1 -; ALL-NEXT: vpermpd {{.*}} # ymm1 = ymm1[3,1,2,3] -; ALL-NEXT: vblendpd {{.*}} # ymm0 = ymm1[0],ymm0[1,2,3] +; ALL-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[3,1,2,3] +; ALL-NEXT: vblendpd {{.*#+}} ymm0 = ymm1[0],ymm0[1,2,3] ; ALL-NEXT: vinsertf64x4 $1, %ymm3, %zmm0, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> @@ -732,20 +720,19 @@ define <8 x double> @shuffle_v8f64_f511235a(<8 x double> %a, <8 x double> %b) { } define <8 x i64> @shuffle_v8i64_00000000(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_00000000 +; ALL-LABEL: shuffle_v8i64_00000000: ; ALL: # BB#0: -; ALL-NEXT: vpermq {{.*}} # ymm0 = ymm0[0,0,0,0] -; ALL-NEXT: vinserti64x4 $1, %ymm0, %zmm0, %zmm0 +; ALL-NEXT: vpbroadcastq %xmm0, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> ret <8 x i64> %shuffle } define <8 x i64> @shuffle_v8i64_00000010(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_00000010 +; ALL-LABEL: shuffle_v8i64_00000010: ; ALL: # BB#0: -; ALL-NEXT: vpermq {{.*}} # ymm1 = ymm0[0,0,0,0] -; ALL-NEXT: vpermq {{.*}} # ymm0 = ymm0[0,0,1,0] +; ALL-NEXT: vpbroadcastq %xmm0, %ymm1 +; ALL-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,1,0] ; ALL-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> @@ -753,10 +740,10 @@ define <8 x i64> @shuffle_v8i64_00000010(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_00000200(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_00000200 +; ALL-LABEL: shuffle_v8i64_00000200: ; ALL: # BB#0: -; ALL-NEXT: vpermq {{.*}} # ymm1 = ymm0[0,0,0,0] -; ALL-NEXT: vpermq {{.*}} # ymm0 = ymm0[0,2,0,0] +; ALL-NEXT: vpbroadcastq %xmm0, %ymm1 +; ALL-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,0,0] ; ALL-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> @@ -764,10 +751,10 @@ define <8 x i64> @shuffle_v8i64_00000200(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_00003000(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_00003000 +; ALL-LABEL: shuffle_v8i64_00003000: ; ALL: # BB#0: -; ALL-NEXT: vpermq {{.*}} # ymm1 = ymm0[0,0,0,0] -; ALL-NEXT: vpermq {{.*}} # ymm0 = ymm0[3,0,0,0] +; ALL-NEXT: vpbroadcastq %xmm0, %ymm1 +; ALL-NEXT: vpermq {{.*#+}} ymm0 = ymm0[3,0,0,0] ; ALL-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> @@ -775,13 +762,12 @@ define <8 x i64> @shuffle_v8i64_00003000(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_00040000(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_00040000 +; ALL-LABEL: shuffle_v8i64_00040000: ; ALL: # BB#0: ; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm1 -; ALL-NEXT: vpermq {{.*}} # ymm1 = ymm1[0,1,2,0] -; ALL-NEXT: vpermq {{.*}} # ymm2 = ymm0[0,0,0,3] -; ALL-NEXT: vpblendd {{.*}} # ymm1 = ymm2[0,1,2,3,4,5],ymm1[6,7] -; ALL-NEXT: vpermq {{.*}} # ymm0 = ymm0[0,0,0,0] +; ALL-NEXT: vpbroadcastq %xmm1, %ymm1 +; ALL-NEXT: vpbroadcastq %xmm0, %ymm0 +; ALL-NEXT: vpblendd {{.*#+}} ymm1 = ymm0[0,1,2,3,4,5],ymm1[6,7] ; ALL-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> @@ -789,13 +775,12 @@ define <8 x i64> @shuffle_v8i64_00040000(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_00500000(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_00500000 +; ALL-LABEL: shuffle_v8i64_00500000: ; ALL: # BB#0: ; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm1 -; ALL-NEXT: vpermq {{.*}} # ymm1 = ymm1[0,1,1,3] -; ALL-NEXT: vpermq {{.*}} # ymm2 = ymm0[0,0,2,0] -; ALL-NEXT: vpblendd {{.*}} # ymm1 = ymm2[0,1,2,3],ymm1[4,5],ymm2[6,7] -; ALL-NEXT: vpermq {{.*}} # ymm0 = ymm0[0,0,0,0] +; ALL-NEXT: vpblendd {{.*#+}} ymm1 = ymm0[0,1],ymm1[2,3],ymm0[4,5,6,7] +; ALL-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,1,0] +; ALL-NEXT: vpbroadcastq %xmm0, %ymm0 ; ALL-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> @@ -803,13 +788,12 @@ define <8 x i64> @shuffle_v8i64_00500000(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_06000000(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_06000000 +; ALL-LABEL: shuffle_v8i64_06000000: ; ALL: # BB#0: ; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm1 -; ALL-NEXT: vpermq {{.*}} # ymm1 = ymm1[0,2,2,3] -; ALL-NEXT: vpermq {{.*}} # ymm2 = ymm0[0,1,0,0] -; ALL-NEXT: vpblendd {{.*}} # ymm1 = ymm2[0,1],ymm1[2,3],ymm2[4,5,6,7] -; ALL-NEXT: vpermq {{.*}} # ymm0 = ymm0[0,0,0,0] +; ALL-NEXT: vpblendd {{.*#+}} ymm1 = ymm0[0,1,2,3],ymm1[4,5],ymm0[6,7] +; ALL-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,2,0,0] +; ALL-NEXT: vpbroadcastq %xmm0, %ymm0 ; ALL-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> @@ -817,12 +801,12 @@ define <8 x i64> @shuffle_v8i64_06000000(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_70000000(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_70000000 +; ALL-LABEL: shuffle_v8i64_70000000: ; ALL: # BB#0: ; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm1 -; ALL-NEXT: vpermq {{.*}} # ymm1 = ymm1[3,1,2,3] -; ALL-NEXT: vpermq {{.*}} # ymm0 = ymm0[0,0,0,0] -; ALL-NEXT: vpblendd {{.*}} # ymm1 = ymm1[0,1],ymm0[2,3,4,5,6,7] +; ALL-NEXT: vpblendd {{.*#+}} ymm1 = ymm0[0,1,2,3,4,5],ymm1[6,7] +; ALL-NEXT: vpermq {{.*#+}} ymm1 = ymm1[3,0,0,0] +; ALL-NEXT: vpbroadcastq %xmm0, %ymm0 ; ALL-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> @@ -830,22 +814,22 @@ define <8 x i64> @shuffle_v8i64_70000000(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_01014545(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_01014545 +; ALL-LABEL: shuffle_v8i64_01014545: ; ALL: # BB#0: -; ALL-NEXT: vpermq {{.*}} # ymm1 = ymm0[0,1,0,1] -; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpermq {{.*}} # ymm0 = ymm0[0,1,0,1] -; ALL-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 +; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm1 +; ALL-NEXT: vinserti128 $1, %xmm1, %ymm1, %ymm1 +; ALL-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0 +; ALL-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> ret <8 x i64> %shuffle } define <8 x i64> @shuffle_v8i64_00112233(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_00112233 +; ALL-LABEL: shuffle_v8i64_00112233: ; ALL: # BB#0: -; ALL-NEXT: vpermq {{.*}} # ymm1 = ymm0[0,0,1,1] -; ALL-NEXT: vpermq {{.*}} # ymm0 = ymm0[2,2,3,3] +; ALL-NEXT: vpermq {{.*#+}} ymm1 = ymm0[0,0,1,1] +; ALL-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,3,3] ; ALL-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> @@ -853,10 +837,10 @@ define <8 x i64> @shuffle_v8i64_00112233(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_00001111(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_00001111 +; ALL-LABEL: shuffle_v8i64_00001111: ; ALL: # BB#0: -; ALL-NEXT: vpermq {{.*}} # ymm1 = ymm0[0,0,0,0] -; ALL-NEXT: vpermq {{.*}} # ymm0 = ymm0[1,1,1,1] +; ALL-NEXT: vpbroadcastq %xmm0, %ymm1 +; ALL-NEXT: vpermq {{.*#+}} ymm0 = ymm0[1,1,1,1] ; ALL-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> @@ -864,12 +848,12 @@ define <8 x i64> @shuffle_v8i64_00001111(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_81a3c5e7(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_81a3c5e7 +; ALL-LABEL: shuffle_v8i64_81a3c5e7: ; ALL: # BB#0: ; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm2 ; ALL-NEXT: vextracti64x4 $1, %zmm1, %ymm3 -; ALL-NEXT: vpblendd {{.*}} # ymm2 = ymm3[0,1],ymm2[2,3],ymm3[4,5],ymm2[6,7] -; ALL-NEXT: vpblendd {{.*}} # ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7] +; ALL-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1],ymm2[2,3],ymm3[4,5],ymm2[6,7] +; ALL-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7] ; ALL-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> @@ -877,11 +861,11 @@ define <8 x i64> @shuffle_v8i64_81a3c5e7(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_08080808(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_08080808 +; ALL-LABEL: shuffle_v8i64_08080808: ; ALL: # BB#0: -; ALL-NEXT: vpermq {{.*}} # ymm1 = ymm1[0,0,2,0] -; ALL-NEXT: vpermq {{.*}} # ymm0 = ymm0[0,1,0,3] -; ALL-NEXT: vpblendd {{.*}} # ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7] +; ALL-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0 +; ALL-NEXT: vpbroadcastq %xmm1, %ymm1 +; ALL-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7] ; ALL-NEXT: vinserti64x4 $1, %ymm0, %zmm0, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> @@ -889,16 +873,16 @@ define <8 x i64> @shuffle_v8i64_08080808(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_08084c4c(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_08084c4c +; ALL-LABEL: shuffle_v8i64_08084c4c: ; ALL: # BB#0: -; ALL-NEXT: vextracti64x4 $1, %zmm1, %ymm2 -; ALL-NEXT: vpermq {{.*}} # ymm2 = ymm2[0,0,2,0] -; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm3 -; ALL-NEXT: vpermq {{.*}} # ymm3 = ymm3[0,1,0,3] -; ALL-NEXT: vpblendd {{.*}} # ymm2 = ymm3[0,1],ymm2[2,3],ymm3[4,5],ymm2[6,7] -; ALL-NEXT: vpermq {{.*}} # ymm1 = ymm1[0,0,2,0] -; ALL-NEXT: vpermq {{.*}} # ymm0 = ymm0[0,1,0,3] -; ALL-NEXT: vpblendd {{.*}} # ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7] +; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm2 +; ALL-NEXT: vinserti128 $1, %xmm2, %ymm2, %ymm2 +; ALL-NEXT: vextracti64x4 $1, %zmm1, %ymm3 +; ALL-NEXT: vpbroadcastq %xmm3, %ymm3 +; ALL-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1],ymm3[2,3],ymm2[4,5],ymm3[6,7] +; ALL-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0 +; ALL-NEXT: vpbroadcastq %xmm1, %ymm1 +; ALL-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7] ; ALL-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> @@ -906,14 +890,14 @@ define <8 x i64> @shuffle_v8i64_08084c4c(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_8823cc67(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_8823cc67 +; ALL-LABEL: shuffle_v8i64_8823cc67: ; ALL: # BB#0: ; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm2 ; ALL-NEXT: vextracti64x4 $1, %zmm1, %ymm3 -; ALL-NEXT: vpshufd {{.*}} # ymm3 = ymm3[0,1,0,1,4,5,4,5] -; ALL-NEXT: vpblendd {{.*}} # ymm2 = ymm3[0,1,2,3],ymm2[4,5,6,7] -; ALL-NEXT: vpshufd {{.*}} # ymm1 = ymm1[0,1,0,1,4,5,4,5] -; ALL-NEXT: vpblendd {{.*}} # ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7] +; ALL-NEXT: vpbroadcastq %xmm3, %ymm3 +; ALL-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3],ymm2[4,5,6,7] +; ALL-NEXT: vpbroadcastq %xmm1, %ymm1 +; ALL-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7] ; ALL-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> @@ -921,50 +905,46 @@ define <8 x i64> @shuffle_v8i64_8823cc67(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_9832dc76(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_9832dc76 +; ALL-LABEL: shuffle_v8i64_9832dc76: ; ALL: # BB#0: -; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm2 -; ALL-NEXT: vpshufd {{.*}} # ymm2 = ymm2[2,3,0,1,6,7,4,5] -; ALL-NEXT: vextracti64x4 $1, %zmm1, %ymm3 -; ALL-NEXT: vpshufd {{.*}} # ymm3 = ymm3[2,3,0,1,6,7,4,5] -; ALL-NEXT: vpblendd {{.*}} # ymm2 = ymm3[0,1,2,3],ymm2[4,5,6,7] -; ALL-NEXT: vpshufd {{.*}} # ymm0 = ymm0[2,3,0,1,6,7,4,5] -; ALL-NEXT: vpshufd {{.*}} # ymm1 = ymm1[2,3,0,1,6,7,4,5] -; ALL-NEXT: vpblendd {{.*}} # ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7] -; ALL-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm0 +; ALL-NEXT: vpblendd {{.*#+}} ymm2 = ymm1[0,1,2,3],ymm0[4,5,6,7] +; ALL-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[2,3,0,1,6,7,4,5] +; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm0 +; ALL-NEXT: vextracti64x4 $1, %zmm1, %ymm1 +; ALL-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7] +; ALL-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[2,3,0,1,6,7,4,5] +; ALL-NEXT: vinserti64x4 $1, %ymm0, %zmm2, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> ret <8 x i64> %shuffle } define <8 x i64> @shuffle_v8i64_9810dc54(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_9810dc54 +; ALL-LABEL: shuffle_v8i64_9810dc54: ; ALL: # BB#0: -; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm2 -; ALL-NEXT: vpermq {{.*}} # ymm2 = ymm2[0,1,1,0] -; ALL-NEXT: vextracti64x4 $1, %zmm1, %ymm3 -; ALL-NEXT: vpshufd {{.*}} # ymm3 = ymm3[2,3,0,1,6,7,4,5] -; ALL-NEXT: vpblendd {{.*}} # ymm2 = ymm3[0,1,2,3],ymm2[4,5,6,7] -; ALL-NEXT: vpermq {{.*}} # ymm0 = ymm0[0,1,1,0] -; ALL-NEXT: vpshufd {{.*}} # ymm1 = ymm1[2,3,0,1,6,7,4,5] -; ALL-NEXT: vpblendd {{.*}} # ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7] -; ALL-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm0 +; ALL-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm2 +; ALL-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[2,3,0,1,6,7,4,5] +; ALL-NEXT: vextracti64x4 $1, %zmm1, %ymm1 +; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm0 +; ALL-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0 +; ALL-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[2,3,0,1,6,7,4,5] +; ALL-NEXT: vinserti64x4 $1, %ymm0, %zmm2, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> ret <8 x i64> %shuffle } define <8 x i64> @shuffle_v8i64_08194c5d(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_08194c5d +; ALL-LABEL: shuffle_v8i64_08194c5d: ; ALL: # BB#0: ; ALL-NEXT: vextracti64x4 $1, %zmm1, %ymm2 -; ALL-NEXT: vpermq {{.*}} # ymm2 = ymm2[0,0,2,1] +; ALL-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1] ; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm3 -; ALL-NEXT: vpermq {{.*}} # ymm3 = ymm3[0,1,1,3] -; ALL-NEXT: vpblendd {{.*}} # ymm2 = ymm3[0,1],ymm2[2,3],ymm3[4,5],ymm2[6,7] -; ALL-NEXT: vpermq {{.*}} # ymm1 = ymm1[0,0,2,1] -; ALL-NEXT: vpermq {{.*}} # ymm0 = ymm0[0,1,1,3] -; ALL-NEXT: vpblendd {{.*}} # ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7] +; ALL-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,1,1,3] +; ALL-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1],ymm2[2,3],ymm3[4,5],ymm2[6,7] +; ALL-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1] +; ALL-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,1,1,3] +; ALL-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7] ; ALL-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> @@ -972,16 +952,16 @@ define <8 x i64> @shuffle_v8i64_08194c5d(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_2a3b6e7f(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_2a3b6e7f +; ALL-LABEL: shuffle_v8i64_2a3b6e7f: ; ALL: # BB#0: ; ALL-NEXT: vextracti64x4 $1, %zmm1, %ymm2 -; ALL-NEXT: vpermq {{.*}} # ymm2 = ymm2[0,2,2,3] +; ALL-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,2,2,3] ; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm3 -; ALL-NEXT: vpermq {{.*}} # ymm3 = ymm3[2,1,3,3] -; ALL-NEXT: vpblendd {{.*}} # ymm2 = ymm3[0,1],ymm2[2,3],ymm3[4,5],ymm2[6,7] -; ALL-NEXT: vpermq {{.*}} # ymm1 = ymm1[0,2,2,3] -; ALL-NEXT: vpermq {{.*}} # ymm0 = ymm0[2,1,3,3] -; ALL-NEXT: vpblendd {{.*}} # ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7] +; ALL-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,1,3,3] +; ALL-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1],ymm2[2,3],ymm3[4,5],ymm2[6,7] +; ALL-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,2,2,3] +; ALL-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,1,3,3] +; ALL-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7] ; ALL-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> @@ -989,14 +969,14 @@ define <8 x i64> @shuffle_v8i64_2a3b6e7f(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_08192a3b(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_08192a3b -; ALL: # BB#0: -; ALL-NEXT: vpermq {{.*}} # ymm2 = ymm1[0,2,2,3] -; ALL-NEXT: vpermq {{.*}} # ymm3 = ymm0[2,1,3,3] -; ALL-NEXT: vpblendd {{.*}} # ymm2 = ymm3[0,1],ymm2[2,3],ymm3[4,5],ymm2[6,7] -; ALL-NEXT: vpermq {{.*}} # ymm1 = ymm1[0,0,2,1] -; ALL-NEXT: vpermq {{.*}} # ymm0 = ymm0[0,1,1,3] -; ALL-NEXT: vpblendd {{.*}} # ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7] +; ALL-LABEL: shuffle_v8i64_08192a3b: +; ALL: # BB#0: +; ALL-NEXT: vpermq {{.*#+}} ymm2 = ymm1[0,2,2,3] +; ALL-NEXT: vpermq {{.*#+}} ymm3 = ymm0[2,1,3,3] +; ALL-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1],ymm2[2,3],ymm3[4,5],ymm2[6,7] +; ALL-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1] +; ALL-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,1,1,3] +; ALL-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7] ; ALL-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> @@ -1004,27 +984,26 @@ define <8 x i64> @shuffle_v8i64_08192a3b(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_08991abb(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_08991abb +; ALL-LABEL: shuffle_v8i64_08991abb: ; ALL: # BB#0: -; ALL-NEXT: vpshufd {{.*}} # ymm2 = ymm0[2,3,2,3,6,7,6,7] -; ALL-NEXT: vpermq {{.*}} # ymm3 = ymm1[0,2,3,3] -; ALL-NEXT: vpblendd {{.*}} # ymm2 = ymm2[0,1],ymm3[2,3,4,5,6,7] -; ALL-NEXT: vpermq {{.*}} # ymm1 = ymm1[0,0,1,1] -; ALL-NEXT: vpblendd {{.*}} # ymm0 = ymm0[0,1],ymm1[2,3,4,5,6,7] -; ALL-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm0 +; ALL-NEXT: vpermq {{.*#+}} ymm2 = ymm1[0,0,1,1] +; ALL-NEXT: vpblendd {{.*#+}} ymm2 = ymm0[0,1],ymm2[2,3,4,5,6,7] +; ALL-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5,6,7] +; ALL-NEXT: vpermq {{.*#+}} ymm0 = ymm0[1,2,3,3] +; ALL-NEXT: vinserti64x4 $1, %ymm0, %zmm2, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> ret <8 x i64> %shuffle } define <8 x i64> @shuffle_v8i64_091b2d3f(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_091b2d3f +; ALL-LABEL: shuffle_v8i64_091b2d3f: ; ALL: # BB#0: ; ALL-NEXT: vextracti64x4 $1, %zmm1, %ymm2 -; ALL-NEXT: vpermq {{.*}} # ymm3 = ymm0[2,1,3,3] -; ALL-NEXT: vpblendd {{.*}} # ymm2 = ymm3[0,1],ymm2[2,3],ymm3[4,5],ymm2[6,7] -; ALL-NEXT: vpermq {{.*}} # ymm0 = ymm0[0,1,1,3] -; ALL-NEXT: vpblendd {{.*}} # ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7] +; ALL-NEXT: vpermq {{.*#+}} ymm3 = ymm0[2,1,3,3] +; ALL-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1],ymm2[2,3],ymm3[4,5],ymm2[6,7] +; ALL-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,1,1,3] +; ALL-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3],ymm0[4,5],ymm1[6,7] ; ALL-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> @@ -1032,12 +1011,12 @@ define <8 x i64> @shuffle_v8i64_091b2d3f(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_09ab1def(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_09ab1def +; ALL-LABEL: shuffle_v8i64_09ab1def: ; ALL: # BB#0: ; ALL-NEXT: vextracti64x4 $1, %zmm1, %ymm2 -; ALL-NEXT: vpshufd {{.*}} # ymm3 = ymm0[2,3,2,3,6,7,6,7] -; ALL-NEXT: vpblendd {{.*}} # ymm2 = ymm3[0,1],ymm2[2,3,4,5,6,7] -; ALL-NEXT: vpblendd {{.*}} # ymm0 = ymm0[0,1],ymm1[2,3,4,5,6,7] +; ALL-NEXT: vpshufd {{.*#+}} ymm3 = ymm0[2,3,2,3,6,7,6,7] +; ALL-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1],ymm2[2,3,4,5,6,7] +; ALL-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3,4,5,6,7] ; ALL-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> @@ -1045,11 +1024,11 @@ define <8 x i64> @shuffle_v8i64_09ab1def(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_00014445(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_00014445 +; ALL-LABEL: shuffle_v8i64_00014445: ; ALL: # BB#0: -; ALL-NEXT: vpermq {{.*}} # ymm1 = ymm0[0,0,0,1] +; ALL-NEXT: vpermq {{.*#+}} ymm1 = ymm0[0,0,0,1] ; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpermq {{.*}} # ymm0 = ymm0[0,0,0,1] +; ALL-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,0,1] ; ALL-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> @@ -1057,11 +1036,11 @@ define <8 x i64> @shuffle_v8i64_00014445(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_00204464(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_00204464 +; ALL-LABEL: shuffle_v8i64_00204464: ; ALL: # BB#0: -; ALL-NEXT: vpermq {{.*}} # ymm1 = ymm0[0,0,2,0] +; ALL-NEXT: vpermq {{.*#+}} ymm1 = ymm0[0,0,2,0] ; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpermq {{.*}} # ymm0 = ymm0[0,0,2,0] +; ALL-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,2,0] ; ALL-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> @@ -1069,11 +1048,11 @@ define <8 x i64> @shuffle_v8i64_00204464(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_03004744(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_03004744 +; ALL-LABEL: shuffle_v8i64_03004744: ; ALL: # BB#0: -; ALL-NEXT: vpermq {{.*}} # ymm1 = ymm0[0,3,0,0] +; ALL-NEXT: vpermq {{.*#+}} ymm1 = ymm0[0,3,0,0] ; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpermq {{.*}} # ymm0 = ymm0[0,3,0,0] +; ALL-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,3,0,0] ; ALL-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> @@ -1081,11 +1060,11 @@ define <8 x i64> @shuffle_v8i64_03004744(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_10005444(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_10005444 +; ALL-LABEL: shuffle_v8i64_10005444: ; ALL: # BB#0: -; ALL-NEXT: vpermq {{.*}} # ymm1 = ymm0[1,0,0,0] +; ALL-NEXT: vpermq {{.*#+}} ymm1 = ymm0[1,0,0,0] ; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpermq {{.*}} # ymm0 = ymm0[1,0,0,0] +; ALL-NEXT: vpermq {{.*#+}} ymm0 = ymm0[1,0,0,0] ; ALL-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> @@ -1093,11 +1072,11 @@ define <8 x i64> @shuffle_v8i64_10005444(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_22006644(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_22006644 +; ALL-LABEL: shuffle_v8i64_22006644: ; ALL: # BB#0: -; ALL-NEXT: vpermq {{.*}} # ymm1 = ymm0[2,2,0,0] +; ALL-NEXT: vpermq {{.*#+}} ymm1 = ymm0[2,2,0,0] ; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpermq {{.*}} # ymm0 = ymm0[2,2,0,0] +; ALL-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,0,0] ; ALL-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> @@ -1105,11 +1084,11 @@ define <8 x i64> @shuffle_v8i64_22006644(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_33307774(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_33307774 +; ALL-LABEL: shuffle_v8i64_33307774: ; ALL: # BB#0: -; ALL-NEXT: vpermq {{.*}} # ymm1 = ymm0[3,3,3,0] +; ALL-NEXT: vpermq {{.*#+}} ymm1 = ymm0[3,3,3,0] ; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpermq {{.*}} # ymm0 = ymm0[3,3,3,0] +; ALL-NEXT: vpermq {{.*#+}} ymm0 = ymm0[3,3,3,0] ; ALL-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> @@ -1117,11 +1096,11 @@ define <8 x i64> @shuffle_v8i64_33307774(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_32107654(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_32107654 +; ALL-LABEL: shuffle_v8i64_32107654: ; ALL: # BB#0: -; ALL-NEXT: vpermq {{.*}} # ymm1 = ymm0[3,2,1,0] +; ALL-NEXT: vpermq {{.*#+}} ymm1 = ymm0[3,2,1,0] ; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpermq {{.*}} # ymm0 = ymm0[3,2,1,0] +; ALL-NEXT: vpermq {{.*#+}} ymm0 = ymm0[3,2,1,0] ; ALL-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> @@ -1129,11 +1108,11 @@ define <8 x i64> @shuffle_v8i64_32107654(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_00234467(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_00234467 +; ALL-LABEL: shuffle_v8i64_00234467: ; ALL: # BB#0: -; ALL-NEXT: vpermq {{.*}} # ymm1 = ymm0[0,0,2,3] +; ALL-NEXT: vpermq {{.*#+}} ymm1 = ymm0[0,0,2,3] ; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpermq {{.*}} # ymm0 = ymm0[0,0,2,3] +; ALL-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,2,3] ; ALL-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> @@ -1141,11 +1120,11 @@ define <8 x i64> @shuffle_v8i64_00234467(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_00224466(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_00224466 +; ALL-LABEL: shuffle_v8i64_00224466: ; ALL: # BB#0: -; ALL-NEXT: vpshufd {{.*}} # ymm1 = ymm0[0,1,0,1,4,5,4,5] +; ALL-NEXT: vpshufd {{.*#+}} ymm1 = ymm0[0,1,0,1,4,5,4,5] ; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpshufd {{.*}} # ymm0 = ymm0[0,1,0,1,4,5,4,5] +; ALL-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[0,1,0,1,4,5,4,5] ; ALL-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> @@ -1153,11 +1132,11 @@ define <8 x i64> @shuffle_v8i64_00224466(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_10325476(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_10325476 +; ALL-LABEL: shuffle_v8i64_10325476: ; ALL: # BB#0: -; ALL-NEXT: vpshufd {{.*}} # ymm1 = ymm0[2,3,0,1,6,7,4,5] +; ALL-NEXT: vpshufd {{.*#+}} ymm1 = ymm0[2,3,0,1,6,7,4,5] ; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpshufd {{.*}} # ymm0 = ymm0[2,3,0,1,6,7,4,5] +; ALL-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[2,3,0,1,6,7,4,5] ; ALL-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> @@ -1165,11 +1144,11 @@ define <8 x i64> @shuffle_v8i64_10325476(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_11335577(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_11335577 +; ALL-LABEL: shuffle_v8i64_11335577: ; ALL: # BB#0: -; ALL-NEXT: vpshufd {{.*}} # ymm1 = ymm0[2,3,2,3,6,7,6,7] +; ALL-NEXT: vpshufd {{.*#+}} ymm1 = ymm0[2,3,2,3,6,7,6,7] ; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpshufd {{.*}} # ymm0 = ymm0[2,3,2,3,6,7,6,7] +; ALL-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[2,3,2,3,6,7,6,7] ; ALL-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> @@ -1177,11 +1156,11 @@ define <8 x i64> @shuffle_v8i64_11335577(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_10235467(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_10235467 +; ALL-LABEL: shuffle_v8i64_10235467: ; ALL: # BB#0: -; ALL-NEXT: vpermq {{.*}} # ymm1 = ymm0[1,0,2,3] +; ALL-NEXT: vpermq {{.*#+}} ymm1 = ymm0[1,0,2,3] ; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpermq {{.*}} # ymm0 = ymm0[1,0,2,3] +; ALL-NEXT: vpermq {{.*#+}} ymm0 = ymm0[1,0,2,3] ; ALL-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> @@ -1189,11 +1168,11 @@ define <8 x i64> @shuffle_v8i64_10235467(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_10225466(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_10225466 +; ALL-LABEL: shuffle_v8i64_10225466: ; ALL: # BB#0: -; ALL-NEXT: vpermq {{.*}} # ymm1 = ymm0[1,0,2,2] +; ALL-NEXT: vpermq {{.*#+}} ymm1 = ymm0[1,0,2,2] ; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpermq {{.*}} # ymm0 = ymm0[1,0,2,2] +; ALL-NEXT: vpermq {{.*#+}} ymm0 = ymm0[1,0,2,2] ; ALL-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> @@ -1201,11 +1180,11 @@ define <8 x i64> @shuffle_v8i64_10225466(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_00015444(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_00015444 +; ALL-LABEL: shuffle_v8i64_00015444: ; ALL: # BB#0: -; ALL-NEXT: vpermq {{.*}} # ymm1 = ymm0[0,0,0,1] +; ALL-NEXT: vpermq {{.*#+}} ymm1 = ymm0[0,0,0,1] ; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpermq {{.*}} # ymm0 = ymm0[1,0,0,0] +; ALL-NEXT: vpermq {{.*#+}} ymm0 = ymm0[1,0,0,0] ; ALL-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> @@ -1213,11 +1192,11 @@ define <8 x i64> @shuffle_v8i64_00015444(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_00204644(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_00204644 +; ALL-LABEL: shuffle_v8i64_00204644: ; ALL: # BB#0: -; ALL-NEXT: vpermq {{.*}} # ymm1 = ymm0[0,0,2,0] +; ALL-NEXT: vpermq {{.*#+}} ymm1 = ymm0[0,0,2,0] ; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpermq {{.*}} # ymm0 = ymm0[0,2,0,0] +; ALL-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,0,0] ; ALL-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> @@ -1225,11 +1204,11 @@ define <8 x i64> @shuffle_v8i64_00204644(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_03004474(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_03004474 +; ALL-LABEL: shuffle_v8i64_03004474: ; ALL: # BB#0: -; ALL-NEXT: vpermq {{.*}} # ymm1 = ymm0[0,3,0,0] +; ALL-NEXT: vpermq {{.*#+}} ymm1 = ymm0[0,3,0,0] ; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpermq {{.*}} # ymm0 = ymm0[0,0,3,0] +; ALL-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,3,0] ; ALL-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> @@ -1237,11 +1216,11 @@ define <8 x i64> @shuffle_v8i64_03004474(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_10004444(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_10004444 +; ALL-LABEL: shuffle_v8i64_10004444: ; ALL: # BB#0: -; ALL-NEXT: vpermq {{.*}} # ymm1 = ymm0[1,0,0,0] +; ALL-NEXT: vpermq {{.*#+}} ymm1 = ymm0[1,0,0,0] ; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpermq {{.*}} # ymm0 = ymm0[0,0,0,0] +; ALL-NEXT: vpbroadcastq %xmm0, %ymm0 ; ALL-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> @@ -1249,11 +1228,11 @@ define <8 x i64> @shuffle_v8i64_10004444(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_22006446(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_22006446 +; ALL-LABEL: shuffle_v8i64_22006446: ; ALL: # BB#0: -; ALL-NEXT: vpermq {{.*}} # ymm1 = ymm0[2,2,0,0] +; ALL-NEXT: vpermq {{.*#+}} ymm1 = ymm0[2,2,0,0] ; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpermq {{.*}} # ymm0 = ymm0[2,0,0,2] +; ALL-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,0,0,2] ; ALL-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> @@ -1261,11 +1240,11 @@ define <8 x i64> @shuffle_v8i64_22006446(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_33307474(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_33307474 +; ALL-LABEL: shuffle_v8i64_33307474: ; ALL: # BB#0: -; ALL-NEXT: vpermq {{.*}} # ymm1 = ymm0[3,3,3,0] +; ALL-NEXT: vpermq {{.*#+}} ymm1 = ymm0[3,3,3,0] ; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpermq {{.*}} # ymm0 = ymm0[3,0,3,0] +; ALL-NEXT: vpermq {{.*#+}} ymm0 = ymm0[3,0,3,0] ; ALL-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> @@ -1273,9 +1252,9 @@ define <8 x i64> @shuffle_v8i64_33307474(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_32104567(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_32104567 +; ALL-LABEL: shuffle_v8i64_32104567: ; ALL: # BB#0: -; ALL-NEXT: vpermq {{.*}} # ymm1 = ymm0[3,2,1,0] +; ALL-NEXT: vpermq {{.*#+}} ymm1 = ymm0[3,2,1,0] ; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm0 ; ALL-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq @@ -1284,11 +1263,11 @@ define <8 x i64> @shuffle_v8i64_32104567(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_00236744(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_00236744 +; ALL-LABEL: shuffle_v8i64_00236744: ; ALL: # BB#0: -; ALL-NEXT: vpermq {{.*}} # ymm1 = ymm0[0,0,2,3] +; ALL-NEXT: vpermq {{.*#+}} ymm1 = ymm0[0,0,2,3] ; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpermq {{.*}} # ymm0 = ymm0[2,3,0,0] +; ALL-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,0,0] ; ALL-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> @@ -1296,11 +1275,11 @@ define <8 x i64> @shuffle_v8i64_00236744(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_00226644(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_00226644 +; ALL-LABEL: shuffle_v8i64_00226644: ; ALL: # BB#0: -; ALL-NEXT: vpshufd {{.*}} # ymm1 = ymm0[0,1,0,1,4,5,4,5] +; ALL-NEXT: vpshufd {{.*#+}} ymm1 = ymm0[0,1,0,1,4,5,4,5] ; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpermq {{.*}} # ymm0 = ymm0[2,2,0,0] +; ALL-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,0,0] ; ALL-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> @@ -1308,9 +1287,9 @@ define <8 x i64> @shuffle_v8i64_00226644(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_10324567(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_10324567 +; ALL-LABEL: shuffle_v8i64_10324567: ; ALL: # BB#0: -; ALL-NEXT: vpshufd {{.*}} # ymm1 = ymm0[2,3,0,1,6,7,4,5] +; ALL-NEXT: vpshufd {{.*#+}} ymm1 = ymm0[2,3,0,1,6,7,4,5] ; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm0 ; ALL-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq @@ -1319,9 +1298,9 @@ define <8 x i64> @shuffle_v8i64_10324567(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_11334567(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_11334567 +; ALL-LABEL: shuffle_v8i64_11334567: ; ALL: # BB#0: -; ALL-NEXT: vpshufd {{.*}} # ymm1 = ymm0[2,3,2,3,6,7,6,7] +; ALL-NEXT: vpshufd {{.*#+}} ymm1 = ymm0[2,3,2,3,6,7,6,7] ; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm0 ; ALL-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq @@ -1330,10 +1309,10 @@ define <8 x i64> @shuffle_v8i64_11334567(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_01235467(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_01235467 +; ALL-LABEL: shuffle_v8i64_01235467: ; ALL: # BB#0: ; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm1 -; ALL-NEXT: vpermq {{.*}} # ymm1 = ymm1[1,0,2,3] +; ALL-NEXT: vpermq {{.*#+}} ymm1 = ymm1[1,0,2,3] ; ALL-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> @@ -1341,10 +1320,10 @@ define <8 x i64> @shuffle_v8i64_01235467(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_01235466(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_01235466 +; ALL-LABEL: shuffle_v8i64_01235466: ; ALL: # BB#0: ; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm1 -; ALL-NEXT: vpermq {{.*}} # ymm1 = ymm1[1,0,2,2] +; ALL-NEXT: vpermq {{.*#+}} ymm1 = ymm1[1,0,2,2] ; ALL-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> @@ -1352,11 +1331,11 @@ define <8 x i64> @shuffle_v8i64_01235466(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_002u6u44(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_002u6u44 +; ALL-LABEL: shuffle_v8i64_002u6u44: ; ALL: # BB#0: -; ALL-NEXT: vpshufd {{.*}} # ymm1 = ymm0[0,1,0,1,4,5,4,5] +; ALL-NEXT: vpshufd {{.*#+}} ymm1 = ymm0[0,1,0,1,4,5,4,5] ; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpermq {{.*}} # ymm0 = ymm0[2,1,0,0] +; ALL-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,1,0,0] ; ALL-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> @@ -1364,11 +1343,11 @@ define <8 x i64> @shuffle_v8i64_002u6u44(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_00uu66uu(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_00uu66uu +; ALL-LABEL: shuffle_v8i64_00uu66uu: ; ALL: # BB#0: -; ALL-NEXT: vpshufd {{.*}} # ymm1 = ymm0[0,1,0,1,4,5,4,5] +; ALL-NEXT: vpbroadcastq %xmm0, %ymm1 ; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpermq {{.*}} # ymm0 = ymm0[2,2,2,3] +; ALL-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,3] ; ALL-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> @@ -1376,9 +1355,9 @@ define <8 x i64> @shuffle_v8i64_00uu66uu(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_103245uu(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_103245uu +; ALL-LABEL: shuffle_v8i64_103245uu: ; ALL: # BB#0: -; ALL-NEXT: vpshufd {{.*}} # ymm1 = ymm0[2,3,0,1,6,7,4,5] +; ALL-NEXT: vpshufd {{.*#+}} ymm1 = ymm0[2,3,0,1,6,7,4,5] ; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm0 ; ALL-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq @@ -1387,9 +1366,9 @@ define <8 x i64> @shuffle_v8i64_103245uu(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_1133uu67(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_1133uu67 +; ALL-LABEL: shuffle_v8i64_1133uu67: ; ALL: # BB#0: -; ALL-NEXT: vpshufd {{.*}} # ymm1 = ymm0[2,3,2,3,6,7,6,7] +; ALL-NEXT: vpshufd {{.*#+}} ymm1 = ymm0[2,3,2,3,6,7,6,7] ; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm0 ; ALL-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; ALL-NEXT: retq @@ -1398,10 +1377,10 @@ define <8 x i64> @shuffle_v8i64_1133uu67(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_0uu354uu(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_0uu354uu +; ALL-LABEL: shuffle_v8i64_0uu354uu: ; ALL: # BB#0: ; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm1 -; ALL-NEXT: vpshufd {{.*}} # ymm1 = ymm1[2,3,0,1,6,7,4,5] +; ALL-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[2,3,0,1,6,7,4,5] ; ALL-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> @@ -1409,10 +1388,10 @@ define <8 x i64> @shuffle_v8i64_0uu354uu(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_uuu3uu66(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_uuu3uu66 +; ALL-LABEL: shuffle_v8i64_uuu3uu66: ; ALL: # BB#0: ; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm1 -; ALL-NEXT: vpshufd {{.*}} # ymm1 = ymm1[0,1,0,1,4,5,4,5] +; ALL-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[0,1,0,1,4,5,4,5] ; ALL-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> @@ -1420,20 +1399,55 @@ define <8 x i64> @shuffle_v8i64_uuu3uu66(<8 x i64> %a, <8 x i64> %b) { } define <8 x i64> @shuffle_v8i64_6caa87e5(<8 x i64> %a, <8 x i64> %b) { -; ALL-LABEL: @shuffle_v8i64_6caa87e5 +; ALL-LABEL: shuffle_v8i64_6caa87e5: ; ALL: # BB#0: -; ALL-NEXT: vextracti64x4 $1, %zmm1, %ymm2 -; ALL-NEXT: vpblendd {{.*}} # ymm3 = ymm1[0,1,2,3],ymm2[4,5],ymm1[6,7] ; ALL-NEXT: vextracti64x4 $1, %zmm0, %ymm0 -; ALL-NEXT: vpermq {{.*}} # ymm4 = ymm0[0,3,2,1] -; ALL-NEXT: vpblendd {{.*}} # ymm3 = ymm3[0,1],ymm4[2,3],ymm3[4,5],ymm4[6,7] -; ALL-NEXT: vpshufd {{.*}} # ymm2 = ymm2[0,1,0,1,4,5,4,5] -; ALL-NEXT: vpshufd {{.*}} # ymm1 = ymm1[0,1,0,1,4,5,4,5] -; ALL-NEXT: vpblendd {{.*}} # ymm1 = ymm1[0,1],ymm2[2,3],ymm1[4,5,6,7] -; ALL-NEXT: vpermq {{.*}} # ymm0 = ymm0[2,1,2,3] -; ALL-NEXT: vpblendd {{.*}} # ymm0 = ymm0[0,1],ymm1[2,3,4,5,6,7] -; ALL-NEXT: vinserti64x4 $1, %ymm3, %zmm0, %zmm0 +; ALL-NEXT: vperm2i128 {{.*#+}} ymm2 = ymm0[0,1,0,1] +; ALL-NEXT: vextracti64x4 $1, %zmm1, %ymm3 +; ALL-NEXT: vpblendd {{.*#+}} ymm4 = ymm1[0,1,2,3],ymm3[4,5],ymm1[6,7] +; ALL-NEXT: vpblendd {{.*#+}} ymm2 = ymm4[0,1],ymm2[2,3],ymm4[4,5],ymm2[6,7] +; ALL-NEXT: vperm2i128 {{.*#+}} ymm0 = ymm0[2,3,0,1] +; ALL-NEXT: vpblendd {{.*#+}} ymm1 = ymm3[0,1,2,3],ymm1[4,5,6,7] +; ALL-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[0,1,0,1,4,5,4,5] +; ALL-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2,3,4,5,6,7] +; ALL-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm0 ; ALL-NEXT: retq %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> ret <8 x i64> %shuffle } + +define <8 x double> @shuffle_v8f64_082a4c6e(<8 x double> %a, <8 x double> %b) { +; ALL-LABEL: shuffle_v8f64_082a4c6e: +; ALL: # BB#0: +; ALL-NEXT: vunpcklpd {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[2],zmm1[2],zmm0[4],zmm1[4],zmm0[6],zmm1[6] +; ALL-NEXT: retq + %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> + ret <8 x double> %shuffle +} + +define <8 x i64> @shuffle_v8i64_082a4c6e(<8 x i64> %a, <8 x i64> %b) { +; ALL-LABEL: shuffle_v8i64_082a4c6e: +; ALL: # BB#0: +; ALL-NEXT: vpunpcklqdq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[2],zmm1[2],zmm0[4],zmm1[4],zmm0[6],zmm1[6] +; ALL-NEXT: retq + %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> + ret <8 x i64> %shuffle +} + +define <8 x double> @shuffle_v8f64_193b5d7f(<8 x double> %a, <8 x double> %b) { +; ALL-LABEL: shuffle_v8f64_193b5d7f: +; ALL: # BB#0: +; ALL-NEXT: vunpckhpd {{.*#+}} zmm0 = zmm0[1],zmm1[1],zmm0[3],zmm1[3],zmm0[5],zmm1[5],zmm0[7],zmm1[7] +; ALL-NEXT: retq + %shuffle = shufflevector <8 x double> %a, <8 x double> %b, <8 x i32> + ret <8 x double> %shuffle +} + +define <8 x i64> @shuffle_v8i64_193b5d7f(<8 x i64> %a, <8 x i64> %b) { +; ALL-LABEL: shuffle_v8i64_193b5d7f: +; ALL: # BB#0: +; ALL-NEXT: vpunpckhqdq {{.*#+}} zmm0 = zmm0[1],zmm1[1],zmm0[3],zmm1[3],zmm0[5],zmm1[5],zmm0[7],zmm1[7] +; ALL-NEXT: retq + %shuffle = shufflevector <8 x i64> %a, <8 x i64> %b, <8 x i32> + ret <8 x i64> %shuffle +}