+; AVX512-LABEL: test_load_32f64:
+; AVX512: ## BB#0:
+; AVX512-NEXT: vextractf128 $1, %ymm0, %xmm5
+; AVX512-NEXT: vpmovsxbd %xmm5, %zmm5
+; AVX512-NEXT: vpslld $31, %zmm5, %zmm5
+; AVX512-NEXT: vptestmd %zmm5, %zmm5, %k1
+; AVX512-NEXT: vmovupd 128(%rdi), %zmm3 {%k1}
+; AVX512-NEXT: vpmovsxbd %xmm0, %zmm0
+; AVX512-NEXT: vpslld $31, %zmm0, %zmm0
+; AVX512-NEXT: vptestmd %zmm0, %zmm0, %k2
+; AVX512-NEXT: vmovupd (%rdi), %zmm1 {%k2}
+; AVX512-NEXT: kshiftrw $8, %k1, %k1
+; AVX512-NEXT: vmovupd 192(%rdi), %zmm4 {%k1}
+; AVX512-NEXT: kshiftrw $8, %k2, %k1
+; AVX512-NEXT: vmovupd 64(%rdi), %zmm2 {%k1}
+; AVX512-NEXT: vmovaps %zmm1, %zmm0
+; AVX512-NEXT: vmovaps %zmm2, %zmm1
+; AVX512-NEXT: vmovaps %zmm3, %zmm2
+; AVX512-NEXT: vmovaps %zmm4, %zmm3
+; AVX512-NEXT: retq
+;
+; AVX2-LABEL: test_load_32f64:
+; AVX2: ## BB#0:
+; AVX2-NEXT: pushq %rbp
+; AVX2-NEXT: Ltmp0:
+; AVX2-NEXT: .cfi_def_cfa_offset 16
+; AVX2-NEXT: Ltmp1:
+; AVX2-NEXT: .cfi_offset %rbp, -16
+; AVX2-NEXT: movq %rsp, %rbp
+; AVX2-NEXT: Ltmp2:
+; AVX2-NEXT: .cfi_def_cfa_register %rbp
+; AVX2-NEXT: andq $-32, %rsp
+; AVX2-NEXT: subq $32, %rsp
+; AVX2-NEXT: vpshufd {{.*#+}} xmm8 = xmm0[1,1,2,3]
+; AVX2-NEXT: vpmovzxbd {{.*#+}} xmm8 = xmm8[0],zero,zero,zero,xmm8[1],zero,zero,zero,xmm8[2],zero,zero,zero,xmm8[3],zero,zero,zero
+; AVX2-NEXT: vpslld $31, %xmm8, %xmm8
+; AVX2-NEXT: vpsrad $31, %xmm8, %xmm8
+; AVX2-NEXT: vpmovsxdq %xmm8, %ymm8
+; AVX2-NEXT: vmaskmovpd 32(%rsi), %ymm8, %ymm9
+; AVX2-NEXT: vpshufd {{.*#+}} xmm10 = xmm0[2,3,0,1]
+; AVX2-NEXT: vpmovzxbd {{.*#+}} xmm10 = xmm10[0],zero,zero,zero,xmm10[1],zero,zero,zero,xmm10[2],zero,zero,zero,xmm10[3],zero,zero,zero
+; AVX2-NEXT: vpslld $31, %xmm10, %xmm10
+; AVX2-NEXT: vpsrad $31, %xmm10, %xmm10
+; AVX2-NEXT: vpmovsxdq %xmm10, %ymm10
+; AVX2-NEXT: vmaskmovpd 64(%rsi), %ymm10, %ymm11
+; AVX2-NEXT: vpshufd {{.*#+}} xmm12 = xmm0[3,1,2,3]
+; AVX2-NEXT: vpmovzxbd {{.*#+}} xmm12 = xmm12[0],zero,zero,zero,xmm12[1],zero,zero,zero,xmm12[2],zero,zero,zero,xmm12[3],zero,zero,zero
+; AVX2-NEXT: vpslld $31, %xmm12, %xmm12
+; AVX2-NEXT: vpsrad $31, %xmm12, %xmm12
+; AVX2-NEXT: vpmovsxdq %xmm12, %ymm12
+; AVX2-NEXT: vmaskmovpd 96(%rsi), %ymm12, %ymm13
+; AVX2-NEXT: vblendvpd %ymm8, %ymm9, %ymm2, %ymm8
+; AVX2-NEXT: vblendvpd %ymm10, %ymm11, %ymm3, %ymm9
+; AVX2-NEXT: vblendvpd %ymm12, %ymm13, %ymm4, %ymm11
+; AVX2-NEXT: vextracti128 $1, %ymm0, %xmm2
+; AVX2-NEXT: vpshufd {{.*#+}} xmm3 = xmm2[1,1,2,3]
+; AVX2-NEXT: vpmovzxbd {{.*#+}} xmm3 = xmm3[0],zero,zero,zero,xmm3[1],zero,zero,zero,xmm3[2],zero,zero,zero,xmm3[3],zero,zero,zero
+; AVX2-NEXT: vpslld $31, %xmm3, %xmm3
+; AVX2-NEXT: vpsrad $31, %xmm3, %xmm3
+; AVX2-NEXT: vpmovsxdq %xmm3, %ymm3
+; AVX2-NEXT: vmaskmovpd 160(%rsi), %ymm3, %ymm10
+; AVX2-NEXT: vpshufd {{.*#+}} xmm4 = xmm2[2,3,0,1]
+; AVX2-NEXT: vpmovzxbd {{.*#+}} xmm4 = xmm4[0],zero,zero,zero,xmm4[1],zero,zero,zero,xmm4[2],zero,zero,zero,xmm4[3],zero,zero,zero
+; AVX2-NEXT: vpslld $31, %xmm4, %xmm4
+; AVX2-NEXT: vpsrad $31, %xmm4, %xmm4
+; AVX2-NEXT: vpmovsxdq %xmm4, %ymm4
+; AVX2-NEXT: vmaskmovpd 192(%rsi), %ymm4, %ymm12
+; AVX2-NEXT: vblendvpd %ymm3, %ymm10, %ymm6, %ymm3
+; AVX2-NEXT: vmovapd 16(%rbp), %ymm6
+; AVX2-NEXT: vblendvpd %ymm4, %ymm12, %ymm7, %ymm4
+; AVX2-NEXT: vpshufd {{.*#+}} xmm7 = xmm2[3,1,2,3]
+; AVX2-NEXT: vpmovzxbd {{.*#+}} xmm7 = xmm7[0],zero,zero,zero,xmm7[1],zero,zero,zero,xmm7[2],zero,zero,zero,xmm7[3],zero,zero,zero
+; AVX2-NEXT: vpslld $31, %xmm7, %xmm7
+; AVX2-NEXT: vpsrad $31, %xmm7, %xmm7
+; AVX2-NEXT: vpmovsxdq %xmm7, %ymm7
+; AVX2-NEXT: vmaskmovpd 224(%rsi), %ymm7, %ymm10
+; AVX2-NEXT: vblendvpd %ymm7, %ymm10, %ymm6, %ymm6
+; AVX2-NEXT: vpmovzxbd {{.*#+}} xmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero
+; AVX2-NEXT: vpslld $31, %xmm0, %xmm0
+; AVX2-NEXT: vpsrad $31, %xmm0, %xmm0
+; AVX2-NEXT: vpmovsxdq %xmm0, %ymm0
+; AVX2-NEXT: vmaskmovpd (%rsi), %ymm0, %ymm7
+; AVX2-NEXT: vblendvpd %ymm0, %ymm7, %ymm1, %ymm0
+; AVX2-NEXT: vpmovzxbd {{.*#+}} xmm1 = xmm2[0],zero,zero,zero,xmm2[1],zero,zero,zero,xmm2[2],zero,zero,zero,xmm2[3],zero,zero,zero
+; AVX2-NEXT: vpslld $31, %xmm1, %xmm1
+; AVX2-NEXT: vpsrad $31, %xmm1, %xmm1
+; AVX2-NEXT: vpmovsxdq %xmm1, %ymm1
+; AVX2-NEXT: vmaskmovpd 128(%rsi), %ymm1, %ymm2
+; AVX2-NEXT: vblendvpd %ymm1, %ymm2, %ymm5, %ymm1
+; AVX2-NEXT: vmovapd %ymm1, 128(%rdi)
+; AVX2-NEXT: vmovapd %ymm0, (%rdi)
+; AVX2-NEXT: vmovapd %ymm6, 224(%rdi)
+; AVX2-NEXT: vmovapd %ymm4, 192(%rdi)
+; AVX2-NEXT: vmovapd %ymm3, 160(%rdi)
+; AVX2-NEXT: vmovapd %ymm11, 96(%rdi)
+; AVX2-NEXT: vmovapd %ymm9, 64(%rdi)
+; AVX2-NEXT: vmovapd %ymm8, 32(%rdi)
+; AVX2-NEXT: movq %rdi, %rax
+; AVX2-NEXT: movq %rbp, %rsp
+; AVX2-NEXT: popq %rbp
+; AVX2-NEXT: vzeroupper
+; AVX2-NEXT: retq
+;