diff options
Diffstat (limited to 'llvm/test/CodeGen/X86/vector-reduce-smax-widen.ll')
| -rw-r--r-- | llvm/test/CodeGen/X86/vector-reduce-smax-widen.ll | 64 |
1 files changed, 27 insertions, 37 deletions
diff --git a/llvm/test/CodeGen/X86/vector-reduce-smax-widen.ll b/llvm/test/CodeGen/X86/vector-reduce-smax-widen.ll index 1c18643b125..f1eff5e7469 100644 --- a/llvm/test/CodeGen/X86/vector-reduce-smax-widen.ll +++ b/llvm/test/CodeGen/X86/vector-reduce-smax-widen.ll @@ -149,13 +149,11 @@ define i64 @test_v4i64(<4 x i64> %a0) { ; AVX1-LABEL: test_v4i64: ; AVX1: # %bb.0: ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm1 -; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm2 -; AVX1-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm3 -; AVX1-NEXT: vinsertf128 $1, %xmm2, %ymm3, %ymm2 -; AVX1-NEXT: vblendvpd %ymm2, %ymm0, %ymm1, %ymm0 +; AVX1-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm2 +; AVX1-NEXT: vblendvpd %xmm2, %xmm0, %xmm1, %xmm0 ; AVX1-NEXT: vpermilps {{.*#+}} xmm1 = xmm0[2,3,0,1] ; AVX1-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm2 -; AVX1-NEXT: vblendvpd %ymm2, %ymm0, %ymm1, %ymm0 +; AVX1-NEXT: vblendvpd %xmm2, %xmm0, %xmm1, %xmm0 ; AVX1-NEXT: vmovq %xmm0, %rax ; AVX1-NEXT: vzeroupper ; AVX1-NEXT: retq @@ -320,20 +318,17 @@ define i64 @test_v8i64(<8 x i64> %a0) { ; ; AVX1-LABEL: test_v8i64: ; AVX1: # %bb.0: -; AVX1-NEXT: vextractf128 $1, %ymm1, %xmm2 -; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm3 -; AVX1-NEXT: vpcmpgtq %xmm2, %xmm3, %xmm2 -; AVX1-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm3 -; AVX1-NEXT: vinsertf128 $1, %xmm2, %ymm3, %ymm2 -; AVX1-NEXT: vblendvpd %ymm2, %ymm0, %ymm1, %ymm0 -; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm1 ; AVX1-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm2 -; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm3 -; AVX1-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm2 -; AVX1-NEXT: vblendvpd %ymm2, %ymm0, %ymm1, %ymm0 +; AVX1-NEXT: vextractf128 $1, %ymm1, %xmm3 +; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm4 +; AVX1-NEXT: vpcmpgtq %xmm3, %xmm4, %xmm5 +; AVX1-NEXT: vblendvpd %xmm5, %xmm4, %xmm3, %xmm3 +; AVX1-NEXT: vblendvpd %xmm2, %xmm0, %xmm1, %xmm0 +; AVX1-NEXT: vpcmpgtq %xmm3, %xmm0, %xmm1 +; AVX1-NEXT: vblendvpd %xmm1, %xmm0, %xmm3, %xmm0 ; AVX1-NEXT: vpermilps {{.*#+}} xmm1 = xmm0[2,3,0,1] ; AVX1-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm2 -; AVX1-NEXT: vblendvpd %ymm2, %ymm0, %ymm1, %ymm0 +; AVX1-NEXT: vblendvpd %xmm2, %xmm0, %xmm1, %xmm0 ; AVX1-NEXT: vmovq %xmm0, %rax ; AVX1-NEXT: vzeroupper ; AVX1-NEXT: retq @@ -611,32 +606,27 @@ define i64 @test_v16i64(<16 x i64> %a0) { ; ; AVX1-LABEL: test_v16i64: ; AVX1: # %bb.0: -; AVX1-NEXT: vextractf128 $1, %ymm2, %xmm4 +; AVX1-NEXT: vextractf128 $1, %ymm2, %xmm11 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm5 -; AVX1-NEXT: vpcmpgtq %xmm4, %xmm5, %xmm4 -; AVX1-NEXT: vpcmpgtq %xmm2, %xmm0, %xmm5 -; AVX1-NEXT: vinsertf128 $1, %xmm4, %ymm5, %ymm4 -; AVX1-NEXT: vblendvpd %ymm4, %ymm0, %ymm2, %ymm0 -; AVX1-NEXT: vextractf128 $1, %ymm3, %xmm2 -; AVX1-NEXT: vextractf128 $1, %ymm1, %xmm4 -; AVX1-NEXT: vpcmpgtq %xmm2, %xmm4, %xmm2 +; AVX1-NEXT: vpcmpgtq %xmm11, %xmm5, %xmm8 +; AVX1-NEXT: vextractf128 $1, %ymm3, %xmm7 +; AVX1-NEXT: vextractf128 $1, %ymm1, %xmm6 +; AVX1-NEXT: vpcmpgtq %xmm7, %xmm6, %xmm9 +; AVX1-NEXT: vpcmpgtq %xmm2, %xmm0, %xmm10 ; AVX1-NEXT: vpcmpgtq %xmm3, %xmm1, %xmm4 -; AVX1-NEXT: vinsertf128 $1, %xmm2, %ymm4, %ymm2 -; AVX1-NEXT: vblendvpd %ymm2, %ymm1, %ymm3, %ymm1 -; AVX1-NEXT: vextractf128 $1, %ymm1, %xmm2 -; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm3 -; AVX1-NEXT: vpcmpgtq %xmm2, %xmm3, %xmm2 -; AVX1-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm3 -; AVX1-NEXT: vinsertf128 $1, %xmm2, %ymm3, %ymm2 -; AVX1-NEXT: vblendvpd %ymm2, %ymm0, %ymm1, %ymm0 -; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm1 +; AVX1-NEXT: vblendvpd %xmm4, %xmm1, %xmm3, %xmm1 +; AVX1-NEXT: vblendvpd %xmm10, %xmm0, %xmm2, %xmm0 ; AVX1-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm2 -; AVX1-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm3 -; AVX1-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm2 -; AVX1-NEXT: vblendvpd %ymm2, %ymm0, %ymm1, %ymm0 +; AVX1-NEXT: vblendvpd %xmm9, %xmm6, %xmm7, %xmm3 +; AVX1-NEXT: vblendvpd %xmm8, %xmm5, %xmm11, %xmm4 +; AVX1-NEXT: vpcmpgtq %xmm3, %xmm4, %xmm5 +; AVX1-NEXT: vblendvpd %xmm5, %xmm4, %xmm3, %xmm3 +; AVX1-NEXT: vblendvpd %xmm2, %xmm0, %xmm1, %xmm0 +; AVX1-NEXT: vpcmpgtq %xmm3, %xmm0, %xmm1 +; AVX1-NEXT: vblendvpd %xmm1, %xmm0, %xmm3, %xmm0 ; AVX1-NEXT: vpermilps {{.*#+}} xmm1 = xmm0[2,3,0,1] ; AVX1-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm2 -; AVX1-NEXT: vblendvpd %ymm2, %ymm0, %ymm1, %ymm0 +; AVX1-NEXT: vblendvpd %xmm2, %xmm0, %xmm1, %xmm0 ; AVX1-NEXT: vmovq %xmm0, %rax ; AVX1-NEXT: vzeroupper ; AVX1-NEXT: retq |

