summaryrefslogtreecommitdiffstats
path: root/llvm/test/CodeGen/X86/known-signbits-vector.ll
diff options
context:
space:
mode:
Diffstat (limited to 'llvm/test/CodeGen/X86/known-signbits-vector.ll')
-rw-r--r--llvm/test/CodeGen/X86/known-signbits-vector.ll40
1 files changed, 19 insertions, 21 deletions
diff --git a/llvm/test/CodeGen/X86/known-signbits-vector.ll b/llvm/test/CodeGen/X86/known-signbits-vector.ll
index 57572caf0a2..eacdd24404f 100644
--- a/llvm/test/CodeGen/X86/known-signbits-vector.ll
+++ b/llvm/test/CodeGen/X86/known-signbits-vector.ll
@@ -310,24 +310,23 @@ define <4 x float> @signbits_ashr_sext_select_shuffle_sitofp(<4 x i64> %a0, <4 x
; X32-NEXT: movl %esp, %ebp
; X32-NEXT: andl $-16, %esp
; X32-NEXT: subl $16, %esp
-; X32-NEXT: vpmovsxdq 16(%ebp), %xmm3
-; X32-NEXT: vpmovsxdq 8(%ebp), %xmm4
-; X32-NEXT: vextractf128 $1, %ymm2, %xmm5
-; X32-NEXT: vpsrlq $33, %xmm5, %xmm5
+; X32-NEXT: vpmovsxdq 8(%ebp), %xmm3
+; X32-NEXT: vpmovsxdq 16(%ebp), %xmm4
+; X32-NEXT: vpsrlq $33, %xmm2, %xmm5
; X32-NEXT: vmovdqa {{.*#+}} xmm6 = [1073741824,0,1,0]
; X32-NEXT: vpxor %xmm6, %xmm5, %xmm5
; X32-NEXT: vpsubq %xmm6, %xmm5, %xmm5
+; X32-NEXT: vextractf128 $1, %ymm2, %xmm2
; X32-NEXT: vpsrlq $33, %xmm2, %xmm2
; X32-NEXT: vpxor %xmm6, %xmm2, %xmm2
; X32-NEXT: vpsubq %xmm6, %xmm2, %xmm2
-; X32-NEXT: vinsertf128 $1, %xmm5, %ymm2, %ymm2
-; X32-NEXT: vinsertf128 $1, %xmm3, %ymm4, %ymm3
-; X32-NEXT: vextractf128 $1, %ymm1, %xmm4
-; X32-NEXT: vextractf128 $1, %ymm0, %xmm5
-; X32-NEXT: vpcmpeqq %xmm4, %xmm5, %xmm4
+; X32-NEXT: vpcmpeqq %xmm1, %xmm0, %xmm6
+; X32-NEXT: vblendvpd %xmm6, %xmm5, %xmm3, %xmm3
+; X32-NEXT: vextractf128 $1, %ymm1, %xmm1
+; X32-NEXT: vextractf128 $1, %ymm0, %xmm0
; X32-NEXT: vpcmpeqq %xmm1, %xmm0, %xmm0
-; X32-NEXT: vinsertf128 $1, %xmm4, %ymm0, %ymm0
-; X32-NEXT: vblendvpd %ymm0, %ymm2, %ymm3, %ymm0
+; X32-NEXT: vblendvpd %xmm0, %xmm2, %xmm4, %xmm0
+; X32-NEXT: vinsertf128 $1, %xmm0, %ymm3, %ymm0
; X32-NEXT: vmovddup {{.*#+}} ymm0 = ymm0[0,0,2,2]
; X32-NEXT: vextractf128 $1, %ymm0, %xmm1
; X32-NEXT: vshufps {{.*#+}} xmm0 = xmm0[0,2],xmm1[0,2]
@@ -339,25 +338,24 @@ define <4 x float> @signbits_ashr_sext_select_shuffle_sitofp(<4 x i64> %a0, <4 x
;
; X64-LABEL: signbits_ashr_sext_select_shuffle_sitofp:
; X64: # %bb.0:
-; X64-NEXT: vextractf128 $1, %ymm2, %xmm4
-; X64-NEXT: vpsrlq $33, %xmm4, %xmm4
+; X64-NEXT: vpsrlq $33, %xmm2, %xmm4
; X64-NEXT: vmovdqa {{.*#+}} xmm5 = [1073741824,1]
; X64-NEXT: vpxor %xmm5, %xmm4, %xmm4
; X64-NEXT: vpsubq %xmm5, %xmm4, %xmm4
+; X64-NEXT: vextractf128 $1, %ymm2, %xmm2
; X64-NEXT: vpsrlq $33, %xmm2, %xmm2
; X64-NEXT: vpxor %xmm5, %xmm2, %xmm2
; X64-NEXT: vpsubq %xmm5, %xmm2, %xmm2
-; X64-NEXT: vinsertf128 $1, %xmm4, %ymm2, %ymm2
-; X64-NEXT: vpmovsxdq %xmm3, %xmm4
+; X64-NEXT: vpmovsxdq %xmm3, %xmm5
; X64-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[2,3,0,1]
; X64-NEXT: vpmovsxdq %xmm3, %xmm3
-; X64-NEXT: vinsertf128 $1, %xmm3, %ymm4, %ymm3
-; X64-NEXT: vextractf128 $1, %ymm1, %xmm4
-; X64-NEXT: vextractf128 $1, %ymm0, %xmm5
-; X64-NEXT: vpcmpeqq %xmm4, %xmm5, %xmm4
+; X64-NEXT: vpcmpeqq %xmm1, %xmm0, %xmm6
+; X64-NEXT: vblendvpd %xmm6, %xmm4, %xmm5, %xmm4
+; X64-NEXT: vextractf128 $1, %ymm1, %xmm1
+; X64-NEXT: vextractf128 $1, %ymm0, %xmm0
; X64-NEXT: vpcmpeqq %xmm1, %xmm0, %xmm0
-; X64-NEXT: vinsertf128 $1, %xmm4, %ymm0, %ymm0
-; X64-NEXT: vblendvpd %ymm0, %ymm2, %ymm3, %ymm0
+; X64-NEXT: vblendvpd %xmm0, %xmm2, %xmm3, %xmm0
+; X64-NEXT: vinsertf128 $1, %xmm0, %ymm4, %ymm0
; X64-NEXT: vmovddup {{.*#+}} ymm0 = ymm0[0,0,2,2]
; X64-NEXT: vextractf128 $1, %ymm0, %xmm1
; X64-NEXT: vshufps {{.*#+}} xmm0 = xmm0[0,2],xmm1[0,2]
OpenPOWER on IntegriCloud