diff options
Diffstat (limited to 'llvm/test/CodeGen/X86/known-signbits-vector.ll')
| -rw-r--r-- | llvm/test/CodeGen/X86/known-signbits-vector.ll | 34 |
1 files changed, 11 insertions, 23 deletions
diff --git a/llvm/test/CodeGen/X86/known-signbits-vector.ll b/llvm/test/CodeGen/X86/known-signbits-vector.ll index a003a5520d0..2b9aed15001 100644 --- a/llvm/test/CodeGen/X86/known-signbits-vector.ll +++ b/llvm/test/CodeGen/X86/known-signbits-vector.ll @@ -28,19 +28,9 @@ define <4 x float> @signbits_sext_v4i64_sitofp_v4f32(i8 signext %a0, i16 signext ; X32-NEXT: movsbl {{[0-9]+}}(%esp), %eax ; X32-NEXT: movswl {{[0-9]+}}(%esp), %ecx ; X32-NEXT: vmovd %eax, %xmm0 -; X32-NEXT: sarl $31, %eax -; X32-NEXT: vpinsrd $1, %eax, %xmm0, %xmm0 ; X32-NEXT: vpinsrd $2, %ecx, %xmm0, %xmm0 -; X32-NEXT: sarl $31, %ecx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: vmovd %eax, %xmm1 -; X32-NEXT: sarl $31, %eax -; X32-NEXT: vpinsrd $1, %eax, %xmm1, %xmm1 -; X32-NEXT: vpinsrd $2, %edx, %xmm1, %xmm1 -; X32-NEXT: sarl $31, %edx -; X32-NEXT: vpinsrd $3, %edx, %xmm1, %xmm1 -; X32-NEXT: vpinsrd $3, %ecx, %xmm0, %xmm0 +; X32-NEXT: vmovd {{.*#+}} xmm1 = mem[0],zero,zero,zero +; X32-NEXT: vpinsrd $2, {{[0-9]+}}(%esp), %xmm1, %xmm1 ; X32-NEXT: vshufps {{.*#+}} xmm0 = xmm0[0,2],xmm1[0,2] ; X32-NEXT: vcvtdq2ps %xmm0, %xmm0 ; X32-NEXT: retl @@ -391,19 +381,17 @@ define <4 x float> @signbits_ashr_sext_select_shuffle_sitofp(<4 x i64> %a0, <4 x ; X32-NEXT: movl %esp, %ebp ; X32-NEXT: andl $-16, %esp ; X32-NEXT: subl $16, %esp -; X32-NEXT: vmovdqa {{.*#+}} ymm3 = [33,0,63,0,33,0,63,0] -; X32-NEXT: vextractf128 $1, %ymm3, %xmm4 -; X32-NEXT: vmovdqa {{.*#+}} xmm5 = [0,2147483648,0,2147483648] -; X32-NEXT: vpsrlq %xmm4, %xmm5, %xmm6 -; X32-NEXT: vextractf128 $1, %ymm2, %xmm7 -; X32-NEXT: vpsrlq %xmm4, %xmm7, %xmm4 -; X32-NEXT: vpxor %xmm6, %xmm4, %xmm4 -; X32-NEXT: vpsubq %xmm6, %xmm4, %xmm4 +; X32-NEXT: vmovdqa {{.*#+}} xmm3 = [33,0,63,0] +; X32-NEXT: vmovdqa {{.*#+}} xmm4 = [0,2147483648,0,2147483648] +; X32-NEXT: vpsrlq %xmm3, %xmm4, %xmm4 +; X32-NEXT: vextractf128 $1, %ymm2, %xmm5 ; X32-NEXT: vpsrlq %xmm3, %xmm5, %xmm5 +; X32-NEXT: vpxor %xmm4, %xmm5, %xmm5 +; X32-NEXT: vpsubq %xmm4, %xmm5, %xmm5 ; X32-NEXT: vpsrlq %xmm3, %xmm2, %xmm2 -; X32-NEXT: vpxor %xmm5, %xmm2, %xmm2 -; X32-NEXT: vpsubq %xmm5, %xmm2, %xmm2 -; X32-NEXT: vinsertf128 $1, %xmm4, %ymm2, %ymm2 +; X32-NEXT: vpxor %xmm4, %xmm2, %xmm2 +; X32-NEXT: vpsubq %xmm4, %xmm2, %xmm2 +; X32-NEXT: vinsertf128 $1, %xmm5, %ymm2, %ymm2 ; X32-NEXT: vpmovsxdq 8(%ebp), %xmm3 ; X32-NEXT: vpmovsxdq 16(%ebp), %xmm4 ; X32-NEXT: vinsertf128 $1, %xmm4, %ymm3, %ymm3 |

