diff options
Diffstat (limited to 'llvm/test')
| -rw-r--r-- | llvm/test/CodeGen/X86/packss.ll | 62 |
1 files changed, 25 insertions, 37 deletions
diff --git a/llvm/test/CodeGen/X86/packss.ll b/llvm/test/CodeGen/X86/packss.ll index 72842c6976c..44ecb40a7a4 100644 --- a/llvm/test/CodeGen/X86/packss.ll +++ b/llvm/test/CodeGen/X86/packss.ll @@ -7,39 +7,39 @@ define <4 x i32> @trunc_ashr_v4i64(<4 x i64> %a) nounwind { ; X32-SSE-LABEL: trunc_ashr_v4i64: ; X32-SSE: # BB#0: -; X32-SSE-NEXT: psrad $31, %xmm0 ; X32-SSE-NEXT: psrad $31, %xmm1 -; X32-SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,3,2,3] -; X32-SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,3,2,3] -; X32-SSE-NEXT: punpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm1[0] +; X32-SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,3,3] +; X32-SSE-NEXT: psrad $31, %xmm0 +; X32-SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,1,3,3] +; X32-SSE-NEXT: packsswb %xmm1, %xmm0 ; X32-SSE-NEXT: retl ; ; X64-SSE-LABEL: trunc_ashr_v4i64: ; X64-SSE: # BB#0: -; X64-SSE-NEXT: psrad $31, %xmm0 ; X64-SSE-NEXT: psrad $31, %xmm1 -; X64-SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,3,2,3] -; X64-SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,3,2,3] -; X64-SSE-NEXT: punpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm1[0] +; X64-SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,3,3] +; X64-SSE-NEXT: psrad $31, %xmm0 +; X64-SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,1,3,3] +; X64-SSE-NEXT: packsswb %xmm1, %xmm0 ; X64-SSE-NEXT: retq ; ; X64-AVX1-LABEL: trunc_ashr_v4i64: ; X64-AVX1: # BB#0: -; X64-AVX1-NEXT: vpsrad $31, %xmm0, %xmm1 -; X64-AVX1-NEXT: vextractf128 $1, %ymm0, %xmm0 +; X64-AVX1-NEXT: vextractf128 $1, %ymm0, %xmm1 +; X64-AVX1-NEXT: vpsrad $31, %xmm1, %xmm1 +; X64-AVX1-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[1,1,3,3] ; X64-AVX1-NEXT: vpsrad $31, %xmm0, %xmm0 -; X64-AVX1-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[0,1,1,3] -; X64-AVX1-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[1,3,2,3] -; X64-AVX1-NEXT: vpblendw {{.*#+}} xmm0 = xmm1[0,1,2,3],xmm0[4,5,6,7] +; X64-AVX1-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[1,1,3,3] +; X64-AVX1-NEXT: vpacksswb %xmm1, %xmm0, %xmm0 ; X64-AVX1-NEXT: vzeroupper ; X64-AVX1-NEXT: retq ; ; X64-AVX2-LABEL: trunc_ashr_v4i64: ; X64-AVX2: # BB#0: ; X64-AVX2-NEXT: vpsrad $31, %ymm0, %ymm0 -; X64-AVX2-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[1,3,2,3,5,7,6,7] -; X64-AVX2-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,2,3] -; X64-AVX2-NEXT: # kill: %XMM0<def> %XMM0<kill> %YMM0<kill> +; X64-AVX2-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[1,1,3,3,5,5,7,7] +; X64-AVX2-NEXT: vextracti128 $1, %ymm0, %xmm1 +; X64-AVX2-NEXT: vpacksswb %xmm1, %xmm0, %xmm0 ; X64-AVX2-NEXT: vzeroupper ; X64-AVX2-NEXT: retq %1 = ashr <4 x i64> %a, <i64 63, i64 63, i64 63, i64 63> @@ -50,44 +50,32 @@ define <4 x i32> @trunc_ashr_v4i64(<4 x i64> %a) nounwind { define <8 x i16> @trunc_ashr_v8i32(<8 x i32> %a) nounwind { ; X32-SSE-LABEL: trunc_ashr_v8i32: ; X32-SSE: # BB#0: -; X32-SSE-NEXT: psrad $31, %xmm0 ; X32-SSE-NEXT: psrad $31, %xmm1 -; X32-SSE-NEXT: pslld $16, %xmm1 -; X32-SSE-NEXT: psrad $16, %xmm1 -; X32-SSE-NEXT: pslld $16, %xmm0 -; X32-SSE-NEXT: psrad $16, %xmm0 -; X32-SSE-NEXT: packssdw %xmm1, %xmm0 +; X32-SSE-NEXT: psrad $31, %xmm0 +; X32-SSE-NEXT: packsswb %xmm1, %xmm0 ; X32-SSE-NEXT: retl ; ; X64-SSE-LABEL: trunc_ashr_v8i32: ; X64-SSE: # BB#0: -; X64-SSE-NEXT: psrad $31, %xmm0 ; X64-SSE-NEXT: psrad $31, %xmm1 -; X64-SSE-NEXT: pslld $16, %xmm1 -; X64-SSE-NEXT: psrad $16, %xmm1 -; X64-SSE-NEXT: pslld $16, %xmm0 -; X64-SSE-NEXT: psrad $16, %xmm0 -; X64-SSE-NEXT: packssdw %xmm1, %xmm0 +; X64-SSE-NEXT: psrad $31, %xmm0 +; X64-SSE-NEXT: packsswb %xmm1, %xmm0 ; X64-SSE-NEXT: retq ; ; X64-AVX1-LABEL: trunc_ashr_v8i32: ; X64-AVX1: # BB#0: -; X64-AVX1-NEXT: vpsrad $31, %xmm0, %xmm1 -; X64-AVX1-NEXT: vextractf128 $1, %ymm0, %xmm0 +; X64-AVX1-NEXT: vextractf128 $1, %ymm0, %xmm1 +; X64-AVX1-NEXT: vpsrad $31, %xmm1, %xmm1 ; X64-AVX1-NEXT: vpsrad $31, %xmm0, %xmm0 -; X64-AVX1-NEXT: vmovdqa {{.*#+}} xmm2 = [0,1,4,5,8,9,12,13,8,9,12,13,12,13,14,15] -; X64-AVX1-NEXT: vpshufb %xmm2, %xmm0, %xmm0 -; X64-AVX1-NEXT: vpshufb %xmm2, %xmm1, %xmm1 -; X64-AVX1-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0] +; X64-AVX1-NEXT: vpacksswb %xmm1, %xmm0, %xmm0 ; X64-AVX1-NEXT: vzeroupper ; X64-AVX1-NEXT: retq ; ; X64-AVX2-LABEL: trunc_ashr_v8i32: ; X64-AVX2: # BB#0: ; X64-AVX2-NEXT: vpsrad $31, %ymm0, %ymm0 -; X64-AVX2-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[0,1,4,5,8,9,12,13],zero,zero,zero,zero,zero,zero,zero,zero,ymm0[16,17,20,21,24,25,28,29],zero,zero,zero,zero,zero,zero,zero,zero -; X64-AVX2-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,2,3] -; X64-AVX2-NEXT: # kill: %XMM0<def> %XMM0<kill> %YMM0<kill> +; X64-AVX2-NEXT: vextracti128 $1, %ymm0, %xmm1 +; X64-AVX2-NEXT: vpacksswb %xmm1, %xmm0, %xmm0 ; X64-AVX2-NEXT: vzeroupper ; X64-AVX2-NEXT: retq %1 = ashr <8 x i32> %a, <i32 31, i32 31, i32 31, i32 31, i32 31, i32 31, i32 31, i32 31> |

