summaryrefslogtreecommitdiffstats
path: root/llvm/test/CodeGen/X86/vector-compare-any_of.ll
diff options
context:
space:
mode:
Diffstat (limited to 'llvm/test/CodeGen/X86/vector-compare-any_of.ll')
-rw-r--r--llvm/test/CodeGen/X86/vector-compare-any_of.ll175
1 files changed, 104 insertions, 71 deletions
diff --git a/llvm/test/CodeGen/X86/vector-compare-any_of.ll b/llvm/test/CodeGen/X86/vector-compare-any_of.ll
index b7fa5cb64df..4b2f98b3698 100644
--- a/llvm/test/CodeGen/X86/vector-compare-any_of.ll
+++ b/llvm/test/CodeGen/X86/vector-compare-any_of.ll
@@ -50,8 +50,9 @@ define i64 @test_v4f64_sext(<4 x double> %a0, <4 x double> %a1) {
; AVX-LABEL: test_v4f64_sext:
; AVX: # %bb.0:
; AVX-NEXT: vcmpltpd %ymm0, %ymm1, %ymm0
-; AVX-NEXT: vmovmskpd %ymm0, %eax
-; AVX-NEXT: negl %eax
+; AVX-NEXT: vmovmskpd %ymm0, %ecx
+; AVX-NEXT: xorl %eax, %eax
+; AVX-NEXT: cmpl %ecx, %eax
; AVX-NEXT: sbbq %rax, %rax
; AVX-NEXT: vzeroupper
; AVX-NEXT: retq
@@ -83,9 +84,10 @@ define i64 @test_v4f64_legal_sext(<4 x double> %a0, <4 x double> %a1) {
; SSE-NEXT: cmpltpd %xmm0, %xmm2
; SSE-NEXT: packssdw %xmm3, %xmm2
; SSE-NEXT: movmskps %xmm2, %eax
-; SSE-NEXT: negl %eax
-; SSE-NEXT: sbbl %eax, %eax
-; SSE-NEXT: cltq
+; SSE-NEXT: xorl %ecx, %ecx
+; SSE-NEXT: cmpl %eax, %ecx
+; SSE-NEXT: sbbl %ecx, %ecx
+; SSE-NEXT: movslq %ecx, %rax
; SSE-NEXT: retq
;
; AVX-LABEL: test_v4f64_legal_sext:
@@ -94,9 +96,10 @@ define i64 @test_v4f64_legal_sext(<4 x double> %a0, <4 x double> %a1) {
; AVX-NEXT: vextractf128 $1, %ymm0, %xmm1
; AVX-NEXT: vpackssdw %xmm1, %xmm0, %xmm0
; AVX-NEXT: vmovmskps %xmm0, %eax
-; AVX-NEXT: negl %eax
-; AVX-NEXT: sbbl %eax, %eax
-; AVX-NEXT: cltq
+; AVX-NEXT: xorl %ecx, %ecx
+; AVX-NEXT: cmpl %eax, %ecx
+; AVX-NEXT: sbbl %ecx, %ecx
+; AVX-NEXT: movslq %ecx, %rax
; AVX-NEXT: vzeroupper
; AVX-NEXT: retq
;
@@ -128,16 +131,18 @@ define i32 @test_v4f32_sext(<4 x float> %a0, <4 x float> %a1) {
; SSE-LABEL: test_v4f32_sext:
; SSE: # %bb.0:
; SSE-NEXT: cmpltps %xmm0, %xmm1
-; SSE-NEXT: movmskps %xmm1, %eax
-; SSE-NEXT: negl %eax
+; SSE-NEXT: movmskps %xmm1, %ecx
+; SSE-NEXT: xorl %eax, %eax
+; SSE-NEXT: cmpl %ecx, %eax
; SSE-NEXT: sbbl %eax, %eax
; SSE-NEXT: retq
;
; AVX-LABEL: test_v4f32_sext:
; AVX: # %bb.0:
; AVX-NEXT: vcmpltps %xmm0, %xmm1, %xmm0
-; AVX-NEXT: vmovmskps %xmm0, %eax
-; AVX-NEXT: negl %eax
+; AVX-NEXT: vmovmskps %xmm0, %ecx
+; AVX-NEXT: xorl %eax, %eax
+; AVX-NEXT: cmpl %ecx, %eax
; AVX-NEXT: sbbl %eax, %eax
; AVX-NEXT: retq
;
@@ -166,16 +171,18 @@ define i32 @test_v8f32_sext(<8 x float> %a0, <8 x float> %a1) {
; SSE-NEXT: cmpltps %xmm1, %xmm3
; SSE-NEXT: cmpltps %xmm0, %xmm2
; SSE-NEXT: orps %xmm3, %xmm2
-; SSE-NEXT: movmskps %xmm2, %eax
-; SSE-NEXT: negl %eax
+; SSE-NEXT: movmskps %xmm2, %ecx
+; SSE-NEXT: xorl %eax, %eax
+; SSE-NEXT: cmpl %ecx, %eax
; SSE-NEXT: sbbl %eax, %eax
; SSE-NEXT: retq
;
; AVX-LABEL: test_v8f32_sext:
; AVX: # %bb.0:
; AVX-NEXT: vcmpltps %ymm0, %ymm1, %ymm0
-; AVX-NEXT: vmovmskps %ymm0, %eax
-; AVX-NEXT: negl %eax
+; AVX-NEXT: vmovmskps %ymm0, %ecx
+; AVX-NEXT: xorl %eax, %eax
+; AVX-NEXT: cmpl %ecx, %eax
; AVX-NEXT: sbbl %eax, %eax
; AVX-NEXT: vzeroupper
; AVX-NEXT: retq
@@ -210,8 +217,9 @@ define i32 @test_v8f32_legal_sext(<8 x float> %a0, <8 x float> %a1) {
; SSE-NEXT: cmpltps %xmm1, %xmm3
; SSE-NEXT: cmpltps %xmm0, %xmm2
; SSE-NEXT: packssdw %xmm3, %xmm2
-; SSE-NEXT: pmovmskb %xmm2, %eax
-; SSE-NEXT: negl %eax
+; SSE-NEXT: pmovmskb %xmm2, %ecx
+; SSE-NEXT: xorl %eax, %eax
+; SSE-NEXT: cmpl %ecx, %eax
; SSE-NEXT: sbbl %eax, %eax
; SSE-NEXT: retq
;
@@ -220,8 +228,9 @@ define i32 @test_v8f32_legal_sext(<8 x float> %a0, <8 x float> %a1) {
; AVX-NEXT: vcmpltps %ymm0, %ymm1, %ymm0
; AVX-NEXT: vextractf128 $1, %ymm0, %xmm1
; AVX-NEXT: vpackssdw %xmm1, %xmm0, %xmm0
-; AVX-NEXT: vpmovmskb %xmm0, %eax
-; AVX-NEXT: negl %eax
+; AVX-NEXT: vpmovmskb %xmm0, %ecx
+; AVX-NEXT: xorl %eax, %eax
+; AVX-NEXT: cmpl %ecx, %eax
; AVX-NEXT: sbbl %eax, %eax
; AVX-NEXT: vzeroupper
; AVX-NEXT: retq
@@ -303,8 +312,9 @@ define i64 @test_v4i64_sext(<4 x i64> %a0, <4 x i64> %a1) {
; AVX1-NEXT: vpcmpgtq %xmm2, %xmm3, %xmm2
; AVX1-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0
; AVX1-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm0
-; AVX1-NEXT: vmovmskpd %ymm0, %eax
-; AVX1-NEXT: negl %eax
+; AVX1-NEXT: vmovmskpd %ymm0, %ecx
+; AVX1-NEXT: xorl %eax, %eax
+; AVX1-NEXT: cmpl %ecx, %eax
; AVX1-NEXT: sbbq %rax, %rax
; AVX1-NEXT: vzeroupper
; AVX1-NEXT: retq
@@ -312,8 +322,9 @@ define i64 @test_v4i64_sext(<4 x i64> %a0, <4 x i64> %a1) {
; AVX2-LABEL: test_v4i64_sext:
; AVX2: # %bb.0:
; AVX2-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0
-; AVX2-NEXT: vmovmskpd %ymm0, %eax
-; AVX2-NEXT: negl %eax
+; AVX2-NEXT: vmovmskpd %ymm0, %ecx
+; AVX2-NEXT: xorl %eax, %eax
+; AVX2-NEXT: cmpl %ecx, %eax
; AVX2-NEXT: sbbq %rax, %rax
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
@@ -345,9 +356,10 @@ define i64 @test_v4i64_legal_sext(<4 x i64> %a0, <4 x i64> %a1) {
; SSE-NEXT: pcmpgtq %xmm2, %xmm0
; SSE-NEXT: packssdw %xmm1, %xmm0
; SSE-NEXT: movmskps %xmm0, %eax
-; SSE-NEXT: negl %eax
-; SSE-NEXT: sbbl %eax, %eax
-; SSE-NEXT: cltq
+; SSE-NEXT: xorl %ecx, %ecx
+; SSE-NEXT: cmpl %eax, %ecx
+; SSE-NEXT: sbbl %ecx, %ecx
+; SSE-NEXT: movslq %ecx, %rax
; SSE-NEXT: retq
;
; AVX1-LABEL: test_v4i64_legal_sext:
@@ -358,9 +370,10 @@ define i64 @test_v4i64_legal_sext(<4 x i64> %a0, <4 x i64> %a1) {
; AVX1-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0
; AVX1-NEXT: vpackssdw %xmm2, %xmm0, %xmm0
; AVX1-NEXT: vmovmskps %xmm0, %eax
-; AVX1-NEXT: negl %eax
-; AVX1-NEXT: sbbl %eax, %eax
-; AVX1-NEXT: cltq
+; AVX1-NEXT: xorl %ecx, %ecx
+; AVX1-NEXT: cmpl %eax, %ecx
+; AVX1-NEXT: sbbl %ecx, %ecx
+; AVX1-NEXT: movslq %ecx, %rax
; AVX1-NEXT: vzeroupper
; AVX1-NEXT: retq
;
@@ -370,9 +383,10 @@ define i64 @test_v4i64_legal_sext(<4 x i64> %a0, <4 x i64> %a1) {
; AVX2-NEXT: vextracti128 $1, %ymm0, %xmm1
; AVX2-NEXT: vpackssdw %xmm1, %xmm0, %xmm0
; AVX2-NEXT: vmovmskps %xmm0, %eax
-; AVX2-NEXT: negl %eax
-; AVX2-NEXT: sbbl %eax, %eax
-; AVX2-NEXT: cltq
+; AVX2-NEXT: xorl %ecx, %ecx
+; AVX2-NEXT: cmpl %eax, %ecx
+; AVX2-NEXT: sbbl %ecx, %ecx
+; AVX2-NEXT: movslq %ecx, %rax
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
@@ -404,16 +418,18 @@ define i32 @test_v4i32_sext(<4 x i32> %a0, <4 x i32> %a1) {
; SSE-LABEL: test_v4i32_sext:
; SSE: # %bb.0:
; SSE-NEXT: pcmpgtd %xmm1, %xmm0
-; SSE-NEXT: movmskps %xmm0, %eax
-; SSE-NEXT: negl %eax
+; SSE-NEXT: movmskps %xmm0, %ecx
+; SSE-NEXT: xorl %eax, %eax
+; SSE-NEXT: cmpl %ecx, %eax
; SSE-NEXT: sbbl %eax, %eax
; SSE-NEXT: retq
;
; AVX-LABEL: test_v4i32_sext:
; AVX: # %bb.0:
; AVX-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
-; AVX-NEXT: vmovmskps %xmm0, %eax
-; AVX-NEXT: negl %eax
+; AVX-NEXT: vmovmskps %xmm0, %ecx
+; AVX-NEXT: xorl %eax, %eax
+; AVX-NEXT: cmpl %ecx, %eax
; AVX-NEXT: sbbl %eax, %eax
; AVX-NEXT: retq
;
@@ -442,8 +458,9 @@ define i32 @test_v8i32_sext(<8 x i32> %a0, <8 x i32> %a1) {
; SSE-NEXT: pcmpgtd %xmm3, %xmm1
; SSE-NEXT: pcmpgtd %xmm2, %xmm0
; SSE-NEXT: por %xmm1, %xmm0
-; SSE-NEXT: movmskps %xmm0, %eax
-; SSE-NEXT: negl %eax
+; SSE-NEXT: movmskps %xmm0, %ecx
+; SSE-NEXT: xorl %eax, %eax
+; SSE-NEXT: cmpl %ecx, %eax
; SSE-NEXT: sbbl %eax, %eax
; SSE-NEXT: retq
;
@@ -454,8 +471,9 @@ define i32 @test_v8i32_sext(<8 x i32> %a0, <8 x i32> %a1) {
; AVX1-NEXT: vpcmpgtd %xmm2, %xmm3, %xmm2
; AVX1-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
; AVX1-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm0
-; AVX1-NEXT: vmovmskps %ymm0, %eax
-; AVX1-NEXT: negl %eax
+; AVX1-NEXT: vmovmskps %ymm0, %ecx
+; AVX1-NEXT: xorl %eax, %eax
+; AVX1-NEXT: cmpl %ecx, %eax
; AVX1-NEXT: sbbl %eax, %eax
; AVX1-NEXT: vzeroupper
; AVX1-NEXT: retq
@@ -463,8 +481,9 @@ define i32 @test_v8i32_sext(<8 x i32> %a0, <8 x i32> %a1) {
; AVX2-LABEL: test_v8i32_sext:
; AVX2: # %bb.0:
; AVX2-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0
-; AVX2-NEXT: vmovmskps %ymm0, %eax
-; AVX2-NEXT: negl %eax
+; AVX2-NEXT: vmovmskps %ymm0, %ecx
+; AVX2-NEXT: xorl %eax, %eax
+; AVX2-NEXT: cmpl %ecx, %eax
; AVX2-NEXT: sbbl %eax, %eax
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
@@ -499,8 +518,9 @@ define i32 @test_v8i32_legal_sext(<8 x i32> %a0, <8 x i32> %a1) {
; SSE-NEXT: pcmpgtd %xmm3, %xmm1
; SSE-NEXT: pcmpgtd %xmm2, %xmm0
; SSE-NEXT: packssdw %xmm1, %xmm0
-; SSE-NEXT: pmovmskb %xmm0, %eax
-; SSE-NEXT: negl %eax
+; SSE-NEXT: pmovmskb %xmm0, %ecx
+; SSE-NEXT: xorl %eax, %eax
+; SSE-NEXT: cmpl %ecx, %eax
; SSE-NEXT: sbbl %eax, %eax
; SSE-NEXT: retq
;
@@ -511,8 +531,9 @@ define i32 @test_v8i32_legal_sext(<8 x i32> %a0, <8 x i32> %a1) {
; AVX1-NEXT: vpcmpgtd %xmm2, %xmm3, %xmm2
; AVX1-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0
; AVX1-NEXT: vpackssdw %xmm2, %xmm0, %xmm0
-; AVX1-NEXT: vpmovmskb %xmm0, %eax
-; AVX1-NEXT: negl %eax
+; AVX1-NEXT: vpmovmskb %xmm0, %ecx
+; AVX1-NEXT: xorl %eax, %eax
+; AVX1-NEXT: cmpl %ecx, %eax
; AVX1-NEXT: sbbl %eax, %eax
; AVX1-NEXT: vzeroupper
; AVX1-NEXT: retq
@@ -522,8 +543,9 @@ define i32 @test_v8i32_legal_sext(<8 x i32> %a0, <8 x i32> %a1) {
; AVX2-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0
; AVX2-NEXT: vextracti128 $1, %ymm0, %xmm1
; AVX2-NEXT: vpackssdw %xmm1, %xmm0, %xmm0
-; AVX2-NEXT: vpmovmskb %xmm0, %eax
-; AVX2-NEXT: negl %eax
+; AVX2-NEXT: vpmovmskb %xmm0, %ecx
+; AVX2-NEXT: xorl %eax, %eax
+; AVX2-NEXT: cmpl %ecx, %eax
; AVX2-NEXT: sbbl %eax, %eax
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
@@ -559,8 +581,9 @@ define i16 @test_v8i16_sext(<8 x i16> %a0, <8 x i16> %a1) {
; SSE-LABEL: test_v8i16_sext:
; SSE: # %bb.0:
; SSE-NEXT: pcmpgtw %xmm1, %xmm0
-; SSE-NEXT: pmovmskb %xmm0, %eax
-; SSE-NEXT: negl %eax
+; SSE-NEXT: pmovmskb %xmm0, %ecx
+; SSE-NEXT: xorl %eax, %eax
+; SSE-NEXT: cmpl %ecx, %eax
; SSE-NEXT: sbbl %eax, %eax
; SSE-NEXT: # kill: def $ax killed $ax killed $eax
; SSE-NEXT: retq
@@ -568,8 +591,9 @@ define i16 @test_v8i16_sext(<8 x i16> %a0, <8 x i16> %a1) {
; AVX-LABEL: test_v8i16_sext:
; AVX: # %bb.0:
; AVX-NEXT: vpcmpgtw %xmm1, %xmm0, %xmm0
-; AVX-NEXT: vpmovmskb %xmm0, %eax
-; AVX-NEXT: negl %eax
+; AVX-NEXT: vpmovmskb %xmm0, %ecx
+; AVX-NEXT: xorl %eax, %eax
+; AVX-NEXT: cmpl %ecx, %eax
; AVX-NEXT: sbbl %eax, %eax
; AVX-NEXT: # kill: def $ax killed $ax killed $eax
; AVX-NEXT: retq
@@ -604,8 +628,9 @@ define i16 @test_v16i16_sext(<16 x i16> %a0, <16 x i16> %a1) {
; SSE-NEXT: pcmpgtw %xmm3, %xmm1
; SSE-NEXT: pcmpgtw %xmm2, %xmm0
; SSE-NEXT: por %xmm1, %xmm0
-; SSE-NEXT: pmovmskb %xmm0, %eax
-; SSE-NEXT: negl %eax
+; SSE-NEXT: pmovmskb %xmm0, %ecx
+; SSE-NEXT: xorl %eax, %eax
+; SSE-NEXT: cmpl %ecx, %eax
; SSE-NEXT: sbbl %eax, %eax
; SSE-NEXT: # kill: def $ax killed $ax killed $eax
; SSE-NEXT: retq
@@ -632,8 +657,9 @@ define i16 @test_v16i16_sext(<16 x i16> %a0, <16 x i16> %a1) {
; AVX2-LABEL: test_v16i16_sext:
; AVX2: # %bb.0:
; AVX2-NEXT: vpcmpgtw %ymm1, %ymm0, %ymm0
-; AVX2-NEXT: vpmovmskb %ymm0, %eax
-; AVX2-NEXT: negl %eax
+; AVX2-NEXT: vpmovmskb %ymm0, %ecx
+; AVX2-NEXT: xorl %eax, %eax
+; AVX2-NEXT: cmpl %ecx, %eax
; AVX2-NEXT: sbbl %eax, %eax
; AVX2-NEXT: # kill: def $ax killed $ax killed $eax
; AVX2-NEXT: vzeroupper
@@ -674,8 +700,9 @@ define i16 @test_v16i16_legal_sext(<16 x i16> %a0, <16 x i16> %a1) {
; SSE-NEXT: pcmpgtw %xmm3, %xmm1
; SSE-NEXT: pcmpgtw %xmm2, %xmm0
; SSE-NEXT: packsswb %xmm1, %xmm0
-; SSE-NEXT: pmovmskb %xmm0, %eax
-; SSE-NEXT: negl %eax
+; SSE-NEXT: pmovmskb %xmm0, %ecx
+; SSE-NEXT: xorl %eax, %eax
+; SSE-NEXT: cmpl %ecx, %eax
; SSE-NEXT: sbbl %eax, %eax
; SSE-NEXT: # kill: def $ax killed $ax killed $eax
; SSE-NEXT: retq
@@ -687,8 +714,9 @@ define i16 @test_v16i16_legal_sext(<16 x i16> %a0, <16 x i16> %a1) {
; AVX1-NEXT: vpcmpgtw %xmm2, %xmm3, %xmm2
; AVX1-NEXT: vpcmpgtw %xmm1, %xmm0, %xmm0
; AVX1-NEXT: vpacksswb %xmm2, %xmm0, %xmm0
-; AVX1-NEXT: vpmovmskb %xmm0, %eax
-; AVX1-NEXT: negl %eax
+; AVX1-NEXT: vpmovmskb %xmm0, %ecx
+; AVX1-NEXT: xorl %eax, %eax
+; AVX1-NEXT: cmpl %ecx, %eax
; AVX1-NEXT: sbbl %eax, %eax
; AVX1-NEXT: # kill: def $ax killed $ax killed $eax
; AVX1-NEXT: vzeroupper
@@ -699,8 +727,9 @@ define i16 @test_v16i16_legal_sext(<16 x i16> %a0, <16 x i16> %a1) {
; AVX2-NEXT: vpcmpgtw %ymm1, %ymm0, %ymm0
; AVX2-NEXT: vextracti128 $1, %ymm0, %xmm1
; AVX2-NEXT: vpacksswb %xmm1, %xmm0, %xmm0
-; AVX2-NEXT: vpmovmskb %xmm0, %eax
-; AVX2-NEXT: negl %eax
+; AVX2-NEXT: vpmovmskb %xmm0, %ecx
+; AVX2-NEXT: xorl %eax, %eax
+; AVX2-NEXT: cmpl %ecx, %eax
; AVX2-NEXT: sbbl %eax, %eax
; AVX2-NEXT: # kill: def $ax killed $ax killed $eax
; AVX2-NEXT: vzeroupper
@@ -742,8 +771,9 @@ define i8 @test_v16i8_sext(<16 x i8> %a0, <16 x i8> %a1) {
; SSE-LABEL: test_v16i8_sext:
; SSE: # %bb.0:
; SSE-NEXT: pcmpgtb %xmm1, %xmm0
-; SSE-NEXT: pmovmskb %xmm0, %eax
-; SSE-NEXT: negl %eax
+; SSE-NEXT: pmovmskb %xmm0, %ecx
+; SSE-NEXT: xorl %eax, %eax
+; SSE-NEXT: cmpl %ecx, %eax
; SSE-NEXT: sbbl %eax, %eax
; SSE-NEXT: # kill: def $al killed $al killed $eax
; SSE-NEXT: retq
@@ -751,8 +781,9 @@ define i8 @test_v16i8_sext(<16 x i8> %a0, <16 x i8> %a1) {
; AVX-LABEL: test_v16i8_sext:
; AVX: # %bb.0:
; AVX-NEXT: vpcmpgtb %xmm1, %xmm0, %xmm0
-; AVX-NEXT: vpmovmskb %xmm0, %eax
-; AVX-NEXT: negl %eax
+; AVX-NEXT: vpmovmskb %xmm0, %ecx
+; AVX-NEXT: xorl %eax, %eax
+; AVX-NEXT: cmpl %ecx, %eax
; AVX-NEXT: sbbl %eax, %eax
; AVX-NEXT: # kill: def $al killed $al killed $eax
; AVX-NEXT: retq
@@ -791,8 +822,9 @@ define i8 @test_v32i8_sext(<32 x i8> %a0, <32 x i8> %a1) {
; SSE-NEXT: pcmpgtb %xmm3, %xmm1
; SSE-NEXT: pcmpgtb %xmm2, %xmm0
; SSE-NEXT: por %xmm1, %xmm0
-; SSE-NEXT: pmovmskb %xmm0, %eax
-; SSE-NEXT: negl %eax
+; SSE-NEXT: pmovmskb %xmm0, %ecx
+; SSE-NEXT: xorl %eax, %eax
+; SSE-NEXT: cmpl %ecx, %eax
; SSE-NEXT: sbbl %eax, %eax
; SSE-NEXT: # kill: def $al killed $al killed $eax
; SSE-NEXT: retq
@@ -821,8 +853,9 @@ define i8 @test_v32i8_sext(<32 x i8> %a0, <32 x i8> %a1) {
; AVX2-LABEL: test_v32i8_sext:
; AVX2: # %bb.0:
; AVX2-NEXT: vpcmpgtb %ymm1, %ymm0, %ymm0
-; AVX2-NEXT: vpmovmskb %ymm0, %eax
-; AVX2-NEXT: negl %eax
+; AVX2-NEXT: vpmovmskb %ymm0, %ecx
+; AVX2-NEXT: xorl %eax, %eax
+; AVX2-NEXT: cmpl %ecx, %eax
; AVX2-NEXT: sbbl %eax, %eax
; AVX2-NEXT: # kill: def $al killed $al killed $eax
; AVX2-NEXT: vzeroupper
OpenPOWER on IntegriCloud