diff options
Diffstat (limited to 'llvm/test/CodeGen/X86/clz.ll')
-rw-r--r-- | llvm/test/CodeGen/X86/clz.ll | 93 |
1 files changed, 93 insertions, 0 deletions
diff --git a/llvm/test/CodeGen/X86/clz.ll b/llvm/test/CodeGen/X86/clz.ll index cffc6732728..e9f59944adc 100644 --- a/llvm/test/CodeGen/X86/clz.ll +++ b/llvm/test/CodeGen/X86/clz.ll @@ -778,3 +778,96 @@ define i32 @ctlz_bsr_zero_test(i32 %n) { %bsr = xor i32 %ctlz, 31 ret i32 %bsr } + +define i8 @cttz_i8_knownbits(i8 %x) { +; X32-LABEL: cttz_i8_knownbits: +; X32: # BB#0: +; X32-NEXT: movb {{[0-9]+}}(%esp), %al +; X32-NEXT: orb $2, %al +; X32-NEXT: movzbl %al, %eax +; X32-NEXT: bsfl %eax, %eax +; X32-NEXT: andb $1, %al +; X32-NEXT: # kill: %AL<def> %AL<kill> %EAX<kill> +; X32-NEXT: retl +; +; X64-LABEL: cttz_i8_knownbits: +; X64: # BB#0: +; X64-NEXT: orb $2, %dil +; X64-NEXT: movzbl %dil, %eax +; X64-NEXT: bsfl %eax, %eax +; X64-NEXT: andb $1, %al +; X64-NEXT: # kill: %AL<def> %AL<kill> %EAX<kill> +; X64-NEXT: retq +; +; X32-CLZ-LABEL: cttz_i8_knownbits: +; X32-CLZ: # BB#0: +; X32-CLZ-NEXT: movb {{[0-9]+}}(%esp), %al +; X32-CLZ-NEXT: orb $2, %al +; X32-CLZ-NEXT: movzbl %al, %eax +; X32-CLZ-NEXT: tzcntl %eax, %eax +; X32-CLZ-NEXT: andb $1, %al +; X32-CLZ-NEXT: # kill: %AL<def> %AL<kill> %EAX<kill> +; X32-CLZ-NEXT: retl +; +; X64-CLZ-LABEL: cttz_i8_knownbits: +; X64-CLZ: # BB#0: +; X64-CLZ-NEXT: orb $2, %dil +; X64-CLZ-NEXT: movzbl %dil, %eax +; X64-CLZ-NEXT: tzcntl %eax, %eax +; X64-CLZ-NEXT: andb $1, %al +; X64-CLZ-NEXT: # kill: %AL<def> %AL<kill> %EAX<kill> +; X64-CLZ-NEXT: retq + %x2 = or i8 %x, 2 + %tmp = call i8 @llvm.cttz.i8(i8 %x2, i1 true ) + %tmp2 = and i8 %tmp, 1 + ret i8 %tmp2 +} + +define i8 @ctlz_i8_knownbits(i8 %x) { +; X32-LABEL: ctlz_i8_knownbits: +; X32: # BB#0: +; X32-NEXT: movb {{[0-9]+}}(%esp), %al +; X32-NEXT: orb $64, %al +; X32-NEXT: movzbl %al, %eax +; X32-NEXT: bsrl %eax, %eax +; X32-NEXT: notl %eax +; X32-NEXT: andb $1, %al +; X32-NEXT: # kill: %AL<def> %AL<kill> %EAX<kill> +; X32-NEXT: retl +; +; X64-LABEL: ctlz_i8_knownbits: +; X64: # BB#0: +; X64-NEXT: orb $64, %dil +; X64-NEXT: movzbl %dil, %eax +; X64-NEXT: bsrl %eax, %eax +; X64-NEXT: notl %eax +; X64-NEXT: andb $1, %al +; X64-NEXT: # kill: %AL<def> %AL<kill> %EAX<kill> +; X64-NEXT: retq +; +; X32-CLZ-LABEL: ctlz_i8_knownbits: +; X32-CLZ: # BB#0: +; X32-CLZ-NEXT: movb {{[0-9]+}}(%esp), %al +; X32-CLZ-NEXT: orb $64, %al +; X32-CLZ-NEXT: movzbl %al, %eax +; X32-CLZ-NEXT: lzcntl %eax, %eax +; X32-CLZ-NEXT: addl $-24, %eax +; X32-CLZ-NEXT: andb $1, %al +; X32-CLZ-NEXT: # kill: %AL<def> %AL<kill> %EAX<kill> +; X32-CLZ-NEXT: retl +; +; X64-CLZ-LABEL: ctlz_i8_knownbits: +; X64-CLZ: # BB#0: +; X64-CLZ-NEXT: orb $64, %dil +; X64-CLZ-NEXT: movzbl %dil, %eax +; X64-CLZ-NEXT: lzcntl %eax, %eax +; X64-CLZ-NEXT: addl $-24, %eax +; X64-CLZ-NEXT: andb $1, %al +; X64-CLZ-NEXT: # kill: %AL<def> %AL<kill> %EAX<kill> +; X64-CLZ-NEXT: retq + + %x2 = or i8 %x, 64 + %tmp = call i8 @llvm.ctlz.i8(i8 %x2, i1 true ) + %tmp2 = and i8 %tmp, 1 + ret i8 %tmp2 +} |