diff options
Diffstat (limited to 'llvm/test')
-rw-r--r-- | llvm/test/CodeGen/X86/ipra-reg-alias.ll | 12 | ||||
-rw-r--r-- | llvm/test/CodeGen/X86/load-scalar-as-vector.ll | 36 | ||||
-rw-r--r-- | llvm/test/CodeGen/X86/mul-constant-i8.ll | 196 | ||||
-rw-r--r-- | llvm/test/CodeGen/X86/urem-i8-constant.ll | 13 |
4 files changed, 128 insertions, 129 deletions
diff --git a/llvm/test/CodeGen/X86/ipra-reg-alias.ll b/llvm/test/CodeGen/X86/ipra-reg-alias.ll index 76e604c9f62..29b2111bf34 100644 --- a/llvm/test/CodeGen/X86/ipra-reg-alias.ll +++ b/llvm/test/CodeGen/X86/ipra-reg-alias.ll @@ -1,19 +1,13 @@ ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py -; RUN: llc -mtriple=x86_64-- -enable-ipra -print-regusage -o - 2>&1 < %s | FileCheck %s --check-prefix=DEBUG ; RUN: llc -mtriple=x86_64-- -enable-ipra -o - < %s | FileCheck %s -; Here only CL is clobbered so CH should not be clobbred, but CX, ECX and RCX -; should be clobbered. -; DEBUG: main Clobbered Registers: $ah $al $ax $cl $cx $eax $ecx $eflags $hax $rax $rcx - define i8 @main(i8 %X) { ; CHECK-LABEL: main: ; CHECK: # %bb.0: -; CHECK-NEXT: movl %edi, %eax -; CHECK-NEXT: movb $5, %cl -; CHECK-NEXT: # kill: def $al killed $al killed $eax -; CHECK-NEXT: mulb %cl +; CHECK-NEXT: # kill: def $edi killed $edi def $rdi +; CHECK-NEXT: leal (%rdi,%rdi,4), %eax ; CHECK-NEXT: addb $5, %al +; CHECK-NEXT: # kill: def $al killed $al killed $eax ; CHECK-NEXT: retq %inc = add i8 %X, 1 %inc2 = mul i8 %inc, 5 diff --git a/llvm/test/CodeGen/X86/load-scalar-as-vector.ll b/llvm/test/CodeGen/X86/load-scalar-as-vector.ll index 6a7bcf658e4..7bc9ee0fd6b 100644 --- a/llvm/test/CodeGen/X86/load-scalar-as-vector.ll +++ b/llvm/test/CodeGen/X86/load-scalar-as-vector.ll @@ -518,33 +518,29 @@ define <2 x i64> @urem_op0_constant(i64* %p) nounwind { define <16 x i8> @urem_op1_constant(i8* %p) nounwind { ; SSE-LABEL: urem_op1_constant: ; SSE: # %bb.0: -; SSE-NEXT: movb (%rdi), %cl -; SSE-NEXT: movl %ecx, %eax -; SSE-NEXT: shrb %al +; SSE-NEXT: movb (%rdi), %al +; SSE-NEXT: movl %eax, %ecx +; SSE-NEXT: shrb %cl +; SSE-NEXT: movzbl %cl, %ecx +; SSE-NEXT: imull $49, %ecx, %ecx +; SSE-NEXT: shrl $10, %ecx +; SSE-NEXT: imull $42, %ecx, %ecx +; SSE-NEXT: subb %cl, %al ; SSE-NEXT: movzbl %al, %eax -; SSE-NEXT: imull $49, %eax, %eax -; SSE-NEXT: shrl $10, %eax -; SSE-NEXT: movb $42, %dl -; SSE-NEXT: # kill: def $al killed $al killed $eax -; SSE-NEXT: mulb %dl -; SSE-NEXT: subb %al, %cl -; SSE-NEXT: movzbl %cl, %eax ; SSE-NEXT: movd %eax, %xmm0 ; SSE-NEXT: retq ; ; AVX-LABEL: urem_op1_constant: ; AVX: # %bb.0: -; AVX-NEXT: movb (%rdi), %cl -; AVX-NEXT: movl %ecx, %eax -; AVX-NEXT: shrb %al +; AVX-NEXT: movb (%rdi), %al +; AVX-NEXT: movl %eax, %ecx +; AVX-NEXT: shrb %cl +; AVX-NEXT: movzbl %cl, %ecx +; AVX-NEXT: imull $49, %ecx, %ecx +; AVX-NEXT: shrl $10, %ecx +; AVX-NEXT: imull $42, %ecx, %ecx +; AVX-NEXT: subb %cl, %al ; AVX-NEXT: movzbl %al, %eax -; AVX-NEXT: imull $49, %eax, %eax -; AVX-NEXT: shrl $10, %eax -; AVX-NEXT: movb $42, %dl -; AVX-NEXT: # kill: def $al killed $al killed $eax -; AVX-NEXT: mulb %dl -; AVX-NEXT: subb %al, %cl -; AVX-NEXT: movzbl %cl, %eax ; AVX-NEXT: vmovd %eax, %xmm0 ; AVX-NEXT: retq %x = load i8, i8* %p diff --git a/llvm/test/CodeGen/X86/mul-constant-i8.ll b/llvm/test/CodeGen/X86/mul-constant-i8.ll index 3c636b114f8..c3dcc12f5e9 100644 --- a/llvm/test/CodeGen/X86/mul-constant-i8.ll +++ b/llvm/test/CodeGen/X86/mul-constant-i8.ll @@ -25,10 +25,9 @@ define i8 @test_mul_by_2(i8 %x) { define i8 @test_mul_by_3(i8 %x) { ; X64-LABEL: test_mul_by_3: ; X64: # %bb.0: -; X64-NEXT: movl %edi, %eax -; X64-NEXT: movb $3, %cl +; X64-NEXT: # kill: def $edi killed $edi def $rdi +; X64-NEXT: leal (%rdi,%rdi,2), %eax ; X64-NEXT: # kill: def $al killed $al killed $eax -; X64-NEXT: mulb %cl ; X64-NEXT: retq %m = mul i8 %x, 3 ret i8 %m @@ -48,10 +47,9 @@ define i8 @test_mul_by_4(i8 %x) { define i8 @test_mul_by_5(i8 %x) { ; X64-LABEL: test_mul_by_5: ; X64: # %bb.0: -; X64-NEXT: movl %edi, %eax -; X64-NEXT: movb $5, %cl +; X64-NEXT: # kill: def $edi killed $edi def $rdi +; X64-NEXT: leal (%rdi,%rdi,4), %eax ; X64-NEXT: # kill: def $al killed $al killed $eax -; X64-NEXT: mulb %cl ; X64-NEXT: retq %m = mul i8 %x, 5 ret i8 %m @@ -60,10 +58,10 @@ define i8 @test_mul_by_5(i8 %x) { define i8 @test_mul_by_6(i8 %x) { ; X64-LABEL: test_mul_by_6: ; X64: # %bb.0: -; X64-NEXT: movl %edi, %eax -; X64-NEXT: movb $6, %cl +; X64-NEXT: # kill: def $edi killed $edi def $rdi +; X64-NEXT: addl %edi, %edi +; X64-NEXT: leal (%rdi,%rdi,2), %eax ; X64-NEXT: # kill: def $al killed $al killed $eax -; X64-NEXT: mulb %cl ; X64-NEXT: retq %m = mul i8 %x, 6 ret i8 %m @@ -72,10 +70,10 @@ define i8 @test_mul_by_6(i8 %x) { define i8 @test_mul_by_7(i8 %x) { ; X64-LABEL: test_mul_by_7: ; X64: # %bb.0: -; X64-NEXT: movl %edi, %eax -; X64-NEXT: movb $7, %cl +; X64-NEXT: # kill: def $edi killed $edi def $rdi +; X64-NEXT: leal (,%rdi,8), %eax +; X64-NEXT: subl %edi, %eax ; X64-NEXT: # kill: def $al killed $al killed $eax -; X64-NEXT: mulb %cl ; X64-NEXT: retq %m = mul i8 %x, 7 ret i8 %m @@ -95,10 +93,9 @@ define i8 @test_mul_by_8(i8 %x) { define i8 @test_mul_by_9(i8 %x) { ; X64-LABEL: test_mul_by_9: ; X64: # %bb.0: -; X64-NEXT: movl %edi, %eax -; X64-NEXT: movb $9, %cl +; X64-NEXT: # kill: def $edi killed $edi def $rdi +; X64-NEXT: leal (%rdi,%rdi,8), %eax ; X64-NEXT: # kill: def $al killed $al killed $eax -; X64-NEXT: mulb %cl ; X64-NEXT: retq %m = mul i8 %x, 9 ret i8 %m @@ -107,10 +104,10 @@ define i8 @test_mul_by_9(i8 %x) { define i8 @test_mul_by_10(i8 %x) { ; X64-LABEL: test_mul_by_10: ; X64: # %bb.0: -; X64-NEXT: movl %edi, %eax -; X64-NEXT: movb $10, %cl +; X64-NEXT: # kill: def $edi killed $edi def $rdi +; X64-NEXT: addl %edi, %edi +; X64-NEXT: leal (%rdi,%rdi,4), %eax ; X64-NEXT: # kill: def $al killed $al killed $eax -; X64-NEXT: mulb %cl ; X64-NEXT: retq %m = mul i8 %x, 10 ret i8 %m @@ -119,10 +116,10 @@ define i8 @test_mul_by_10(i8 %x) { define i8 @test_mul_by_11(i8 %x) { ; X64-LABEL: test_mul_by_11: ; X64: # %bb.0: -; X64-NEXT: movl %edi, %eax -; X64-NEXT: movb $11, %cl +; X64-NEXT: # kill: def $edi killed $edi def $rdi +; X64-NEXT: leal (%rdi,%rdi,4), %eax +; X64-NEXT: leal (%rdi,%rax,2), %eax ; X64-NEXT: # kill: def $al killed $al killed $eax -; X64-NEXT: mulb %cl ; X64-NEXT: retq %m = mul i8 %x, 11 ret i8 %m @@ -131,10 +128,10 @@ define i8 @test_mul_by_11(i8 %x) { define i8 @test_mul_by_12(i8 %x) { ; X64-LABEL: test_mul_by_12: ; X64: # %bb.0: -; X64-NEXT: movl %edi, %eax -; X64-NEXT: movb $12, %cl +; X64-NEXT: # kill: def $edi killed $edi def $rdi +; X64-NEXT: shll $2, %edi +; X64-NEXT: leal (%rdi,%rdi,2), %eax ; X64-NEXT: # kill: def $al killed $al killed $eax -; X64-NEXT: mulb %cl ; X64-NEXT: retq %m = mul i8 %x, 12 ret i8 %m @@ -143,10 +140,10 @@ define i8 @test_mul_by_12(i8 %x) { define i8 @test_mul_by_13(i8 %x) { ; X64-LABEL: test_mul_by_13: ; X64: # %bb.0: -; X64-NEXT: movl %edi, %eax -; X64-NEXT: movb $13, %cl +; X64-NEXT: # kill: def $edi killed $edi def $rdi +; X64-NEXT: leal (%rdi,%rdi,2), %eax +; X64-NEXT: leal (%rdi,%rax,4), %eax ; X64-NEXT: # kill: def $al killed $al killed $eax -; X64-NEXT: mulb %cl ; X64-NEXT: retq %m = mul i8 %x, 13 ret i8 %m @@ -156,9 +153,10 @@ define i8 @test_mul_by_14(i8 %x) { ; X64-LABEL: test_mul_by_14: ; X64: # %bb.0: ; X64-NEXT: movl %edi, %eax -; X64-NEXT: movb $14, %cl +; X64-NEXT: shll $4, %eax +; X64-NEXT: subl %edi, %eax +; X64-NEXT: subl %edi, %eax ; X64-NEXT: # kill: def $al killed $al killed $eax -; X64-NEXT: mulb %cl ; X64-NEXT: retq %m = mul i8 %x, 14 ret i8 %m @@ -167,10 +165,10 @@ define i8 @test_mul_by_14(i8 %x) { define i8 @test_mul_by_15(i8 %x) { ; X64-LABEL: test_mul_by_15: ; X64: # %bb.0: -; X64-NEXT: movl %edi, %eax -; X64-NEXT: movb $15, %cl +; X64-NEXT: # kill: def $edi killed $edi def $rdi +; X64-NEXT: leal (%rdi,%rdi,4), %eax +; X64-NEXT: leal (%rax,%rax,2), %eax ; X64-NEXT: # kill: def $al killed $al killed $eax -; X64-NEXT: mulb %cl ; X64-NEXT: retq %m = mul i8 %x, 15 ret i8 %m @@ -190,10 +188,11 @@ define i8 @test_mul_by_16(i8 %x) { define i8 @test_mul_by_17(i8 %x) { ; X64-LABEL: test_mul_by_17: ; X64: # %bb.0: +; X64-NEXT: # kill: def $edi killed $edi def $rdi ; X64-NEXT: movl %edi, %eax -; X64-NEXT: movb $17, %cl +; X64-NEXT: shll $4, %eax +; X64-NEXT: leal (%rax,%rdi), %eax ; X64-NEXT: # kill: def $al killed $al killed $eax -; X64-NEXT: mulb %cl ; X64-NEXT: retq %m = mul i8 %x, 17 ret i8 %m @@ -202,10 +201,10 @@ define i8 @test_mul_by_17(i8 %x) { define i8 @test_mul_by_18(i8 %x) { ; X64-LABEL: test_mul_by_18: ; X64: # %bb.0: -; X64-NEXT: movl %edi, %eax -; X64-NEXT: movb $18, %cl +; X64-NEXT: # kill: def $edi killed $edi def $rdi +; X64-NEXT: addl %edi, %edi +; X64-NEXT: leal (%rdi,%rdi,8), %eax ; X64-NEXT: # kill: def $al killed $al killed $eax -; X64-NEXT: mulb %cl ; X64-NEXT: retq %m = mul i8 %x, 18 ret i8 %m @@ -214,10 +213,10 @@ define i8 @test_mul_by_18(i8 %x) { define i8 @test_mul_by_19(i8 %x) { ; X64-LABEL: test_mul_by_19: ; X64: # %bb.0: -; X64-NEXT: movl %edi, %eax -; X64-NEXT: movb $19, %cl +; X64-NEXT: # kill: def $edi killed $edi def $rdi +; X64-NEXT: leal (%rdi,%rdi,8), %eax +; X64-NEXT: leal (%rdi,%rax,2), %eax ; X64-NEXT: # kill: def $al killed $al killed $eax -; X64-NEXT: mulb %cl ; X64-NEXT: retq %m = mul i8 %x, 19 ret i8 %m @@ -226,10 +225,10 @@ define i8 @test_mul_by_19(i8 %x) { define i8 @test_mul_by_20(i8 %x) { ; X64-LABEL: test_mul_by_20: ; X64: # %bb.0: -; X64-NEXT: movl %edi, %eax -; X64-NEXT: movb $20, %cl +; X64-NEXT: # kill: def $edi killed $edi def $rdi +; X64-NEXT: shll $2, %edi +; X64-NEXT: leal (%rdi,%rdi,4), %eax ; X64-NEXT: # kill: def $al killed $al killed $eax -; X64-NEXT: mulb %cl ; X64-NEXT: retq %m = mul i8 %x, 20 ret i8 %m @@ -238,10 +237,10 @@ define i8 @test_mul_by_20(i8 %x) { define i8 @test_mul_by_21(i8 %x) { ; X64-LABEL: test_mul_by_21: ; X64: # %bb.0: -; X64-NEXT: movl %edi, %eax -; X64-NEXT: movb $21, %cl +; X64-NEXT: # kill: def $edi killed $edi def $rdi +; X64-NEXT: leal (%rdi,%rdi,4), %eax +; X64-NEXT: leal (%rdi,%rax,4), %eax ; X64-NEXT: # kill: def $al killed $al killed $eax -; X64-NEXT: mulb %cl ; X64-NEXT: retq %m = mul i8 %x, 21 ret i8 %m @@ -250,10 +249,11 @@ define i8 @test_mul_by_21(i8 %x) { define i8 @test_mul_by_22(i8 %x) { ; X64-LABEL: test_mul_by_22: ; X64: # %bb.0: -; X64-NEXT: movl %edi, %eax -; X64-NEXT: movb $22, %cl +; X64-NEXT: # kill: def $edi killed $edi def $rdi +; X64-NEXT: leal (%rdi,%rdi,4), %eax +; X64-NEXT: leal (%rdi,%rax,4), %eax +; X64-NEXT: addl %edi, %eax ; X64-NEXT: # kill: def $al killed $al killed $eax -; X64-NEXT: mulb %cl ; X64-NEXT: retq %m = mul i8 %x, 22 ret i8 %m @@ -262,10 +262,11 @@ define i8 @test_mul_by_22(i8 %x) { define i8 @test_mul_by_23(i8 %x) { ; X64-LABEL: test_mul_by_23: ; X64: # %bb.0: -; X64-NEXT: movl %edi, %eax -; X64-NEXT: movb $23, %cl +; X64-NEXT: # kill: def $edi killed $edi def $rdi +; X64-NEXT: leal (%rdi,%rdi,2), %eax +; X64-NEXT: shll $3, %eax +; X64-NEXT: subl %edi, %eax ; X64-NEXT: # kill: def $al killed $al killed $eax -; X64-NEXT: mulb %cl ; X64-NEXT: retq %m = mul i8 %x, 23 ret i8 %m @@ -274,10 +275,10 @@ define i8 @test_mul_by_23(i8 %x) { define i8 @test_mul_by_24(i8 %x) { ; X64-LABEL: test_mul_by_24: ; X64: # %bb.0: -; X64-NEXT: movl %edi, %eax -; X64-NEXT: movb $24, %cl +; X64-NEXT: # kill: def $edi killed $edi def $rdi +; X64-NEXT: shll $3, %edi +; X64-NEXT: leal (%rdi,%rdi,2), %eax ; X64-NEXT: # kill: def $al killed $al killed $eax -; X64-NEXT: mulb %cl ; X64-NEXT: retq %m = mul i8 %x, 24 ret i8 %m @@ -286,10 +287,10 @@ define i8 @test_mul_by_24(i8 %x) { define i8 @test_mul_by_25(i8 %x) { ; X64-LABEL: test_mul_by_25: ; X64: # %bb.0: -; X64-NEXT: movl %edi, %eax -; X64-NEXT: movb $25, %cl +; X64-NEXT: # kill: def $edi killed $edi def $rdi +; X64-NEXT: leal (%rdi,%rdi,4), %eax +; X64-NEXT: leal (%rax,%rax,4), %eax ; X64-NEXT: # kill: def $al killed $al killed $eax -; X64-NEXT: mulb %cl ; X64-NEXT: retq %m = mul i8 %x, 25 ret i8 %m @@ -298,10 +299,11 @@ define i8 @test_mul_by_25(i8 %x) { define i8 @test_mul_by_26(i8 %x) { ; X64-LABEL: test_mul_by_26: ; X64: # %bb.0: -; X64-NEXT: movl %edi, %eax -; X64-NEXT: movb $26, %cl +; X64-NEXT: # kill: def $edi killed $edi def $rdi +; X64-NEXT: leal (%rdi,%rdi,4), %eax +; X64-NEXT: leal (%rax,%rax,4), %eax +; X64-NEXT: addl %edi, %eax ; X64-NEXT: # kill: def $al killed $al killed $eax -; X64-NEXT: mulb %cl ; X64-NEXT: retq %m = mul i8 %x, 26 ret i8 %m @@ -310,10 +312,10 @@ define i8 @test_mul_by_26(i8 %x) { define i8 @test_mul_by_27(i8 %x) { ; X64-LABEL: test_mul_by_27: ; X64: # %bb.0: -; X64-NEXT: movl %edi, %eax -; X64-NEXT: movb $27, %cl +; X64-NEXT: # kill: def $edi killed $edi def $rdi +; X64-NEXT: leal (%rdi,%rdi,8), %eax +; X64-NEXT: leal (%rax,%rax,2), %eax ; X64-NEXT: # kill: def $al killed $al killed $eax -; X64-NEXT: mulb %cl ; X64-NEXT: retq %m = mul i8 %x, 27 ret i8 %m @@ -322,10 +324,11 @@ define i8 @test_mul_by_27(i8 %x) { define i8 @test_mul_by_28(i8 %x) { ; X64-LABEL: test_mul_by_28: ; X64: # %bb.0: -; X64-NEXT: movl %edi, %eax -; X64-NEXT: movb $28, %cl +; X64-NEXT: # kill: def $edi killed $edi def $rdi +; X64-NEXT: leal (%rdi,%rdi,8), %eax +; X64-NEXT: leal (%rax,%rax,2), %eax +; X64-NEXT: addl %edi, %eax ; X64-NEXT: # kill: def $al killed $al killed $eax -; X64-NEXT: mulb %cl ; X64-NEXT: retq %m = mul i8 %x, 28 ret i8 %m @@ -334,10 +337,12 @@ define i8 @test_mul_by_28(i8 %x) { define i8 @test_mul_by_29(i8 %x) { ; X64-LABEL: test_mul_by_29: ; X64: # %bb.0: -; X64-NEXT: movl %edi, %eax -; X64-NEXT: movb $29, %cl +; X64-NEXT: # kill: def $edi killed $edi def $rdi +; X64-NEXT: leal (%rdi,%rdi,8), %eax +; X64-NEXT: leal (%rax,%rax,2), %eax +; X64-NEXT: addl %edi, %eax +; X64-NEXT: addl %edi, %eax ; X64-NEXT: # kill: def $al killed $al killed $eax -; X64-NEXT: mulb %cl ; X64-NEXT: retq %m = mul i8 %x, 29 ret i8 %m @@ -347,9 +352,10 @@ define i8 @test_mul_by_30(i8 %x) { ; X64-LABEL: test_mul_by_30: ; X64: # %bb.0: ; X64-NEXT: movl %edi, %eax -; X64-NEXT: movb $30, %cl +; X64-NEXT: shll $5, %eax +; X64-NEXT: subl %edi, %eax +; X64-NEXT: subl %edi, %eax ; X64-NEXT: # kill: def $al killed $al killed $eax -; X64-NEXT: mulb %cl ; X64-NEXT: retq %m = mul i8 %x, 30 ret i8 %m @@ -359,9 +365,9 @@ define i8 @test_mul_by_31(i8 %x) { ; X64-LABEL: test_mul_by_31: ; X64: # %bb.0: ; X64-NEXT: movl %edi, %eax -; X64-NEXT: movb $31, %cl +; X64-NEXT: shll $5, %eax +; X64-NEXT: subl %edi, %eax ; X64-NEXT: # kill: def $al killed $al killed $eax -; X64-NEXT: mulb %cl ; X64-NEXT: retq %m = mul i8 %x, 31 ret i8 %m @@ -381,10 +387,10 @@ define i8 @test_mul_by_32(i8 %x) { define i8 @test_mul_by_37(i8 %x) { ; X64-LABEL: test_mul_by_37: ; X64: # %bb.0: -; X64-NEXT: movl %edi, %eax -; X64-NEXT: movb $37, %cl +; X64-NEXT: # kill: def $edi killed $edi def $rdi +; X64-NEXT: leal (%rdi,%rdi,8), %eax +; X64-NEXT: leal (%rdi,%rax,4), %eax ; X64-NEXT: # kill: def $al killed $al killed $eax -; X64-NEXT: mulb %cl ; X64-NEXT: retq %m = mul i8 %x, 37 ret i8 %m @@ -393,10 +399,10 @@ define i8 @test_mul_by_37(i8 %x) { define i8 @test_mul_by_41(i8 %x) { ; X64-LABEL: test_mul_by_41: ; X64: # %bb.0: -; X64-NEXT: movl %edi, %eax -; X64-NEXT: movb $41, %cl +; X64-NEXT: # kill: def $edi killed $edi def $rdi +; X64-NEXT: leal (%rdi,%rdi,4), %eax +; X64-NEXT: leal (%rdi,%rax,8), %eax ; X64-NEXT: # kill: def $al killed $al killed $eax -; X64-NEXT: mulb %cl ; X64-NEXT: retq %m = mul i8 %x, 41 ret i8 %m @@ -406,9 +412,10 @@ define i8 @test_mul_by_62(i8 %x) { ; X64-LABEL: test_mul_by_62: ; X64: # %bb.0: ; X64-NEXT: movl %edi, %eax -; X64-NEXT: movb $62, %cl +; X64-NEXT: shll $6, %eax +; X64-NEXT: subl %edi, %eax +; X64-NEXT: subl %edi, %eax ; X64-NEXT: # kill: def $al killed $al killed $eax -; X64-NEXT: mulb %cl ; X64-NEXT: retq %m = mul i8 %x, 62 ret i8 %m @@ -417,10 +424,11 @@ define i8 @test_mul_by_62(i8 %x) { define i8 @test_mul_by_66(i8 %x) { ; X64-LABEL: test_mul_by_66: ; X64: # %bb.0: +; X64-NEXT: # kill: def $edi killed $edi def $rdi ; X64-NEXT: movl %edi, %eax -; X64-NEXT: movb $66, %cl +; X64-NEXT: shll $6, %eax +; X64-NEXT: leal (%rax,%rdi,2), %eax ; X64-NEXT: # kill: def $al killed $al killed $eax -; X64-NEXT: mulb %cl ; X64-NEXT: retq %m = mul i8 %x, 66 ret i8 %m @@ -429,10 +437,10 @@ define i8 @test_mul_by_66(i8 %x) { define i8 @test_mul_by_73(i8 %x) { ; X64-LABEL: test_mul_by_73: ; X64: # %bb.0: -; X64-NEXT: movl %edi, %eax -; X64-NEXT: movb $73, %cl +; X64-NEXT: # kill: def $edi killed $edi def $rdi +; X64-NEXT: leal (%rdi,%rdi,8), %eax +; X64-NEXT: leal (%rdi,%rax,8), %eax ; X64-NEXT: # kill: def $al killed $al killed $eax -; X64-NEXT: mulb %cl ; X64-NEXT: retq %m = mul i8 %x, 73 ret i8 %m @@ -452,10 +460,11 @@ define i8 @test_mul_by_520(i8 %x) { define i8 @test_mul_by_neg10(i8 %x) { ; X64-LABEL: test_mul_by_neg10: ; X64: # %bb.0: -; X64-NEXT: movl %edi, %eax -; X64-NEXT: movb $-10, %cl +; X64-NEXT: # kill: def $edi killed $edi def $rdi +; X64-NEXT: addl %edi, %edi +; X64-NEXT: leal (%rdi,%rdi,4), %eax +; X64-NEXT: negl %eax ; X64-NEXT: # kill: def $al killed $al killed $eax -; X64-NEXT: mulb %cl ; X64-NEXT: retq %m = mul i8 %x, -10 ret i8 %m @@ -464,10 +473,11 @@ define i8 @test_mul_by_neg10(i8 %x) { define i8 @test_mul_by_neg36(i8 %x) { ; X64-LABEL: test_mul_by_neg36: ; X64: # %bb.0: -; X64-NEXT: movl %edi, %eax -; X64-NEXT: movb $-36, %cl +; X64-NEXT: # kill: def $edi killed $edi def $rdi +; X64-NEXT: shll $2, %edi +; X64-NEXT: leal (%rdi,%rdi,8), %eax +; X64-NEXT: negl %eax ; X64-NEXT: # kill: def $al killed $al killed $eax -; X64-NEXT: mulb %cl ; X64-NEXT: retq %m = mul i8 %x, -36 ret i8 %m diff --git a/llvm/test/CodeGen/X86/urem-i8-constant.ll b/llvm/test/CodeGen/X86/urem-i8-constant.ll index d4fd92c0464..ae218405c0e 100644 --- a/llvm/test/CodeGen/X86/urem-i8-constant.ll +++ b/llvm/test/CodeGen/X86/urem-i8-constant.ll @@ -6,14 +6,13 @@ define i8 @foo(i8 %tmp325) { ; CHECK-LABEL: foo: ; CHECK: # %bb.0: -; CHECK-NEXT: movzbl {{[0-9]+}}(%esp), %ecx -; CHECK-NEXT: imull $111, %ecx, %eax -; CHECK-NEXT: shrl $12, %eax -; CHECK-NEXT: movb $37, %dl +; CHECK-NEXT: movzbl {{[0-9]+}}(%esp), %eax +; CHECK-NEXT: imull $111, %eax, %ecx +; CHECK-NEXT: shrl $12, %ecx +; CHECK-NEXT: leal (%ecx,%ecx,8), %edx +; CHECK-NEXT: leal (%ecx,%edx,4), %ecx +; CHECK-NEXT: subb %cl, %al ; CHECK-NEXT: # kill: def $al killed $al killed $eax -; CHECK-NEXT: mulb %dl -; CHECK-NEXT: subb %al, %cl -; CHECK-NEXT: movl %ecx, %eax ; CHECK-NEXT: retl %t546 = urem i8 %tmp325, 37 ret i8 %t546 |