diff options
Diffstat (limited to 'llvm/test/CodeGen/X86/rotate4.ll')
-rw-r--r-- | llvm/test/CodeGen/X86/rotate4.ll | 36 |
1 files changed, 18 insertions, 18 deletions
diff --git a/llvm/test/CodeGen/X86/rotate4.ll b/llvm/test/CodeGen/X86/rotate4.ll index 5347fecfee1..92a98c98e0b 100644 --- a/llvm/test/CodeGen/X86/rotate4.ll +++ b/llvm/test/CodeGen/X86/rotate4.ll @@ -65,9 +65,9 @@ define i64 @rotate_left_64(i64 %a, i64 %b) { ; X86-NEXT: .cfi_offset %esi, -16 ; X86-NEXT: .cfi_offset %edi, -12 ; X86-NEXT: .cfi_offset %ebx, -8 +; X86-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-NEXT: movl {{[0-9]+}}(%esp), %esi ; X86-NEXT: movl {{[0-9]+}}(%esp), %edi -; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx ; X86-NEXT: movl %esi, %eax ; X86-NEXT: shll %cl, %eax ; X86-NEXT: movl %edi, %edx @@ -78,7 +78,7 @@ define i64 @rotate_left_64(i64 %a, i64 %b) { ; X86-NEXT: movl %eax, %edx ; X86-NEXT: xorl %eax, %eax ; X86-NEXT: .LBB2_2: -; X86-NEXT: negl %ecx +; X86-NEXT: negb %cl ; X86-NEXT: movl %edi, %ebx ; X86-NEXT: shrl %cl, %ebx ; X86-NEXT: shrdl %cl, %edi, %esi @@ -126,9 +126,9 @@ define i64 @rotate_right_64(i64 %a, i64 %b) { ; X86-NEXT: .cfi_offset %esi, -16 ; X86-NEXT: .cfi_offset %edi, -12 ; X86-NEXT: .cfi_offset %ebx, -8 +; X86-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-NEXT: movl {{[0-9]+}}(%esp), %edi ; X86-NEXT: movl {{[0-9]+}}(%esp), %esi -; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx ; X86-NEXT: movl %esi, %edx ; X86-NEXT: shrl %cl, %edx ; X86-NEXT: movl %edi, %eax @@ -139,7 +139,7 @@ define i64 @rotate_right_64(i64 %a, i64 %b) { ; X86-NEXT: movl %edx, %eax ; X86-NEXT: xorl %edx, %edx ; X86-NEXT: .LBB3_2: -; X86-NEXT: negl %ecx +; X86-NEXT: negb %cl ; X86-NEXT: movl %edi, %ebx ; X86-NEXT: shll %cl, %ebx ; X86-NEXT: shldl %cl, %edi, %esi @@ -242,7 +242,7 @@ define void @rotate_left_m64(i64 *%pa, i64 %b) { ; X86-NEXT: .cfi_offset %edi, -16 ; X86-NEXT: .cfi_offset %ebx, -12 ; X86-NEXT: .cfi_offset %ebp, -8 -; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx +; X86-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movl (%eax), %edx ; X86-NEXT: movl 4(%eax), %ebx @@ -256,7 +256,7 @@ define void @rotate_left_m64(i64 *%pa, i64 %b) { ; X86-NEXT: movl %esi, %edi ; X86-NEXT: xorl %esi, %esi ; X86-NEXT: .LBB6_2: -; X86-NEXT: negl %ecx +; X86-NEXT: negb %cl ; X86-NEXT: movl %ebx, %ebp ; X86-NEXT: shrl %cl, %ebp ; X86-NEXT: shrdl %cl, %ebx, %edx @@ -312,33 +312,33 @@ define void @rotate_right_m64(i64 *%pa, i64 %b) { ; X86-NEXT: .cfi_offset %edi, -16 ; X86-NEXT: .cfi_offset %ebx, -12 ; X86-NEXT: .cfi_offset %ebp, -8 -; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx +; X86-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movl (%eax), %ebx -; X86-NEXT: movl 4(%eax), %edx -; X86-NEXT: movl %edx, %esi -; X86-NEXT: shrl %cl, %esi +; X86-NEXT: movl 4(%eax), %esi +; X86-NEXT: movl %esi, %edx +; X86-NEXT: shrl %cl, %edx ; X86-NEXT: movl %ebx, %edi -; X86-NEXT: shrdl %cl, %edx, %edi +; X86-NEXT: shrdl %cl, %esi, %edi ; X86-NEXT: testb $32, %cl ; X86-NEXT: je .LBB7_2 ; X86-NEXT: # %bb.1: -; X86-NEXT: movl %esi, %edi -; X86-NEXT: xorl %esi, %esi +; X86-NEXT: movl %edx, %edi +; X86-NEXT: xorl %edx, %edx ; X86-NEXT: .LBB7_2: -; X86-NEXT: negl %ecx +; X86-NEXT: negb %cl ; X86-NEXT: movl %ebx, %ebp ; X86-NEXT: shll %cl, %ebp -; X86-NEXT: shldl %cl, %ebx, %edx +; X86-NEXT: shldl %cl, %ebx, %esi ; X86-NEXT: testb $32, %cl ; X86-NEXT: je .LBB7_4 ; X86-NEXT: # %bb.3: -; X86-NEXT: movl %ebp, %edx +; X86-NEXT: movl %ebp, %esi ; X86-NEXT: xorl %ebp, %ebp ; X86-NEXT: .LBB7_4: -; X86-NEXT: orl %edx, %esi +; X86-NEXT: orl %esi, %edx ; X86-NEXT: orl %ebp, %edi -; X86-NEXT: movl %esi, 4(%eax) +; X86-NEXT: movl %edx, 4(%eax) ; X86-NEXT: movl %edi, (%eax) ; X86-NEXT: popl %esi ; X86-NEXT: .cfi_def_cfa_offset 16 |