diff options
Diffstat (limited to 'llvm/test/CodeGen/X86/memcmp.ll')
-rw-r--r-- | llvm/test/CodeGen/X86/memcmp.ll | 216 |
1 files changed, 108 insertions, 108 deletions
diff --git a/llvm/test/CodeGen/X86/memcmp.ll b/llvm/test/CodeGen/X86/memcmp.ll index 84fd45b0a08..ed7f496ee34 100644 --- a/llvm/test/CodeGen/X86/memcmp.ll +++ b/llvm/test/CodeGen/X86/memcmp.ll @@ -15,12 +15,12 @@ declare i32 @memcmp(i8*, i8*, i64) define i32 @length0(i8* %X, i8* %Y) nounwind { ; X86-LABEL: length0: -; X86: # BB#0: +; X86: # %bb.0: ; X86-NEXT: xorl %eax, %eax ; X86-NEXT: retl ; ; X64-LABEL: length0: -; X64: # BB#0: +; X64: # %bb.0: ; X64-NEXT: xorl %eax, %eax ; X64-NEXT: retq %m = tail call i32 @memcmp(i8* %X, i8* %Y, i64 0) nounwind @@ -29,12 +29,12 @@ define i32 @length0(i8* %X, i8* %Y) nounwind { define i1 @length0_eq(i8* %X, i8* %Y) nounwind { ; X86-LABEL: length0_eq: -; X86: # BB#0: +; X86: # %bb.0: ; X86-NEXT: movb $1, %al ; X86-NEXT: retl ; ; X64-LABEL: length0_eq: -; X64: # BB#0: +; X64: # %bb.0: ; X64-NEXT: movb $1, %al ; X64-NEXT: retq %m = tail call i32 @memcmp(i8* %X, i8* %Y, i64 0) nounwind @@ -44,7 +44,7 @@ define i1 @length0_eq(i8* %X, i8* %Y) nounwind { define i32 @length2(i8* %X, i8* %Y) nounwind { ; X86-LABEL: length2: -; X86: # BB#0: +; X86: # %bb.0: ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx ; X86-NEXT: movzwl (%ecx), %ecx @@ -57,7 +57,7 @@ define i32 @length2(i8* %X, i8* %Y) nounwind { ; X86-NEXT: retl ; ; X64-LABEL: length2: -; X64: # BB#0: +; X64: # %bb.0: ; X64-NEXT: movzwl (%rdi), %eax ; X64-NEXT: movzwl (%rsi), %ecx ; X64-NEXT: rolw $8, %ax @@ -72,7 +72,7 @@ define i32 @length2(i8* %X, i8* %Y) nounwind { define i1 @length2_eq(i8* %X, i8* %Y) nounwind { ; X86-LABEL: length2_eq: -; X86: # BB#0: +; X86: # %bb.0: ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx ; X86-NEXT: movzwl (%ecx), %ecx @@ -81,7 +81,7 @@ define i1 @length2_eq(i8* %X, i8* %Y) nounwind { ; X86-NEXT: retl ; ; X64-LABEL: length2_eq: -; X64: # BB#0: +; X64: # %bb.0: ; X64-NEXT: movzwl (%rdi), %eax ; X64-NEXT: cmpw (%rsi), %ax ; X64-NEXT: sete %al @@ -93,7 +93,7 @@ define i1 @length2_eq(i8* %X, i8* %Y) nounwind { define i1 @length2_eq_const(i8* %X) nounwind { ; X86-LABEL: length2_eq_const: -; X86: # BB#0: +; X86: # %bb.0: ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movzwl (%eax), %eax ; X86-NEXT: cmpl $12849, %eax # imm = 0x3231 @@ -101,7 +101,7 @@ define i1 @length2_eq_const(i8* %X) nounwind { ; X86-NEXT: retl ; ; X64-LABEL: length2_eq_const: -; X64: # BB#0: +; X64: # %bb.0: ; X64-NEXT: movzwl (%rdi), %eax ; X64-NEXT: cmpl $12849, %eax # imm = 0x3231 ; X64-NEXT: setne %al @@ -113,7 +113,7 @@ define i1 @length2_eq_const(i8* %X) nounwind { define i1 @length2_eq_nobuiltin_attr(i8* %X, i8* %Y) nounwind { ; X86-LABEL: length2_eq_nobuiltin_attr: -; X86: # BB#0: +; X86: # %bb.0: ; X86-NEXT: pushl $0 ; X86-NEXT: pushl $2 ; X86-NEXT: pushl {{[0-9]+}}(%esp) @@ -125,7 +125,7 @@ define i1 @length2_eq_nobuiltin_attr(i8* %X, i8* %Y) nounwind { ; X86-NEXT: retl ; ; X64-LABEL: length2_eq_nobuiltin_attr: -; X64: # BB#0: +; X64: # %bb.0: ; X64-NEXT: pushq %rax ; X64-NEXT: movl $2, %edx ; X64-NEXT: callq memcmp @@ -140,7 +140,7 @@ define i1 @length2_eq_nobuiltin_attr(i8* %X, i8* %Y) nounwind { define i32 @length3(i8* %X, i8* %Y) nounwind { ; X86-LABEL: length3: -; X86: # BB#0: # %loadbb +; X86: # %bb.0: # %loadbb ; X86-NEXT: pushl %esi ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax @@ -150,7 +150,7 @@ define i32 @length3(i8* %X, i8* %Y) nounwind { ; X86-NEXT: rolw $8, %si ; X86-NEXT: cmpw %si, %dx ; X86-NEXT: jne .LBB6_1 -; X86-NEXT: # BB#2: # %loadbb1 +; X86-NEXT: # %bb.2: # %loadbb1 ; X86-NEXT: movzbl 2(%eax), %eax ; X86-NEXT: movzbl 2(%ecx), %ecx ; X86-NEXT: subl %ecx, %eax @@ -164,14 +164,14 @@ define i32 @length3(i8* %X, i8* %Y) nounwind { ; X86-NEXT: retl ; ; X64-LABEL: length3: -; X64: # BB#0: # %loadbb +; X64: # %bb.0: # %loadbb ; X64-NEXT: movzwl (%rdi), %eax ; X64-NEXT: movzwl (%rsi), %ecx ; X64-NEXT: rolw $8, %ax ; X64-NEXT: rolw $8, %cx ; X64-NEXT: cmpw %cx, %ax ; X64-NEXT: jne .LBB6_1 -; X64-NEXT: # BB#2: # %loadbb1 +; X64-NEXT: # %bb.2: # %loadbb1 ; X64-NEXT: movzbl 2(%rdi), %eax ; X64-NEXT: movzbl 2(%rsi), %ecx ; X64-NEXT: subl %ecx, %eax @@ -187,13 +187,13 @@ define i32 @length3(i8* %X, i8* %Y) nounwind { define i1 @length3_eq(i8* %X, i8* %Y) nounwind { ; X86-LABEL: length3_eq: -; X86: # BB#0: +; X86: # %bb.0: ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx ; X86-NEXT: movzwl (%ecx), %edx ; X86-NEXT: cmpw (%eax), %dx ; X86-NEXT: jne .LBB7_2 -; X86-NEXT: # BB#1: # %loadbb1 +; X86-NEXT: # %bb.1: # %loadbb1 ; X86-NEXT: movb 2(%ecx), %dl ; X86-NEXT: xorl %ecx, %ecx ; X86-NEXT: cmpb 2(%eax), %dl @@ -206,11 +206,11 @@ define i1 @length3_eq(i8* %X, i8* %Y) nounwind { ; X86-NEXT: retl ; ; X64-LABEL: length3_eq: -; X64: # BB#0: +; X64: # %bb.0: ; X64-NEXT: movzwl (%rdi), %eax ; X64-NEXT: cmpw (%rsi), %ax ; X64-NEXT: jne .LBB7_2 -; X64-NEXT: # BB#1: # %loadbb1 +; X64-NEXT: # %bb.1: # %loadbb1 ; X64-NEXT: movb 2(%rdi), %cl ; X64-NEXT: xorl %eax, %eax ; X64-NEXT: cmpb 2(%rsi), %cl @@ -228,7 +228,7 @@ define i1 @length3_eq(i8* %X, i8* %Y) nounwind { define i32 @length4(i8* %X, i8* %Y) nounwind { ; X86-LABEL: length4: -; X86: # BB#0: +; X86: # %bb.0: ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx ; X86-NEXT: movl (%ecx), %ecx @@ -242,7 +242,7 @@ define i32 @length4(i8* %X, i8* %Y) nounwind { ; X86-NEXT: retl ; ; X64-LABEL: length4: -; X64: # BB#0: +; X64: # %bb.0: ; X64-NEXT: movl (%rdi), %ecx ; X64-NEXT: movl (%rsi), %edx ; X64-NEXT: bswapl %ecx @@ -258,7 +258,7 @@ define i32 @length4(i8* %X, i8* %Y) nounwind { define i1 @length4_eq(i8* %X, i8* %Y) nounwind { ; X86-LABEL: length4_eq: -; X86: # BB#0: +; X86: # %bb.0: ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx ; X86-NEXT: movl (%ecx), %ecx @@ -267,7 +267,7 @@ define i1 @length4_eq(i8* %X, i8* %Y) nounwind { ; X86-NEXT: retl ; ; X64-LABEL: length4_eq: -; X64: # BB#0: +; X64: # %bb.0: ; X64-NEXT: movl (%rdi), %eax ; X64-NEXT: cmpl (%rsi), %eax ; X64-NEXT: setne %al @@ -279,14 +279,14 @@ define i1 @length4_eq(i8* %X, i8* %Y) nounwind { define i1 @length4_eq_const(i8* %X) nounwind { ; X86-LABEL: length4_eq_const: -; X86: # BB#0: +; X86: # %bb.0: ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: cmpl $875770417, (%eax) # imm = 0x34333231 ; X86-NEXT: sete %al ; X86-NEXT: retl ; ; X64-LABEL: length4_eq_const: -; X64: # BB#0: +; X64: # %bb.0: ; X64-NEXT: cmpl $875770417, (%rdi) # imm = 0x34333231 ; X64-NEXT: sete %al ; X64-NEXT: retq @@ -297,7 +297,7 @@ define i1 @length4_eq_const(i8* %X) nounwind { define i32 @length5(i8* %X, i8* %Y) nounwind { ; X86-LABEL: length5: -; X86: # BB#0: # %loadbb +; X86: # %bb.0: # %loadbb ; X86-NEXT: pushl %esi ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax @@ -307,7 +307,7 @@ define i32 @length5(i8* %X, i8* %Y) nounwind { ; X86-NEXT: bswapl %esi ; X86-NEXT: cmpl %esi, %edx ; X86-NEXT: jne .LBB11_1 -; X86-NEXT: # BB#2: # %loadbb1 +; X86-NEXT: # %bb.2: # %loadbb1 ; X86-NEXT: movzbl 4(%eax), %eax ; X86-NEXT: movzbl 4(%ecx), %ecx ; X86-NEXT: subl %ecx, %eax @@ -321,14 +321,14 @@ define i32 @length5(i8* %X, i8* %Y) nounwind { ; X86-NEXT: retl ; ; X64-LABEL: length5: -; X64: # BB#0: # %loadbb +; X64: # %bb.0: # %loadbb ; X64-NEXT: movl (%rdi), %eax ; X64-NEXT: movl (%rsi), %ecx ; X64-NEXT: bswapl %eax ; X64-NEXT: bswapl %ecx ; X64-NEXT: cmpl %ecx, %eax ; X64-NEXT: jne .LBB11_1 -; X64-NEXT: # BB#2: # %loadbb1 +; X64-NEXT: # %bb.2: # %loadbb1 ; X64-NEXT: movzbl 4(%rdi), %eax ; X64-NEXT: movzbl 4(%rsi), %ecx ; X64-NEXT: subl %ecx, %eax @@ -344,13 +344,13 @@ define i32 @length5(i8* %X, i8* %Y) nounwind { define i1 @length5_eq(i8* %X, i8* %Y) nounwind { ; X86-LABEL: length5_eq: -; X86: # BB#0: +; X86: # %bb.0: ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx ; X86-NEXT: movl (%ecx), %edx ; X86-NEXT: cmpl (%eax), %edx ; X86-NEXT: jne .LBB12_2 -; X86-NEXT: # BB#1: # %loadbb1 +; X86-NEXT: # %bb.1: # %loadbb1 ; X86-NEXT: movb 4(%ecx), %dl ; X86-NEXT: xorl %ecx, %ecx ; X86-NEXT: cmpb 4(%eax), %dl @@ -363,11 +363,11 @@ define i1 @length5_eq(i8* %X, i8* %Y) nounwind { ; X86-NEXT: retl ; ; X64-LABEL: length5_eq: -; X64: # BB#0: +; X64: # %bb.0: ; X64-NEXT: movl (%rdi), %eax ; X64-NEXT: cmpl (%rsi), %eax ; X64-NEXT: jne .LBB12_2 -; X64-NEXT: # BB#1: # %loadbb1 +; X64-NEXT: # %bb.1: # %loadbb1 ; X64-NEXT: movb 4(%rdi), %cl ; X64-NEXT: xorl %eax, %eax ; X64-NEXT: cmpb 4(%rsi), %cl @@ -385,7 +385,7 @@ define i1 @length5_eq(i8* %X, i8* %Y) nounwind { define i32 @length8(i8* %X, i8* %Y) nounwind { ; X86-LABEL: length8: -; X86: # BB#0: +; X86: # %bb.0: ; X86-NEXT: pushl %esi ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movl {{[0-9]+}}(%esp), %esi @@ -395,7 +395,7 @@ define i32 @length8(i8* %X, i8* %Y) nounwind { ; X86-NEXT: bswapl %edx ; X86-NEXT: cmpl %edx, %ecx ; X86-NEXT: jne .LBB13_2 -; X86-NEXT: # BB#1: # %loadbb1 +; X86-NEXT: # %bb.1: # %loadbb1 ; X86-NEXT: movl 4(%esi), %ecx ; X86-NEXT: movl 4(%eax), %edx ; X86-NEXT: bswapl %ecx @@ -413,7 +413,7 @@ define i32 @length8(i8* %X, i8* %Y) nounwind { ; X86-NEXT: retl ; ; X64-LABEL: length8: -; X64: # BB#0: +; X64: # %bb.0: ; X64-NEXT: movq (%rdi), %rcx ; X64-NEXT: movq (%rsi), %rdx ; X64-NEXT: bswapq %rcx @@ -429,13 +429,13 @@ define i32 @length8(i8* %X, i8* %Y) nounwind { define i1 @length8_eq(i8* %X, i8* %Y) nounwind { ; X86-LABEL: length8_eq: -; X86: # BB#0: +; X86: # %bb.0: ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx ; X86-NEXT: movl (%ecx), %edx ; X86-NEXT: cmpl (%eax), %edx ; X86-NEXT: jne .LBB14_2 -; X86-NEXT: # BB#1: # %loadbb1 +; X86-NEXT: # %bb.1: # %loadbb1 ; X86-NEXT: movl 4(%ecx), %edx ; X86-NEXT: xorl %ecx, %ecx ; X86-NEXT: cmpl 4(%eax), %edx @@ -448,7 +448,7 @@ define i1 @length8_eq(i8* %X, i8* %Y) nounwind { ; X86-NEXT: retl ; ; X64-LABEL: length8_eq: -; X64: # BB#0: +; X64: # %bb.0: ; X64-NEXT: movq (%rdi), %rax ; X64-NEXT: cmpq (%rsi), %rax ; X64-NEXT: sete %al @@ -460,11 +460,11 @@ define i1 @length8_eq(i8* %X, i8* %Y) nounwind { define i1 @length8_eq_const(i8* %X) nounwind { ; X86-LABEL: length8_eq_const: -; X86: # BB#0: +; X86: # %bb.0: ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx ; X86-NEXT: cmpl $858927408, (%ecx) # imm = 0x33323130 ; X86-NEXT: jne .LBB15_2 -; X86-NEXT: # BB#1: # %loadbb1 +; X86-NEXT: # %bb.1: # %loadbb1 ; X86-NEXT: xorl %eax, %eax ; X86-NEXT: cmpl $926299444, 4(%ecx) # imm = 0x37363534 ; X86-NEXT: je .LBB15_3 @@ -476,7 +476,7 @@ define i1 @length8_eq_const(i8* %X) nounwind { ; X86-NEXT: retl ; ; X64-LABEL: length8_eq_const: -; X64: # BB#0: +; X64: # %bb.0: ; X64-NEXT: movabsq $3978425819141910832, %rax # imm = 0x3736353433323130 ; X64-NEXT: cmpq %rax, (%rdi) ; X64-NEXT: setne %al @@ -488,7 +488,7 @@ define i1 @length8_eq_const(i8* %X) nounwind { define i1 @length12_eq(i8* %X, i8* %Y) nounwind { ; X86-LABEL: length12_eq: -; X86: # BB#0: +; X86: # %bb.0: ; X86-NEXT: pushl $0 ; X86-NEXT: pushl $12 ; X86-NEXT: pushl {{[0-9]+}}(%esp) @@ -500,11 +500,11 @@ define i1 @length12_eq(i8* %X, i8* %Y) nounwind { ; X86-NEXT: retl ; ; X64-LABEL: length12_eq: -; X64: # BB#0: +; X64: # %bb.0: ; X64-NEXT: movq (%rdi), %rax ; X64-NEXT: cmpq (%rsi), %rax ; X64-NEXT: jne .LBB16_2 -; X64-NEXT: # BB#1: # %loadbb1 +; X64-NEXT: # %bb.1: # %loadbb1 ; X64-NEXT: movl 8(%rdi), %ecx ; X64-NEXT: xorl %eax, %eax ; X64-NEXT: cmpl 8(%rsi), %ecx @@ -522,7 +522,7 @@ define i1 @length12_eq(i8* %X, i8* %Y) nounwind { define i32 @length12(i8* %X, i8* %Y) nounwind { ; X86-LABEL: length12: -; X86: # BB#0: +; X86: # %bb.0: ; X86-NEXT: pushl $0 ; X86-NEXT: pushl $12 ; X86-NEXT: pushl {{[0-9]+}}(%esp) @@ -532,14 +532,14 @@ define i32 @length12(i8* %X, i8* %Y) nounwind { ; X86-NEXT: retl ; ; X64-LABEL: length12: -; X64: # BB#0: +; X64: # %bb.0: ; X64-NEXT: movq (%rdi), %rcx ; X64-NEXT: movq (%rsi), %rdx ; X64-NEXT: bswapq %rcx ; X64-NEXT: bswapq %rdx ; X64-NEXT: cmpq %rdx, %rcx ; X64-NEXT: jne .LBB17_2 -; X64-NEXT: # BB#1: # %loadbb1 +; X64-NEXT: # %bb.1: # %loadbb1 ; X64-NEXT: movl 8(%rdi), %ecx ; X64-NEXT: movl 8(%rsi), %edx ; X64-NEXT: bswapl %ecx @@ -562,7 +562,7 @@ define i32 @length12(i8* %X, i8* %Y) nounwind { define i32 @length16(i8* %X, i8* %Y) nounwind { ; X86-LABEL: length16: -; X86: # BB#0: +; X86: # %bb.0: ; X86-NEXT: pushl $0 ; X86-NEXT: pushl $16 ; X86-NEXT: pushl {{[0-9]+}}(%esp) @@ -572,14 +572,14 @@ define i32 @length16(i8* %X, i8* %Y) nounwind { ; X86-NEXT: retl ; ; X64-LABEL: length16: -; X64: # BB#0: +; X64: # %bb.0: ; X64-NEXT: movq (%rdi), %rcx ; X64-NEXT: movq (%rsi), %rdx ; X64-NEXT: bswapq %rcx ; X64-NEXT: bswapq %rdx ; X64-NEXT: cmpq %rdx, %rcx ; X64-NEXT: jne .LBB18_2 -; X64-NEXT: # BB#1: # %loadbb1 +; X64-NEXT: # %bb.1: # %loadbb1 ; X64-NEXT: movq 8(%rdi), %rcx ; X64-NEXT: movq 8(%rsi), %rdx ; X64-NEXT: bswapq %rcx @@ -600,7 +600,7 @@ define i32 @length16(i8* %X, i8* %Y) nounwind { define i1 @length16_eq(i8* %x, i8* %y) nounwind { ; X86-NOSSE-LABEL: length16_eq: -; X86-NOSSE: # BB#0: +; X86-NOSSE: # %bb.0: ; X86-NOSSE-NEXT: pushl $0 ; X86-NOSSE-NEXT: pushl $16 ; X86-NOSSE-NEXT: pushl {{[0-9]+}}(%esp) @@ -612,7 +612,7 @@ define i1 @length16_eq(i8* %x, i8* %y) nounwind { ; X86-NOSSE-NEXT: retl ; ; X86-SSE1-LABEL: length16_eq: -; X86-SSE1: # BB#0: +; X86-SSE1: # %bb.0: ; X86-SSE1-NEXT: pushl $0 ; X86-SSE1-NEXT: pushl $16 ; X86-SSE1-NEXT: pushl {{[0-9]+}}(%esp) @@ -624,7 +624,7 @@ define i1 @length16_eq(i8* %x, i8* %y) nounwind { ; X86-SSE1-NEXT: retl ; ; X86-SSE2-LABEL: length16_eq: -; X86-SSE2: # BB#0: +; X86-SSE2: # %bb.0: ; X86-SSE2-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-SSE2-NEXT: movl {{[0-9]+}}(%esp), %ecx ; X86-SSE2-NEXT: movdqu (%ecx), %xmm0 @@ -636,7 +636,7 @@ define i1 @length16_eq(i8* %x, i8* %y) nounwind { ; X86-SSE2-NEXT: retl ; ; X64-SSE2-LABEL: length16_eq: -; X64-SSE2: # BB#0: +; X64-SSE2: # %bb.0: ; X64-SSE2-NEXT: movdqu (%rdi), %xmm0 ; X64-SSE2-NEXT: movdqu (%rsi), %xmm1 ; X64-SSE2-NEXT: pcmpeqb %xmm0, %xmm1 @@ -646,7 +646,7 @@ define i1 @length16_eq(i8* %x, i8* %y) nounwind { ; X64-SSE2-NEXT: retq ; ; X64-AVX-LABEL: length16_eq: -; X64-AVX: # BB#0: +; X64-AVX: # %bb.0: ; X64-AVX-NEXT: vmovdqu (%rdi), %xmm0 ; X64-AVX-NEXT: vpcmpeqb (%rsi), %xmm0, %xmm0 ; X64-AVX-NEXT: vpmovmskb %xmm0, %eax @@ -660,7 +660,7 @@ define i1 @length16_eq(i8* %x, i8* %y) nounwind { define i1 @length16_eq_const(i8* %X) nounwind { ; X86-NOSSE-LABEL: length16_eq_const: -; X86-NOSSE: # BB#0: +; X86-NOSSE: # %bb.0: ; X86-NOSSE-NEXT: pushl $0 ; X86-NOSSE-NEXT: pushl $16 ; X86-NOSSE-NEXT: pushl $.L.str @@ -672,7 +672,7 @@ define i1 @length16_eq_const(i8* %X) nounwind { ; X86-NOSSE-NEXT: retl ; ; X86-SSE1-LABEL: length16_eq_const: -; X86-SSE1: # BB#0: +; X86-SSE1: # %bb.0: ; X86-SSE1-NEXT: pushl $0 ; X86-SSE1-NEXT: pushl $16 ; X86-SSE1-NEXT: pushl $.L.str @@ -684,7 +684,7 @@ define i1 @length16_eq_const(i8* %X) nounwind { ; X86-SSE1-NEXT: retl ; ; X86-SSE2-LABEL: length16_eq_const: -; X86-SSE2: # BB#0: +; X86-SSE2: # %bb.0: ; X86-SSE2-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-SSE2-NEXT: movdqu (%eax), %xmm0 ; X86-SSE2-NEXT: pcmpeqb {{\.LCPI.*}}, %xmm0 @@ -694,7 +694,7 @@ define i1 @length16_eq_const(i8* %X) nounwind { ; X86-SSE2-NEXT: retl ; ; X64-SSE2-LABEL: length16_eq_const: -; X64-SSE2: # BB#0: +; X64-SSE2: # %bb.0: ; X64-SSE2-NEXT: movdqu (%rdi), %xmm0 ; X64-SSE2-NEXT: pcmpeqb {{.*}}(%rip), %xmm0 ; X64-SSE2-NEXT: pmovmskb %xmm0, %eax @@ -703,7 +703,7 @@ define i1 @length16_eq_const(i8* %X) nounwind { ; X64-SSE2-NEXT: retq ; ; X64-AVX-LABEL: length16_eq_const: -; X64-AVX: # BB#0: +; X64-AVX: # %bb.0: ; X64-AVX-NEXT: vmovdqu (%rdi), %xmm0 ; X64-AVX-NEXT: vpcmpeqb {{.*}}(%rip), %xmm0, %xmm0 ; X64-AVX-NEXT: vpmovmskb %xmm0, %eax @@ -719,7 +719,7 @@ define i1 @length16_eq_const(i8* %X) nounwind { define i32 @length24(i8* %X, i8* %Y) nounwind { ; X86-LABEL: length24: -; X86: # BB#0: +; X86: # %bb.0: ; X86-NEXT: pushl $0 ; X86-NEXT: pushl $24 ; X86-NEXT: pushl {{[0-9]+}}(%esp) @@ -729,7 +729,7 @@ define i32 @length24(i8* %X, i8* %Y) nounwind { ; X86-NEXT: retl ; ; X64-LABEL: length24: -; X64: # BB#0: +; X64: # %bb.0: ; X64-NEXT: movl $24, %edx ; X64-NEXT: jmp memcmp # TAILCALL %m = tail call i32 @memcmp(i8* %X, i8* %Y, i64 24) nounwind @@ -738,7 +738,7 @@ define i32 @length24(i8* %X, i8* %Y) nounwind { define i1 @length24_eq(i8* %x, i8* %y) nounwind { ; X86-LABEL: length24_eq: -; X86: # BB#0: +; X86: # %bb.0: ; X86-NEXT: pushl $0 ; X86-NEXT: pushl $24 ; X86-NEXT: pushl {{[0-9]+}}(%esp) @@ -750,14 +750,14 @@ define i1 @length24_eq(i8* %x, i8* %y) nounwind { ; X86-NEXT: retl ; ; X64-SSE2-LABEL: length24_eq: -; X64-SSE2: # BB#0: +; X64-SSE2: # %bb.0: ; X64-SSE2-NEXT: movdqu (%rdi), %xmm0 ; X64-SSE2-NEXT: movdqu (%rsi), %xmm1 ; X64-SSE2-NEXT: pcmpeqb %xmm0, %xmm1 ; X64-SSE2-NEXT: pmovmskb %xmm1, %eax ; X64-SSE2-NEXT: cmpl $65535, %eax # imm = 0xFFFF ; X64-SSE2-NEXT: jne .LBB22_2 -; X64-SSE2-NEXT: # BB#1: # %loadbb1 +; X64-SSE2-NEXT: # %bb.1: # %loadbb1 ; X64-SSE2-NEXT: movq 16(%rdi), %rcx ; X64-SSE2-NEXT: xorl %eax, %eax ; X64-SSE2-NEXT: cmpq 16(%rsi), %rcx @@ -770,13 +770,13 @@ define i1 @length24_eq(i8* %x, i8* %y) nounwind { ; X64-SSE2-NEXT: retq ; ; X64-AVX-LABEL: length24_eq: -; X64-AVX: # BB#0: +; X64-AVX: # %bb.0: ; X64-AVX-NEXT: vmovdqu (%rdi), %xmm0 ; X64-AVX-NEXT: vpcmpeqb (%rsi), %xmm0, %xmm0 ; X64-AVX-NEXT: vpmovmskb %xmm0, %eax ; X64-AVX-NEXT: cmpl $65535, %eax # imm = 0xFFFF ; X64-AVX-NEXT: jne .LBB22_2 -; X64-AVX-NEXT: # BB#1: # %loadbb1 +; X64-AVX-NEXT: # %bb.1: # %loadbb1 ; X64-AVX-NEXT: movq 16(%rdi), %rcx ; X64-AVX-NEXT: xorl %eax, %eax ; X64-AVX-NEXT: cmpq 16(%rsi), %rcx @@ -794,7 +794,7 @@ define i1 @length24_eq(i8* %x, i8* %y) nounwind { define i1 @length24_eq_const(i8* %X) nounwind { ; X86-LABEL: length24_eq_const: -; X86: # BB#0: +; X86: # %bb.0: ; X86-NEXT: pushl $0 ; X86-NEXT: pushl $24 ; X86-NEXT: pushl $.L.str @@ -806,13 +806,13 @@ define i1 @length24_eq_const(i8* %X) nounwind { ; X86-NEXT: retl ; ; X64-SSE2-LABEL: length24_eq_const: -; X64-SSE2: # BB#0: +; X64-SSE2: # %bb.0: ; X64-SSE2-NEXT: movdqu (%rdi), %xmm0 ; X64-SSE2-NEXT: pcmpeqb {{.*}}(%rip), %xmm0 ; X64-SSE2-NEXT: pmovmskb %xmm0, %eax ; X64-SSE2-NEXT: cmpl $65535, %eax # imm = 0xFFFF ; X64-SSE2-NEXT: jne .LBB23_2 -; X64-SSE2-NEXT: # BB#1: # %loadbb1 +; X64-SSE2-NEXT: # %bb.1: # %loadbb1 ; X64-SSE2-NEXT: xorl %eax, %eax ; X64-SSE2-NEXT: movabsq $3689065127958034230, %rcx # imm = 0x3332313039383736 ; X64-SSE2-NEXT: cmpq %rcx, 16(%rdi) @@ -825,13 +825,13 @@ define i1 @length24_eq_const(i8* %X) nounwind { ; X64-SSE2-NEXT: retq ; ; X64-AVX-LABEL: length24_eq_const: -; X64-AVX: # BB#0: +; X64-AVX: # %bb.0: ; X64-AVX-NEXT: vmovdqu (%rdi), %xmm0 ; X64-AVX-NEXT: vpcmpeqb {{.*}}(%rip), %xmm0, %xmm0 ; X64-AVX-NEXT: vpmovmskb %xmm0, %eax ; X64-AVX-NEXT: cmpl $65535, %eax # imm = 0xFFFF ; X64-AVX-NEXT: jne .LBB23_2 -; X64-AVX-NEXT: # BB#1: # %loadbb1 +; X64-AVX-NEXT: # %bb.1: # %loadbb1 ; X64-AVX-NEXT: xorl %eax, %eax ; X64-AVX-NEXT: movabsq $3689065127958034230, %rcx # imm = 0x3332313039383736 ; X64-AVX-NEXT: cmpq %rcx, 16(%rdi) @@ -849,7 +849,7 @@ define i1 @length24_eq_const(i8* %X) nounwind { define i32 @length32(i8* %X, i8* %Y) nounwind { ; X86-LABEL: length32: -; X86: # BB#0: +; X86: # %bb.0: ; X86-NEXT: pushl $0 ; X86-NEXT: pushl $32 ; X86-NEXT: pushl {{[0-9]+}}(%esp) @@ -859,7 +859,7 @@ define i32 @length32(i8* %X, i8* %Y) nounwind { ; X86-NEXT: retl ; ; X64-LABEL: length32: -; X64: # BB#0: +; X64: # %bb.0: ; X64-NEXT: movl $32, %edx ; X64-NEXT: jmp memcmp # TAILCALL %m = tail call i32 @memcmp(i8* %X, i8* %Y, i64 32) nounwind @@ -870,7 +870,7 @@ define i32 @length32(i8* %X, i8* %Y) nounwind { define i1 @length32_eq(i8* %x, i8* %y) nounwind { ; X86-NOSSE-LABEL: length32_eq: -; X86-NOSSE: # BB#0: +; X86-NOSSE: # %bb.0: ; X86-NOSSE-NEXT: pushl $0 ; X86-NOSSE-NEXT: pushl $32 ; X86-NOSSE-NEXT: pushl {{[0-9]+}}(%esp) @@ -882,7 +882,7 @@ define i1 @length32_eq(i8* %x, i8* %y) nounwind { ; X86-NOSSE-NEXT: retl ; ; X86-SSE1-LABEL: length32_eq: -; X86-SSE1: # BB#0: +; X86-SSE1: # %bb.0: ; X86-SSE1-NEXT: pushl $0 ; X86-SSE1-NEXT: pushl $32 ; X86-SSE1-NEXT: pushl {{[0-9]+}}(%esp) @@ -894,7 +894,7 @@ define i1 @length32_eq(i8* %x, i8* %y) nounwind { ; X86-SSE1-NEXT: retl ; ; X86-SSE2-LABEL: length32_eq: -; X86-SSE2: # BB#0: +; X86-SSE2: # %bb.0: ; X86-SSE2-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-SSE2-NEXT: movl {{[0-9]+}}(%esp), %ecx ; X86-SSE2-NEXT: movdqu (%ecx), %xmm0 @@ -903,7 +903,7 @@ define i1 @length32_eq(i8* %x, i8* %y) nounwind { ; X86-SSE2-NEXT: pmovmskb %xmm1, %edx ; X86-SSE2-NEXT: cmpl $65535, %edx # imm = 0xFFFF ; X86-SSE2-NEXT: jne .LBB25_2 -; X86-SSE2-NEXT: # BB#1: # %loadbb1 +; X86-SSE2-NEXT: # %bb.1: # %loadbb1 ; X86-SSE2-NEXT: movdqu 16(%ecx), %xmm0 ; X86-SSE2-NEXT: movdqu 16(%eax), %xmm1 ; X86-SSE2-NEXT: pcmpeqb %xmm0, %xmm1 @@ -919,14 +919,14 @@ define i1 @length32_eq(i8* %x, i8* %y) nounwind { ; X86-SSE2-NEXT: retl ; ; X64-SSE2-LABEL: length32_eq: -; X64-SSE2: # BB#0: +; X64-SSE2: # %bb.0: ; X64-SSE2-NEXT: movdqu (%rdi), %xmm0 ; X64-SSE2-NEXT: movdqu (%rsi), %xmm1 ; X64-SSE2-NEXT: pcmpeqb %xmm0, %xmm1 ; X64-SSE2-NEXT: pmovmskb %xmm1, %eax ; X64-SSE2-NEXT: cmpl $65535, %eax # imm = 0xFFFF ; X64-SSE2-NEXT: jne .LBB25_2 -; X64-SSE2-NEXT: # BB#1: # %loadbb1 +; X64-SSE2-NEXT: # %bb.1: # %loadbb1 ; X64-SSE2-NEXT: movdqu 16(%rdi), %xmm0 ; X64-SSE2-NEXT: movdqu 16(%rsi), %xmm1 ; X64-SSE2-NEXT: pcmpeqb %xmm0, %xmm1 @@ -942,13 +942,13 @@ define i1 @length32_eq(i8* %x, i8* %y) nounwind { ; X64-SSE2-NEXT: retq ; ; X64-AVX1-LABEL: length32_eq: -; X64-AVX1: # BB#0: +; X64-AVX1: # %bb.0: ; X64-AVX1-NEXT: vmovdqu (%rdi), %xmm0 ; X64-AVX1-NEXT: vpcmpeqb (%rsi), %xmm0, %xmm0 ; X64-AVX1-NEXT: vpmovmskb %xmm0, %eax ; X64-AVX1-NEXT: cmpl $65535, %eax # imm = 0xFFFF ; X64-AVX1-NEXT: jne .LBB25_2 -; X64-AVX1-NEXT: # BB#1: # %loadbb1 +; X64-AVX1-NEXT: # %bb.1: # %loadbb1 ; X64-AVX1-NEXT: vmovdqu 16(%rdi), %xmm0 ; X64-AVX1-NEXT: vpcmpeqb 16(%rsi), %xmm0, %xmm0 ; X64-AVX1-NEXT: vpmovmskb %xmm0, %ecx @@ -963,7 +963,7 @@ define i1 @length32_eq(i8* %x, i8* %y) nounwind { ; X64-AVX1-NEXT: retq ; ; X64-AVX2-LABEL: length32_eq: -; X64-AVX2: # BB#0: +; X64-AVX2: # %bb.0: ; X64-AVX2-NEXT: vmovdqu (%rdi), %ymm0 ; X64-AVX2-NEXT: vpcmpeqb (%rsi), %ymm0, %ymm0 ; X64-AVX2-NEXT: vpmovmskb %ymm0, %eax @@ -978,7 +978,7 @@ define i1 @length32_eq(i8* %x, i8* %y) nounwind { define i1 @length32_eq_const(i8* %X) nounwind { ; X86-NOSSE-LABEL: length32_eq_const: -; X86-NOSSE: # BB#0: +; X86-NOSSE: # %bb.0: ; X86-NOSSE-NEXT: pushl $0 ; X86-NOSSE-NEXT: pushl $32 ; X86-NOSSE-NEXT: pushl $.L.str @@ -990,7 +990,7 @@ define i1 @length32_eq_const(i8* %X) nounwind { ; X86-NOSSE-NEXT: retl ; ; X86-SSE1-LABEL: length32_eq_const: -; X86-SSE1: # BB#0: +; X86-SSE1: # %bb.0: ; X86-SSE1-NEXT: pushl $0 ; X86-SSE1-NEXT: pushl $32 ; X86-SSE1-NEXT: pushl $.L.str @@ -1002,14 +1002,14 @@ define i1 @length32_eq_const(i8* %X) nounwind { ; X86-SSE1-NEXT: retl ; ; X86-SSE2-LABEL: length32_eq_const: -; X86-SSE2: # BB#0: +; X86-SSE2: # %bb.0: ; X86-SSE2-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-SSE2-NEXT: movdqu (%eax), %xmm0 ; X86-SSE2-NEXT: pcmpeqb {{\.LCPI.*}}, %xmm0 ; X86-SSE2-NEXT: pmovmskb %xmm0, %ecx ; X86-SSE2-NEXT: cmpl $65535, %ecx # imm = 0xFFFF ; X86-SSE2-NEXT: jne .LBB26_2 -; X86-SSE2-NEXT: # BB#1: # %loadbb1 +; X86-SSE2-NEXT: # %bb.1: # %loadbb1 ; X86-SSE2-NEXT: movdqu 16(%eax), %xmm0 ; X86-SSE2-NEXT: pcmpeqb {{\.LCPI.*}}, %xmm0 ; X86-SSE2-NEXT: pmovmskb %xmm0, %ecx @@ -1024,13 +1024,13 @@ define i1 @length32_eq_const(i8* %X) nounwind { ; X86-SSE2-NEXT: retl ; ; X64-SSE2-LABEL: length32_eq_const: -; X64-SSE2: # BB#0: +; X64-SSE2: # %bb.0: ; X64-SSE2-NEXT: movdqu (%rdi), %xmm0 ; X64-SSE2-NEXT: pcmpeqb {{.*}}(%rip), %xmm0 ; X64-SSE2-NEXT: pmovmskb %xmm0, %eax ; X64-SSE2-NEXT: cmpl $65535, %eax # imm = 0xFFFF ; X64-SSE2-NEXT: jne .LBB26_2 -; X64-SSE2-NEXT: # BB#1: # %loadbb1 +; X64-SSE2-NEXT: # %bb.1: # %loadbb1 ; X64-SSE2-NEXT: movdqu 16(%rdi), %xmm0 ; X64-SSE2-NEXT: pcmpeqb {{.*}}(%rip), %xmm0 ; X64-SSE2-NEXT: pmovmskb %xmm0, %ecx @@ -1045,13 +1045,13 @@ define i1 @length32_eq_const(i8* %X) nounwind { ; X64-SSE2-NEXT: retq ; ; X64-AVX1-LABEL: length32_eq_const: -; X64-AVX1: # BB#0: +; X64-AVX1: # %bb.0: ; X64-AVX1-NEXT: vmovdqu (%rdi), %xmm0 ; X64-AVX1-NEXT: vpcmpeqb {{.*}}(%rip), %xmm0, %xmm0 ; X64-AVX1-NEXT: vpmovmskb %xmm0, %eax ; X64-AVX1-NEXT: cmpl $65535, %eax # imm = 0xFFFF ; X64-AVX1-NEXT: jne .LBB26_2 -; X64-AVX1-NEXT: # BB#1: # %loadbb1 +; X64-AVX1-NEXT: # %bb.1: # %loadbb1 ; X64-AVX1-NEXT: vmovdqu 16(%rdi), %xmm0 ; X64-AVX1-NEXT: vpcmpeqb {{.*}}(%rip), %xmm0, %xmm0 ; X64-AVX1-NEXT: vpmovmskb %xmm0, %ecx @@ -1066,7 +1066,7 @@ define i1 @length32_eq_const(i8* %X) nounwind { ; X64-AVX1-NEXT: retq ; ; X64-AVX2-LABEL: length32_eq_const: -; X64-AVX2: # BB#0: +; X64-AVX2: # %bb.0: ; X64-AVX2-NEXT: vmovdqu (%rdi), %ymm0 ; X64-AVX2-NEXT: vpcmpeqb {{.*}}(%rip), %ymm0, %ymm0 ; X64-AVX2-NEXT: vpmovmskb %ymm0, %eax @@ -1081,7 +1081,7 @@ define i1 @length32_eq_const(i8* %X) nounwind { define i32 @length64(i8* %X, i8* %Y) nounwind { ; X86-LABEL: length64: -; X86: # BB#0: +; X86: # %bb.0: ; X86-NEXT: pushl $0 ; X86-NEXT: pushl $64 ; X86-NEXT: pushl {{[0-9]+}}(%esp) @@ -1091,7 +1091,7 @@ define i32 @length64(i8* %X, i8* %Y) nounwind { ; X86-NEXT: retl ; ; X64-LABEL: length64: -; X64: # BB#0: +; X64: # %bb.0: ; X64-NEXT: movl $64, %edx ; X64-NEXT: jmp memcmp # TAILCALL %m = tail call i32 @memcmp(i8* %X, i8* %Y, i64 64) nounwind @@ -1100,7 +1100,7 @@ define i32 @length64(i8* %X, i8* %Y) nounwind { define i1 @length64_eq(i8* %x, i8* %y) nounwind { ; X86-LABEL: length64_eq: -; X86: # BB#0: +; X86: # %bb.0: ; X86-NEXT: pushl $0 ; X86-NEXT: pushl $64 ; X86-NEXT: pushl {{[0-9]+}}(%esp) @@ -1112,7 +1112,7 @@ define i1 @length64_eq(i8* %x, i8* %y) nounwind { ; X86-NEXT: retl ; ; X64-SSE2-LABEL: length64_eq: -; X64-SSE2: # BB#0: +; X64-SSE2: # %bb.0: ; X64-SSE2-NEXT: pushq %rax ; X64-SSE2-NEXT: movl $64, %edx ; X64-SSE2-NEXT: callq memcmp @@ -1122,7 +1122,7 @@ define i1 @length64_eq(i8* %x, i8* %y) nounwind { ; X64-SSE2-NEXT: retq ; ; X64-AVX1-LABEL: length64_eq: -; X64-AVX1: # BB#0: +; X64-AVX1: # %bb.0: ; X64-AVX1-NEXT: pushq %rax ; X64-AVX1-NEXT: movl $64, %edx ; X64-AVX1-NEXT: callq memcmp @@ -1132,13 +1132,13 @@ define i1 @length64_eq(i8* %x, i8* %y) nounwind { ; X64-AVX1-NEXT: retq ; ; X64-AVX2-LABEL: length64_eq: -; X64-AVX2: # BB#0: +; X64-AVX2: # %bb.0: ; X64-AVX2-NEXT: vmovdqu (%rdi), %ymm0 ; X64-AVX2-NEXT: vpcmpeqb (%rsi), %ymm0, %ymm0 ; X64-AVX2-NEXT: vpmovmskb %ymm0, %eax ; X64-AVX2-NEXT: cmpl $-1, %eax ; X64-AVX2-NEXT: jne .LBB28_2 -; X64-AVX2-NEXT: # BB#1: # %loadbb1 +; X64-AVX2-NEXT: # %bb.1: # %loadbb1 ; X64-AVX2-NEXT: vmovdqu 32(%rdi), %ymm0 ; X64-AVX2-NEXT: vpcmpeqb 32(%rsi), %ymm0, %ymm0 ; X64-AVX2-NEXT: vpmovmskb %ymm0, %ecx @@ -1159,7 +1159,7 @@ define i1 @length64_eq(i8* %x, i8* %y) nounwind { define i1 @length64_eq_const(i8* %X) nounwind { ; X86-LABEL: length64_eq_const: -; X86: # BB#0: +; X86: # %bb.0: ; X86-NEXT: pushl $0 ; X86-NEXT: pushl $64 ; X86-NEXT: pushl $.L.str @@ -1171,7 +1171,7 @@ define i1 @length64_eq_const(i8* %X) nounwind { ; X86-NEXT: retl ; ; X64-SSE2-LABEL: length64_eq_const: -; X64-SSE2: # BB#0: +; X64-SSE2: # %bb.0: ; X64-SSE2-NEXT: pushq %rax ; X64-SSE2-NEXT: movl $.L.str, %esi ; X64-SSE2-NEXT: movl $64, %edx @@ -1182,7 +1182,7 @@ define i1 @length64_eq_const(i8* %X) nounwind { ; X64-SSE2-NEXT: retq ; ; X64-AVX1-LABEL: length64_eq_const: -; X64-AVX1: # BB#0: +; X64-AVX1: # %bb.0: ; X64-AVX1-NEXT: pushq %rax ; X64-AVX1-NEXT: movl $.L.str, %esi ; X64-AVX1-NEXT: movl $64, %edx @@ -1193,13 +1193,13 @@ define i1 @length64_eq_const(i8* %X) nounwind { ; X64-AVX1-NEXT: retq ; ; X64-AVX2-LABEL: length64_eq_const: -; X64-AVX2: # BB#0: +; X64-AVX2: # %bb.0: ; X64-AVX2-NEXT: vmovdqu (%rdi), %ymm0 ; X64-AVX2-NEXT: vpcmpeqb {{.*}}(%rip), %ymm0, %ymm0 ; X64-AVX2-NEXT: vpmovmskb %ymm0, %eax ; X64-AVX2-NEXT: cmpl $-1, %eax ; X64-AVX2-NEXT: jne .LBB29_2 -; X64-AVX2-NEXT: # BB#1: # %loadbb1 +; X64-AVX2-NEXT: # %bb.1: # %loadbb1 ; X64-AVX2-NEXT: vmovdqu 32(%rdi), %ymm0 ; X64-AVX2-NEXT: vpcmpeqb {{.*}}(%rip), %ymm0, %ymm0 ; X64-AVX2-NEXT: vpmovmskb %ymm0, %ecx @@ -1221,7 +1221,7 @@ define i1 @length64_eq_const(i8* %X) nounwind { ; This checks that we do not do stupid things with huge sizes. define i32 @huge_length(i8* %X, i8* %Y) nounwind { ; X86-LABEL: huge_length: -; X86: # BB#0: +; X86: # %bb.0: ; X86-NEXT: pushl $2147483647 # imm = 0x7FFFFFFF ; X86-NEXT: pushl $-1 ; X86-NEXT: pushl {{[0-9]+}}(%esp) @@ -1231,7 +1231,7 @@ define i32 @huge_length(i8* %X, i8* %Y) nounwind { ; X86-NEXT: retl ; ; X64-LABEL: huge_length: -; X64: # BB#0: +; X64: # %bb.0: ; X64-NEXT: movabsq $9223372036854775807, %rdx # imm = 0x7FFFFFFFFFFFFFFF ; X64-NEXT: jmp memcmp # TAILCALL %m = tail call i32 @memcmp(i8* %X, i8* %Y, i64 9223372036854775807) nounwind |