diff options
author | Reid Kleckner <rnk@google.com> | 2017-11-08 21:31:14 +0000 |
---|---|---|
committer | Reid Kleckner <rnk@google.com> | 2017-11-08 21:31:14 +0000 |
commit | 7adb2fdbbadd0c588a2137c7362a9099574d79ae (patch) | |
tree | 51b8a7eec92f22bbf2869969130a0fce1886de6b /llvm/test | |
parent | a702fa17f31abd5efce302ef3d4cf2fbaf167368 (diff) | |
download | bcm5719-llvm-7adb2fdbbadd0c588a2137c7362a9099574d79ae.tar.gz bcm5719-llvm-7adb2fdbbadd0c588a2137c7362a9099574d79ae.zip |
Revert "Correct dwarf unwind information in function epilogue for X86"
This reverts r317579, originally committed as r317100.
There is a design issue with marking CFI instructions duplicatable. Not
all targets support the CFIInstrInserter pass, and targets like Darwin
can't cope with duplicated prologue setup CFI instructions. The compact
unwind info emission fails.
When the following code is compiled for arm64 on Mac at -O3, the CFI
instructions end up getting tail duplicated, which causes compact unwind
info emission to fail:
int a, c, d, e, f, g, h, i, j, k, l, m;
void n(int o, int *b) {
if (g)
f = 0;
for (; f < o; f++) {
m = a;
if (l > j * k > i)
j = i = k = d;
h = b[c] - e;
}
}
We get assembly that looks like this:
; BB#1: ; %if.then
Lloh3:
adrp x9, _f@GOTPAGE
Lloh4:
ldr x9, [x9, _f@GOTPAGEOFF]
mov w8, wzr
Lloh5:
str wzr, [x9]
stp x20, x19, [sp, #-16]! ; 8-byte Folded Spill
.cfi_def_cfa_offset 16
.cfi_offset w19, -8
.cfi_offset w20, -16
cmp w8, w0
b.lt LBB0_3
b LBB0_7
LBB0_2: ; %entry.if.end_crit_edge
Lloh6:
adrp x8, _f@GOTPAGE
Lloh7:
ldr x8, [x8, _f@GOTPAGEOFF]
Lloh8:
ldr w8, [x8]
stp x20, x19, [sp, #-16]! ; 8-byte Folded Spill
.cfi_def_cfa_offset 16
.cfi_offset w19, -8
.cfi_offset w20, -16
cmp w8, w0
b.ge LBB0_7
LBB0_3: ; %for.body.lr.ph
Note the multiple .cfi_def* directives. Compact unwind info emission
can't handle that.
llvm-svn: 317726
Diffstat (limited to 'llvm/test')
87 files changed, 17 insertions, 1215 deletions
diff --git a/llvm/test/CodeGen/X86/2009-03-16-PHIElimInLPad.ll b/llvm/test/CodeGen/X86/2009-03-16-PHIElimInLPad.ll index 109962c2859..6814ed1d894 100644 --- a/llvm/test/CodeGen/X86/2009-03-16-PHIElimInLPad.ll +++ b/llvm/test/CodeGen/X86/2009-03-16-PHIElimInLPad.ll @@ -23,7 +23,6 @@ lpad: ; preds = %cont, %entry } ; CHECK: lpad -; CHECK-NEXT: .cfi_def_cfa_offset 16 ; CHECK-NEXT: Ltmp declare i32 @__gxx_personality_v0(...) diff --git a/llvm/test/CodeGen/X86/2011-10-19-widen_vselect.ll b/llvm/test/CodeGen/X86/2011-10-19-widen_vselect.ll index dd059100503..416761ffef4 100644 --- a/llvm/test/CodeGen/X86/2011-10-19-widen_vselect.ll +++ b/llvm/test/CodeGen/X86/2011-10-19-widen_vselect.ll @@ -88,7 +88,6 @@ define void @full_test() { ; X32-NEXT: movss %xmm4, {{[0-9]+}}(%esp) ; X32-NEXT: movss %xmm0, {{[0-9]+}}(%esp) ; X32-NEXT: addl $60, %esp -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; ; X64-LABEL: full_test: diff --git a/llvm/test/CodeGen/X86/GlobalISel/add-scalar.ll b/llvm/test/CodeGen/X86/GlobalISel/add-scalar.ll index 9d28f441fb7..64a6313023b 100644 --- a/llvm/test/CodeGen/X86/GlobalISel/add-scalar.ll +++ b/llvm/test/CodeGen/X86/GlobalISel/add-scalar.ll @@ -20,7 +20,6 @@ define i64 @test_add_i64(i64 %arg1, i64 %arg2) { ; X32-NEXT: addl 8(%ebp), %eax ; X32-NEXT: adcl 12(%ebp), %edx ; X32-NEXT: popl %ebp -; X32-NEXT: .cfi_def_cfa %esp, 4 ; X32-NEXT: retl %ret = add i64 %arg1, %arg2 ret i64 %ret diff --git a/llvm/test/CodeGen/X86/GlobalISel/brcond.ll b/llvm/test/CodeGen/X86/GlobalISel/brcond.ll index 2467344776e..917ee6f5bd8 100644 --- a/llvm/test/CodeGen/X86/GlobalISel/brcond.ll +++ b/llvm/test/CodeGen/X86/GlobalISel/brcond.ll @@ -36,7 +36,6 @@ define i32 @test_1(i32 %a, i32 %b, i32 %tValue, i32 %fValue) { ; X32-NEXT: movl %eax, (%esp) ; X32-NEXT: movl (%esp), %eax ; X32-NEXT: popl %ecx -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl entry: %retval = alloca i32, align 4 diff --git a/llvm/test/CodeGen/X86/GlobalISel/callingconv.ll b/llvm/test/CodeGen/X86/GlobalISel/callingconv.ll index 23987a3c365..4100a7217ac 100644 --- a/llvm/test/CodeGen/X86/GlobalISel/callingconv.ll +++ b/llvm/test/CodeGen/X86/GlobalISel/callingconv.ll @@ -117,7 +117,6 @@ define <8 x i32> @test_v8i32_args(<8 x i32> %arg1, <8 x i32> %arg2) { ; X32-NEXT: movups 16(%esp), %xmm1 ; X32-NEXT: movaps %xmm2, %xmm0 ; X32-NEXT: addl $12, %esp -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; ; X64-LABEL: test_v8i32_args: @@ -136,7 +135,6 @@ define void @test_trivial_call() { ; X32-NEXT: .cfi_def_cfa_offset 16 ; X32-NEXT: calll trivial_callee ; X32-NEXT: addl $12, %esp -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; ; X64-LABEL: test_trivial_call: @@ -145,7 +143,6 @@ define void @test_trivial_call() { ; X64-NEXT: .cfi_def_cfa_offset 16 ; X64-NEXT: callq trivial_callee ; X64-NEXT: popq %rax -; X64-NEXT: .cfi_def_cfa_offset 8 ; X64-NEXT: retq call void @trivial_callee() ret void @@ -163,7 +160,6 @@ define void @test_simple_arg_call(i32 %in0, i32 %in1) { ; X32-NEXT: movl %eax, 4(%esp) ; X32-NEXT: calll simple_arg_callee ; X32-NEXT: addl $12, %esp -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; ; X64-LABEL: test_simple_arg_call: @@ -175,7 +171,6 @@ define void @test_simple_arg_call(i32 %in0, i32 %in1) { ; X64-NEXT: movl %eax, %esi ; X64-NEXT: callq simple_arg_callee ; X64-NEXT: popq %rax -; X64-NEXT: .cfi_def_cfa_offset 8 ; X64-NEXT: retq call void @simple_arg_callee(i32 %in1, i32 %in0) ret void @@ -198,7 +193,6 @@ define void @test_simple_arg8_call(i32 %in0) { ; X32-NEXT: movl %eax, 28(%esp) ; X32-NEXT: calll simple_arg8_callee ; X32-NEXT: addl $44, %esp -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; ; X64-LABEL: test_simple_arg8_call: @@ -214,7 +208,6 @@ define void @test_simple_arg8_call(i32 %in0) { ; X64-NEXT: movl %edi, %r9d ; X64-NEXT: callq simple_arg8_callee ; X64-NEXT: addq $24, %rsp -; X64-NEXT: .cfi_def_cfa_offset 8 ; X64-NEXT: retq call void @simple_arg8_callee(i32 %in0, i32 %in0, i32 %in0, i32 %in0,i32 %in0, i32 %in0, i32 %in0, i32 %in0) ret void @@ -231,7 +224,6 @@ define i32 @test_simple_return_callee() { ; X32-NEXT: calll simple_return_callee ; X32-NEXT: addl %eax, %eax ; X32-NEXT: addl $12, %esp -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; ; X64-LABEL: test_simple_return_callee: @@ -242,7 +234,6 @@ define i32 @test_simple_return_callee() { ; X64-NEXT: callq simple_return_callee ; X64-NEXT: addl %eax, %eax ; X64-NEXT: popq %rcx -; X64-NEXT: .cfi_def_cfa_offset 8 ; X64-NEXT: retq %call = call i32 @simple_return_callee(i32 5) %r = add i32 %call, %call @@ -263,7 +254,6 @@ define <8 x i32> @test_split_return_callee(<8 x i32> %arg1, <8 x i32> %arg2) { ; X32-NEXT: paddd (%esp), %xmm0 # 16-byte Folded Reload ; X32-NEXT: paddd 16(%esp), %xmm1 # 16-byte Folded Reload ; X32-NEXT: addl $44, %esp -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; ; X64-LABEL: test_split_return_callee: @@ -278,7 +268,6 @@ define <8 x i32> @test_split_return_callee(<8 x i32> %arg1, <8 x i32> %arg2) { ; X64-NEXT: paddd (%rsp), %xmm0 # 16-byte Folded Reload ; X64-NEXT: paddd 16(%rsp), %xmm1 # 16-byte Folded Reload ; X64-NEXT: addq $40, %rsp -; X64-NEXT: .cfi_def_cfa_offset 8 ; X64-NEXT: retq %call = call <8 x i32> @split_return_callee(<8 x i32> %arg2) %r = add <8 x i32> %arg1, %call @@ -292,7 +281,6 @@ define void @test_indirect_call(void()* %func) { ; X32-NEXT: .cfi_def_cfa_offset 16 ; X32-NEXT: calll *16(%esp) ; X32-NEXT: addl $12, %esp -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; ; X64-LABEL: test_indirect_call: @@ -301,7 +289,6 @@ define void @test_indirect_call(void()* %func) { ; X64-NEXT: .cfi_def_cfa_offset 16 ; X64-NEXT: callq *%rdi ; X64-NEXT: popq %rax -; X64-NEXT: .cfi_def_cfa_offset 8 ; X64-NEXT: retq call void %func() ret void @@ -330,11 +317,8 @@ define void @test_abi_exts_call(i8* %addr) { ; X32-NEXT: movl %esi, (%esp) ; X32-NEXT: calll take_char ; X32-NEXT: addl $4, %esp -; X32-NEXT: .cfi_def_cfa_offset 12 ; X32-NEXT: popl %esi -; X32-NEXT: .cfi_def_cfa_offset 8 ; X32-NEXT: popl %ebx -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; ; X64-LABEL: test_abi_exts_call: @@ -351,7 +335,6 @@ define void @test_abi_exts_call(i8* %addr) { ; X64-NEXT: movl %ebx, %edi ; X64-NEXT: callq take_char ; X64-NEXT: popq %rbx -; X64-NEXT: .cfi_def_cfa_offset 8 ; X64-NEXT: retq %val = load i8, i8* %addr call void @take_char(i8 %val) @@ -374,7 +357,6 @@ define void @test_variadic_call_1(i8** %addr_ptr, i32* %val_ptr) { ; X32-NEXT: movl %ecx, 4(%esp) ; X32-NEXT: calll variadic_callee ; X32-NEXT: addl $12, %esp -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; ; X64-LABEL: test_variadic_call_1: @@ -386,7 +368,6 @@ define void @test_variadic_call_1(i8** %addr_ptr, i32* %val_ptr) { ; X64-NEXT: movb $0, %al ; X64-NEXT: callq variadic_callee ; X64-NEXT: popq %rax -; X64-NEXT: .cfi_def_cfa_offset 8 ; X64-NEXT: retq %addr = load i8*, i8** %addr_ptr @@ -412,7 +393,6 @@ define void @test_variadic_call_2(i8** %addr_ptr, double* %val_ptr) { ; X32-NEXT: movl %ecx, 4(%eax) ; X32-NEXT: calll variadic_callee ; X32-NEXT: addl $12, %esp -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; ; X64-LABEL: test_variadic_call_2: @@ -425,7 +405,6 @@ define void @test_variadic_call_2(i8** %addr_ptr, double* %val_ptr) { ; X64-NEXT: movq %rcx, %xmm0 ; X64-NEXT: callq variadic_callee ; X64-NEXT: popq %rax -; X64-NEXT: .cfi_def_cfa_offset 8 ; X64-NEXT: retq %addr = load i8*, i8** %addr_ptr diff --git a/llvm/test/CodeGen/X86/GlobalISel/frameIndex.ll b/llvm/test/CodeGen/X86/GlobalISel/frameIndex.ll index f260d0d707f..7b2a050f153 100644 --- a/llvm/test/CodeGen/X86/GlobalISel/frameIndex.ll +++ b/llvm/test/CodeGen/X86/GlobalISel/frameIndex.ll @@ -18,7 +18,6 @@ define i32* @allocai32() { ; X32-NEXT: .cfi_def_cfa_offset 8 ; X32-NEXT: movl %esp, %eax ; X32-NEXT: popl %ecx -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; ; X32ABI-LABEL: allocai32: diff --git a/llvm/test/CodeGen/X86/O0-pipeline.ll b/llvm/test/CodeGen/X86/O0-pipeline.ll index 8ecafad8022..1f7415ee2af 100644 --- a/llvm/test/CodeGen/X86/O0-pipeline.ll +++ b/llvm/test/CodeGen/X86/O0-pipeline.ll @@ -49,7 +49,6 @@ ; CHECK-NEXT: X86 pseudo instruction expansion pass ; CHECK-NEXT: Analyze Machine Code For Garbage Collection ; CHECK-NEXT: X86 vzeroupper inserter -; CHECK-NEXT: Check CFA info and insert CFI instructions if needed ; CHECK-NEXT: Contiguously Lay Out Funclets ; CHECK-NEXT: StackMap Liveness Analysis ; CHECK-NEXT: Live DEBUG_VALUE analysis diff --git a/llvm/test/CodeGen/X86/TruncAssertZext.ll b/llvm/test/CodeGen/X86/TruncAssertZext.ll index ed98fd51cc0..b9ae57ca011 100644 --- a/llvm/test/CodeGen/X86/TruncAssertZext.ll +++ b/llvm/test/CodeGen/X86/TruncAssertZext.ll @@ -25,7 +25,6 @@ define i64 @main() { ; CHECK-NEXT: subq %rcx, %rax ; CHECK-NEXT: shrq $32, %rax ; CHECK-NEXT: popq %rcx -; CHECK-NEXT: .cfi_def_cfa_offset 8 ; CHECK-NEXT: retq %b = call i64 @foo() %or = and i64 %b, 18446744069414584575 ; this is 0xffffffff000000ff diff --git a/llvm/test/CodeGen/X86/avx512-mask-op.ll b/llvm/test/CodeGen/X86/avx512-mask-op.ll index 909e8398680..b75bd8cc3ee 100644 --- a/llvm/test/CodeGen/X86/avx512-mask-op.ll +++ b/llvm/test/CodeGen/X86/avx512-mask-op.ll @@ -699,13 +699,11 @@ define <16 x i8> @test8(<16 x i32>%a, <16 x i32>%b, i32 %a1, i32 %b1) { ; AVX512BW-NEXT: jg LBB17_1 ; AVX512BW-NEXT: ## BB#2: ; AVX512BW-NEXT: vpcmpltud %zmm2, %zmm1, %k0 -; AVX512BW-NEXT: vpmovm2b %k0, %zmm0 -; AVX512BW-NEXT: ## kill: %XMM0<def> %XMM0<kill> %ZMM0<kill> -; AVX512BW-NEXT: vzeroupper -; AVX512BW-NEXT: retq +; AVX512BW-NEXT: jmp LBB17_3 ; AVX512BW-NEXT: LBB17_1: -; AVX512BW-NEXT: vpcmpgtd %zmm2, %zmm0, %k0 -; AVX512BW-NEXT: vpmovm2b %k0, %zmm0 +; AVX512BW-NEXT: vpcmpgtd %zmm2, %zmm0, %k0 +; AVX512BW-NEXT: LBB17_3: +; AVX512BW-NEXT: vpmovm2b %k0, %zmm0 ; AVX512BW-NEXT: ## kill: %XMM0<def> %XMM0<kill> %ZMM0<kill> ; AVX512BW-NEXT: vzeroupper ; AVX512BW-NEXT: retq diff --git a/llvm/test/CodeGen/X86/avx512-regcall-Mask.ll b/llvm/test/CodeGen/X86/avx512-regcall-Mask.ll index fa6adec675f..bb541f46567 100644 --- a/llvm/test/CodeGen/X86/avx512-regcall-Mask.ll +++ b/llvm/test/CodeGen/X86/avx512-regcall-Mask.ll @@ -209,18 +209,12 @@ define i64 @caller_argv64i1() #0 { ; LINUXOSX64-NEXT: pushq %rax ; LINUXOSX64-NEXT: .cfi_adjust_cfa_offset 8 ; LINUXOSX64-NEXT: callq test_argv64i1 -; LINUXOSX64-NEXT: addq $16, %rsp +; LINUXOSX64-NEXT: addq $24, %rsp ; LINUXOSX64-NEXT: .cfi_adjust_cfa_offset -16 -; LINUXOSX64-NEXT: addq $8, %rsp -; LINUXOSX64-NEXT: .cfi_def_cfa_offset 40 ; LINUXOSX64-NEXT: popq %r12 -; LINUXOSX64-NEXT: .cfi_def_cfa_offset 32 ; LINUXOSX64-NEXT: popq %r13 -; LINUXOSX64-NEXT: .cfi_def_cfa_offset 24 ; LINUXOSX64-NEXT: popq %r14 -; LINUXOSX64-NEXT: .cfi_def_cfa_offset 16 ; LINUXOSX64-NEXT: popq %r15 -; LINUXOSX64-NEXT: .cfi_def_cfa_offset 8 ; LINUXOSX64-NEXT: retq entry: %v0 = bitcast i64 4294967298 to <64 x i1> @@ -293,7 +287,6 @@ define <64 x i1> @caller_retv64i1() #0 { ; LINUXOSX64-NEXT: kmovq %rax, %k0 ; LINUXOSX64-NEXT: vpmovm2b %k0, %zmm0 ; LINUXOSX64-NEXT: popq %rax -; LINUXOSX64-NEXT: .cfi_def_cfa_offset 8 ; LINUXOSX64-NEXT: retq entry: %call = call x86_regcallcc <64 x i1> @test_retv64i1() @@ -404,9 +397,7 @@ define x86_regcallcc i32 @test_argv32i1(<32 x i1> %x0, <32 x i1> %x1, <32 x i1> ; LINUXOSX64-NEXT: vmovaps {{[0-9]+}}(%rsp), %xmm14 # 16-byte Reload ; LINUXOSX64-NEXT: vmovaps {{[0-9]+}}(%rsp), %xmm15 # 16-byte Reload ; LINUXOSX64-NEXT: addq $128, %rsp -; LINUXOSX64-NEXT: .cfi_def_cfa_offset 16 ; LINUXOSX64-NEXT: popq %rsp -; LINUXOSX64-NEXT: .cfi_def_cfa_offset 8 ; LINUXOSX64-NEXT: vzeroupper ; LINUXOSX64-NEXT: retq entry: @@ -460,7 +451,6 @@ define i32 @caller_argv32i1() #0 { ; LINUXOSX64-NEXT: movl $1, %edx ; LINUXOSX64-NEXT: callq test_argv32i1 ; LINUXOSX64-NEXT: popq %rcx -; LINUXOSX64-NEXT: .cfi_def_cfa_offset 8 ; LINUXOSX64-NEXT: retq entry: %v0 = bitcast i32 1 to <32 x i1> @@ -523,7 +513,6 @@ define i32 @caller_retv32i1() #0 { ; LINUXOSX64-NEXT: callq test_retv32i1 ; LINUXOSX64-NEXT: incl %eax ; LINUXOSX64-NEXT: popq %rcx -; LINUXOSX64-NEXT: .cfi_def_cfa_offset 8 ; LINUXOSX64-NEXT: retq entry: %call = call x86_regcallcc <32 x i1> @test_retv32i1() @@ -637,9 +626,7 @@ define x86_regcallcc i16 @test_argv16i1(<16 x i1> %x0, <16 x i1> %x1, <16 x i1> ; LINUXOSX64-NEXT: vmovaps {{[0-9]+}}(%rsp), %xmm14 # 16-byte Reload ; LINUXOSX64-NEXT: vmovaps {{[0-9]+}}(%rsp), %xmm15 # 16-byte Reload ; LINUXOSX64-NEXT: addq $128, %rsp -; LINUXOSX64-NEXT: .cfi_def_cfa_offset 16 ; LINUXOSX64-NEXT: popq %rsp -; LINUXOSX64-NEXT: .cfi_def_cfa_offset 8 ; LINUXOSX64-NEXT: retq %res = call i16 @test_argv16i1helper(<16 x i1> %x0, <16 x i1> %x1, <16 x i1> %x2) ret i16 %res @@ -691,7 +678,6 @@ define i16 @caller_argv16i1() #0 { ; LINUXOSX64-NEXT: movl $1, %edx ; LINUXOSX64-NEXT: callq test_argv16i1 ; LINUXOSX64-NEXT: popq %rcx -; LINUXOSX64-NEXT: .cfi_def_cfa_offset 8 ; LINUXOSX64-NEXT: retq entry: %v0 = bitcast i16 1 to <16 x i1> @@ -760,7 +746,6 @@ define i16 @caller_retv16i1() #0 { ; LINUXOSX64-NEXT: incl %eax ; LINUXOSX64-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill> ; LINUXOSX64-NEXT: popq %rcx -; LINUXOSX64-NEXT: .cfi_def_cfa_offset 8 ; LINUXOSX64-NEXT: retq entry: %call = call x86_regcallcc <16 x i1> @test_retv16i1() @@ -874,9 +859,7 @@ define x86_regcallcc i8 @test_argv8i1(<8 x i1> %x0, <8 x i1> %x1, <8 x i1> %x2) ; LINUXOSX64-NEXT: vmovaps {{[0-9]+}}(%rsp), %xmm14 # 16-byte Reload ; LINUXOSX64-NEXT: vmovaps {{[0-9]+}}(%rsp), %xmm15 # 16-byte Reload ; LINUXOSX64-NEXT: addq $128, %rsp -; LINUXOSX64-NEXT: .cfi_def_cfa_offset 16 ; LINUXOSX64-NEXT: popq %rsp -; LINUXOSX64-NEXT: .cfi_def_cfa_offset 8 ; LINUXOSX64-NEXT: retq %res = call i8 @test_argv8i1helper(<8 x i1> %x0, <8 x i1> %x1, <8 x i1> %x2) ret i8 %res @@ -928,7 +911,6 @@ define i8 @caller_argv8i1() #0 { ; LINUXOSX64-NEXT: movl $1, %edx ; LINUXOSX64-NEXT: callq test_argv8i1 ; LINUXOSX64-NEXT: popq %rcx -; LINUXOSX64-NEXT: .cfi_def_cfa_offset 8 ; LINUXOSX64-NEXT: retq entry: %v0 = bitcast i8 1 to <8 x i1> @@ -1002,11 +984,9 @@ define <8 x i1> @caller_retv8i1() #0 { ; LINUXOSX64-NEXT: vpmovm2w %k0, %zmm0 ; LINUXOSX64-NEXT: # kill: %XMM0<def> %XMM0<kill> %ZMM0<kill> ; LINUXOSX64-NEXT: popq %rax -; LINUXOSX64-NEXT: .cfi_def_cfa_offset 8 ; LINUXOSX64-NEXT: vzeroupper ; LINUXOSX64-NEXT: retq entry: %call = call x86_regcallcc <8 x i1> @test_retv8i1() ret <8 x i1> %call } - diff --git a/llvm/test/CodeGen/X86/avx512-regcall-NoMask.ll b/llvm/test/CodeGen/X86/avx512-regcall-NoMask.ll index b4f1d2c776d..43a1871245b 100644 --- a/llvm/test/CodeGen/X86/avx512-regcall-NoMask.ll +++ b/llvm/test/CodeGen/X86/avx512-regcall-NoMask.ll @@ -63,7 +63,6 @@ define x86_regcallcc i1 @test_CallargReti1(i1 %a) { ; LINUXOSX64-NEXT: callq test_argReti1 ; LINUXOSX64-NEXT: incb %al ; LINUXOSX64-NEXT: popq %rsp -; LINUXOSX64-NEXT: .cfi_def_cfa_offset 8 ; LINUXOSX64-NEXT: retq %b = add i1 %a, 1 %c = call x86_regcallcc i1 @test_argReti1(i1 %b) @@ -131,7 +130,6 @@ define x86_regcallcc i8 @test_CallargReti8(i8 %a) { ; LINUXOSX64-NEXT: callq test_argReti8 ; LINUXOSX64-NEXT: incb %al ; LINUXOSX64-NEXT: popq %rsp -; LINUXOSX64-NEXT: .cfi_def_cfa_offset 8 ; LINUXOSX64-NEXT: retq %b = add i8 %a, 1 %c = call x86_regcallcc i8 @test_argReti8(i8 %b) @@ -202,7 +200,6 @@ define x86_regcallcc i16 @test_CallargReti16(i16 %a) { ; LINUXOSX64-NEXT: incl %eax ; LINUXOSX64-NEXT: # kill: %AX<def> %AX<kill> %EAX<kill> ; LINUXOSX64-NEXT: popq %rsp -; LINUXOSX64-NEXT: .cfi_def_cfa_offset 8 ; LINUXOSX64-NEXT: retq %b = add i16 %a, 1 %c = call x86_regcallcc i16 @test_argReti16(i16 %b) @@ -264,7 +261,6 @@ define x86_regcallcc i32 @test_CallargReti32(i32 %a) { ; LINUXOSX64-NEXT: callq test_argReti32 ; LINUXOSX64-NEXT: incl %eax ; LINUXOSX64-NEXT: popq %rsp -; LINUXOSX64-NEXT: .cfi_def_cfa_offset 8 ; LINUXOSX64-NEXT: retq %b = add i32 %a, 1 %c = call x86_regcallcc i32 @test_argReti32(i32 %b) @@ -331,7 +327,6 @@ define x86_regcallcc i64 @test_CallargReti64(i64 %a) { ; LINUXOSX64-NEXT: callq test_argReti64 ; LINUXOSX64-NEXT: incq %rax ; LINUXOSX64-NEXT: popq %rsp -; LINUXOSX64-NEXT: .cfi_def_cfa_offset 8 ; LINUXOSX64-NEXT: retq %b = add i64 %a, 1 %c = call x86_regcallcc i64 @test_argReti64(i64 %b) @@ -411,9 +406,7 @@ define x86_regcallcc float @test_CallargRetFloat(float %a) { ; LINUXOSX64-NEXT: vaddss %xmm8, %xmm0, %xmm0 ; LINUXOSX64-NEXT: vmovaps (%rsp), %xmm8 # 16-byte Reload ; LINUXOSX64-NEXT: addq $16, %rsp -; LINUXOSX64-NEXT: .cfi_def_cfa_offset 16 ; LINUXOSX64-NEXT: popq %rsp -; LINUXOSX64-NEXT: .cfi_def_cfa_offset 8 ; LINUXOSX64-NEXT: retq %b = fadd float 1.0, %a %c = call x86_regcallcc float @test_argRetFloat(float %b) @@ -493,9 +486,7 @@ define x86_regcallcc double @test_CallargRetDouble(double %a) { ; LINUXOSX64-NEXT: vaddsd %xmm8, %xmm0, %xmm0 ; LINUXOSX64-NEXT: vmovaps (%rsp), %xmm8 # 16-byte Reload ; LINUXOSX64-NEXT: addq $16, %rsp -; LINUXOSX64-NEXT: .cfi_def_cfa_offset 16 ; LINUXOSX64-NEXT: popq %rsp -; LINUXOSX64-NEXT: .cfi_def_cfa_offset 8 ; LINUXOSX64-NEXT: retq %b = fadd double 1.0, %a %c = call x86_regcallcc double @test_argRetDouble(double %b) @@ -557,7 +548,6 @@ define x86_regcallcc x86_fp80 @test_CallargRetf80(x86_fp80 %a) { ; LINUXOSX64-NEXT: callq test_argRetf80 ; LINUXOSX64-NEXT: fadd %st(0), %st(0) ; LINUXOSX64-NEXT: popq %rsp -; LINUXOSX64-NEXT: .cfi_def_cfa_offset 8 ; LINUXOSX64-NEXT: retq %b = fadd x86_fp80 %a, %a %c = call x86_regcallcc x86_fp80 @test_argRetf80(x86_fp80 %b) @@ -621,7 +611,6 @@ define x86_regcallcc [4 x i32]* @test_CallargRetPointer([4 x i32]* %a) { ; LINUXOSX64-NEXT: callq test_argRetPointer ; LINUXOSX64-NEXT: incl %eax ; LINUXOSX64-NEXT: popq %rsp -; LINUXOSX64-NEXT: .cfi_def_cfa_offset 8 ; LINUXOSX64-NEXT: retq %b = ptrtoint [4 x i32]* %a to i32 %c = add i32 %b, 1 @@ -705,9 +694,7 @@ define x86_regcallcc <4 x i32> @test_CallargRet128Vector(<4 x i32> %a) { ; LINUXOSX64-NEXT: vmovdqa32 %xmm8, %xmm0 {%k1} ; LINUXOSX64-NEXT: vmovaps (%rsp), %xmm8 # 16-byte Reload ; LINUXOSX64-NEXT: addq $16, %rsp -; LINUXOSX64-NEXT: .cfi_def_cfa_offset 16 ; LINUXOSX64-NEXT: popq %rsp -; LINUXOSX64-NEXT: .cfi_def_cfa_offset 8 ; LINUXOSX64-NEXT: retq %b = call x86_regcallcc <4 x i32> @test_argRet128Vector(<4 x i32> %a, <4 x i32> %a) %c = select <4 x i1> undef , <4 x i32> %a, <4 x i32> %b @@ -781,9 +768,7 @@ define x86_regcallcc <8 x i32> @test_CallargRet256Vector(<8 x i32> %a) { ; LINUXOSX64-NEXT: vmovdqu (%rsp), %ymm1 # 32-byte Reload ; LINUXOSX64-NEXT: vmovdqa32 %ymm1, %ymm0 {%k1} ; LINUXOSX64-NEXT: addq $48, %rsp -; LINUXOSX64-NEXT: .cfi_def_cfa_offset 16 ; LINUXOSX64-NEXT: popq %rsp -; LINUXOSX64-NEXT: .cfi_def_cfa_offset 8 ; LINUXOSX64-NEXT: retq %b = call x86_regcallcc <8 x i32> @test_argRet256Vector(<8 x i32> %a, <8 x i32> %a) %c = select <8 x i1> undef , <8 x i32> %a, <8 x i32> %b @@ -857,9 +842,7 @@ define x86_regcallcc <16 x i32> @test_CallargRet512Vector(<16 x i32> %a) { ; LINUXOSX64-NEXT: vmovdqu64 (%rsp), %zmm1 # 64-byte Reload ; LINUXOSX64-NEXT: vmovdqa32 %zmm1, %zmm0 {%k1} ; LINUXOSX64-NEXT: addq $112, %rsp -; LINUXOSX64-NEXT: .cfi_def_cfa_offset 16 ; LINUXOSX64-NEXT: popq %rsp -; LINUXOSX64-NEXT: .cfi_def_cfa_offset 8 ; LINUXOSX64-NEXT: retq %b = call x86_regcallcc <16 x i32> @test_argRet512Vector(<16 x i32> %a, <16 x i32> %a) %c = select <16 x i1> undef , <16 x i32> %a, <16 x i32> %b diff --git a/llvm/test/CodeGen/X86/avx512-schedule.ll b/llvm/test/CodeGen/X86/avx512-schedule.ll index abc8c1a7513..8372fbdb9ab 100755 --- a/llvm/test/CodeGen/X86/avx512-schedule.ll +++ b/llvm/test/CodeGen/X86/avx512-schedule.ll @@ -8839,7 +8839,6 @@ define <16 x float> @broadcast_ss_spill(float %x) { ; GENERIC-NEXT: callq func_f32 ; GENERIC-NEXT: vbroadcastss (%rsp), %zmm0 # 16-byte Folded Reload ; GENERIC-NEXT: addq $24, %rsp # sched: [1:0.33] -; GENERIC-NEXT: .cfi_def_cfa_offset 8 ; GENERIC-NEXT: retq # sched: [1:1.00] ; ; SKX-LABEL: broadcast_ss_spill: @@ -8853,7 +8852,6 @@ define <16 x float> @broadcast_ss_spill(float %x) { ; SKX-NEXT: vbroadcastss (%rsp), %zmm0 # 16-byte Folded Reload sched: [8:0.50] ; SKX-NEXT: # sched: [8:0.50] ; SKX-NEXT: addq $24, %rsp # sched: [1:0.25] -; SKX-NEXT: .cfi_def_cfa_offset 8 ; SKX-NEXT: retq # sched: [7:1.00] %a = fadd float %x, %x call void @func_f32(float %a) @@ -8874,7 +8872,6 @@ define <8 x double> @broadcast_sd_spill(double %x) { ; GENERIC-NEXT: callq func_f64 ; GENERIC-NEXT: vbroadcastsd (%rsp), %zmm0 # 16-byte Folded Reload ; GENERIC-NEXT: addq $24, %rsp # sched: [1:0.33] -; GENERIC-NEXT: .cfi_def_cfa_offset 8 ; GENERIC-NEXT: retq # sched: [1:1.00] ; ; SKX-LABEL: broadcast_sd_spill: @@ -8888,7 +8885,6 @@ define <8 x double> @broadcast_sd_spill(double %x) { ; SKX-NEXT: vbroadcastsd (%rsp), %zmm0 # 16-byte Folded Reload sched: [8:0.50] ; SKX-NEXT: # sched: [8:0.50] ; SKX-NEXT: addq $24, %rsp # sched: [1:0.25] -; SKX-NEXT: .cfi_def_cfa_offset 8 ; SKX-NEXT: retq # sched: [7:1.00] %a = fadd double %x, %x call void @func_f64(double %a) diff --git a/llvm/test/CodeGen/X86/avx512-select.ll b/llvm/test/CodeGen/X86/avx512-select.ll index 51a7c685ed4..43cf9ee7358 100644 --- a/llvm/test/CodeGen/X86/avx512-select.ll +++ b/llvm/test/CodeGen/X86/avx512-select.ll @@ -115,7 +115,6 @@ define <16 x double> @select04(<16 x double> %a, <16 x double> %b) { ; X86-NEXT: vmovaps 8(%ebp), %zmm1 ; X86-NEXT: movl %ebp, %esp ; X86-NEXT: popl %ebp -; X86-NEXT: .cfi_def_cfa %esp, 4 ; X86-NEXT: retl ; ; X64-LABEL: select04: diff --git a/llvm/test/CodeGen/X86/avx512-vbroadcast.ll b/llvm/test/CodeGen/X86/avx512-vbroadcast.ll index 9aacb23fbd5..584968f1c6e 100644 --- a/llvm/test/CodeGen/X86/avx512-vbroadcast.ll +++ b/llvm/test/CodeGen/X86/avx512-vbroadcast.ll @@ -413,7 +413,6 @@ define <16 x float> @broadcast_ss_spill(float %x) { ; ALL-NEXT: callq func_f32 ; ALL-NEXT: vbroadcastss (%rsp), %zmm0 # 16-byte Folded Reload ; ALL-NEXT: addq $24, %rsp -; ALL-NEXT: .cfi_def_cfa_offset 8 ; ALL-NEXT: retq %a = fadd float %x, %x call void @func_f32(float %a) @@ -433,7 +432,6 @@ define <8 x double> @broadcast_sd_spill(double %x) { ; ALL-NEXT: callq func_f64 ; ALL-NEXT: vbroadcastsd (%rsp), %zmm0 # 16-byte Folded Reload ; ALL-NEXT: addq $24, %rsp -; ALL-NEXT: .cfi_def_cfa_offset 8 ; ALL-NEXT: retq %a = fadd double %x, %x call void @func_f64(double %a) diff --git a/llvm/test/CodeGen/X86/avx512bw-intrinsics-fast-isel.ll b/llvm/test/CodeGen/X86/avx512bw-intrinsics-fast-isel.ll index 7f170cd51bf..d1bf8fd5f3f 100644 --- a/llvm/test/CodeGen/X86/avx512bw-intrinsics-fast-isel.ll +++ b/llvm/test/CodeGen/X86/avx512bw-intrinsics-fast-isel.ll @@ -717,7 +717,6 @@ define <8 x i64> @test_mm512_mask_set1_epi8(<8 x i64> %__O, i64 %__M, i8 signext ; X32-NEXT: vpbroadcastb %eax, %zmm3 {%k1} ; X32-NEXT: vmovdqa64 %zmm3, %zmm0 ; X32-NEXT: popl %ebx -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; ; X64-LABEL: test_mm512_mask_set1_epi8: @@ -1445,7 +1444,6 @@ define <8 x i64> @test_mm512_maskz_set1_epi8(i64 %__M, i8 signext %__A) { ; X32-NEXT: korq %k0, %k1, %k1 ; X32-NEXT: vpbroadcastb %eax, %zmm0 {%k1} {z} ; X32-NEXT: popl %ebx -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; ; X64-LABEL: test_mm512_maskz_set1_epi8: diff --git a/llvm/test/CodeGen/X86/avx512bw-intrinsics-upgrade.ll b/llvm/test/CodeGen/X86/avx512bw-intrinsics-upgrade.ll index 87565ac129b..a5ef1809157 100644 --- a/llvm/test/CodeGen/X86/avx512bw-intrinsics-upgrade.ll +++ b/llvm/test/CodeGen/X86/avx512bw-intrinsics-upgrade.ll @@ -355,7 +355,6 @@ define i64 @test_pcmpeq_b(<64 x i8> %a, <64 x i8> %b) { ; AVX512F-32-NEXT: movl (%esp), %eax ; AVX512F-32-NEXT: movl {{[0-9]+}}(%esp), %edx ; AVX512F-32-NEXT: addl $12, %esp -; AVX512F-32-NEXT: .cfi_def_cfa_offset 4 ; AVX512F-32-NEXT: vzeroupper ; AVX512F-32-NEXT: retl %res = call i64 @llvm.x86.avx512.mask.pcmpeq.b.512(<64 x i8> %a, <64 x i8> %b, i64 -1) @@ -381,7 +380,6 @@ define i64 @test_mask_pcmpeq_b(<64 x i8> %a, <64 x i8> %b, i64 %mask) { ; AVX512F-32-NEXT: movl (%esp), %eax ; AVX512F-32-NEXT: movl {{[0-9]+}}(%esp), %edx ; AVX512F-32-NEXT: addl $12, %esp -; AVX512F-32-NEXT: .cfi_def_cfa_offset 4 ; AVX512F-32-NEXT: vzeroupper ; AVX512F-32-NEXT: retl %res = call i64 @llvm.x86.avx512.mask.pcmpeq.b.512(<64 x i8> %a, <64 x i8> %b, i64 %mask) @@ -447,7 +445,6 @@ define i64 @test_pcmpgt_b(<64 x i8> %a, <64 x i8> %b) { ; AVX512F-32-NEXT: movl (%esp), %eax ; AVX512F-32-NEXT: movl {{[0-9]+}}(%esp), %edx ; AVX512F-32-NEXT: addl $12, %esp -; AVX512F-32-NEXT: .cfi_def_cfa_offset 4 ; AVX512F-32-NEXT: vzeroupper ; AVX512F-32-NEXT: retl %res = call i64 @llvm.x86.avx512.mask.pcmpgt.b.512(<64 x i8> %a, <64 x i8> %b, i64 -1) @@ -473,7 +470,6 @@ define i64 @test_mask_pcmpgt_b(<64 x i8> %a, <64 x i8> %b, i64 %mask) { ; AVX512F-32-NEXT: movl (%esp), %eax ; AVX512F-32-NEXT: movl {{[0-9]+}}(%esp), %edx ; AVX512F-32-NEXT: addl $12, %esp -; AVX512F-32-NEXT: .cfi_def_cfa_offset 4 ; AVX512F-32-NEXT: vzeroupper ; AVX512F-32-NEXT: retl %res = call i64 @llvm.x86.avx512.mask.pcmpgt.b.512(<64 x i8> %a, <64 x i8> %b, i64 %mask) @@ -1706,7 +1702,6 @@ define i64 @test_cmp_b_512(<64 x i8> %a0, <64 x i8> %a1) { ; AVX512F-32-NEXT: addl {{[0-9]+}}(%esp), %eax ; AVX512F-32-NEXT: adcl {{[0-9]+}}(%esp), %edx ; AVX512F-32-NEXT: addl $60, %esp -; AVX512F-32-NEXT: .cfi_def_cfa_offset 4 ; AVX512F-32-NEXT: vzeroupper ; AVX512F-32-NEXT: retl %res0 = call i64 @llvm.x86.avx512.mask.cmp.b.512(<64 x i8> %a0, <64 x i8> %a1, i32 0, i64 -1) @@ -2508,11 +2503,8 @@ define i64 @test_mask_cmp_b_512(<64 x i8> %a0, <64 x i8> %a1, i64 %mask) { ; AVX512F-32-NEXT: addl %esi, %eax ; AVX512F-32-NEXT: adcl %ecx, %edx ; AVX512F-32-NEXT: addl $60, %esp -; AVX512F-32-NEXT: .cfi_def_cfa_offset 12 ; AVX512F-32-NEXT: popl %esi -; AVX512F-32-NEXT: .cfi_def_cfa_offset 8 ; AVX512F-32-NEXT: popl %ebx -; AVX512F-32-NEXT: .cfi_def_cfa_offset 4 ; AVX512F-32-NEXT: vzeroupper ; AVX512F-32-NEXT: retl %res0 = call i64 @llvm.x86.avx512.mask.cmp.b.512(<64 x i8> %a0, <64 x i8> %a1, i32 0, i64 %mask) @@ -2594,7 +2586,6 @@ define i64 @test_ucmp_b_512(<64 x i8> %a0, <64 x i8> %a1) { ; AVX512F-32-NEXT: addl {{[0-9]+}}(%esp), %eax ; AVX512F-32-NEXT: adcl {{[0-9]+}}(%esp), %edx ; AVX512F-32-NEXT: addl $60, %esp -; AVX512F-32-NEXT: .cfi_def_cfa_offset 4 ; AVX512F-32-NEXT: vzeroupper ; AVX512F-32-NEXT: retl %res0 = call i64 @llvm.x86.avx512.mask.ucmp.b.512(<64 x i8> %a0, <64 x i8> %a1, i32 0, i64 -1) @@ -3396,11 +3387,8 @@ define i64 @test_mask_x86_avx512_ucmp_b_512(<64 x i8> %a0, <64 x i8> %a1, i64 %m ; AVX512F-32-NEXT: addl %esi, %eax ; AVX512F-32-NEXT: adcl %ecx, %edx ; AVX512F-32-NEXT: addl $60, %esp -; AVX512F-32-NEXT: .cfi_def_cfa_offset 12 ; AVX512F-32-NEXT: popl %esi -; AVX512F-32-NEXT: .cfi_def_cfa_offset 8 ; AVX512F-32-NEXT: popl %ebx -; AVX512F-32-NEXT: .cfi_def_cfa_offset 4 ; AVX512F-32-NEXT: vzeroupper ; AVX512F-32-NEXT: retl %res0 = call i64 @llvm.x86.avx512.mask.ucmp.b.512(<64 x i8> %a0, <64 x i8> %a1, i32 0, i64 %mask) diff --git a/llvm/test/CodeGen/X86/avx512bw-intrinsics.ll b/llvm/test/CodeGen/X86/avx512bw-intrinsics.ll index c2620642e5c..e23deebd15b 100644 --- a/llvm/test/CodeGen/X86/avx512bw-intrinsics.ll +++ b/llvm/test/CodeGen/X86/avx512bw-intrinsics.ll @@ -1499,7 +1499,6 @@ define i64@test_int_x86_avx512_kunpck_qd(i64 %x0, i64 %x1) { ; AVX512F-32-NEXT: movl (%esp), %eax ; AVX512F-32-NEXT: movl {{[0-9]+}}(%esp), %edx ; AVX512F-32-NEXT: addl $12, %esp -; AVX512F-32-NEXT: .cfi_def_cfa_offset 4 ; AVX512F-32-NEXT: retl %res = call i64 @llvm.x86.avx512.kunpck.dq(i64 %x0, i64 %x1) ret i64 %res @@ -1523,7 +1522,6 @@ define i64@test_int_x86_avx512_cvtb2mask_512(<64 x i8> %x0) { ; AVX512F-32-NEXT: movl (%esp), %eax ; AVX512F-32-NEXT: movl {{[0-9]+}}(%esp), %edx ; AVX512F-32-NEXT: addl $12, %esp -; AVX512F-32-NEXT: .cfi_def_cfa_offset 4 ; AVX512F-32-NEXT: retl %res = call i64 @llvm.x86.avx512.cvtb2mask.512(<64 x i8> %x0) ret i64 %res @@ -1714,7 +1712,6 @@ define i64@test_int_x86_avx512_ptestm_b_512(<64 x i8> %x0, <64 x i8> %x1, i64 %x ; AVX512F-32-NEXT: addl {{[0-9]+}}(%esp), %eax ; AVX512F-32-NEXT: adcxl {{[0-9]+}}(%esp), %edx ; AVX512F-32-NEXT: addl $20, %esp -; AVX512F-32-NEXT: .cfi_def_cfa_offset 4 ; AVX512F-32-NEXT: retl %res = call i64 @llvm.x86.avx512.ptestm.b.512(<64 x i8> %x0, <64 x i8> %x1, i64 %x2) %res1 = call i64 @llvm.x86.avx512.ptestm.b.512(<64 x i8> %x0, <64 x i8> %x1, i64-1) @@ -1779,7 +1776,6 @@ define i64@test_int_x86_avx512_ptestnm_b_512(<64 x i8> %x0, <64 x i8> %x1, i64 % ; AVX512F-32-NEXT: addl {{[0-9]+}}(%esp), %eax ; AVX512F-32-NEXT: adcxl {{[0-9]+}}(%esp), %edx ; AVX512F-32-NEXT: addl $20, %esp -; AVX512F-32-NEXT: .cfi_def_cfa_offset 4 ; AVX512F-32-NEXT: retl %res = call i64 @llvm.x86.avx512.ptestnm.b.512(<64 x i8> %x0, <64 x i8> %x1, i64 %x2) %res1 = call i64 @llvm.x86.avx512.ptestnm.b.512(<64 x i8> %x0, <64 x i8> %x1, i64-1) diff --git a/llvm/test/CodeGen/X86/avx512vl-intrinsics-fast-isel.ll b/llvm/test/CodeGen/X86/avx512vl-intrinsics-fast-isel.ll index 3f4a696af0c..f5578d6cc88 100644 --- a/llvm/test/CodeGen/X86/avx512vl-intrinsics-fast-isel.ll +++ b/llvm/test/CodeGen/X86/avx512vl-intrinsics-fast-isel.ll @@ -233,7 +233,6 @@ define <2 x i64> @test_mm_mask_broadcastd_epi32(<2 x i64> %a0, i8 %a1, <2 x i64> ; X32-NEXT: kmovw %eax, %k1 ; X32-NEXT: vpbroadcastd %xmm1, %xmm0 {%k1} ; X32-NEXT: popl %eax -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; ; X64-LABEL: test_mm_mask_broadcastd_epi32: @@ -266,7 +265,6 @@ define <2 x i64> @test_mm_maskz_broadcastd_epi32(i8 %a0, <2 x i64> %a1) { ; X32-NEXT: kmovw %eax, %k1 ; X32-NEXT: vpbroadcastd %xmm0, %xmm0 {%k1} {z} ; X32-NEXT: popl %eax -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; ; X64-LABEL: test_mm_maskz_broadcastd_epi32: @@ -371,7 +369,6 @@ define <2 x i64> @test_mm_mask_broadcastq_epi64(<2 x i64> %a0, i8 %a1, <2 x i64> ; X32-NEXT: kmovw %eax, %k1 ; X32-NEXT: vpbroadcastq %xmm1, %xmm0 {%k1} ; X32-NEXT: popl %eax -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; ; X64-LABEL: test_mm_mask_broadcastq_epi64: @@ -401,7 +398,6 @@ define <2 x i64> @test_mm_maskz_broadcastq_epi64(i8 %a0, <2 x i64> %a1) { ; X32-NEXT: kmovw %eax, %k1 ; X32-NEXT: vpbroadcastq %xmm0, %xmm0 {%k1} {z} ; X32-NEXT: popl %eax -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; ; X64-LABEL: test_mm_maskz_broadcastq_epi64: @@ -445,7 +441,6 @@ define <4 x i64> @test_mm256_mask_broadcastq_epi64(<4 x i64> %a0, i8 %a1, <2 x i ; X32-NEXT: kmovw %eax, %k1 ; X32-NEXT: vpbroadcastq %xmm1, %ymm0 {%k1} ; X32-NEXT: popl %eax -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; ; X64-LABEL: test_mm256_mask_broadcastq_epi64: @@ -475,7 +470,6 @@ define <4 x i64> @test_mm256_maskz_broadcastq_epi64(i8 %a0, <2 x i64> %a1) { ; X32-NEXT: kmovw %eax, %k1 ; X32-NEXT: vpbroadcastq %xmm0, %ymm0 {%k1} {z} ; X32-NEXT: popl %eax -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; ; X64-LABEL: test_mm256_maskz_broadcastq_epi64: @@ -519,7 +513,6 @@ define <2 x double> @test_mm_mask_broadcastsd_pd(<2 x double> %a0, i8 %a1, <2 x ; X32-NEXT: kmovw %eax, %k1 ; X32-NEXT: vmovddup {{.*#+}} xmm0 {%k1} = xmm1[0,0] ; X32-NEXT: popl %eax -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; ; X64-LABEL: test_mm_mask_broadcastsd_pd: @@ -549,7 +542,6 @@ define <2 x double> @test_mm_maskz_broadcastsd_pd(i8 %a0, <2 x double> %a1) { ; X32-NEXT: kmovw %eax, %k1 ; X32-NEXT: vmovddup {{.*#+}} xmm0 {%k1} {z} = xmm0[0,0] ; X32-NEXT: popl %eax -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; ; X64-LABEL: test_mm_maskz_broadcastsd_pd: @@ -593,7 +585,6 @@ define <4 x double> @test_mm256_mask_broadcastsd_pd(<4 x double> %a0, i8 %a1, <2 ; X32-NEXT: kmovw %eax, %k1 ; X32-NEXT: vbroadcastsd %xmm1, %ymm0 {%k1} ; X32-NEXT: popl %eax -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; ; X64-LABEL: test_mm256_mask_broadcastsd_pd: @@ -623,7 +614,6 @@ define <4 x double> @test_mm256_maskz_broadcastsd_pd(i8 %a0, <2 x double> %a1) { ; X32-NEXT: kmovw %eax, %k1 ; X32-NEXT: vbroadcastsd %xmm0, %ymm0 {%k1} {z} ; X32-NEXT: popl %eax -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; ; X64-LABEL: test_mm256_maskz_broadcastsd_pd: @@ -667,7 +657,6 @@ define <4 x float> @test_mm_mask_broadcastss_ps(<4 x float> %a0, i8 %a1, <4 x fl ; X32-NEXT: kmovw %eax, %k1 ; X32-NEXT: vbroadcastss %xmm1, %xmm0 {%k1} ; X32-NEXT: popl %eax -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; ; X64-LABEL: test_mm_mask_broadcastss_ps: @@ -697,7 +686,6 @@ define <4 x float> @test_mm_maskz_broadcastss_ps(i8 %a0, <4 x float> %a1) { ; X32-NEXT: kmovw %eax, %k1 ; X32-NEXT: vbroadcastss %xmm0, %xmm0 {%k1} {z} ; X32-NEXT: popl %eax -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; ; X64-LABEL: test_mm_maskz_broadcastss_ps: @@ -793,7 +781,6 @@ define <2 x double> @test_mm_mask_movddup_pd(<2 x double> %a0, i8 %a1, <2 x doub ; X32-NEXT: kmovw %eax, %k1 ; X32-NEXT: vmovddup {{.*#+}} xmm0 {%k1} = xmm1[0,0] ; X32-NEXT: popl %eax -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; ; X64-LABEL: test_mm_mask_movddup_pd: @@ -823,7 +810,6 @@ define <2 x double> @test_mm_maskz_movddup_pd(i8 %a0, <2 x double> %a1) { ; X32-NEXT: kmovw %eax, %k1 ; X32-NEXT: vmovddup {{.*#+}} xmm0 {%k1} {z} = xmm0[0,0] ; X32-NEXT: popl %eax -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; ; X64-LABEL: test_mm_maskz_movddup_pd: @@ -867,7 +853,6 @@ define <4 x double> @test_mm256_mask_movddup_pd(<4 x double> %a0, i8 %a1, <4 x d ; X32-NEXT: kmovw %eax, %k1 ; X32-NEXT: vmovddup {{.*#+}} ymm0 {%k1} = ymm1[0,0,2,2] ; X32-NEXT: popl %eax -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; ; X64-LABEL: test_mm256_mask_movddup_pd: @@ -897,7 +882,6 @@ define <4 x double> @test_mm256_maskz_movddup_pd(i8 %a0, <4 x double> %a1) { ; X32-NEXT: kmovw %eax, %k1 ; X32-NEXT: vmovddup {{.*#+}} ymm0 {%k1} {z} = ymm0[0,0,2,2] ; X32-NEXT: popl %eax -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; ; X64-LABEL: test_mm256_maskz_movddup_pd: @@ -941,7 +925,6 @@ define <4 x float> @test_mm_mask_movehdup_ps(<4 x float> %a0, i8 %a1, <4 x float ; X32-NEXT: kmovw %eax, %k1 ; X32-NEXT: vmovshdup {{.*#+}} xmm0 {%k1} = xmm1[1,1,3,3] ; X32-NEXT: popl %eax -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; ; X64-LABEL: test_mm_mask_movehdup_ps: @@ -971,7 +954,6 @@ define <4 x float> @test_mm_maskz_movehdup_ps(i8 %a0, <4 x float> %a1) { ; X32-NEXT: kmovw %eax, %k1 ; X32-NEXT: vmovshdup {{.*#+}} xmm0 {%k1} {z} = xmm0[1,1,3,3] ; X32-NEXT: popl %eax -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; ; X64-LABEL: test_mm_maskz_movehdup_ps: @@ -1067,7 +1049,6 @@ define <4 x float> @test_mm_mask_moveldup_ps(<4 x float> %a0, i8 %a1, <4 x float ; X32-NEXT: kmovw %eax, %k1 ; X32-NEXT: vmovsldup {{.*#+}} xmm0 {%k1} = xmm1[0,0,2,2] ; X32-NEXT: popl %eax -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; ; X64-LABEL: test_mm_mask_moveldup_ps: @@ -1097,7 +1078,6 @@ define <4 x float> @test_mm_maskz_moveldup_ps(i8 %a0, <4 x float> %a1) { ; X32-NEXT: kmovw %eax, %k1 ; X32-NEXT: vmovsldup {{.*#+}} xmm0 {%k1} {z} = xmm0[0,0,2,2] ; X32-NEXT: popl %eax -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; ; X64-LABEL: test_mm_maskz_moveldup_ps: @@ -1193,7 +1173,6 @@ define <4 x i64> @test_mm256_mask_permutex_epi64(<4 x i64> %a0, i8 %a1, <4 x i64 ; X32-NEXT: kmovw %eax, %k1 ; X32-NEXT: vpermq {{.*#+}} ymm0 {%k1} = ymm1[1,0,0,0] ; X32-NEXT: popl %eax -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; ; X64-LABEL: test_mm256_mask_permutex_epi64: @@ -1223,7 +1202,6 @@ define <4 x i64> @test_mm256_maskz_permutex_epi64(i8 %a0, <4 x i64> %a1) { ; X32-NEXT: kmovw %eax, %k1 ; X32-NEXT: vpermq {{.*#+}} ymm0 {%k1} {z} = ymm0[1,0,0,0] ; X32-NEXT: popl %eax -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; ; X64-LABEL: test_mm256_maskz_permutex_epi64: @@ -1267,7 +1245,6 @@ define <4 x double> @test_mm256_mask_permutex_pd(<4 x double> %a0, i8 %a1, <4 x ; X32-NEXT: kmovw %eax, %k1 ; X32-NEXT: vpermpd {{.*#+}} ymm0 {%k1} = ymm1[1,0,0,0] ; X32-NEXT: popl %eax -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; ; X64-LABEL: test_mm256_mask_permutex_pd: @@ -1297,7 +1274,6 @@ define <4 x double> @test_mm256_maskz_permutex_pd(i8 %a0, <4 x double> %a1) { ; X32-NEXT: kmovw %eax, %k1 ; X32-NEXT: vpermpd {{.*#+}} ymm0 {%k1} {z} = ymm0[1,0,0,0] ; X32-NEXT: popl %eax -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; ; X64-LABEL: test_mm256_maskz_permutex_pd: @@ -1341,7 +1317,6 @@ define <2 x double> @test_mm_mask_shuffle_pd(<2 x double> %a0, i8 %a1, <2 x doub ; X32-NEXT: kmovw %eax, %k1 ; X32-NEXT: vunpckhpd {{.*#+}} xmm0 {%k1} = xmm1[1],xmm2[1] ; X32-NEXT: popl %eax -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; ; X64-LABEL: test_mm_mask_shuffle_pd: @@ -1371,7 +1346,6 @@ define <2 x double> @test_mm_maskz_shuffle_pd(i8 %a0, <2 x double> %a1, <2 x dou ; X32-NEXT: kmovw %eax, %k1 ; X32-NEXT: vunpckhpd {{.*#+}} xmm0 {%k1} {z} = xmm0[1],xmm1[1] ; X32-NEXT: popl %eax -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; ; X64-LABEL: test_mm_maskz_shuffle_pd: @@ -1415,7 +1389,6 @@ define <4 x double> @test_mm256_mask_shuffle_pd(<4 x double> %a0, i8 %a1, <4 x d ; X32-NEXT: kmovw %eax, %k1 ; X32-NEXT: vshufpd {{.*#+}} ymm0 {%k1} = ymm1[1],ymm2[1],ymm1[2],ymm2[2] ; X32-NEXT: popl %eax -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; ; X64-LABEL: test_mm256_mask_shuffle_pd: @@ -1445,7 +1418,6 @@ define <4 x double> @test_mm256_maskz_shuffle_pd(i8 %a0, <4 x double> %a1, <4 x ; X32-NEXT: kmovw %eax, %k1 ; X32-NEXT: vshufpd {{.*#+}} ymm0 {%k1} {z} = ymm0[1],ymm1[1],ymm0[2],ymm1[2] ; X32-NEXT: popl %eax -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; ; X64-LABEL: test_mm256_maskz_shuffle_pd: @@ -1489,7 +1461,6 @@ define <4 x float> @test_mm_mask_shuffle_ps(<4 x float> %a0, i8 %a1, <4 x float> ; X32-NEXT: kmovw %eax, %k1 ; X32-NEXT: vshufps {{.*#+}} xmm0 {%k1} = xmm1[0,1],xmm2[0,0] ; X32-NEXT: popl %eax -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; ; X64-LABEL: test_mm_mask_shuffle_ps: @@ -1519,7 +1490,6 @@ define <4 x float> @test_mm_maskz_shuffle_ps(i8 %a0, <4 x float> %a1, <4 x float ; X32-NEXT: kmovw %eax, %k1 ; X32-NEXT: vshufps {{.*#+}} xmm0 {%k1} {z} = xmm0[0,1],xmm1[0,0] ; X32-NEXT: popl %eax -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; ; X64-LABEL: test_mm_maskz_shuffle_ps: diff --git a/llvm/test/CodeGen/X86/avx512vl-vbroadcast.ll b/llvm/test/CodeGen/X86/avx512vl-vbroadcast.ll index 1098e7bffe0..9fc957297e2 100644 --- a/llvm/test/CodeGen/X86/avx512vl-vbroadcast.ll +++ b/llvm/test/CodeGen/X86/avx512vl-vbroadcast.ll @@ -12,7 +12,6 @@ define <8 x float> @_256_broadcast_ss_spill(float %x) { ; CHECK-NEXT: callq func_f32 ; CHECK-NEXT: vbroadcastss (%rsp), %ymm0 # 16-byte Folded Reload ; CHECK-NEXT: addq $24, %rsp -; CHECK-NEXT: .cfi_def_cfa_offset 8 ; CHECK-NEXT: retq %a = fadd float %x, %x call void @func_f32(float %a) @@ -31,7 +30,6 @@ define <4 x float> @_128_broadcast_ss_spill(float %x) { ; CHECK-NEXT: callq func_f32 ; CHECK-NEXT: vbroadcastss (%rsp), %xmm0 # 16-byte Folded Reload ; CHECK-NEXT: addq $24, %rsp -; CHECK-NEXT: .cfi_def_cfa_offset 8 ; CHECK-NEXT: retq %a = fadd float %x, %x call void @func_f32(float %a) @@ -51,7 +49,6 @@ define <4 x double> @_256_broadcast_sd_spill(double %x) { ; CHECK-NEXT: callq func_f64 ; CHECK-NEXT: vbroadcastsd (%rsp), %ymm0 # 16-byte Folded Reload ; CHECK-NEXT: addq $24, %rsp -; CHECK-NEXT: .cfi_def_cfa_offset 8 ; CHECK-NEXT: retq %a = fadd double %x, %x call void @func_f64(double %a) diff --git a/llvm/test/CodeGen/X86/avx512vl-vec-masked-cmp.ll b/llvm/test/CodeGen/X86/avx512vl-vec-masked-cmp.ll index bccf953fb0b..26a7d83a027 100644 --- a/llvm/test/CodeGen/X86/avx512vl-vec-masked-cmp.ll +++ b/llvm/test/CodeGen/X86/avx512vl-vec-masked-cmp.ll @@ -109,7 +109,6 @@ define zeroext i32 @test_vpcmpeqb_v16i1_v32i1_mask(<2 x i64> %__a, <2 x i64> %__ ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -228,7 +227,6 @@ define zeroext i32 @test_vpcmpeqb_v16i1_v32i1_mask_mem(<2 x i64> %__a, <2 x i64> ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -350,7 +348,6 @@ define zeroext i32 @test_masked_vpcmpeqb_v16i1_v32i1_mask(i16 zeroext %__u, <2 x ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -473,7 +470,6 @@ define zeroext i32 @test_masked_vpcmpeqb_v16i1_v32i1_mask_mem(i16 zeroext %__u, ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -601,7 +597,6 @@ define zeroext i64 @test_vpcmpeqb_v16i1_v64i1_mask(<2 x i64> %__a, <2 x i64> %__ ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -725,7 +720,6 @@ define zeroext i64 @test_vpcmpeqb_v16i1_v64i1_mask_mem(<2 x i64> %__a, <2 x i64> ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -852,7 +846,6 @@ define zeroext i64 @test_masked_vpcmpeqb_v16i1_v64i1_mask(i16 zeroext %__u, <2 x ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -980,7 +973,6 @@ define zeroext i64 @test_masked_vpcmpeqb_v16i1_v64i1_mask_mem(i16 zeroext %__u, ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -1032,7 +1024,6 @@ define zeroext i64 @test_vpcmpeqb_v32i1_v64i1_mask(<4 x i64> %__a, <4 x i64> %__ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -1080,7 +1071,6 @@ define zeroext i64 @test_vpcmpeqb_v32i1_v64i1_mask_mem(<4 x i64> %__a, <4 x i64> ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -1139,7 +1129,6 @@ define zeroext i64 @test_masked_vpcmpeqb_v32i1_v64i1_mask(i32 zeroext %__u, <4 x ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -1199,7 +1188,6 @@ define zeroext i64 @test_masked_vpcmpeqb_v32i1_v64i1_mask_mem(i32 zeroext %__u, ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -1396,7 +1384,6 @@ define zeroext i32 @test_vpcmpeqw_v8i1_v32i1_mask(<2 x i64> %__a, <2 x i64> %__b ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -1470,7 +1457,6 @@ define zeroext i32 @test_vpcmpeqw_v8i1_v32i1_mask_mem(<2 x i64> %__a, <2 x i64>* ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -1547,7 +1533,6 @@ define zeroext i32 @test_masked_vpcmpeqw_v8i1_v32i1_mask(i8 zeroext %__u, <2 x i ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -1625,7 +1610,6 @@ define zeroext i32 @test_masked_vpcmpeqw_v8i1_v32i1_mask_mem(i8 zeroext %__u, <2 ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -1708,7 +1692,6 @@ define zeroext i64 @test_vpcmpeqw_v8i1_v64i1_mask(<2 x i64> %__a, <2 x i64> %__b ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -1787,7 +1770,6 @@ define zeroext i64 @test_vpcmpeqw_v8i1_v64i1_mask_mem(<2 x i64> %__a, <2 x i64>* ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -1869,7 +1851,6 @@ define zeroext i64 @test_masked_vpcmpeqw_v8i1_v64i1_mask(i8 zeroext %__u, <2 x i ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -1952,7 +1933,6 @@ define zeroext i64 @test_masked_vpcmpeqw_v8i1_v64i1_mask_mem(i8 zeroext %__u, <2 ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -2076,7 +2056,6 @@ define zeroext i32 @test_vpcmpeqw_v16i1_v32i1_mask(<4 x i64> %__a, <4 x i64> %__ ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -2196,7 +2175,6 @@ define zeroext i32 @test_vpcmpeqw_v16i1_v32i1_mask_mem(<4 x i64> %__a, <4 x i64> ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -2319,7 +2297,6 @@ define zeroext i32 @test_masked_vpcmpeqw_v16i1_v32i1_mask(i16 zeroext %__u, <4 x ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -2443,7 +2420,6 @@ define zeroext i32 @test_masked_vpcmpeqw_v16i1_v32i1_mask_mem(i16 zeroext %__u, ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -2572,7 +2548,6 @@ define zeroext i64 @test_vpcmpeqw_v16i1_v64i1_mask(<4 x i64> %__a, <4 x i64> %__ ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -2697,7 +2672,6 @@ define zeroext i64 @test_vpcmpeqw_v16i1_v64i1_mask_mem(<4 x i64> %__a, <4 x i64> ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -2825,7 +2799,6 @@ define zeroext i64 @test_masked_vpcmpeqw_v16i1_v64i1_mask(i16 zeroext %__u, <4 x ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -2954,7 +2927,6 @@ define zeroext i64 @test_masked_vpcmpeqw_v16i1_v64i1_mask_mem(i16 zeroext %__u, ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -3308,7 +3280,6 @@ define zeroext i64 @test_vpcmpeqw_v32i1_v64i1_mask(<8 x i64> %__a, <8 x i64> %__ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -3573,7 +3544,6 @@ define zeroext i64 @test_vpcmpeqw_v32i1_v64i1_mask_mem(<8 x i64> %__a, <8 x i64> ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -3934,7 +3904,6 @@ define zeroext i64 @test_masked_vpcmpeqw_v32i1_v64i1_mask(i32 zeroext %__u, <8 x ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -4211,7 +4180,6 @@ define zeroext i64 @test_masked_vpcmpeqw_v32i1_v64i1_mask_mem(i32 zeroext %__u, ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -5075,7 +5043,6 @@ define zeroext i32 @test_vpcmpeqd_v4i1_v32i1_mask(<2 x i64> %__a, <2 x i64> %__b ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -5117,7 +5084,6 @@ define zeroext i32 @test_vpcmpeqd_v4i1_v32i1_mask_mem(<2 x i64> %__a, <2 x i64>* ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -5179,7 +5145,6 @@ define zeroext i32 @test_masked_vpcmpeqd_v4i1_v32i1_mask(i8 zeroext %__u, <2 x i ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -5243,7 +5208,6 @@ define zeroext i32 @test_masked_vpcmpeqd_v4i1_v32i1_mask_mem(i8 zeroext %__u, <2 ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -5291,7 +5255,6 @@ define zeroext i32 @test_vpcmpeqd_v4i1_v32i1_mask_mem_b(<2 x i64> %__a, i32* %__ ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -5355,7 +5318,6 @@ define zeroext i32 @test_masked_vpcmpeqd_v4i1_v32i1_mask_mem_b(i8 zeroext %__u, ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -5409,7 +5371,6 @@ define zeroext i64 @test_vpcmpeqd_v4i1_v64i1_mask(<2 x i64> %__a, <2 x i64> %__b ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -5457,7 +5418,6 @@ define zeroext i64 @test_vpcmpeqd_v4i1_v64i1_mask_mem(<2 x i64> %__a, <2 x i64>* ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -5525,7 +5485,6 @@ define zeroext i64 @test_masked_vpcmpeqd_v4i1_v64i1_mask(i8 zeroext %__u, <2 x i ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -5595,7 +5554,6 @@ define zeroext i64 @test_masked_vpcmpeqd_v4i1_v64i1_mask_mem(i8 zeroext %__u, <2 ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -5649,7 +5607,6 @@ define zeroext i64 @test_vpcmpeqd_v4i1_v64i1_mask_mem_b(<2 x i64> %__a, i32* %__ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -5719,7 +5676,6 @@ define zeroext i64 @test_masked_vpcmpeqd_v4i1_v64i1_mask_mem_b(i8 zeroext %__u, ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -5993,7 +5949,6 @@ define zeroext i32 @test_vpcmpeqd_v8i1_v32i1_mask(<4 x i64> %__a, <4 x i64> %__b ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -6067,7 +6022,6 @@ define zeroext i32 @test_vpcmpeqd_v8i1_v32i1_mask_mem(<4 x i64> %__a, <4 x i64>* ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -6144,7 +6098,6 @@ define zeroext i32 @test_masked_vpcmpeqd_v8i1_v32i1_mask(i8 zeroext %__u, <4 x i ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -6222,7 +6175,6 @@ define zeroext i32 @test_masked_vpcmpeqd_v8i1_v32i1_mask_mem(i8 zeroext %__u, <4 ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -6300,7 +6252,6 @@ define zeroext i32 @test_vpcmpeqd_v8i1_v32i1_mask_mem_b(<4 x i64> %__a, i32* %__ ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -6378,7 +6329,6 @@ define zeroext i32 @test_masked_vpcmpeqd_v8i1_v32i1_mask_mem_b(i8 zeroext %__u, ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -6462,7 +6412,6 @@ define zeroext i64 @test_vpcmpeqd_v8i1_v64i1_mask(<4 x i64> %__a, <4 x i64> %__b ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -6541,7 +6490,6 @@ define zeroext i64 @test_vpcmpeqd_v8i1_v64i1_mask_mem(<4 x i64> %__a, <4 x i64>* ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -6623,7 +6571,6 @@ define zeroext i64 @test_masked_vpcmpeqd_v8i1_v64i1_mask(i8 zeroext %__u, <4 x i ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -6706,7 +6653,6 @@ define zeroext i64 @test_masked_vpcmpeqd_v8i1_v64i1_mask_mem(i8 zeroext %__u, <4 ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -6789,7 +6735,6 @@ define zeroext i64 @test_vpcmpeqd_v8i1_v64i1_mask_mem_b(<4 x i64> %__a, i32* %__ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -6872,7 +6817,6 @@ define zeroext i64 @test_masked_vpcmpeqd_v8i1_v64i1_mask_mem_b(i8 zeroext %__u, ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -6994,7 +6938,6 @@ define zeroext i32 @test_vpcmpeqd_v16i1_v32i1_mask(<8 x i64> %__a, <8 x i64> %__ ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -7111,7 +7054,6 @@ define zeroext i32 @test_vpcmpeqd_v16i1_v32i1_mask_mem(<8 x i64> %__a, <8 x i64> ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -7231,7 +7173,6 @@ define zeroext i32 @test_masked_vpcmpeqd_v16i1_v32i1_mask(i16 zeroext %__u, <8 x ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -7352,7 +7293,6 @@ define zeroext i32 @test_masked_vpcmpeqd_v16i1_v32i1_mask_mem(i16 zeroext %__u, ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -7473,7 +7413,6 @@ define zeroext i32 @test_vpcmpeqd_v16i1_v32i1_mask_mem_b(<8 x i64> %__a, i32* %_ ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -7594,7 +7533,6 @@ define zeroext i32 @test_masked_vpcmpeqd_v16i1_v32i1_mask_mem_b(i16 zeroext %__u ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -7721,7 +7659,6 @@ define zeroext i64 @test_vpcmpeqd_v16i1_v64i1_mask(<8 x i64> %__a, <8 x i64> %__ ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -7843,7 +7780,6 @@ define zeroext i64 @test_vpcmpeqd_v16i1_v64i1_mask_mem(<8 x i64> %__a, <8 x i64> ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -7968,7 +7904,6 @@ define zeroext i64 @test_masked_vpcmpeqd_v16i1_v64i1_mask(i16 zeroext %__u, <8 x ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -8094,7 +8029,6 @@ define zeroext i64 @test_masked_vpcmpeqd_v16i1_v64i1_mask_mem(i16 zeroext %__u, ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -8220,7 +8154,6 @@ define zeroext i64 @test_vpcmpeqd_v16i1_v64i1_mask_mem_b(<8 x i64> %__a, i32* %_ ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -8346,7 +8279,6 @@ define zeroext i64 @test_masked_vpcmpeqd_v16i1_v64i1_mask_mem_b(i16 zeroext %__u ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -9191,7 +9123,6 @@ define zeroext i32 @test_vpcmpeqq_v2i1_v32i1_mask(<2 x i64> %__a, <2 x i64> %__b ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -9233,7 +9164,6 @@ define zeroext i32 @test_vpcmpeqq_v2i1_v32i1_mask_mem(<2 x i64> %__a, <2 x i64>* ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -9287,7 +9217,6 @@ define zeroext i32 @test_masked_vpcmpeqq_v2i1_v32i1_mask(i8 zeroext %__u, <2 x i ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -9343,7 +9272,6 @@ define zeroext i32 @test_masked_vpcmpeqq_v2i1_v32i1_mask_mem(i8 zeroext %__u, <2 ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -9391,7 +9319,6 @@ define zeroext i32 @test_vpcmpeqq_v2i1_v32i1_mask_mem_b(<2 x i64> %__a, i64* %__ ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -9447,7 +9374,6 @@ define zeroext i32 @test_masked_vpcmpeqq_v2i1_v32i1_mask_mem_b(i8 zeroext %__u, ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -9501,7 +9427,6 @@ define zeroext i64 @test_vpcmpeqq_v2i1_v64i1_mask(<2 x i64> %__a, <2 x i64> %__b ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -9549,7 +9474,6 @@ define zeroext i64 @test_vpcmpeqq_v2i1_v64i1_mask_mem(<2 x i64> %__a, <2 x i64>* ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -9609,7 +9533,6 @@ define zeroext i64 @test_masked_vpcmpeqq_v2i1_v64i1_mask(i8 zeroext %__u, <2 x i ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -9671,7 +9594,6 @@ define zeroext i64 @test_masked_vpcmpeqq_v2i1_v64i1_mask_mem(i8 zeroext %__u, <2 ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -9725,7 +9647,6 @@ define zeroext i64 @test_vpcmpeqq_v2i1_v64i1_mask_mem_b(<2 x i64> %__a, i64* %__ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -9787,7 +9708,6 @@ define zeroext i64 @test_masked_vpcmpeqq_v2i1_v64i1_mask_mem_b(i8 zeroext %__u, ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -10679,7 +10599,6 @@ define zeroext i32 @test_vpcmpeqq_v4i1_v32i1_mask(<4 x i64> %__a, <4 x i64> %__b ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -10723,7 +10642,6 @@ define zeroext i32 @test_vpcmpeqq_v4i1_v32i1_mask_mem(<4 x i64> %__a, <4 x i64>* ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -10787,7 +10705,6 @@ define zeroext i32 @test_masked_vpcmpeqq_v4i1_v32i1_mask(i8 zeroext %__u, <4 x i ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -10853,7 +10770,6 @@ define zeroext i32 @test_masked_vpcmpeqq_v4i1_v32i1_mask_mem(i8 zeroext %__u, <4 ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -10903,7 +10819,6 @@ define zeroext i32 @test_vpcmpeqq_v4i1_v32i1_mask_mem_b(<4 x i64> %__a, i64* %__ ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -10969,7 +10884,6 @@ define zeroext i32 @test_masked_vpcmpeqq_v4i1_v32i1_mask_mem_b(i8 zeroext %__u, ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -11025,7 +10939,6 @@ define zeroext i64 @test_vpcmpeqq_v4i1_v64i1_mask(<4 x i64> %__a, <4 x i64> %__b ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -11075,7 +10988,6 @@ define zeroext i64 @test_vpcmpeqq_v4i1_v64i1_mask_mem(<4 x i64> %__a, <4 x i64>* ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -11145,7 +11057,6 @@ define zeroext i64 @test_masked_vpcmpeqq_v4i1_v64i1_mask(i8 zeroext %__u, <4 x i ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -11217,7 +11128,6 @@ define zeroext i64 @test_masked_vpcmpeqq_v4i1_v64i1_mask_mem(i8 zeroext %__u, <4 ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -11273,7 +11183,6 @@ define zeroext i64 @test_vpcmpeqq_v4i1_v64i1_mask_mem_b(<4 x i64> %__a, i64* %__ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -11345,7 +11254,6 @@ define zeroext i64 @test_masked_vpcmpeqq_v4i1_v64i1_mask_mem_b(i8 zeroext %__u, ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -11593,7 +11501,6 @@ define zeroext i32 @test_vpcmpeqq_v8i1_v32i1_mask(<8 x i64> %__a, <8 x i64> %__b ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -11665,7 +11572,6 @@ define zeroext i32 @test_vpcmpeqq_v8i1_v32i1_mask_mem(<8 x i64> %__a, <8 x i64>* ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -11740,7 +11646,6 @@ define zeroext i32 @test_masked_vpcmpeqq_v8i1_v32i1_mask(i8 zeroext %__u, <8 x i ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -11816,7 +11721,6 @@ define zeroext i32 @test_masked_vpcmpeqq_v8i1_v32i1_mask_mem(i8 zeroext %__u, <8 ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -11892,7 +11796,6 @@ define zeroext i32 @test_vpcmpeqq_v8i1_v32i1_mask_mem_b(<8 x i64> %__a, i64* %__ ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -11968,7 +11871,6 @@ define zeroext i32 @test_masked_vpcmpeqq_v8i1_v32i1_mask_mem_b(i8 zeroext %__u, ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -12050,7 +11952,6 @@ define zeroext i64 @test_vpcmpeqq_v8i1_v64i1_mask(<8 x i64> %__a, <8 x i64> %__b ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -12127,7 +12028,6 @@ define zeroext i64 @test_vpcmpeqq_v8i1_v64i1_mask_mem(<8 x i64> %__a, <8 x i64>* ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -12207,7 +12107,6 @@ define zeroext i64 @test_masked_vpcmpeqq_v8i1_v64i1_mask(i8 zeroext %__u, <8 x i ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -12288,7 +12187,6 @@ define zeroext i64 @test_masked_vpcmpeqq_v8i1_v64i1_mask_mem(i8 zeroext %__u, <8 ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -12369,7 +12267,6 @@ define zeroext i64 @test_vpcmpeqq_v8i1_v64i1_mask_mem_b(<8 x i64> %__a, i64* %__ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -12450,7 +12347,6 @@ define zeroext i64 @test_masked_vpcmpeqq_v8i1_v64i1_mask_mem_b(i8 zeroext %__u, ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -12574,7 +12470,6 @@ define zeroext i32 @test_vpcmpsgtb_v16i1_v32i1_mask(<2 x i64> %__a, <2 x i64> %_ ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -12693,7 +12588,6 @@ define zeroext i32 @test_vpcmpsgtb_v16i1_v32i1_mask_mem(<2 x i64> %__a, <2 x i64 ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -12815,7 +12709,6 @@ define zeroext i32 @test_masked_vpcmpsgtb_v16i1_v32i1_mask(i16 zeroext %__u, <2 ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -12938,7 +12831,6 @@ define zeroext i32 @test_masked_vpcmpsgtb_v16i1_v32i1_mask_mem(i16 zeroext %__u, ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -13066,7 +12958,6 @@ define zeroext i64 @test_vpcmpsgtb_v16i1_v64i1_mask(<2 x i64> %__a, <2 x i64> %_ ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -13190,7 +13081,6 @@ define zeroext i64 @test_vpcmpsgtb_v16i1_v64i1_mask_mem(<2 x i64> %__a, <2 x i64 ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -13317,7 +13207,6 @@ define zeroext i64 @test_masked_vpcmpsgtb_v16i1_v64i1_mask(i16 zeroext %__u, <2 ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -13445,7 +13334,6 @@ define zeroext i64 @test_masked_vpcmpsgtb_v16i1_v64i1_mask_mem(i16 zeroext %__u, ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -13497,7 +13385,6 @@ define zeroext i64 @test_vpcmpsgtb_v32i1_v64i1_mask(<4 x i64> %__a, <4 x i64> %_ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -13545,7 +13432,6 @@ define zeroext i64 @test_vpcmpsgtb_v32i1_v64i1_mask_mem(<4 x i64> %__a, <4 x i64 ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -13604,7 +13490,6 @@ define zeroext i64 @test_masked_vpcmpsgtb_v32i1_v64i1_mask(i32 zeroext %__u, <4 ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -13664,7 +13549,6 @@ define zeroext i64 @test_masked_vpcmpsgtb_v32i1_v64i1_mask_mem(i32 zeroext %__u, ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -13861,7 +13745,6 @@ define zeroext i32 @test_vpcmpsgtw_v8i1_v32i1_mask(<2 x i64> %__a, <2 x i64> %__ ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -13935,7 +13818,6 @@ define zeroext i32 @test_vpcmpsgtw_v8i1_v32i1_mask_mem(<2 x i64> %__a, <2 x i64> ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -14012,7 +13894,6 @@ define zeroext i32 @test_masked_vpcmpsgtw_v8i1_v32i1_mask(i8 zeroext %__u, <2 x ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -14090,7 +13971,6 @@ define zeroext i32 @test_masked_vpcmpsgtw_v8i1_v32i1_mask_mem(i8 zeroext %__u, < ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -14173,7 +14053,6 @@ define zeroext i64 @test_vpcmpsgtw_v8i1_v64i1_mask(<2 x i64> %__a, <2 x i64> %__ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -14252,7 +14131,6 @@ define zeroext i64 @test_vpcmpsgtw_v8i1_v64i1_mask_mem(<2 x i64> %__a, <2 x i64> ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -14334,7 +14212,6 @@ define zeroext i64 @test_masked_vpcmpsgtw_v8i1_v64i1_mask(i8 zeroext %__u, <2 x ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -14417,7 +14294,6 @@ define zeroext i64 @test_masked_vpcmpsgtw_v8i1_v64i1_mask_mem(i8 zeroext %__u, < ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -14541,7 +14417,6 @@ define zeroext i32 @test_vpcmpsgtw_v16i1_v32i1_mask(<4 x i64> %__a, <4 x i64> %_ ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -14661,7 +14536,6 @@ define zeroext i32 @test_vpcmpsgtw_v16i1_v32i1_mask_mem(<4 x i64> %__a, <4 x i64 ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -14784,7 +14658,6 @@ define zeroext i32 @test_masked_vpcmpsgtw_v16i1_v32i1_mask(i16 zeroext %__u, <4 ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -14908,7 +14781,6 @@ define zeroext i32 @test_masked_vpcmpsgtw_v16i1_v32i1_mask_mem(i16 zeroext %__u, ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -15037,7 +14909,6 @@ define zeroext i64 @test_vpcmpsgtw_v16i1_v64i1_mask(<4 x i64> %__a, <4 x i64> %_ ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -15162,7 +15033,6 @@ define zeroext i64 @test_vpcmpsgtw_v16i1_v64i1_mask_mem(<4 x i64> %__a, <4 x i64 ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -15290,7 +15160,6 @@ define zeroext i64 @test_masked_vpcmpsgtw_v16i1_v64i1_mask(i16 zeroext %__u, <4 ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -15419,7 +15288,6 @@ define zeroext i64 @test_masked_vpcmpsgtw_v16i1_v64i1_mask_mem(i16 zeroext %__u, ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -15773,7 +15641,6 @@ define zeroext i64 @test_vpcmpsgtw_v32i1_v64i1_mask(<8 x i64> %__a, <8 x i64> %_ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -16038,7 +15905,6 @@ define zeroext i64 @test_vpcmpsgtw_v32i1_v64i1_mask_mem(<8 x i64> %__a, <8 x i64 ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -16399,7 +16265,6 @@ define zeroext i64 @test_masked_vpcmpsgtw_v32i1_v64i1_mask(i32 zeroext %__u, <8 ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -16676,7 +16541,6 @@ define zeroext i64 @test_masked_vpcmpsgtw_v32i1_v64i1_mask_mem(i32 zeroext %__u, ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -17540,7 +17404,6 @@ define zeroext i32 @test_vpcmpsgtd_v4i1_v32i1_mask(<2 x i64> %__a, <2 x i64> %__ ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -17582,7 +17445,6 @@ define zeroext i32 @test_vpcmpsgtd_v4i1_v32i1_mask_mem(<2 x i64> %__a, <2 x i64> ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -17644,7 +17506,6 @@ define zeroext i32 @test_masked_vpcmpsgtd_v4i1_v32i1_mask(i8 zeroext %__u, <2 x ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -17708,7 +17569,6 @@ define zeroext i32 @test_masked_vpcmpsgtd_v4i1_v32i1_mask_mem(i8 zeroext %__u, < ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -17756,7 +17616,6 @@ define zeroext i32 @test_vpcmpsgtd_v4i1_v32i1_mask_mem_b(<2 x i64> %__a, i32* %_ ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -17820,7 +17679,6 @@ define zeroext i32 @test_masked_vpcmpsgtd_v4i1_v32i1_mask_mem_b(i8 zeroext %__u, ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -17874,7 +17732,6 @@ define zeroext i64 @test_vpcmpsgtd_v4i1_v64i1_mask(<2 x i64> %__a, <2 x i64> %__ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -17922,7 +17779,6 @@ define zeroext i64 @test_vpcmpsgtd_v4i1_v64i1_mask_mem(<2 x i64> %__a, <2 x i64> ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -17990,7 +17846,6 @@ define zeroext i64 @test_masked_vpcmpsgtd_v4i1_v64i1_mask(i8 zeroext %__u, <2 x ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -18060,7 +17915,6 @@ define zeroext i64 @test_masked_vpcmpsgtd_v4i1_v64i1_mask_mem(i8 zeroext %__u, < ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -18114,7 +17968,6 @@ define zeroext i64 @test_vpcmpsgtd_v4i1_v64i1_mask_mem_b(<2 x i64> %__a, i32* %_ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -18184,7 +18037,6 @@ define zeroext i64 @test_masked_vpcmpsgtd_v4i1_v64i1_mask_mem_b(i8 zeroext %__u, ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -18458,7 +18310,6 @@ define zeroext i32 @test_vpcmpsgtd_v8i1_v32i1_mask(<4 x i64> %__a, <4 x i64> %__ ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -18532,7 +18383,6 @@ define zeroext i32 @test_vpcmpsgtd_v8i1_v32i1_mask_mem(<4 x i64> %__a, <4 x i64> ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -18609,7 +18459,6 @@ define zeroext i32 @test_masked_vpcmpsgtd_v8i1_v32i1_mask(i8 zeroext %__u, <4 x ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -18687,7 +18536,6 @@ define zeroext i32 @test_masked_vpcmpsgtd_v8i1_v32i1_mask_mem(i8 zeroext %__u, < ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -18765,7 +18613,6 @@ define zeroext i32 @test_vpcmpsgtd_v8i1_v32i1_mask_mem_b(<4 x i64> %__a, i32* %_ ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -18843,7 +18690,6 @@ define zeroext i32 @test_masked_vpcmpsgtd_v8i1_v32i1_mask_mem_b(i8 zeroext %__u, ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -18927,7 +18773,6 @@ define zeroext i64 @test_vpcmpsgtd_v8i1_v64i1_mask(<4 x i64> %__a, <4 x i64> %__ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -19006,7 +18851,6 @@ define zeroext i64 @test_vpcmpsgtd_v8i1_v64i1_mask_mem(<4 x i64> %__a, <4 x i64> ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -19088,7 +18932,6 @@ define zeroext i64 @test_masked_vpcmpsgtd_v8i1_v64i1_mask(i8 zeroext %__u, <4 x ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -19171,7 +19014,6 @@ define zeroext i64 @test_masked_vpcmpsgtd_v8i1_v64i1_mask_mem(i8 zeroext %__u, < ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -19254,7 +19096,6 @@ define zeroext i64 @test_vpcmpsgtd_v8i1_v64i1_mask_mem_b(<4 x i64> %__a, i32* %_ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -19337,7 +19178,6 @@ define zeroext i64 @test_masked_vpcmpsgtd_v8i1_v64i1_mask_mem_b(i8 zeroext %__u, ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -19459,7 +19299,6 @@ define zeroext i32 @test_vpcmpsgtd_v16i1_v32i1_mask(<8 x i64> %__a, <8 x i64> %_ ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -19576,7 +19415,6 @@ define zeroext i32 @test_vpcmpsgtd_v16i1_v32i1_mask_mem(<8 x i64> %__a, <8 x i64 ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -19696,7 +19534,6 @@ define zeroext i32 @test_masked_vpcmpsgtd_v16i1_v32i1_mask(i16 zeroext %__u, <8 ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -19817,7 +19654,6 @@ define zeroext i32 @test_masked_vpcmpsgtd_v16i1_v32i1_mask_mem(i16 zeroext %__u, ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -19938,7 +19774,6 @@ define zeroext i32 @test_vpcmpsgtd_v16i1_v32i1_mask_mem_b(<8 x i64> %__a, i32* % ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -20059,7 +19894,6 @@ define zeroext i32 @test_masked_vpcmpsgtd_v16i1_v32i1_mask_mem_b(i16 zeroext %__ ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -20186,7 +20020,6 @@ define zeroext i64 @test_vpcmpsgtd_v16i1_v64i1_mask(<8 x i64> %__a, <8 x i64> %_ ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -20308,7 +20141,6 @@ define zeroext i64 @test_vpcmpsgtd_v16i1_v64i1_mask_mem(<8 x i64> %__a, <8 x i64 ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -20433,7 +20265,6 @@ define zeroext i64 @test_masked_vpcmpsgtd_v16i1_v64i1_mask(i16 zeroext %__u, <8 ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -20559,7 +20390,6 @@ define zeroext i64 @test_masked_vpcmpsgtd_v16i1_v64i1_mask_mem(i16 zeroext %__u, ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -20685,7 +20515,6 @@ define zeroext i64 @test_vpcmpsgtd_v16i1_v64i1_mask_mem_b(<8 x i64> %__a, i32* % ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -20811,7 +20640,6 @@ define zeroext i64 @test_masked_vpcmpsgtd_v16i1_v64i1_mask_mem_b(i16 zeroext %__ ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -21656,7 +21484,6 @@ define zeroext i32 @test_vpcmpsgtq_v2i1_v32i1_mask(<2 x i64> %__a, <2 x i64> %__ ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -21698,7 +21525,6 @@ define zeroext i32 @test_vpcmpsgtq_v2i1_v32i1_mask_mem(<2 x i64> %__a, <2 x i64> ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -21752,7 +21578,6 @@ define zeroext i32 @test_masked_vpcmpsgtq_v2i1_v32i1_mask(i8 zeroext %__u, <2 x ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -21808,7 +21633,6 @@ define zeroext i32 @test_masked_vpcmpsgtq_v2i1_v32i1_mask_mem(i8 zeroext %__u, < ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -21856,7 +21680,6 @@ define zeroext i32 @test_vpcmpsgtq_v2i1_v32i1_mask_mem_b(<2 x i64> %__a, i64* %_ ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -21912,7 +21735,6 @@ define zeroext i32 @test_masked_vpcmpsgtq_v2i1_v32i1_mask_mem_b(i8 zeroext %__u, ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -21966,7 +21788,6 @@ define zeroext i64 @test_vpcmpsgtq_v2i1_v64i1_mask(<2 x i64> %__a, <2 x i64> %__ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -22014,7 +21835,6 @@ define zeroext i64 @test_vpcmpsgtq_v2i1_v64i1_mask_mem(<2 x i64> %__a, <2 x i64> ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -22074,7 +21894,6 @@ define zeroext i64 @test_masked_vpcmpsgtq_v2i1_v64i1_mask(i8 zeroext %__u, <2 x ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -22136,7 +21955,6 @@ define zeroext i64 @test_masked_vpcmpsgtq_v2i1_v64i1_mask_mem(i8 zeroext %__u, < ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -22190,7 +22008,6 @@ define zeroext i64 @test_vpcmpsgtq_v2i1_v64i1_mask_mem_b(<2 x i64> %__a, i64* %_ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -22252,7 +22069,6 @@ define zeroext i64 @test_masked_vpcmpsgtq_v2i1_v64i1_mask_mem_b(i8 zeroext %__u, ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -23144,7 +22960,6 @@ define zeroext i32 @test_vpcmpsgtq_v4i1_v32i1_mask(<4 x i64> %__a, <4 x i64> %__ ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -23188,7 +23003,6 @@ define zeroext i32 @test_vpcmpsgtq_v4i1_v32i1_mask_mem(<4 x i64> %__a, <4 x i64> ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -23252,7 +23066,6 @@ define zeroext i32 @test_masked_vpcmpsgtq_v4i1_v32i1_mask(i8 zeroext %__u, <4 x ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -23318,7 +23131,6 @@ define zeroext i32 @test_masked_vpcmpsgtq_v4i1_v32i1_mask_mem(i8 zeroext %__u, < ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -23368,7 +23180,6 @@ define zeroext i32 @test_vpcmpsgtq_v4i1_v32i1_mask_mem_b(<4 x i64> %__a, i64* %_ ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -23434,7 +23245,6 @@ define zeroext i32 @test_masked_vpcmpsgtq_v4i1_v32i1_mask_mem_b(i8 zeroext %__u, ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -23490,7 +23300,6 @@ define zeroext i64 @test_vpcmpsgtq_v4i1_v64i1_mask(<4 x i64> %__a, <4 x i64> %__ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -23540,7 +23349,6 @@ define zeroext i64 @test_vpcmpsgtq_v4i1_v64i1_mask_mem(<4 x i64> %__a, <4 x i64> ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -23610,7 +23418,6 @@ define zeroext i64 @test_masked_vpcmpsgtq_v4i1_v64i1_mask(i8 zeroext %__u, <4 x ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -23682,7 +23489,6 @@ define zeroext i64 @test_masked_vpcmpsgtq_v4i1_v64i1_mask_mem(i8 zeroext %__u, < ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -23738,7 +23544,6 @@ define zeroext i64 @test_vpcmpsgtq_v4i1_v64i1_mask_mem_b(<4 x i64> %__a, i64* %_ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -23810,7 +23615,6 @@ define zeroext i64 @test_masked_vpcmpsgtq_v4i1_v64i1_mask_mem_b(i8 zeroext %__u, ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -24058,7 +23862,6 @@ define zeroext i32 @test_vpcmpsgtq_v8i1_v32i1_mask(<8 x i64> %__a, <8 x i64> %__ ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -24130,7 +23933,6 @@ define zeroext i32 @test_vpcmpsgtq_v8i1_v32i1_mask_mem(<8 x i64> %__a, <8 x i64> ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -24205,7 +24007,6 @@ define zeroext i32 @test_masked_vpcmpsgtq_v8i1_v32i1_mask(i8 zeroext %__u, <8 x ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -24281,7 +24082,6 @@ define zeroext i32 @test_masked_vpcmpsgtq_v8i1_v32i1_mask_mem(i8 zeroext %__u, < ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -24357,7 +24157,6 @@ define zeroext i32 @test_vpcmpsgtq_v8i1_v32i1_mask_mem_b(<8 x i64> %__a, i64* %_ ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -24433,7 +24232,6 @@ define zeroext i32 @test_masked_vpcmpsgtq_v8i1_v32i1_mask_mem_b(i8 zeroext %__u, ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -24515,7 +24313,6 @@ define zeroext i64 @test_vpcmpsgtq_v8i1_v64i1_mask(<8 x i64> %__a, <8 x i64> %__ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -24592,7 +24389,6 @@ define zeroext i64 @test_vpcmpsgtq_v8i1_v64i1_mask_mem(<8 x i64> %__a, <8 x i64> ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -24672,7 +24468,6 @@ define zeroext i64 @test_masked_vpcmpsgtq_v8i1_v64i1_mask(i8 zeroext %__u, <8 x ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -24753,7 +24548,6 @@ define zeroext i64 @test_masked_vpcmpsgtq_v8i1_v64i1_mask_mem(i8 zeroext %__u, < ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -24834,7 +24628,6 @@ define zeroext i64 @test_vpcmpsgtq_v8i1_v64i1_mask_mem_b(<8 x i64> %__a, i64* %_ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -24915,7 +24708,6 @@ define zeroext i64 @test_masked_vpcmpsgtq_v8i1_v64i1_mask_mem_b(i8 zeroext %__u, ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -25041,7 +24833,6 @@ define zeroext i32 @test_vpcmpsgeb_v16i1_v32i1_mask(<2 x i64> %__a, <2 x i64> %_ ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -25163,7 +24954,6 @@ define zeroext i32 @test_vpcmpsgeb_v16i1_v32i1_mask_mem(<2 x i64> %__a, <2 x i64 ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -25287,7 +25077,6 @@ define zeroext i32 @test_masked_vpcmpsgeb_v16i1_v32i1_mask(i16 zeroext %__u, <2 ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -25413,7 +25202,6 @@ define zeroext i32 @test_masked_vpcmpsgeb_v16i1_v32i1_mask_mem(i16 zeroext %__u, ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -25543,7 +25331,6 @@ define zeroext i64 @test_vpcmpsgeb_v16i1_v64i1_mask(<2 x i64> %__a, <2 x i64> %_ ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -25670,7 +25457,6 @@ define zeroext i64 @test_vpcmpsgeb_v16i1_v64i1_mask_mem(<2 x i64> %__a, <2 x i64 ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -25799,7 +25585,6 @@ define zeroext i64 @test_masked_vpcmpsgeb_v16i1_v64i1_mask(i16 zeroext %__u, <2 ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -25930,7 +25715,6 @@ define zeroext i64 @test_masked_vpcmpsgeb_v16i1_v64i1_mask_mem(i16 zeroext %__u, ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -25984,7 +25768,6 @@ define zeroext i64 @test_vpcmpsgeb_v32i1_v64i1_mask(<4 x i64> %__a, <4 x i64> %_ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -26035,7 +25818,6 @@ define zeroext i64 @test_vpcmpsgeb_v32i1_v64i1_mask_mem(<4 x i64> %__a, <4 x i64 ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -26096,7 +25878,6 @@ define zeroext i64 @test_masked_vpcmpsgeb_v32i1_v64i1_mask(i32 zeroext %__u, <4 ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -26159,7 +25940,6 @@ define zeroext i64 @test_masked_vpcmpsgeb_v32i1_v64i1_mask_mem(i32 zeroext %__u, ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -26368,7 +26148,6 @@ define zeroext i32 @test_vpcmpsgew_v8i1_v32i1_mask(<2 x i64> %__a, <2 x i64> %__ ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -26445,7 +26224,6 @@ define zeroext i32 @test_vpcmpsgew_v8i1_v32i1_mask_mem(<2 x i64> %__a, <2 x i64> ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -26524,7 +26302,6 @@ define zeroext i32 @test_masked_vpcmpsgew_v8i1_v32i1_mask(i8 zeroext %__u, <2 x ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -26605,7 +26382,6 @@ define zeroext i32 @test_masked_vpcmpsgew_v8i1_v32i1_mask_mem(i8 zeroext %__u, < ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -26690,7 +26466,6 @@ define zeroext i64 @test_vpcmpsgew_v8i1_v64i1_mask(<2 x i64> %__a, <2 x i64> %__ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -26772,7 +26547,6 @@ define zeroext i64 @test_vpcmpsgew_v8i1_v64i1_mask_mem(<2 x i64> %__a, <2 x i64> ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -26856,7 +26630,6 @@ define zeroext i64 @test_masked_vpcmpsgew_v8i1_v64i1_mask(i8 zeroext %__u, <2 x ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -26942,7 +26715,6 @@ define zeroext i64 @test_masked_vpcmpsgew_v8i1_v64i1_mask_mem(i8 zeroext %__u, < ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -27068,7 +26840,6 @@ define zeroext i32 @test_vpcmpsgew_v16i1_v32i1_mask(<4 x i64> %__a, <4 x i64> %_ ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -27191,7 +26962,6 @@ define zeroext i32 @test_vpcmpsgew_v16i1_v32i1_mask_mem(<4 x i64> %__a, <4 x i64 ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -27316,7 +27086,6 @@ define zeroext i32 @test_masked_vpcmpsgew_v16i1_v32i1_mask(i16 zeroext %__u, <4 ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -27443,7 +27212,6 @@ define zeroext i32 @test_masked_vpcmpsgew_v16i1_v32i1_mask_mem(i16 zeroext %__u, ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -27574,7 +27342,6 @@ define zeroext i64 @test_vpcmpsgew_v16i1_v64i1_mask(<4 x i64> %__a, <4 x i64> %_ ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -27702,7 +27469,6 @@ define zeroext i64 @test_vpcmpsgew_v16i1_v64i1_mask_mem(<4 x i64> %__a, <4 x i64 ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -27832,7 +27598,6 @@ define zeroext i64 @test_masked_vpcmpsgew_v16i1_v64i1_mask(i16 zeroext %__u, <4 ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -27964,7 +27729,6 @@ define zeroext i64 @test_masked_vpcmpsgew_v16i1_v64i1_mask_mem(i16 zeroext %__u, ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -28321,7 +28085,6 @@ define zeroext i64 @test_vpcmpsgew_v32i1_v64i1_mask(<8 x i64> %__a, <8 x i64> %_ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -28591,7 +28354,6 @@ define zeroext i64 @test_vpcmpsgew_v32i1_v64i1_mask_mem(<8 x i64> %__a, <8 x i64 ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -28955,7 +28717,6 @@ define zeroext i64 @test_masked_vpcmpsgew_v32i1_v64i1_mask(i32 zeroext %__u, <8 ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -29237,7 +28998,6 @@ define zeroext i64 @test_masked_vpcmpsgew_v32i1_v64i1_mask_mem(i32 zeroext %__u, ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -30119,7 +29879,6 @@ define zeroext i32 @test_vpcmpsged_v4i1_v32i1_mask(<2 x i64> %__a, <2 x i64> %__ ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -30164,7 +29923,6 @@ define zeroext i32 @test_vpcmpsged_v4i1_v32i1_mask_mem(<2 x i64> %__a, <2 x i64> ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -30226,7 +29984,6 @@ define zeroext i32 @test_masked_vpcmpsged_v4i1_v32i1_mask(i8 zeroext %__u, <2 x ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -30291,7 +30048,6 @@ define zeroext i32 @test_masked_vpcmpsged_v4i1_v32i1_mask_mem(i8 zeroext %__u, < ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -30341,7 +30097,6 @@ define zeroext i32 @test_vpcmpsged_v4i1_v32i1_mask_mem_b(<2 x i64> %__a, i32* %_ ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -30405,7 +30160,6 @@ define zeroext i32 @test_masked_vpcmpsged_v4i1_v32i1_mask_mem_b(i8 zeroext %__u, ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -30461,7 +30215,6 @@ define zeroext i64 @test_vpcmpsged_v4i1_v64i1_mask(<2 x i64> %__a, <2 x i64> %__ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -30512,7 +30265,6 @@ define zeroext i64 @test_vpcmpsged_v4i1_v64i1_mask_mem(<2 x i64> %__a, <2 x i64> ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -30580,7 +30332,6 @@ define zeroext i64 @test_masked_vpcmpsged_v4i1_v64i1_mask(i8 zeroext %__u, <2 x ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -30651,7 +30402,6 @@ define zeroext i64 @test_masked_vpcmpsged_v4i1_v64i1_mask_mem(i8 zeroext %__u, < ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -30707,7 +30457,6 @@ define zeroext i64 @test_vpcmpsged_v4i1_v64i1_mask_mem_b(<2 x i64> %__a, i32* %_ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -30777,7 +30526,6 @@ define zeroext i64 @test_masked_vpcmpsged_v4i1_v64i1_mask_mem_b(i8 zeroext %__u, ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -31051,7 +30799,6 @@ define zeroext i32 @test_vpcmpsged_v8i1_v32i1_mask(<4 x i64> %__a, <4 x i64> %__ ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -31125,7 +30872,6 @@ define zeroext i32 @test_vpcmpsged_v8i1_v32i1_mask_mem(<4 x i64> %__a, <4 x i64> ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -31202,7 +30948,6 @@ define zeroext i32 @test_masked_vpcmpsged_v8i1_v32i1_mask(i8 zeroext %__u, <4 x ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -31280,7 +31025,6 @@ define zeroext i32 @test_masked_vpcmpsged_v8i1_v32i1_mask_mem(i8 zeroext %__u, < ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -31358,7 +31102,6 @@ define zeroext i32 @test_vpcmpsged_v8i1_v32i1_mask_mem_b(<4 x i64> %__a, i32* %_ ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -31436,7 +31179,6 @@ define zeroext i32 @test_masked_vpcmpsged_v8i1_v32i1_mask_mem_b(i8 zeroext %__u, ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -31520,7 +31262,6 @@ define zeroext i64 @test_vpcmpsged_v8i1_v64i1_mask(<4 x i64> %__a, <4 x i64> %__ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -31599,7 +31340,6 @@ define zeroext i64 @test_vpcmpsged_v8i1_v64i1_mask_mem(<4 x i64> %__a, <4 x i64> ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -31681,7 +31421,6 @@ define zeroext i64 @test_masked_vpcmpsged_v8i1_v64i1_mask(i8 zeroext %__u, <4 x ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -31764,7 +31503,6 @@ define zeroext i64 @test_masked_vpcmpsged_v8i1_v64i1_mask_mem(i8 zeroext %__u, < ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -31847,7 +31585,6 @@ define zeroext i64 @test_vpcmpsged_v8i1_v64i1_mask_mem_b(<4 x i64> %__a, i32* %_ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -31930,7 +31667,6 @@ define zeroext i64 @test_masked_vpcmpsged_v8i1_v64i1_mask_mem_b(i8 zeroext %__u, ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -32052,7 +31788,6 @@ define zeroext i32 @test_vpcmpsged_v16i1_v32i1_mask(<8 x i64> %__a, <8 x i64> %_ ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -32169,7 +31904,6 @@ define zeroext i32 @test_vpcmpsged_v16i1_v32i1_mask_mem(<8 x i64> %__a, <8 x i64 ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -32289,7 +32023,6 @@ define zeroext i32 @test_masked_vpcmpsged_v16i1_v32i1_mask(i16 zeroext %__u, <8 ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -32410,7 +32143,6 @@ define zeroext i32 @test_masked_vpcmpsged_v16i1_v32i1_mask_mem(i16 zeroext %__u, ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -32531,7 +32263,6 @@ define zeroext i32 @test_vpcmpsged_v16i1_v32i1_mask_mem_b(<8 x i64> %__a, i32* % ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -32652,7 +32383,6 @@ define zeroext i32 @test_masked_vpcmpsged_v16i1_v32i1_mask_mem_b(i16 zeroext %__ ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -32779,7 +32509,6 @@ define zeroext i64 @test_vpcmpsged_v16i1_v64i1_mask(<8 x i64> %__a, <8 x i64> %_ ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -32901,7 +32630,6 @@ define zeroext i64 @test_vpcmpsged_v16i1_v64i1_mask_mem(<8 x i64> %__a, <8 x i64 ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -33026,7 +32754,6 @@ define zeroext i64 @test_masked_vpcmpsged_v16i1_v64i1_mask(i16 zeroext %__u, <8 ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -33152,7 +32879,6 @@ define zeroext i64 @test_masked_vpcmpsged_v16i1_v64i1_mask_mem(i16 zeroext %__u, ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -33278,7 +33004,6 @@ define zeroext i64 @test_vpcmpsged_v16i1_v64i1_mask_mem_b(<8 x i64> %__a, i32* % ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -33404,7 +33129,6 @@ define zeroext i64 @test_masked_vpcmpsged_v16i1_v64i1_mask_mem_b(i16 zeroext %__ ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -34275,7 +33999,6 @@ define zeroext i32 @test_vpcmpsgeq_v2i1_v32i1_mask(<2 x i64> %__a, <2 x i64> %__ ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -34320,7 +34043,6 @@ define zeroext i32 @test_vpcmpsgeq_v2i1_v32i1_mask_mem(<2 x i64> %__a, <2 x i64> ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -34374,7 +34096,6 @@ define zeroext i32 @test_masked_vpcmpsgeq_v2i1_v32i1_mask(i8 zeroext %__u, <2 x ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -34431,7 +34152,6 @@ define zeroext i32 @test_masked_vpcmpsgeq_v2i1_v32i1_mask_mem(i8 zeroext %__u, < ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -34481,7 +34201,6 @@ define zeroext i32 @test_vpcmpsgeq_v2i1_v32i1_mask_mem_b(<2 x i64> %__a, i64* %_ ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -34537,7 +34256,6 @@ define zeroext i32 @test_masked_vpcmpsgeq_v2i1_v32i1_mask_mem_b(i8 zeroext %__u, ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -34593,7 +34311,6 @@ define zeroext i64 @test_vpcmpsgeq_v2i1_v64i1_mask(<2 x i64> %__a, <2 x i64> %__ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -34644,7 +34361,6 @@ define zeroext i64 @test_vpcmpsgeq_v2i1_v64i1_mask_mem(<2 x i64> %__a, <2 x i64> ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -34704,7 +34420,6 @@ define zeroext i64 @test_masked_vpcmpsgeq_v2i1_v64i1_mask(i8 zeroext %__u, <2 x ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -34767,7 +34482,6 @@ define zeroext i64 @test_masked_vpcmpsgeq_v2i1_v64i1_mask_mem(i8 zeroext %__u, < ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -34823,7 +34537,6 @@ define zeroext i64 @test_vpcmpsgeq_v2i1_v64i1_mask_mem_b(<2 x i64> %__a, i64* %_ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -34885,7 +34598,6 @@ define zeroext i64 @test_masked_vpcmpsgeq_v2i1_v64i1_mask_mem_b(i8 zeroext %__u, ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -35807,7 +35519,6 @@ define zeroext i32 @test_vpcmpsgeq_v4i1_v32i1_mask(<4 x i64> %__a, <4 x i64> %__ ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -35854,7 +35565,6 @@ define zeroext i32 @test_vpcmpsgeq_v4i1_v32i1_mask_mem(<4 x i64> %__a, <4 x i64> ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -35920,7 +35630,6 @@ define zeroext i32 @test_masked_vpcmpsgeq_v4i1_v32i1_mask(i8 zeroext %__u, <4 x ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -35989,7 +35698,6 @@ define zeroext i32 @test_masked_vpcmpsgeq_v4i1_v32i1_mask_mem(i8 zeroext %__u, < ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -36041,7 +35749,6 @@ define zeroext i32 @test_vpcmpsgeq_v4i1_v32i1_mask_mem_b(<4 x i64> %__a, i64* %_ ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -36109,7 +35816,6 @@ define zeroext i32 @test_masked_vpcmpsgeq_v4i1_v32i1_mask_mem_b(i8 zeroext %__u, ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -36167,7 +35873,6 @@ define zeroext i64 @test_vpcmpsgeq_v4i1_v64i1_mask(<4 x i64> %__a, <4 x i64> %__ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -36220,7 +35925,6 @@ define zeroext i64 @test_vpcmpsgeq_v4i1_v64i1_mask_mem(<4 x i64> %__a, <4 x i64> ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -36292,7 +35996,6 @@ define zeroext i64 @test_masked_vpcmpsgeq_v4i1_v64i1_mask(i8 zeroext %__u, <4 x ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -36367,7 +36070,6 @@ define zeroext i64 @test_masked_vpcmpsgeq_v4i1_v64i1_mask_mem(i8 zeroext %__u, < ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -36425,7 +36127,6 @@ define zeroext i64 @test_vpcmpsgeq_v4i1_v64i1_mask_mem_b(<4 x i64> %__a, i64* %_ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -36499,7 +36200,6 @@ define zeroext i64 @test_masked_vpcmpsgeq_v4i1_v64i1_mask_mem_b(i8 zeroext %__u, ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -36747,7 +36447,6 @@ define zeroext i32 @test_vpcmpsgeq_v8i1_v32i1_mask(<8 x i64> %__a, <8 x i64> %__ ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -36819,7 +36518,6 @@ define zeroext i32 @test_vpcmpsgeq_v8i1_v32i1_mask_mem(<8 x i64> %__a, <8 x i64> ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -36894,7 +36592,6 @@ define zeroext i32 @test_masked_vpcmpsgeq_v8i1_v32i1_mask(i8 zeroext %__u, <8 x ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -36970,7 +36667,6 @@ define zeroext i32 @test_masked_vpcmpsgeq_v8i1_v32i1_mask_mem(i8 zeroext %__u, < ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -37046,7 +36742,6 @@ define zeroext i32 @test_vpcmpsgeq_v8i1_v32i1_mask_mem_b(<8 x i64> %__a, i64* %_ ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -37122,7 +36817,6 @@ define zeroext i32 @test_masked_vpcmpsgeq_v8i1_v32i1_mask_mem_b(i8 zeroext %__u, ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -37204,7 +36898,6 @@ define zeroext i64 @test_vpcmpsgeq_v8i1_v64i1_mask(<8 x i64> %__a, <8 x i64> %__ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -37281,7 +36974,6 @@ define zeroext i64 @test_vpcmpsgeq_v8i1_v64i1_mask_mem(<8 x i64> %__a, <8 x i64> ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -37361,7 +37053,6 @@ define zeroext i64 @test_masked_vpcmpsgeq_v8i1_v64i1_mask(i8 zeroext %__u, <8 x ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -37442,7 +37133,6 @@ define zeroext i64 @test_masked_vpcmpsgeq_v8i1_v64i1_mask_mem(i8 zeroext %__u, < ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -37523,7 +37213,6 @@ define zeroext i64 @test_vpcmpsgeq_v8i1_v64i1_mask_mem_b(<8 x i64> %__a, i64* %_ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -37604,7 +37293,6 @@ define zeroext i64 @test_masked_vpcmpsgeq_v8i1_v64i1_mask_mem_b(i8 zeroext %__u, ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -37731,7 +37419,6 @@ define zeroext i32 @test_vpcmpultb_v16i1_v32i1_mask(<2 x i64> %__a, <2 x i64> %_ ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -37853,7 +37540,6 @@ define zeroext i32 @test_vpcmpultb_v16i1_v32i1_mask_mem(<2 x i64> %__a, <2 x i64 ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -37978,7 +37664,6 @@ define zeroext i32 @test_masked_vpcmpultb_v16i1_v32i1_mask(i16 zeroext %__u, <2 ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -38104,7 +37789,6 @@ define zeroext i32 @test_masked_vpcmpultb_v16i1_v32i1_mask_mem(i16 zeroext %__u, ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -38235,7 +37919,6 @@ define zeroext i64 @test_vpcmpultb_v16i1_v64i1_mask(<2 x i64> %__a, <2 x i64> %_ ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -38362,7 +38045,6 @@ define zeroext i64 @test_vpcmpultb_v16i1_v64i1_mask_mem(<2 x i64> %__a, <2 x i64 ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -38492,7 +38174,6 @@ define zeroext i64 @test_masked_vpcmpultb_v16i1_v64i1_mask(i16 zeroext %__u, <2 ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -38623,7 +38304,6 @@ define zeroext i64 @test_masked_vpcmpultb_v16i1_v64i1_mask_mem(i16 zeroext %__u, ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -38678,7 +38358,6 @@ define zeroext i64 @test_vpcmpultb_v32i1_v64i1_mask(<4 x i64> %__a, <4 x i64> %_ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -38729,7 +38408,6 @@ define zeroext i64 @test_vpcmpultb_v32i1_v64i1_mask_mem(<4 x i64> %__a, <4 x i64 ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -38791,7 +38469,6 @@ define zeroext i64 @test_masked_vpcmpultb_v32i1_v64i1_mask(i32 zeroext %__u, <4 ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -38854,7 +38531,6 @@ define zeroext i64 @test_masked_vpcmpultb_v32i1_v64i1_mask_mem(i32 zeroext %__u, ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -39066,7 +38742,6 @@ define zeroext i32 @test_vpcmpultw_v8i1_v32i1_mask(<2 x i64> %__a, <2 x i64> %__ ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -39143,7 +38818,6 @@ define zeroext i32 @test_vpcmpultw_v8i1_v32i1_mask_mem(<2 x i64> %__a, <2 x i64> ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -39223,7 +38897,6 @@ define zeroext i32 @test_masked_vpcmpultw_v8i1_v32i1_mask(i8 zeroext %__u, <2 x ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -39304,7 +38977,6 @@ define zeroext i32 @test_masked_vpcmpultw_v8i1_v32i1_mask_mem(i8 zeroext %__u, < ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -39390,7 +39062,6 @@ define zeroext i64 @test_vpcmpultw_v8i1_v64i1_mask(<2 x i64> %__a, <2 x i64> %__ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -39472,7 +39143,6 @@ define zeroext i64 @test_vpcmpultw_v8i1_v64i1_mask_mem(<2 x i64> %__a, <2 x i64> ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -39557,7 +39227,6 @@ define zeroext i64 @test_masked_vpcmpultw_v8i1_v64i1_mask(i8 zeroext %__u, <2 x ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -39643,7 +39312,6 @@ define zeroext i64 @test_masked_vpcmpultw_v8i1_v64i1_mask_mem(i8 zeroext %__u, < ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -39770,7 +39438,6 @@ define zeroext i32 @test_vpcmpultw_v16i1_v32i1_mask(<4 x i64> %__a, <4 x i64> %_ ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -39893,7 +39560,6 @@ define zeroext i32 @test_vpcmpultw_v16i1_v32i1_mask_mem(<4 x i64> %__a, <4 x i64 ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -40019,7 +39685,6 @@ define zeroext i32 @test_masked_vpcmpultw_v16i1_v32i1_mask(i16 zeroext %__u, <4 ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -40146,7 +39811,6 @@ define zeroext i32 @test_masked_vpcmpultw_v16i1_v32i1_mask_mem(i16 zeroext %__u, ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -40278,7 +39942,6 @@ define zeroext i64 @test_vpcmpultw_v16i1_v64i1_mask(<4 x i64> %__a, <4 x i64> %_ ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -40406,7 +40069,6 @@ define zeroext i64 @test_vpcmpultw_v16i1_v64i1_mask_mem(<4 x i64> %__a, <4 x i64 ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -40537,7 +40199,6 @@ define zeroext i64 @test_masked_vpcmpultw_v16i1_v64i1_mask(i16 zeroext %__u, <4 ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -40669,7 +40330,6 @@ define zeroext i64 @test_masked_vpcmpultw_v16i1_v64i1_mask_mem(i16 zeroext %__u, ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -41028,7 +40688,6 @@ define zeroext i64 @test_vpcmpultw_v32i1_v64i1_mask(<8 x i64> %__a, <8 x i64> %_ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -41298,7 +40957,6 @@ define zeroext i64 @test_vpcmpultw_v32i1_v64i1_mask_mem(<8 x i64> %__a, <8 x i64 ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -41664,7 +41322,6 @@ define zeroext i64 @test_masked_vpcmpultw_v32i1_v64i1_mask(i32 zeroext %__u, <8 ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -41946,7 +41603,6 @@ define zeroext i64 @test_masked_vpcmpultw_v32i1_v64i1_mask_mem(i32 zeroext %__u, ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -42849,7 +42505,6 @@ define zeroext i32 @test_vpcmpultd_v4i1_v32i1_mask(<2 x i64> %__a, <2 x i64> %__ ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -42894,7 +42549,6 @@ define zeroext i32 @test_vpcmpultd_v4i1_v32i1_mask_mem(<2 x i64> %__a, <2 x i64> ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -42959,7 +42613,6 @@ define zeroext i32 @test_masked_vpcmpultd_v4i1_v32i1_mask(i8 zeroext %__u, <2 x ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -43026,7 +42679,6 @@ define zeroext i32 @test_masked_vpcmpultd_v4i1_v32i1_mask_mem(i8 zeroext %__u, < ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -43077,7 +42729,6 @@ define zeroext i32 @test_vpcmpultd_v4i1_v32i1_mask_mem_b(<2 x i64> %__a, i32* %_ ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -43144,7 +42795,6 @@ define zeroext i32 @test_masked_vpcmpultd_v4i1_v32i1_mask_mem_b(i8 zeroext %__u, ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -43201,7 +42851,6 @@ define zeroext i64 @test_vpcmpultd_v4i1_v64i1_mask(<2 x i64> %__a, <2 x i64> %__ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -43252,7 +42901,6 @@ define zeroext i64 @test_vpcmpultd_v4i1_v64i1_mask_mem(<2 x i64> %__a, <2 x i64> ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -43323,7 +42971,6 @@ define zeroext i64 @test_masked_vpcmpultd_v4i1_v64i1_mask(i8 zeroext %__u, <2 x ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -43396,7 +43043,6 @@ define zeroext i64 @test_masked_vpcmpultd_v4i1_v64i1_mask_mem(i8 zeroext %__u, < ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -43453,7 +43099,6 @@ define zeroext i64 @test_vpcmpultd_v4i1_v64i1_mask_mem_b(<2 x i64> %__a, i32* %_ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -43526,7 +43171,6 @@ define zeroext i64 @test_masked_vpcmpultd_v4i1_v64i1_mask_mem_b(i8 zeroext %__u, ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -43800,7 +43444,6 @@ define zeroext i32 @test_vpcmpultd_v8i1_v32i1_mask(<4 x i64> %__a, <4 x i64> %__ ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -43874,7 +43517,6 @@ define zeroext i32 @test_vpcmpultd_v8i1_v32i1_mask_mem(<4 x i64> %__a, <4 x i64> ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -43951,7 +43593,6 @@ define zeroext i32 @test_masked_vpcmpultd_v8i1_v32i1_mask(i8 zeroext %__u, <4 x ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -44029,7 +43670,6 @@ define zeroext i32 @test_masked_vpcmpultd_v8i1_v32i1_mask_mem(i8 zeroext %__u, < ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -44107,7 +43747,6 @@ define zeroext i32 @test_vpcmpultd_v8i1_v32i1_mask_mem_b(<4 x i64> %__a, i32* %_ ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -44185,7 +43824,6 @@ define zeroext i32 @test_masked_vpcmpultd_v8i1_v32i1_mask_mem_b(i8 zeroext %__u, ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -44269,7 +43907,6 @@ define zeroext i64 @test_vpcmpultd_v8i1_v64i1_mask(<4 x i64> %__a, <4 x i64> %__ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -44348,7 +43985,6 @@ define zeroext i64 @test_vpcmpultd_v8i1_v64i1_mask_mem(<4 x i64> %__a, <4 x i64> ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -44430,7 +44066,6 @@ define zeroext i64 @test_masked_vpcmpultd_v8i1_v64i1_mask(i8 zeroext %__u, <4 x ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -44513,7 +44148,6 @@ define zeroext i64 @test_masked_vpcmpultd_v8i1_v64i1_mask_mem(i8 zeroext %__u, < ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -44596,7 +44230,6 @@ define zeroext i64 @test_vpcmpultd_v8i1_v64i1_mask_mem_b(<4 x i64> %__a, i32* %_ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -44679,7 +44312,6 @@ define zeroext i64 @test_masked_vpcmpultd_v8i1_v64i1_mask_mem_b(i8 zeroext %__u, ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -44801,7 +44433,6 @@ define zeroext i32 @test_vpcmpultd_v16i1_v32i1_mask(<8 x i64> %__a, <8 x i64> %_ ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -44918,7 +44549,6 @@ define zeroext i32 @test_vpcmpultd_v16i1_v32i1_mask_mem(<8 x i64> %__a, <8 x i64 ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -45038,7 +44668,6 @@ define zeroext i32 @test_masked_vpcmpultd_v16i1_v32i1_mask(i16 zeroext %__u, <8 ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -45159,7 +44788,6 @@ define zeroext i32 @test_masked_vpcmpultd_v16i1_v32i1_mask_mem(i16 zeroext %__u, ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -45280,7 +44908,6 @@ define zeroext i32 @test_vpcmpultd_v16i1_v32i1_mask_mem_b(<8 x i64> %__a, i32* % ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -45401,7 +45028,6 @@ define zeroext i32 @test_masked_vpcmpultd_v16i1_v32i1_mask_mem_b(i16 zeroext %__ ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -45528,7 +45154,6 @@ define zeroext i64 @test_vpcmpultd_v16i1_v64i1_mask(<8 x i64> %__a, <8 x i64> %_ ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -45650,7 +45275,6 @@ define zeroext i64 @test_vpcmpultd_v16i1_v64i1_mask_mem(<8 x i64> %__a, <8 x i64 ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -45775,7 +45399,6 @@ define zeroext i64 @test_masked_vpcmpultd_v16i1_v64i1_mask(i16 zeroext %__u, <8 ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -45901,7 +45524,6 @@ define zeroext i64 @test_masked_vpcmpultd_v16i1_v64i1_mask_mem(i16 zeroext %__u, ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -46027,7 +45649,6 @@ define zeroext i64 @test_vpcmpultd_v16i1_v64i1_mask_mem_b(<8 x i64> %__a, i32* % ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -46153,7 +45774,6 @@ define zeroext i64 @test_masked_vpcmpultd_v16i1_v64i1_mask_mem_b(i16 zeroext %__ ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -47055,7 +46675,6 @@ define zeroext i32 @test_vpcmpultq_v2i1_v32i1_mask(<2 x i64> %__a, <2 x i64> %__ ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -47100,7 +46719,6 @@ define zeroext i32 @test_vpcmpultq_v2i1_v32i1_mask_mem(<2 x i64> %__a, <2 x i64> ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -47157,7 +46775,6 @@ define zeroext i32 @test_masked_vpcmpultq_v2i1_v32i1_mask(i8 zeroext %__u, <2 x ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -47216,7 +46833,6 @@ define zeroext i32 @test_masked_vpcmpultq_v2i1_v32i1_mask_mem(i8 zeroext %__u, < ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -47267,7 +46883,6 @@ define zeroext i32 @test_vpcmpultq_v2i1_v32i1_mask_mem_b(<2 x i64> %__a, i64* %_ ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -47326,7 +46941,6 @@ define zeroext i32 @test_masked_vpcmpultq_v2i1_v32i1_mask_mem_b(i8 zeroext %__u, ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -47383,7 +46997,6 @@ define zeroext i64 @test_vpcmpultq_v2i1_v64i1_mask(<2 x i64> %__a, <2 x i64> %__ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -47434,7 +47047,6 @@ define zeroext i64 @test_vpcmpultq_v2i1_v64i1_mask_mem(<2 x i64> %__a, <2 x i64> ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -47497,7 +47109,6 @@ define zeroext i64 @test_masked_vpcmpultq_v2i1_v64i1_mask(i8 zeroext %__u, <2 x ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -47562,7 +47173,6 @@ define zeroext i64 @test_masked_vpcmpultq_v2i1_v64i1_mask_mem(i8 zeroext %__u, < ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -47619,7 +47229,6 @@ define zeroext i64 @test_vpcmpultq_v2i1_v64i1_mask_mem_b(<2 x i64> %__a, i64* %_ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -47684,7 +47293,6 @@ define zeroext i64 @test_masked_vpcmpultq_v2i1_v64i1_mask_mem_b(i8 zeroext %__u, ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -48615,7 +48223,6 @@ define zeroext i32 @test_vpcmpultq_v4i1_v32i1_mask(<4 x i64> %__a, <4 x i64> %__ ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -48662,7 +48269,6 @@ define zeroext i32 @test_vpcmpultq_v4i1_v32i1_mask_mem(<4 x i64> %__a, <4 x i64> ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -48729,7 +48335,6 @@ define zeroext i32 @test_masked_vpcmpultq_v4i1_v32i1_mask(i8 zeroext %__u, <4 x ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -48798,7 +48403,6 @@ define zeroext i32 @test_masked_vpcmpultq_v4i1_v32i1_mask_mem(i8 zeroext %__u, < ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -48851,7 +48455,6 @@ define zeroext i32 @test_vpcmpultq_v4i1_v32i1_mask_mem_b(<4 x i64> %__a, i64* %_ ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -48920,7 +48523,6 @@ define zeroext i32 @test_masked_vpcmpultq_v4i1_v32i1_mask_mem_b(i8 zeroext %__u, ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -48979,7 +48581,6 @@ define zeroext i64 @test_vpcmpultq_v4i1_v64i1_mask(<4 x i64> %__a, <4 x i64> %__ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -49032,7 +48633,6 @@ define zeroext i64 @test_vpcmpultq_v4i1_v64i1_mask_mem(<4 x i64> %__a, <4 x i64> ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -49105,7 +48705,6 @@ define zeroext i64 @test_masked_vpcmpultq_v4i1_v64i1_mask(i8 zeroext %__u, <4 x ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -49180,7 +48779,6 @@ define zeroext i64 @test_masked_vpcmpultq_v4i1_v64i1_mask_mem(i8 zeroext %__u, < ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -49239,7 +48837,6 @@ define zeroext i64 @test_vpcmpultq_v4i1_v64i1_mask_mem_b(<4 x i64> %__a, i64* %_ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -49314,7 +48911,6 @@ define zeroext i64 @test_masked_vpcmpultq_v4i1_v64i1_mask_mem_b(i8 zeroext %__u, ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -49562,7 +49158,6 @@ define zeroext i32 @test_vpcmpultq_v8i1_v32i1_mask(<8 x i64> %__a, <8 x i64> %__ ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -49634,7 +49229,6 @@ define zeroext i32 @test_vpcmpultq_v8i1_v32i1_mask_mem(<8 x i64> %__a, <8 x i64> ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -49709,7 +49303,6 @@ define zeroext i32 @test_masked_vpcmpultq_v8i1_v32i1_mask(i8 zeroext %__u, <8 x ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -49785,7 +49378,6 @@ define zeroext i32 @test_masked_vpcmpultq_v8i1_v32i1_mask_mem(i8 zeroext %__u, < ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -49861,7 +49453,6 @@ define zeroext i32 @test_vpcmpultq_v8i1_v32i1_mask_mem_b(<8 x i64> %__a, i64* %_ ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -49937,7 +49528,6 @@ define zeroext i32 @test_masked_vpcmpultq_v8i1_v32i1_mask_mem_b(i8 zeroext %__u, ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -50019,7 +49609,6 @@ define zeroext i64 @test_vpcmpultq_v8i1_v64i1_mask(<8 x i64> %__a, <8 x i64> %__ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -50096,7 +49685,6 @@ define zeroext i64 @test_vpcmpultq_v8i1_v64i1_mask_mem(<8 x i64> %__a, <8 x i64> ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -50176,7 +49764,6 @@ define zeroext i64 @test_masked_vpcmpultq_v8i1_v64i1_mask(i8 zeroext %__u, <8 x ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -50257,7 +49844,6 @@ define zeroext i64 @test_masked_vpcmpultq_v8i1_v64i1_mask_mem(i8 zeroext %__u, < ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -50338,7 +49924,6 @@ define zeroext i64 @test_vpcmpultq_v8i1_v64i1_mask_mem_b(<8 x i64> %__a, i64* %_ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -50419,7 +50004,6 @@ define zeroext i64 @test_masked_vpcmpultq_v8i1_v64i1_mask_mem_b(i8 zeroext %__u, ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -51213,7 +50797,6 @@ define zeroext i32 @test_vcmpoeqps_v4i1_v32i1_mask(<2 x i64> %__a, <2 x i64> %__ ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -51255,7 +50838,6 @@ define zeroext i32 @test_vcmpoeqps_v4i1_v32i1_mask_mem(<2 x i64> %__a, <2 x i64> ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -51299,7 +50881,6 @@ define zeroext i32 @test_vcmpoeqps_v4i1_v32i1_mask_mem_b(<2 x i64> %__a, float* ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -51351,7 +50932,6 @@ define zeroext i32 @test_masked_vcmpoeqps_v4i1_v32i1_mask(i4 zeroext %__u, <2 x ; NoVLX-NEXT: movl {{[0-9]+}}(%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -51403,7 +50983,6 @@ define zeroext i32 @test_masked_vcmpoeqps_v4i1_v32i1_mask_mem(i4 zeroext %__u, < ; NoVLX-NEXT: movl {{[0-9]+}}(%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -51457,7 +51036,6 @@ define zeroext i32 @test_masked_vcmpoeqps_v4i1_v32i1_mask_mem_b(i4 zeroext %__u, ; NoVLX-NEXT: movl {{[0-9]+}}(%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -51511,7 +51089,6 @@ define zeroext i64 @test_vcmpoeqps_v4i1_v64i1_mask(<2 x i64> %__a, <2 x i64> %__ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -51559,7 +51136,6 @@ define zeroext i64 @test_vcmpoeqps_v4i1_v64i1_mask_mem(<2 x i64> %__a, <2 x i64> ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -51609,7 +51185,6 @@ define zeroext i64 @test_vcmpoeqps_v4i1_v64i1_mask_mem_b(<2 x i64> %__a, float* ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -51667,7 +51242,6 @@ define zeroext i64 @test_masked_vcmpoeqps_v4i1_v64i1_mask(i4 zeroext %__u, <2 x ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -51725,7 +51299,6 @@ define zeroext i64 @test_masked_vcmpoeqps_v4i1_v64i1_mask_mem(i4 zeroext %__u, < ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -51785,7 +51358,6 @@ define zeroext i64 @test_masked_vcmpoeqps_v4i1_v64i1_mask_mem_b(i4 zeroext %__u, ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -52059,7 +51631,6 @@ define zeroext i32 @test_vcmpoeqps_v8i1_v32i1_mask(<4 x i64> %__a, <4 x i64> %__ ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -52133,7 +51704,6 @@ define zeroext i32 @test_vcmpoeqps_v8i1_v32i1_mask_mem(<4 x i64> %__a, <4 x i64> ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -52208,7 +51778,6 @@ define zeroext i32 @test_vcmpoeqps_v8i1_v32i1_mask_mem_b(<4 x i64> %__a, float* ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -52286,7 +51855,6 @@ define zeroext i32 @test_masked_vcmpoeqps_v8i1_v32i1_mask(i8 zeroext %__u, <4 x ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -52364,7 +51932,6 @@ define zeroext i32 @test_masked_vcmpoeqps_v8i1_v32i1_mask_mem(i8 zeroext %__u, < ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -52443,7 +52010,6 @@ define zeroext i32 @test_masked_vcmpoeqps_v8i1_v32i1_mask_mem_b(i8 zeroext %__u, ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -52528,7 +52094,6 @@ define zeroext i64 @test_vcmpoeqps_v8i1_v64i1_mask(<4 x i64> %__a, <4 x i64> %__ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -52607,7 +52172,6 @@ define zeroext i64 @test_vcmpoeqps_v8i1_v64i1_mask_mem(<4 x i64> %__a, <4 x i64> ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -52687,7 +52251,6 @@ define zeroext i64 @test_vcmpoeqps_v8i1_v64i1_mask_mem_b(<4 x i64> %__a, float* ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -52770,7 +52333,6 @@ define zeroext i64 @test_masked_vcmpoeqps_v8i1_v64i1_mask(i8 zeroext %__u, <4 x ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -52853,7 +52415,6 @@ define zeroext i64 @test_masked_vcmpoeqps_v8i1_v64i1_mask_mem(i8 zeroext %__u, < ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -52937,7 +52498,6 @@ define zeroext i64 @test_masked_vcmpoeqps_v8i1_v64i1_mask_mem_b(i8 zeroext %__u, ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -53060,7 +52620,6 @@ define zeroext i32 @test_vcmpoeqps_v16i1_v32i1_mask(<8 x i64> %__a, <8 x i64> %_ ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -53177,7 +52736,6 @@ define zeroext i32 @test_vcmpoeqps_v16i1_v32i1_mask_mem(<8 x i64> %__a, <8 x i64 ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -53295,7 +52853,6 @@ define zeroext i32 @test_vcmpoeqps_v16i1_v32i1_mask_mem_b(<8 x i64> %__a, float* ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -53416,7 +52973,6 @@ define zeroext i32 @test_masked_vcmpoeqps_v16i1_v32i1_mask(i16 zeroext %__u, <8 ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -53537,7 +53093,6 @@ define zeroext i32 @test_masked_vcmpoeqps_v16i1_v32i1_mask_mem(i16 zeroext %__u, ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -53659,7 +53214,6 @@ define zeroext i32 @test_masked_vcmpoeqps_v16i1_v32i1_mask_mem_b(i16 zeroext %__ ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -53828,7 +53382,6 @@ define zeroext i64 @test_vcmpoeqps_v16i1_v64i1_mask(<8 x i64> %__a, <8 x i64> %_ ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -53950,7 +53503,6 @@ define zeroext i64 @test_vcmpoeqps_v16i1_v64i1_mask_mem(<8 x i64> %__a, <8 x i64 ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -54073,7 +53625,6 @@ define zeroext i64 @test_vcmpoeqps_v16i1_v64i1_mask_mem_b(<8 x i64> %__a, float* ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -54199,7 +53750,6 @@ define zeroext i64 @test_masked_vcmpoeqps_v16i1_v64i1_mask(i16 zeroext %__u, <8 ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -54325,7 +53875,6 @@ define zeroext i64 @test_masked_vcmpoeqps_v16i1_v64i1_mask_mem(i16 zeroext %__u, ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -54452,7 +54001,6 @@ define zeroext i64 @test_masked_vcmpoeqps_v16i1_v64i1_mask_mem_b(i16 zeroext %__ ; NoVLX-NEXT: popq %r14 ; NoVLX-NEXT: popq %r15 ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -55306,7 +54854,6 @@ define zeroext i32 @test_vcmpoeqpd_v2i1_v32i1_mask(<2 x i64> %__a, <2 x i64> %__ ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -55348,7 +54895,6 @@ define zeroext i32 @test_vcmpoeqpd_v2i1_v32i1_mask_mem(<2 x i64> %__a, <2 x i64> ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -55392,7 +54938,6 @@ define zeroext i32 @test_vcmpoeqpd_v2i1_v32i1_mask_mem_b(<2 x i64> %__a, double* ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -55443,7 +54988,6 @@ define zeroext i32 @test_masked_vcmpoeqpd_v2i1_v32i1_mask(i2 zeroext %__u, <2 x ; NoVLX-NEXT: movl {{[0-9]+}}(%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -55494,7 +55038,6 @@ define zeroext i32 @test_masked_vcmpoeqpd_v2i1_v32i1_mask_mem(i2 zeroext %__u, < ; NoVLX-NEXT: movl {{[0-9]+}}(%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -55547,7 +55090,6 @@ define zeroext i32 @test_masked_vcmpoeqpd_v2i1_v32i1_mask_mem_b(i2 zeroext %__u, ; NoVLX-NEXT: movl {{[0-9]+}}(%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -55601,7 +55143,6 @@ define zeroext i64 @test_vcmpoeqpd_v2i1_v64i1_mask(<2 x i64> %__a, <2 x i64> %__ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -55649,7 +55190,6 @@ define zeroext i64 @test_vcmpoeqpd_v2i1_v64i1_mask_mem(<2 x i64> %__a, <2 x i64> ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -55699,7 +55239,6 @@ define zeroext i64 @test_vcmpoeqpd_v2i1_v64i1_mask_mem_b(<2 x i64> %__a, double* ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -55756,7 +55295,6 @@ define zeroext i64 @test_masked_vcmpoeqpd_v2i1_v64i1_mask(i2 zeroext %__u, <2 x ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -55813,7 +55351,6 @@ define zeroext i64 @test_masked_vcmpoeqpd_v2i1_v64i1_mask_mem(i2 zeroext %__u, < ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -55872,7 +55409,6 @@ define zeroext i64 @test_masked_vcmpoeqpd_v2i1_v64i1_mask_mem_b(i2 zeroext %__u, ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -56692,7 +56228,6 @@ define zeroext i32 @test_vcmpoeqpd_v4i1_v32i1_mask(<4 x i64> %__a, <4 x i64> %__ ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -56736,7 +56271,6 @@ define zeroext i32 @test_vcmpoeqpd_v4i1_v32i1_mask_mem(<4 x i64> %__a, <4 x i64> ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -56782,7 +56316,6 @@ define zeroext i32 @test_vcmpoeqpd_v4i1_v32i1_mask_mem_b(<4 x i64> %__a, double* ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -56836,7 +56369,6 @@ define zeroext i32 @test_masked_vcmpoeqpd_v4i1_v32i1_mask(i4 zeroext %__u, <4 x ; NoVLX-NEXT: movl {{[0-9]+}}(%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -56890,7 +56422,6 @@ define zeroext i32 @test_masked_vcmpoeqpd_v4i1_v32i1_mask_mem(i4 zeroext %__u, < ; NoVLX-NEXT: movl {{[0-9]+}}(%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -56946,7 +56477,6 @@ define zeroext i32 @test_masked_vcmpoeqpd_v4i1_v32i1_mask_mem_b(i4 zeroext %__u, ; NoVLX-NEXT: movl {{[0-9]+}}(%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -57002,7 +56532,6 @@ define zeroext i64 @test_vcmpoeqpd_v4i1_v64i1_mask(<4 x i64> %__a, <4 x i64> %__ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -57052,7 +56581,6 @@ define zeroext i64 @test_vcmpoeqpd_v4i1_v64i1_mask_mem(<4 x i64> %__a, <4 x i64> ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -57104,7 +56632,6 @@ define zeroext i64 @test_vcmpoeqpd_v4i1_v64i1_mask_mem_b(<4 x i64> %__a, double* ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -57164,7 +56691,6 @@ define zeroext i64 @test_masked_vcmpoeqpd_v4i1_v64i1_mask(i4 zeroext %__u, <4 x ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -57224,7 +56750,6 @@ define zeroext i64 @test_masked_vcmpoeqpd_v4i1_v64i1_mask_mem(i4 zeroext %__u, < ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -57286,7 +56811,6 @@ define zeroext i64 @test_masked_vcmpoeqpd_v4i1_v64i1_mask_mem_b(i4 zeroext %__u, ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -57590,7 +57114,6 @@ define zeroext i32 @test_vcmpoeqpd_v8i1_v32i1_mask(<8 x i64> %__a, <8 x i64> %__ ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -57662,7 +57185,6 @@ define zeroext i32 @test_vcmpoeqpd_v8i1_v32i1_mask_mem(<8 x i64> %__a, <8 x i64> ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -57735,7 +57257,6 @@ define zeroext i32 @test_vcmpoeqpd_v8i1_v32i1_mask_mem_b(<8 x i64> %__a, double* ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -57811,7 +57332,6 @@ define zeroext i32 @test_masked_vcmpoeqpd_v8i1_v32i1_mask(i8 zeroext %__u, <8 x ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -57887,7 +57407,6 @@ define zeroext i32 @test_masked_vcmpoeqpd_v8i1_v32i1_mask_mem(i8 zeroext %__u, < ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -57964,7 +57483,6 @@ define zeroext i32 @test_masked_vcmpoeqpd_v8i1_v32i1_mask_mem_b(i8 zeroext %__u, ; NoVLX-NEXT: movl (%rsp), %eax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -58097,7 +57615,6 @@ define zeroext i64 @test_vcmpoeqpd_v8i1_v64i1_mask(<8 x i64> %__a, <8 x i64> %__ ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -58174,7 +57691,6 @@ define zeroext i64 @test_vcmpoeqpd_v8i1_v64i1_mask_mem(<8 x i64> %__a, <8 x i64> ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -58252,7 +57768,6 @@ define zeroext i64 @test_vcmpoeqpd_v8i1_v64i1_mask_mem_b(<8 x i64> %__a, double* ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -58333,7 +57848,6 @@ define zeroext i64 @test_masked_vcmpoeqpd_v8i1_v64i1_mask(i8 zeroext %__u, <8 x ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -58414,7 +57928,6 @@ define zeroext i64 @test_masked_vcmpoeqpd_v8i1_v64i1_mask_mem(i8 zeroext %__u, < ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: @@ -58496,7 +58009,6 @@ define zeroext i64 @test_masked_vcmpoeqpd_v8i1_v64i1_mask_mem_b(i8 zeroext %__u, ; NoVLX-NEXT: orq %rcx, %rax ; NoVLX-NEXT: movq %rbp, %rsp ; NoVLX-NEXT: popq %rbp -; NoVLX-NEXT: .cfi_def_cfa %rsp, 8 ; NoVLX-NEXT: vzeroupper ; NoVLX-NEXT: retq entry: diff --git a/llvm/test/CodeGen/X86/bitcast-and-setcc-256.ll b/llvm/test/CodeGen/X86/bitcast-and-setcc-256.ll index c48222000c6..e197713c679 100644 --- a/llvm/test/CodeGen/X86/bitcast-and-setcc-256.ll +++ b/llvm/test/CodeGen/X86/bitcast-and-setcc-256.ll @@ -439,7 +439,6 @@ define i32 @v32i8(<32 x i8> %a, <32 x i8> %b, <32 x i8> %c, <32 x i8> %d) { ; AVX512F-NEXT: movl (%rsp), %eax ; AVX512F-NEXT: movq %rbp, %rsp ; AVX512F-NEXT: popq %rbp -; AVX512F-NEXT: .cfi_def_cfa %rsp, 8 ; AVX512F-NEXT: vzeroupper ; AVX512F-NEXT: retq ; diff --git a/llvm/test/CodeGen/X86/bitcast-and-setcc-512.ll b/llvm/test/CodeGen/X86/bitcast-and-setcc-512.ll index f5fe395eaf3..f6cfbbb4044 100644 --- a/llvm/test/CodeGen/X86/bitcast-and-setcc-512.ll +++ b/llvm/test/CodeGen/X86/bitcast-and-setcc-512.ll @@ -594,7 +594,6 @@ define i32 @v32i16(<32 x i16> %a, <32 x i16> %b, <32 x i16> %c, <32 x i16> %d) { ; AVX512F-NEXT: movl (%rsp), %eax ; AVX512F-NEXT: movq %rbp, %rsp ; AVX512F-NEXT: popq %rbp -; AVX512F-NEXT: .cfi_def_cfa %rsp, 8 ; AVX512F-NEXT: vzeroupper ; AVX512F-NEXT: retq ; @@ -1240,7 +1239,6 @@ define i64 @v64i8(<64 x i8> %a, <64 x i8> %b, <64 x i8> %c, <64 x i8> %d) { ; AVX1-NEXT: orq %rcx, %rax ; AVX1-NEXT: movq %rbp, %rsp ; AVX1-NEXT: popq %rbp -; AVX1-NEXT: .cfi_def_cfa %rsp, 8 ; AVX1-NEXT: vzeroupper ; AVX1-NEXT: retq ; @@ -1459,7 +1457,6 @@ define i64 @v64i8(<64 x i8> %a, <64 x i8> %b, <64 x i8> %c, <64 x i8> %d) { ; AVX2-NEXT: orq %rcx, %rax ; AVX2-NEXT: movq %rbp, %rsp ; AVX2-NEXT: popq %rbp -; AVX2-NEXT: .cfi_def_cfa %rsp, 8 ; AVX2-NEXT: vzeroupper ; AVX2-NEXT: retq ; @@ -1502,7 +1499,6 @@ define i64 @v64i8(<64 x i8> %a, <64 x i8> %b, <64 x i8> %c, <64 x i8> %d) { ; AVX512F-NEXT: orq %rcx, %rax ; AVX512F-NEXT: movq %rbp, %rsp ; AVX512F-NEXT: popq %rbp -; AVX512F-NEXT: .cfi_def_cfa %rsp, 8 ; AVX512F-NEXT: vzeroupper ; AVX512F-NEXT: retq ; diff --git a/llvm/test/CodeGen/X86/bitcast-int-to-vector-bool-zext.ll b/llvm/test/CodeGen/X86/bitcast-int-to-vector-bool-zext.ll index 1959000b859..4ed55ac0919 100644 --- a/llvm/test/CodeGen/X86/bitcast-int-to-vector-bool-zext.ll +++ b/llvm/test/CodeGen/X86/bitcast-int-to-vector-bool-zext.ll @@ -321,17 +321,11 @@ define <16 x i8> @ext_i16_16i8(i16 %a0) { ; AVX512-NEXT: vpinsrb $15, %r9d, %xmm0, %xmm0 ; AVX512-NEXT: vpand {{.*}}(%rip), %xmm0, %xmm0 ; AVX512-NEXT: popq %rbx -; AVX512-NEXT: .cfi_def_cfa_offset 48 ; AVX512-NEXT: popq %r12 -; AVX512-NEXT: .cfi_def_cfa_offset 40 ; AVX512-NEXT: popq %r13 -; AVX512-NEXT: .cfi_def_cfa_offset 32 ; AVX512-NEXT: popq %r14 -; AVX512-NEXT: .cfi_def_cfa_offset 24 ; AVX512-NEXT: popq %r15 -; AVX512-NEXT: .cfi_def_cfa_offset 16 ; AVX512-NEXT: popq %rbp -; AVX512-NEXT: .cfi_def_cfa_offset 8 ; AVX512-NEXT: retq %1 = bitcast i16 %a0 to <16 x i1> %2 = zext <16 x i1> %1 to <16 x i8> diff --git a/llvm/test/CodeGen/X86/bitcast-setcc-256.ll b/llvm/test/CodeGen/X86/bitcast-setcc-256.ll index 76160517546..ee2dac1d466 100644 --- a/llvm/test/CodeGen/X86/bitcast-setcc-256.ll +++ b/llvm/test/CodeGen/X86/bitcast-setcc-256.ll @@ -204,7 +204,6 @@ define i32 @v32i8(<32 x i8> %a, <32 x i8> %b) { ; AVX512F-NEXT: movl (%rsp), %eax ; AVX512F-NEXT: movq %rbp, %rsp ; AVX512F-NEXT: popq %rbp -; AVX512F-NEXT: .cfi_def_cfa %rsp, 8 ; AVX512F-NEXT: vzeroupper ; AVX512F-NEXT: retq ; diff --git a/llvm/test/CodeGen/X86/bitcast-setcc-512.ll b/llvm/test/CodeGen/X86/bitcast-setcc-512.ll index ef981080bb3..2b73c6e16bd 100644 --- a/llvm/test/CodeGen/X86/bitcast-setcc-512.ll +++ b/llvm/test/CodeGen/X86/bitcast-setcc-512.ll @@ -203,7 +203,6 @@ define i32 @v32i16(<32 x i16> %a, <32 x i16> %b) { ; AVX512F-NEXT: movl (%rsp), %eax ; AVX512F-NEXT: movq %rbp, %rsp ; AVX512F-NEXT: popq %rbp -; AVX512F-NEXT: .cfi_def_cfa %rsp, 8 ; AVX512F-NEXT: vzeroupper ; AVX512F-NEXT: retq ; @@ -770,7 +769,6 @@ define i64 @v64i8(<64 x i8> %a, <64 x i8> %b) { ; AVX1-NEXT: orq %rcx, %rax ; AVX1-NEXT: movq %rbp, %rsp ; AVX1-NEXT: popq %rbp -; AVX1-NEXT: .cfi_def_cfa %rsp, 8 ; AVX1-NEXT: vzeroupper ; AVX1-NEXT: retq ; @@ -985,7 +983,6 @@ define i64 @v64i8(<64 x i8> %a, <64 x i8> %b) { ; AVX2-NEXT: orq %rcx, %rax ; AVX2-NEXT: movq %rbp, %rsp ; AVX2-NEXT: popq %rbp -; AVX2-NEXT: .cfi_def_cfa %rsp, 8 ; AVX2-NEXT: vzeroupper ; AVX2-NEXT: retq ; @@ -1024,7 +1021,6 @@ define i64 @v64i8(<64 x i8> %a, <64 x i8> %b) { ; AVX512F-NEXT: orq %rcx, %rax ; AVX512F-NEXT: movq %rbp, %rsp ; AVX512F-NEXT: popq %rbp -; AVX512F-NEXT: .cfi_def_cfa %rsp, 8 ; AVX512F-NEXT: vzeroupper ; AVX512F-NEXT: retq ; diff --git a/llvm/test/CodeGen/X86/bool-vector.ll b/llvm/test/CodeGen/X86/bool-vector.ll index 692d992df76..eb40744c54d 100644 --- a/llvm/test/CodeGen/X86/bool-vector.ll +++ b/llvm/test/CodeGen/X86/bool-vector.ll @@ -93,7 +93,6 @@ define i32 @PR15215_good(<4 x i32> %input) { ; X32-NEXT: leal (%eax,%edx,4), %eax ; X32-NEXT: leal (%eax,%esi,8), %eax ; X32-NEXT: popl %esi -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; ; X32-SSE2-LABEL: PR15215_good: @@ -116,7 +115,6 @@ define i32 @PR15215_good(<4 x i32> %input) { ; X32-SSE2-NEXT: leal (%eax,%edx,4), %eax ; X32-SSE2-NEXT: leal (%eax,%esi,8), %eax ; X32-SSE2-NEXT: popl %esi -; X32-SSE2-NEXT: .cfi_def_cfa_offset 4 ; X32-SSE2-NEXT: retl ; ; X32-AVX2-LABEL: PR15215_good: @@ -136,7 +134,6 @@ define i32 @PR15215_good(<4 x i32> %input) { ; X32-AVX2-NEXT: leal (%eax,%edx,4), %eax ; X32-AVX2-NEXT: leal (%eax,%esi,8), %eax ; X32-AVX2-NEXT: popl %esi -; X32-AVX2-NEXT: .cfi_def_cfa_offset 4 ; X32-AVX2-NEXT: retl ; ; X64-LABEL: PR15215_good: diff --git a/llvm/test/CodeGen/X86/cmp.ll b/llvm/test/CodeGen/X86/cmp.ll index 6f9abae6a71..82e133d2576 100644 --- a/llvm/test/CodeGen/X86/cmp.ll +++ b/llvm/test/CodeGen/X86/cmp.ll @@ -247,13 +247,10 @@ define i32 @test12() ssp uwtable { ; CHECK-NEXT: # BB#1: # %T ; CHECK-NEXT: movl $1, %eax # encoding: [0xb8,0x01,0x00,0x00,0x00] ; CHECK-NEXT: popq %rcx # encoding: [0x59] -; CHECK-NEXT: .cfi_def_cfa_offset 8 ; CHECK-NEXT: retq # encoding: [0xc3] ; CHECK-NEXT: .LBB12_2: # %F -; CHECK-NEXT: .cfi_def_cfa_offset 16 ; CHECK-NEXT: movl $2, %eax # encoding: [0xb8,0x02,0x00,0x00,0x00] ; CHECK-NEXT: popq %rcx # encoding: [0x59] -; CHECK-NEXT: .cfi_def_cfa_offset 8 ; CHECK-NEXT: retq # encoding: [0xc3] entry: %tmp1 = call zeroext i1 @test12b() diff --git a/llvm/test/CodeGen/X86/emutls-pie.ll b/llvm/test/CodeGen/X86/emutls-pie.ll index f4561fcbd35..3c312a92669 100644 --- a/llvm/test/CodeGen/X86/emutls-pie.ll +++ b/llvm/test/CodeGen/X86/emutls-pie.ll @@ -18,16 +18,13 @@ define i32 @my_get_xyz() { ; X32-NEXT: calll my_emutls_get_address@PLT ; X32-NEXT: movl (%eax), %eax ; X32-NEXT: addl $8, %esp -; X32-NEXT: .cfi_def_cfa_offset 8 ; X32-NEXT: popl %ebx -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; X64-LABEL: my_get_xyz: ; X64: movq my_emutls_v_xyz@GOTPCREL(%rip), %rdi ; X64-NEXT: callq my_emutls_get_address@PLT ; X64-NEXT: movl (%rax), %eax ; X64-NEXT: popq %rcx -; X64-NEXT: .cfi_def_cfa_offset 8 ; X64-NEXT: retq entry: @@ -47,16 +44,13 @@ define i32 @f1() { ; X32-NEXT: calll __emutls_get_address@PLT ; X32-NEXT: movl (%eax), %eax ; X32-NEXT: addl $8, %esp -; X32-NEXT: .cfi_def_cfa_offset 8 ; X32-NEXT: popl %ebx -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; X64-LABEL: f1: ; X64: leaq __emutls_v.i(%rip), %rdi ; X64-NEXT: callq __emutls_get_address@PLT ; X64-NEXT: movl (%rax), %eax ; X64-NEXT: popq %rcx -; X64-NEXT: .cfi_def_cfa_offset 8 ; X64-NEXT: retq entry: diff --git a/llvm/test/CodeGen/X86/emutls.ll b/llvm/test/CodeGen/X86/emutls.ll index 2321cd2fc28..8c0ba903659 100644 --- a/llvm/test/CodeGen/X86/emutls.ll +++ b/llvm/test/CodeGen/X86/emutls.ll @@ -16,14 +16,12 @@ define i32 @my_get_xyz() { ; X32-NEXT: calll my_emutls_get_address ; X32-NEXT: movl (%eax), %eax ; X32-NEXT: addl $12, %esp -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; X64-LABEL: my_get_xyz: ; X64: movl $my_emutls_v_xyz, %edi ; X64-NEXT: callq my_emutls_get_address ; X64-NEXT: movl (%rax), %eax ; X64-NEXT: popq %rcx -; X64-NEXT: .cfi_def_cfa_offset 8 ; X64-NEXT: retq entry: @@ -47,14 +45,12 @@ define i32 @f1() { ; X32-NEXT: calll __emutls_get_address ; X32-NEXT: movl (%eax), %eax ; X32-NEXT: addl $12, %esp -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; X64-LABEL: f1: ; X64: movl $__emutls_v.i1, %edi ; X64-NEXT: callq __emutls_get_address ; X64-NEXT: movl (%rax), %eax ; X64-NEXT: popq %rcx -; X64-NEXT: .cfi_def_cfa_offset 8 ; X64-NEXT: retq entry: @@ -67,13 +63,11 @@ define i32* @f2() { ; X32: movl $__emutls_v.i1, (%esp) ; X32-NEXT: calll __emutls_get_address ; X32-NEXT: addl $12, %esp -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; X64-LABEL: f2: ; X64: movl $__emutls_v.i1, %edi ; X64-NEXT: callq __emutls_get_address ; X64-NEXT: popq %rcx -; X64-NEXT: .cfi_def_cfa_offset 8 ; X64-NEXT: retq entry: @@ -98,7 +92,6 @@ define i32* @f4() { ; X32: movl $__emutls_v.i2, (%esp) ; X32-NEXT: calll __emutls_get_address ; X32-NEXT: addl $12, %esp -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl entry: @@ -123,7 +116,6 @@ define i32* @f6() { ; X32: movl $__emutls_v.i3, (%esp) ; X32-NEXT: calll __emutls_get_address ; X32-NEXT: addl $12, %esp -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl entry: @@ -136,7 +128,6 @@ define i32 @f7() { ; X32-NEXT: calll __emutls_get_address ; X32-NEXT: movl (%eax), %eax ; X32-NEXT: addl $12, %esp -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl entry: @@ -149,7 +140,6 @@ define i32* @f8() { ; X32: movl $__emutls_v.i4, (%esp) ; X32-NEXT: calll __emutls_get_address ; X32-NEXT: addl $12, %esp -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl entry: @@ -162,7 +152,6 @@ define i32 @f9() { ; X32-NEXT: calll __emutls_get_address ; X32-NEXT: movl (%eax), %eax ; X32-NEXT: addl $12, %esp -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl entry: @@ -175,7 +164,6 @@ define i32* @f10() { ; X32: movl $__emutls_v.i5, (%esp) ; X32-NEXT: calll __emutls_get_address ; X32-NEXT: addl $12, %esp -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl entry: @@ -188,7 +176,6 @@ define i16 @f11() { ; X32-NEXT: calll __emutls_get_address ; X32-NEXT: movzwl (%eax), %eax ; X32-NEXT: addl $12, %esp -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl entry: @@ -202,7 +189,6 @@ define i32 @f12() { ; X32-NEXT: calll __emutls_get_address ; X32-NEXT: movswl (%eax), %eax ; X32-NEXT: addl $12, %esp -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl entry: @@ -217,7 +203,6 @@ define i8 @f13() { ; X32-NEXT: calll __emutls_get_address ; X32-NEXT: movb (%eax), %al ; X32-NEXT: addl $12, %esp -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl entry: @@ -231,7 +216,6 @@ define i32 @f14() { ; X32-NEXT: calll __emutls_get_address ; X32-NEXT: movsbl (%eax), %eax ; X32-NEXT: addl $12, %esp -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl entry: diff --git a/llvm/test/CodeGen/X86/epilogue-cfi-fp.ll b/llvm/test/CodeGen/X86/epilogue-cfi-fp.ll deleted file mode 100644 index c2fe1c7eaac..00000000000 --- a/llvm/test/CodeGen/X86/epilogue-cfi-fp.ll +++ /dev/null @@ -1,43 +0,0 @@ -; RUN: llc -O0 %s -o - | FileCheck %s - -; ModuleID = 'epilogue-cfi-fp.c' -source_filename = "epilogue-cfi-fp.c" -target datalayout = "e-m:e-p:32:32-f64:32:64-f80:32-n8:16:32-S128" -target triple = "i686-pc-linux" - -; Function Attrs: noinline nounwind -define i32 @foo(i32 %i, i32 %j, i32 %k, i32 %l, i32 %m) #0 { - -; CHECK-LABEL: foo: -; CHECK: popl %ebp -; CHECK-NEXT: .cfi_def_cfa %esp, 4 -; CHECK-NEXT: retl - -entry: - %i.addr = alloca i32, align 4 - %j.addr = alloca i32, align 4 - %k.addr = alloca i32, align 4 - %l.addr = alloca i32, align 4 - %m.addr = alloca i32, align 4 - store i32 %i, i32* %i.addr, align 4 - store i32 %j, i32* %j.addr, align 4 - store i32 %k, i32* %k.addr, align 4 - store i32 %l, i32* %l.addr, align 4 - store i32 %m, i32* %m.addr, align 4 - ret i32 0 -} - -attributes #0 = { "no-frame-pointer-elim"="true" } - -!llvm.dbg.cu = !{!0} -!llvm.module.flags = !{!3, !4, !5, !6, !7} - -!0 = distinct !DICompileUnit(language: DW_LANG_C99, file: !1, producer: "clang version 5.0.0 (http://llvm.org/git/clang.git 3f8116e6a2815b1d5f3491493938d0c63c9f42c9) (http://llvm.org/git/llvm.git 4fde77f8f1a8e4482e69b6a7484bc7d1b99b3c0a)", isOptimized: false, runtimeVersion: 0, emissionKind: FullDebug, enums: !2) -!1 = !DIFile(filename: "epilogue-cfi-fp.c", directory: "epilogue-dwarf/test") -!2 = !{} -!3 = !{i32 1, !"NumRegisterParameters", i32 0} -!4 = !{i32 2, !"Dwarf Version", i32 4} -!5 = !{i32 2, !"Debug Info Version", i32 3} -!6 = !{i32 1, !"wchar_size", i32 4} -!7 = !{i32 7, !"PIC Level", i32 2} - diff --git a/llvm/test/CodeGen/X86/epilogue-cfi-no-fp.ll b/llvm/test/CodeGen/X86/epilogue-cfi-no-fp.ll deleted file mode 100644 index 79d6f478de8..00000000000 --- a/llvm/test/CodeGen/X86/epilogue-cfi-no-fp.ll +++ /dev/null @@ -1,46 +0,0 @@ -; RUN: llc -O0 < %s | FileCheck %s - -; ModuleID = 'epilogue-cfi-no-fp.c' -source_filename = "epilogue-cfi-no-fp.c" -target datalayout = "e-m:e-p:32:32-f64:32:64-f80:32-n8:16:32-S128" -target triple = "i686-pc-linux" - -; Function Attrs: noinline nounwind -define i32 @foo(i32 %i, i32 %j, i32 %k, i32 %l, i32 %m) { -; CHECK-LABEL: foo: -; CHECK: addl $20, %esp -; CHECK-NEXT: .cfi_def_cfa_offset 16 -; CHECK-NEXT: popl %esi -; CHECK-NEXT: .cfi_def_cfa_offset 12 -; CHECK-NEXT: popl %edi -; CHECK-NEXT: .cfi_def_cfa_offset 8 -; CHECK-NEXT: popl %ebx -; CHECK-NEXT: .cfi_def_cfa_offset 4 -; CHECK-NEXT: retl -entry: - %i.addr = alloca i32, align 4 - %j.addr = alloca i32, align 4 - %k.addr = alloca i32, align 4 - %l.addr = alloca i32, align 4 - %m.addr = alloca i32, align 4 - store i32 %i, i32* %i.addr, align 4 - store i32 %j, i32* %j.addr, align 4 - store i32 %k, i32* %k.addr, align 4 - store i32 %l, i32* %l.addr, align 4 - store i32 %m, i32* %m.addr, align 4 - ret i32 0 -} - -!llvm.dbg.cu = !{!0} -!llvm.module.flags = !{!3, !4, !5, !6, !7} - -!0 = distinct !DICompileUnit(language: DW_LANG_C99, file: !1, producer: "clang version 5.0.0 (http://llvm.org/git/clang.git 3f8116e6a2815b1d5f3491493938d0c63c9f42c9) (http://llvm.org/git/llvm.git 4fde77f8f1a8e4482e69b6a7484bc7d1b99b3c0a)", isOptimized: false, runtimeVersion: 0, emissionKind: FullDebug, enums: !2) -!1 = !DIFile(filename: "epilogue-cfi-no-fp.c", directory: "epilogue-dwarf/test") -!2 = !{} -!3 = !{i32 1, !"NumRegisterParameters", i32 0} -!4 = !{i32 2, !"Dwarf Version", i32 4} -!5 = !{i32 2, !"Debug Info Version", i32 3} -!6 = !{i32 1, !"wchar_size", i32 4} -!7 = !{i32 7, !"PIC Level", i32 2} - - diff --git a/llvm/test/CodeGen/X86/fast-isel-int-float-conversion.ll b/llvm/test/CodeGen/X86/fast-isel-int-float-conversion.ll index 57b50abab53..3e69710868b 100644 --- a/llvm/test/CodeGen/X86/fast-isel-int-float-conversion.ll +++ b/llvm/test/CodeGen/X86/fast-isel-int-float-conversion.ll @@ -31,7 +31,6 @@ define double @int_to_double_rr(i32 %a) { ; SSE2_X86-NEXT: fldl (%esp) ; SSE2_X86-NEXT: movl %ebp, %esp ; SSE2_X86-NEXT: popl %ebp -; SSE2_X86-NEXT: .cfi_def_cfa %esp, 4 ; SSE2_X86-NEXT: retl ; ; AVX_X86-LABEL: int_to_double_rr: @@ -48,7 +47,6 @@ define double @int_to_double_rr(i32 %a) { ; AVX_X86-NEXT: fldl (%esp) ; AVX_X86-NEXT: movl %ebp, %esp ; AVX_X86-NEXT: popl %ebp -; AVX_X86-NEXT: .cfi_def_cfa %esp, 4 ; AVX_X86-NEXT: retl entry: %0 = sitofp i32 %a to double @@ -82,7 +80,6 @@ define double @int_to_double_rm(i32* %a) { ; SSE2_X86-NEXT: fldl (%esp) ; SSE2_X86-NEXT: movl %ebp, %esp ; SSE2_X86-NEXT: popl %ebp -; SSE2_X86-NEXT: .cfi_def_cfa %esp, 4 ; SSE2_X86-NEXT: retl ; ; AVX_X86-LABEL: int_to_double_rm: @@ -100,7 +97,6 @@ define double @int_to_double_rm(i32* %a) { ; AVX_X86-NEXT: fldl (%esp) ; AVX_X86-NEXT: movl %ebp, %esp ; AVX_X86-NEXT: popl %ebp -; AVX_X86-NEXT: .cfi_def_cfa %esp, 4 ; AVX_X86-NEXT: retl entry: %0 = load i32, i32* %a @@ -134,7 +130,6 @@ define double @int_to_double_rm_optsize(i32* %a) optsize { ; SSE2_X86-NEXT: fldl (%esp) ; SSE2_X86-NEXT: movl %ebp, %esp ; SSE2_X86-NEXT: popl %ebp -; SSE2_X86-NEXT: .cfi_def_cfa %esp, 4 ; SSE2_X86-NEXT: retl ; ; AVX_X86-LABEL: int_to_double_rm_optsize: @@ -152,7 +147,6 @@ define double @int_to_double_rm_optsize(i32* %a) optsize { ; AVX_X86-NEXT: fldl (%esp) ; AVX_X86-NEXT: movl %ebp, %esp ; AVX_X86-NEXT: popl %ebp -; AVX_X86-NEXT: .cfi_def_cfa %esp, 4 ; AVX_X86-NEXT: retl entry: %0 = load i32, i32* %a @@ -180,7 +174,6 @@ define float @int_to_float_rr(i32 %a) { ; SSE2_X86-NEXT: movss %xmm0, (%esp) ; SSE2_X86-NEXT: flds (%esp) ; SSE2_X86-NEXT: popl %eax -; SSE2_X86-NEXT: .cfi_def_cfa_offset 4 ; SSE2_X86-NEXT: retl ; ; AVX_X86-LABEL: int_to_float_rr: @@ -191,7 +184,6 @@ define float @int_to_float_rr(i32 %a) { ; AVX_X86-NEXT: vmovss %xmm0, (%esp) ; AVX_X86-NEXT: flds (%esp) ; AVX_X86-NEXT: popl %eax -; AVX_X86-NEXT: .cfi_def_cfa_offset 4 ; AVX_X86-NEXT: retl entry: %0 = sitofp i32 %a to float @@ -219,7 +211,6 @@ define float @int_to_float_rm(i32* %a) { ; SSE2_X86-NEXT: movss %xmm0, (%esp) ; SSE2_X86-NEXT: flds (%esp) ; SSE2_X86-NEXT: popl %eax -; SSE2_X86-NEXT: .cfi_def_cfa_offset 4 ; SSE2_X86-NEXT: retl ; ; AVX_X86-LABEL: int_to_float_rm: @@ -231,7 +222,6 @@ define float @int_to_float_rm(i32* %a) { ; AVX_X86-NEXT: vmovss %xmm0, (%esp) ; AVX_X86-NEXT: flds (%esp) ; AVX_X86-NEXT: popl %eax -; AVX_X86-NEXT: .cfi_def_cfa_offset 4 ; AVX_X86-NEXT: retl entry: %0 = load i32, i32* %a @@ -259,7 +249,6 @@ define float @int_to_float_rm_optsize(i32* %a) optsize { ; SSE2_X86-NEXT: movss %xmm0, (%esp) ; SSE2_X86-NEXT: flds (%esp) ; SSE2_X86-NEXT: popl %eax -; SSE2_X86-NEXT: .cfi_def_cfa_offset 4 ; SSE2_X86-NEXT: retl ; ; AVX_X86-LABEL: int_to_float_rm_optsize: @@ -271,7 +260,6 @@ define float @int_to_float_rm_optsize(i32* %a) optsize { ; AVX_X86-NEXT: vmovss %xmm0, (%esp) ; AVX_X86-NEXT: flds (%esp) ; AVX_X86-NEXT: popl %eax -; AVX_X86-NEXT: .cfi_def_cfa_offset 4 ; AVX_X86-NEXT: retl entry: %0 = load i32, i32* %a diff --git a/llvm/test/CodeGen/X86/fast-isel-store.ll b/llvm/test/CodeGen/X86/fast-isel-store.ll index e2412e9c5c0..e359e620563 100644 --- a/llvm/test/CodeGen/X86/fast-isel-store.ll +++ b/llvm/test/CodeGen/X86/fast-isel-store.ll @@ -375,7 +375,6 @@ define <4 x double> @test_store_4xf64(<4 x double>* nocapture %addr, <4 x double ; SSE64-NEXT: movupd %xmm0, (%eax) ; SSE64-NEXT: movupd %xmm1, 16(%eax) ; SSE64-NEXT: addl $12, %esp -; SSE64-NEXT: .cfi_def_cfa_offset 4 ; SSE64-NEXT: retl ; ; AVX32-LABEL: test_store_4xf64: @@ -414,7 +413,6 @@ define <4 x double> @test_store_4xf64_aligned(<4 x double>* nocapture %addr, <4 ; SSE64-NEXT: movapd %xmm0, (%eax) ; SSE64-NEXT: movapd %xmm1, 16(%eax) ; SSE64-NEXT: addl $12, %esp -; SSE64-NEXT: .cfi_def_cfa_offset 4 ; SSE64-NEXT: retl ; ; AVX32-LABEL: test_store_4xf64_aligned: @@ -454,7 +452,6 @@ define <16 x i32> @test_store_16xi32(<16 x i32>* nocapture %addr, <16 x i32> %va ; SSE64-NEXT: movups %xmm2, 32(%eax) ; SSE64-NEXT: movups %xmm3, 48(%eax) ; SSE64-NEXT: addl $12, %esp -; SSE64-NEXT: .cfi_def_cfa_offset 4 ; SSE64-NEXT: retl ; ; AVXONLY32-LABEL: test_store_16xi32: @@ -504,7 +501,6 @@ define <16 x i32> @test_store_16xi32_aligned(<16 x i32>* nocapture %addr, <16 x ; SSE64-NEXT: movaps %xmm2, 32(%eax) ; SSE64-NEXT: movaps %xmm3, 48(%eax) ; SSE64-NEXT: addl $12, %esp -; SSE64-NEXT: .cfi_def_cfa_offset 4 ; SSE64-NEXT: retl ; ; AVXONLY32-LABEL: test_store_16xi32_aligned: @@ -554,7 +550,6 @@ define <16 x float> @test_store_16xf32(<16 x float>* nocapture %addr, <16 x floa ; SSE64-NEXT: movups %xmm2, 32(%eax) ; SSE64-NEXT: movups %xmm3, 48(%eax) ; SSE64-NEXT: addl $12, %esp -; SSE64-NEXT: .cfi_def_cfa_offset 4 ; SSE64-NEXT: retl ; ; AVXONLY32-LABEL: test_store_16xf32: @@ -604,7 +599,6 @@ define <16 x float> @test_store_16xf32_aligned(<16 x float>* nocapture %addr, <1 ; SSE64-NEXT: movaps %xmm2, 32(%eax) ; SSE64-NEXT: movaps %xmm3, 48(%eax) ; SSE64-NEXT: addl $12, %esp -; SSE64-NEXT: .cfi_def_cfa_offset 4 ; SSE64-NEXT: retl ; ; AVXONLY32-LABEL: test_store_16xf32_aligned: @@ -662,7 +656,6 @@ define <8 x double> @test_store_8xf64(<8 x double>* nocapture %addr, <8 x double ; SSE64-NEXT: movupd %xmm2, 32(%eax) ; SSE64-NEXT: movupd %xmm3, 48(%eax) ; SSE64-NEXT: addl $12, %esp -; SSE64-NEXT: .cfi_def_cfa_offset 4 ; SSE64-NEXT: retl ; ; AVXONLY32-LABEL: test_store_8xf64: @@ -689,7 +682,6 @@ define <8 x double> @test_store_8xf64(<8 x double>* nocapture %addr, <8 x double ; AVXONLY64-NEXT: vmovupd %ymm1, 32(%eax) ; AVXONLY64-NEXT: movl %ebp, %esp ; AVXONLY64-NEXT: popl %ebp -; AVXONLY64-NEXT: .cfi_def_cfa %esp, 4 ; AVXONLY64-NEXT: retl ; ; AVX51232-LABEL: test_store_8xf64: @@ -737,7 +729,6 @@ define <8 x double> @test_store_8xf64_aligned(<8 x double>* nocapture %addr, <8 ; SSE64-NEXT: movapd %xmm2, 32(%eax) ; SSE64-NEXT: movapd %xmm3, 48(%eax) ; SSE64-NEXT: addl $12, %esp -; SSE64-NEXT: .cfi_def_cfa_offset 4 ; SSE64-NEXT: retl ; ; AVXONLY32-LABEL: test_store_8xf64_aligned: @@ -764,7 +755,6 @@ define <8 x double> @test_store_8xf64_aligned(<8 x double>* nocapture %addr, <8 ; AVXONLY64-NEXT: vmovapd %ymm1, 32(%eax) ; AVXONLY64-NEXT: movl %ebp, %esp ; AVXONLY64-NEXT: popl %ebp -; AVXONLY64-NEXT: .cfi_def_cfa %esp, 4 ; AVXONLY64-NEXT: retl ; ; AVX51232-LABEL: test_store_8xf64_aligned: diff --git a/llvm/test/CodeGen/X86/frame-lowering-debug-intrinsic-2.ll b/llvm/test/CodeGen/X86/frame-lowering-debug-intrinsic-2.ll index ee64790d1d9..ba80c839fdd 100644 --- a/llvm/test/CodeGen/X86/frame-lowering-debug-intrinsic-2.ll +++ b/llvm/test/CodeGen/X86/frame-lowering-debug-intrinsic-2.ll @@ -18,15 +18,11 @@ entry: } ; CHECK-LABEL: noDebug -; CHECK: addq $16, %rsp -; CHECK-NEXT: .cfi_adjust_cfa_offset -16 -; CHECK-NEXT: addq $8, %rsp -; CHECK-NEXT: .cfi_def_cfa_offset 24 -; CHECK-NEXT: popq %rbx -; CHECK-NEXT: .cfi_def_cfa_offset 16 -; CHECK-NEXT: popq %r14 -; CHECK-NEXT: .cfi_def_cfa_offset 8 -; CHECK-NEXT: retq +; CHECK: addq $24, %rsp +; CHECK: popq %rbx +; CHECK-NEXT: popq %r14 +; CHECK-NEXT: retq + define void @withDebug() !dbg !18 { entry: @@ -46,11 +42,9 @@ entry: ; CHECK-LABEL: withDebug ; CHECK: callq printf ; CHECK: callq printf -; CHECK-NEXT: addq $16, %rsp +; CHECK-NEXT: addq $24, %rsp ; CHECK: popq %rbx -; CHECK-NEXT: .cfi_def_cfa_offset 16 ; CHECK-NEXT: popq %r14 -; CHECK-NEXT: .cfi_def_cfa_offset 8 ; CHECK-NEXT: retq declare { i64, i1 } @llvm.uadd.with.overflow.i64(i64, i64) diff --git a/llvm/test/CodeGen/X86/frame-lowering-debug-intrinsic.ll b/llvm/test/CodeGen/X86/frame-lowering-debug-intrinsic.ll index de9d6bf93d6..f9ecf707810 100644 --- a/llvm/test/CodeGen/X86/frame-lowering-debug-intrinsic.ll +++ b/llvm/test/CodeGen/X86/frame-lowering-debug-intrinsic.ll @@ -9,7 +9,6 @@ define i64 @fn1NoDebug(i64 %a) { ; CHECK-LABEL: fn1NoDebug ; CHECK: popq %rcx -; CHECK-NEXT: .cfi_def_cfa_offset 8 ; CHECK-NEXT: ret define i64 @fn1WithDebug(i64 %a) !dbg !4 { @@ -20,7 +19,6 @@ define i64 @fn1WithDebug(i64 %a) !dbg !4 { ; CHECK-LABEL: fn1WithDebug ; CHECK: popq %rcx -; CHECK-NEXT: .cfi_def_cfa_offset 8 ; CHECK-NEXT: ret %struct.Buffer = type { i8, [63 x i8] } @@ -35,7 +33,6 @@ define void @fn2NoDebug(%struct.Buffer* byval align 64 %p1) { ; CHECK-NOT: sub ; CHECK: mov ; CHECK-NEXT: pop -; CHECK-NEXT: .cfi_def_cfa %rsp, 8 ; CHECK-NEXT: ret define void @fn2WithDebug(%struct.Buffer* byval align 64 %p1) !dbg !8 { @@ -49,7 +46,6 @@ define void @fn2WithDebug(%struct.Buffer* byval align 64 %p1) !dbg !8 { ; CHECK-NOT: sub ; CHECK: mov ; CHECK-NEXT: pop -; CHECK-NEXT: .cfi_def_cfa %rsp, 8 ; CHECK-NEXT: ret declare i64 @fn(i64, i64) diff --git a/llvm/test/CodeGen/X86/haddsub-2.ll b/llvm/test/CodeGen/X86/haddsub-2.ll index 7126fb233e6..e32c7452b0c 100644 --- a/llvm/test/CodeGen/X86/haddsub-2.ll +++ b/llvm/test/CodeGen/X86/haddsub-2.ll @@ -724,17 +724,11 @@ define <16 x i16> @avx2_vphadd_w_test(<16 x i16> %a, <16 x i16> %b) { ; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1] ; SSE3-NEXT: punpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm5[0] ; SSE3-NEXT: popq %rbx -; SSE3-NEXT: .cfi_def_cfa_offset 48 ; SSE3-NEXT: popq %r12 -; SSE3-NEXT: .cfi_def_cfa_offset 40 ; SSE3-NEXT: popq %r13 -; SSE3-NEXT: .cfi_def_cfa_offset 32 ; SSE3-NEXT: popq %r14 -; SSE3-NEXT: .cfi_def_cfa_offset 24 ; SSE3-NEXT: popq %r15 -; SSE3-NEXT: .cfi_def_cfa_offset 16 ; SSE3-NEXT: popq %rbp -; SSE3-NEXT: .cfi_def_cfa_offset 8 ; SSE3-NEXT: retq ; ; SSSE3-LABEL: avx2_vphadd_w_test: @@ -1357,17 +1351,11 @@ define <16 x i16> @avx2_hadd_w(<16 x i16> %a, <16 x i16> %b) { ; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1] ; SSE3-NEXT: punpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm5[0] ; SSE3-NEXT: popq %rbx -; SSE3-NEXT: .cfi_def_cfa_offset 48 ; SSE3-NEXT: popq %r12 -; SSE3-NEXT: .cfi_def_cfa_offset 40 ; SSE3-NEXT: popq %r13 -; SSE3-NEXT: .cfi_def_cfa_offset 32 ; SSE3-NEXT: popq %r14 -; SSE3-NEXT: .cfi_def_cfa_offset 24 ; SSE3-NEXT: popq %r15 -; SSE3-NEXT: .cfi_def_cfa_offset 16 ; SSE3-NEXT: popq %rbp -; SSE3-NEXT: .cfi_def_cfa_offset 8 ; SSE3-NEXT: retq ; ; SSSE3-LABEL: avx2_hadd_w: diff --git a/llvm/test/CodeGen/X86/hipe-cc64.ll b/llvm/test/CodeGen/X86/hipe-cc64.ll index ce2d0e9c671..efe07cf6301 100644 --- a/llvm/test/CodeGen/X86/hipe-cc64.ll +++ b/llvm/test/CodeGen/X86/hipe-cc64.ll @@ -87,7 +87,6 @@ define cc 11 { i64, i64, i64 } @tailcaller(i64 %hp, i64 %p) #0 { ; CHECK-NEXT: movl $47, %ecx ; CHECK-NEXT: movl $63, %r8d ; CHECK-NEXT: popq %rax - ; CHECK-NEXT: .cfi_def_cfa_offset 16 ; CHECK-NEXT: jmp tailcallee %ret = tail call cc11 { i64, i64, i64 } @tailcallee(i64 %hp, i64 %p, i64 15, i64 31, i64 47, i64 63, i64 79) #1 diff --git a/llvm/test/CodeGen/X86/illegal-bitfield-loadstore.ll b/llvm/test/CodeGen/X86/illegal-bitfield-loadstore.ll index e3b25a539c1..fd503aa6c6e 100644 --- a/llvm/test/CodeGen/X86/illegal-bitfield-loadstore.ll +++ b/llvm/test/CodeGen/X86/illegal-bitfield-loadstore.ll @@ -81,7 +81,6 @@ define void @i24_insert_bit(i24* %a, i1 zeroext %bit) { ; X86-NEXT: orl %edx, %eax ; X86-NEXT: movw %ax, (%ecx) ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl ; ; X64-LABEL: i24_insert_bit: diff --git a/llvm/test/CodeGen/X86/imul.ll b/llvm/test/CodeGen/X86/imul.ll index 02782f72108..e364b001f94 100644 --- a/llvm/test/CodeGen/X86/imul.ll +++ b/llvm/test/CodeGen/X86/imul.ll @@ -307,7 +307,6 @@ define i64 @test5(i64 %a) { ; X86-NEXT: subl %ecx, %edx ; X86-NEXT: subl %esi, %edx ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl entry: %tmp3 = mul i64 %a, -31 @@ -363,7 +362,6 @@ define i64 @test7(i64 %a) { ; X86-NEXT: subl %ecx, %edx ; X86-NEXT: subl %esi, %edx ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl entry: %tmp3 = mul i64 %a, -33 @@ -392,7 +390,6 @@ define i64 @testOverflow(i64 %a) { ; X86-NEXT: addl %esi, %edx ; X86-NEXT: subl {{[0-9]+}}(%esp), %edx ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl entry: %tmp3 = mul i64 %a, 9223372036854775807 diff --git a/llvm/test/CodeGen/X86/lea-opt-cse1.ll b/llvm/test/CodeGen/X86/lea-opt-cse1.ll index 4c9ec3e0d7a..05b47690e81 100644 --- a/llvm/test/CodeGen/X86/lea-opt-cse1.ll +++ b/llvm/test/CodeGen/X86/lea-opt-cse1.ll @@ -30,7 +30,6 @@ define void @test_func(%struct.SA* nocapture %ctx, i32 %n) local_unnamed_addr { ; X86-NEXT: leal 1(%edx,%ecx), %ecx ; X86-NEXT: movl %ecx, 16(%eax) ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl entry: %h0 = getelementptr inbounds %struct.SA, %struct.SA* %ctx, i64 0, i32 0 diff --git a/llvm/test/CodeGen/X86/lea-opt-cse2.ll b/llvm/test/CodeGen/X86/lea-opt-cse2.ll index cee6f6792cb..865dd49a6e1 100644 --- a/llvm/test/CodeGen/X86/lea-opt-cse2.ll +++ b/llvm/test/CodeGen/X86/lea-opt-cse2.ll @@ -46,9 +46,7 @@ define void @foo(%struct.SA* nocapture %ctx, i32 %n) local_unnamed_addr #0 { ; X86-NEXT: leal 1(%esi,%edx), %ecx ; X86-NEXT: movl %ecx, 16(%eax) ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 8 ; X86-NEXT: popl %edi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl entry: br label %loop diff --git a/llvm/test/CodeGen/X86/lea-opt-cse3.ll b/llvm/test/CodeGen/X86/lea-opt-cse3.ll index ed3aff98036..87949b40d48 100644 --- a/llvm/test/CodeGen/X86/lea-opt-cse3.ll +++ b/llvm/test/CodeGen/X86/lea-opt-cse3.ll @@ -91,7 +91,6 @@ define i32 @foo1_mult_basic_blocks(i32 %a, i32 %b) local_unnamed_addr #0 { ; X86-NEXT: movl %ecx, %eax ; X86-NEXT: .LBB2_2: # %exit ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl entry: %mul = shl i32 %b, 2 @@ -144,7 +143,6 @@ define i32 @foo1_mult_basic_blocks_illegal_scale(i32 %a, i32 %b) local_unnamed_a ; X86-NEXT: movl %ecx, %eax ; X86-NEXT: .LBB3_2: # %exit ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl entry: %mul = shl i32 %b, 1 diff --git a/llvm/test/CodeGen/X86/lea-opt-cse4.ll b/llvm/test/CodeGen/X86/lea-opt-cse4.ll index d068180c39c..31f31a73d44 100644 --- a/llvm/test/CodeGen/X86/lea-opt-cse4.ll +++ b/llvm/test/CodeGen/X86/lea-opt-cse4.ll @@ -36,7 +36,6 @@ define void @foo(%struct.SA* nocapture %ctx, i32 %n) local_unnamed_addr #0 { ; X86-NEXT: leal 1(%ecx,%edx), %ecx ; X86-NEXT: movl %ecx, 16(%eax) ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl entry: %h0 = getelementptr inbounds %struct.SA, %struct.SA* %ctx, i64 0, i32 0 @@ -111,9 +110,7 @@ define void @foo_loop(%struct.SA* nocapture %ctx, i32 %n) local_unnamed_addr #0 ; X86-NEXT: addl %ecx, %edx ; X86-NEXT: movl %edx, 16(%eax) ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 8 ; X86-NEXT: popl %edi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl entry: br label %loop diff --git a/llvm/test/CodeGen/X86/legalize-shift-64.ll b/llvm/test/CodeGen/X86/legalize-shift-64.ll index 7dff2c20d5a..ca4cfa5b805 100644 --- a/llvm/test/CodeGen/X86/legalize-shift-64.ll +++ b/llvm/test/CodeGen/X86/legalize-shift-64.ll @@ -117,13 +117,9 @@ define <2 x i64> @test5(<2 x i64> %A, <2 x i64> %B) { ; CHECK-NEXT: movl %esi, 4(%eax) ; CHECK-NEXT: movl %edi, (%eax) ; CHECK-NEXT: popl %esi -; CHECK-NEXT: .cfi_def_cfa_offset 16 ; CHECK-NEXT: popl %edi -; CHECK-NEXT: .cfi_def_cfa_offset 12 ; CHECK-NEXT: popl %ebx -; CHECK-NEXT: .cfi_def_cfa_offset 8 ; CHECK-NEXT: popl %ebp -; CHECK-NEXT: .cfi_def_cfa_offset 4 ; CHECK-NEXT: retl $4 %shl = shl <2 x i64> %A, %B ret <2 x i64> %shl @@ -164,7 +160,6 @@ define i32 @test6() { ; CHECK-NEXT: .LBB5_4: # %if.then ; CHECK-NEXT: movl %ebp, %esp ; CHECK-NEXT: popl %ebp -; CHECK-NEXT: .cfi_def_cfa %esp, 4 ; CHECK-NEXT: retl %x = alloca i32, align 4 %t = alloca i64, align 8 diff --git a/llvm/test/CodeGen/X86/live-out-reg-info.ll b/llvm/test/CodeGen/X86/live-out-reg-info.ll index 170f73593f6..b838065beea 100644 --- a/llvm/test/CodeGen/X86/live-out-reg-info.ll +++ b/llvm/test/CodeGen/X86/live-out-reg-info.ll @@ -18,7 +18,6 @@ define void @foo(i32 %a) { ; CHECK-NEXT: callq qux ; CHECK-NEXT: .LBB0_2: # %false ; CHECK-NEXT: popq %rax -; CHECK-NEXT: .cfi_def_cfa_offset 8 ; CHECK-NEXT: retq %t0 = lshr i32 %a, 23 br label %next diff --git a/llvm/test/CodeGen/X86/load-combine.ll b/llvm/test/CodeGen/X86/load-combine.ll index d46efc4b5ec..d1f5f41ac7b 100644 --- a/llvm/test/CodeGen/X86/load-combine.ll +++ b/llvm/test/CodeGen/X86/load-combine.ll @@ -376,7 +376,6 @@ define i32 @load_i32_by_i8_bswap_uses(i32* %arg) { ; CHECK-NEXT: orl %ecx, %eax ; CHECK-NEXT: orl %edx, %eax ; CHECK-NEXT: popl %esi -; CHECK-NEXT: .cfi_def_cfa_offset 4 ; CHECK-NEXT: retl ; ; CHECK64-LABEL: load_i32_by_i8_bswap_uses: @@ -497,7 +496,6 @@ define i32 @load_i32_by_i8_bswap_store_in_between(i32* %arg, i32* %arg1) { ; CHECK-NEXT: movzbl 3(%ecx), %eax ; CHECK-NEXT: orl %edx, %eax ; CHECK-NEXT: popl %esi -; CHECK-NEXT: .cfi_def_cfa_offset 4 ; CHECK-NEXT: retl ; ; CHECK64-LABEL: load_i32_by_i8_bswap_store_in_between: diff --git a/llvm/test/CodeGen/X86/masked_gather_scatter.ll b/llvm/test/CodeGen/X86/masked_gather_scatter.ll index 207175aae1a..94057f98703 100644 --- a/llvm/test/CodeGen/X86/masked_gather_scatter.ll +++ b/llvm/test/CodeGen/X86/masked_gather_scatter.ll @@ -1690,7 +1690,6 @@ define <16 x i64> @test_gather_16i64(<16 x i64*> %ptrs, <16 x i1> %mask, <16 x i ; KNL_32-NEXT: vmovdqa64 %zmm2, %zmm0 ; KNL_32-NEXT: movl %ebp, %esp ; KNL_32-NEXT: popl %ebp -; KNL_32-NEXT: .cfi_def_cfa %esp, 4 ; KNL_32-NEXT: retl ; ; SKX-LABEL: test_gather_16i64: @@ -1725,7 +1724,6 @@ define <16 x i64> @test_gather_16i64(<16 x i64*> %ptrs, <16 x i1> %mask, <16 x i ; SKX_32-NEXT: vmovdqa64 %zmm2, %zmm0 ; SKX_32-NEXT: movl %ebp, %esp ; SKX_32-NEXT: popl %ebp -; SKX_32-NEXT: .cfi_def_cfa %esp, 4 ; SKX_32-NEXT: retl %res = call <16 x i64> @llvm.masked.gather.v16i64.v16p0i64(<16 x i64*> %ptrs, i32 4, <16 x i1> %mask, <16 x i64> %src0) ret <16 x i64> %res @@ -1809,7 +1807,6 @@ define <16 x double> @test_gather_16f64(<16 x double*> %ptrs, <16 x i1> %mask, < ; KNL_32-NEXT: vmovapd %zmm2, %zmm0 ; KNL_32-NEXT: movl %ebp, %esp ; KNL_32-NEXT: popl %ebp -; KNL_32-NEXT: .cfi_def_cfa %esp, 4 ; KNL_32-NEXT: retl ; ; SKX-LABEL: test_gather_16f64: @@ -1844,7 +1841,6 @@ define <16 x double> @test_gather_16f64(<16 x double*> %ptrs, <16 x i1> %mask, < ; SKX_32-NEXT: vmovapd %zmm2, %zmm0 ; SKX_32-NEXT: movl %ebp, %esp ; SKX_32-NEXT: popl %ebp -; SKX_32-NEXT: .cfi_def_cfa %esp, 4 ; SKX_32-NEXT: retl %res = call <16 x double> @llvm.masked.gather.v16f64.v16p0f64(<16 x double*> %ptrs, i32 4, <16 x i1> %mask, <16 x double> %src0) ret <16 x double> %res @@ -1926,7 +1922,6 @@ define void @test_scatter_16i64(<16 x i64*> %ptrs, <16 x i1> %mask, <16 x i64> % ; KNL_32-NEXT: vpscatterdq %zmm1, (,%ymm0) {%k2} ; KNL_32-NEXT: movl %ebp, %esp ; KNL_32-NEXT: popl %ebp -; KNL_32-NEXT: .cfi_def_cfa %esp, 4 ; KNL_32-NEXT: vzeroupper ; KNL_32-NEXT: retl ; @@ -1960,7 +1955,6 @@ define void @test_scatter_16i64(<16 x i64*> %ptrs, <16 x i1> %mask, <16 x i64> % ; SKX_32-NEXT: vpscatterdq %zmm1, (,%ymm0) {%k2} ; SKX_32-NEXT: movl %ebp, %esp ; SKX_32-NEXT: popl %ebp -; SKX_32-NEXT: .cfi_def_cfa %esp, 4 ; SKX_32-NEXT: vzeroupper ; SKX_32-NEXT: retl call void @llvm.masked.scatter.v16i64.v16p0i64(<16 x i64> %src0, <16 x i64*> %ptrs, i32 4, <16 x i1> %mask) @@ -2044,7 +2038,6 @@ define void @test_scatter_16f64(<16 x double*> %ptrs, <16 x i1> %mask, <16 x dou ; KNL_32-NEXT: vscatterdpd %zmm1, (,%ymm0) {%k2} ; KNL_32-NEXT: movl %ebp, %esp ; KNL_32-NEXT: popl %ebp -; KNL_32-NEXT: .cfi_def_cfa %esp, 4 ; KNL_32-NEXT: vzeroupper ; KNL_32-NEXT: retl ; @@ -2078,7 +2071,6 @@ define void @test_scatter_16f64(<16 x double*> %ptrs, <16 x i1> %mask, <16 x dou ; SKX_32-NEXT: vscatterdpd %zmm1, (,%ymm0) {%k2} ; SKX_32-NEXT: movl %ebp, %esp ; SKX_32-NEXT: popl %ebp -; SKX_32-NEXT: .cfi_def_cfa %esp, 4 ; SKX_32-NEXT: vzeroupper ; SKX_32-NEXT: retl call void @llvm.masked.scatter.v16f64.v16p0f64(<16 x double> %src0, <16 x double*> %ptrs, i32 4, <16 x i1> %mask) @@ -2123,7 +2115,6 @@ define <4 x i64> @test_pr28312(<4 x i64*> %p1, <4 x i1> %k, <4 x i1> %k2,<4 x i6 ; KNL_32-NEXT: vpaddq %ymm0, %ymm1, %ymm0 ; KNL_32-NEXT: movl %ebp, %esp ; KNL_32-NEXT: popl %ebp -; KNL_32-NEXT: .cfi_def_cfa %esp, 4 ; KNL_32-NEXT: retl ; ; SKX-LABEL: test_pr28312: @@ -2151,7 +2142,6 @@ define <4 x i64> @test_pr28312(<4 x i64*> %p1, <4 x i1> %k, <4 x i1> %k2,<4 x i6 ; SKX_32-NEXT: vpaddq %ymm0, %ymm1, %ymm0 ; SKX_32-NEXT: movl %ebp, %esp ; SKX_32-NEXT: popl %ebp -; SKX_32-NEXT: .cfi_def_cfa %esp, 4 ; SKX_32-NEXT: retl %g1 = call <4 x i64> @llvm.masked.gather.v4i64.v4p0i64(<4 x i64*> %p1, i32 8, <4 x i1> %k, <4 x i64> undef) %g2 = call <4 x i64> @llvm.masked.gather.v4i64.v4p0i64(<4 x i64*> %p1, i32 8, <4 x i1> %k, <4 x i64> undef) diff --git a/llvm/test/CodeGen/X86/memset-nonzero.ll b/llvm/test/CodeGen/X86/memset-nonzero.ll index 98e09377ddb..f0a957c9417 100644 --- a/llvm/test/CodeGen/X86/memset-nonzero.ll +++ b/llvm/test/CodeGen/X86/memset-nonzero.ll @@ -148,7 +148,6 @@ define void @memset_256_nonzero_bytes(i8* %x) { ; SSE-NEXT: movl $256, %edx # imm = 0x100 ; SSE-NEXT: callq memset ; SSE-NEXT: popq %rax -; SSE-NEXT: .cfi_def_cfa_offset 8 ; SSE-NEXT: retq ; ; SSE2FAST-LABEL: memset_256_nonzero_bytes: diff --git a/llvm/test/CodeGen/X86/merge-consecutive-loads-128.ll b/llvm/test/CodeGen/X86/merge-consecutive-loads-128.ll index b909b7c403b..e414f5554de 100644 --- a/llvm/test/CodeGen/X86/merge-consecutive-loads-128.ll +++ b/llvm/test/CodeGen/X86/merge-consecutive-loads-128.ll @@ -72,9 +72,7 @@ define <2 x i64> @merge_2i64_i64_12(i64* %ptr) nounwind uwtable noinline ssp { ; X32-SSE1-NEXT: movl %esi, 4(%eax) ; X32-SSE1-NEXT: movl %edx, (%eax) ; X32-SSE1-NEXT: popl %esi -; X32-SSE1-NEXT: .cfi_def_cfa_offset 8 ; X32-SSE1-NEXT: popl %edi -; X32-SSE1-NEXT: .cfi_def_cfa_offset 4 ; X32-SSE1-NEXT: retl $4 ; ; X32-SSE41-LABEL: merge_2i64_i64_12: @@ -386,7 +384,6 @@ define <4 x i32> @merge_4i32_i32_23u5(i32* %ptr) nounwind uwtable noinline ssp { ; X32-SSE1-NEXT: movl %edx, (%eax) ; X32-SSE1-NEXT: movl %ecx, 12(%eax) ; X32-SSE1-NEXT: popl %esi -; X32-SSE1-NEXT: .cfi_def_cfa_offset 4 ; X32-SSE1-NEXT: retl $4 ; ; X32-SSE41-LABEL: merge_4i32_i32_23u5: @@ -438,9 +435,7 @@ define <4 x i32> @merge_4i32_i32_23u5_inc2(i32* %ptr) nounwind uwtable noinline ; X32-SSE1-NEXT: movl %edx, (%eax) ; X32-SSE1-NEXT: movl %ecx, 12(%eax) ; X32-SSE1-NEXT: popl %esi -; X32-SSE1-NEXT: .cfi_def_cfa_offset 8 ; X32-SSE1-NEXT: popl %edi -; X32-SSE1-NEXT: .cfi_def_cfa_offset 4 ; X32-SSE1-NEXT: retl $4 ; ; X32-SSE41-LABEL: merge_4i32_i32_23u5_inc2: @@ -495,9 +490,7 @@ define <4 x i32> @merge_4i32_i32_23u5_inc3(i32* %ptr) nounwind uwtable noinline ; X32-SSE1-NEXT: movl %edx, (%eax) ; X32-SSE1-NEXT: movl %ecx, 12(%eax) ; X32-SSE1-NEXT: popl %esi -; X32-SSE1-NEXT: .cfi_def_cfa_offset 8 ; X32-SSE1-NEXT: popl %edi -; X32-SSE1-NEXT: .cfi_def_cfa_offset 4 ; X32-SSE1-NEXT: retl $4 ; ; X32-SSE41-LABEL: merge_4i32_i32_23u5_inc3: @@ -656,9 +649,7 @@ define <4 x i32> @merge_4i32_i32_45zz_inc4(i32* %ptr) nounwind uwtable noinline ; X32-SSE1-NEXT: movl $0, 12(%eax) ; X32-SSE1-NEXT: movl $0, 8(%eax) ; X32-SSE1-NEXT: popl %esi -; X32-SSE1-NEXT: .cfi_def_cfa_offset 8 ; X32-SSE1-NEXT: popl %edi -; X32-SSE1-NEXT: .cfi_def_cfa_offset 4 ; X32-SSE1-NEXT: retl $4 ; ; X32-SSE41-LABEL: merge_4i32_i32_45zz_inc4: @@ -710,9 +701,7 @@ define <4 x i32> @merge_4i32_i32_45zz_inc5(i32* %ptr) nounwind uwtable noinline ; X32-SSE1-NEXT: movl $0, 12(%eax) ; X32-SSE1-NEXT: movl $0, 8(%eax) ; X32-SSE1-NEXT: popl %esi -; X32-SSE1-NEXT: .cfi_def_cfa_offset 8 ; X32-SSE1-NEXT: popl %edi -; X32-SSE1-NEXT: .cfi_def_cfa_offset 4 ; X32-SSE1-NEXT: retl $4 ; ; X32-SSE41-LABEL: merge_4i32_i32_45zz_inc5: @@ -762,9 +751,7 @@ define <8 x i16> @merge_8i16_i16_23u567u9(i16* %ptr) nounwind uwtable noinline s ; X32-SSE1-NEXT: movl %esi, 6(%eax) ; X32-SSE1-NEXT: movl %edx, (%eax) ; X32-SSE1-NEXT: popl %esi -; X32-SSE1-NEXT: .cfi_def_cfa_offset 8 ; X32-SSE1-NEXT: popl %edi -; X32-SSE1-NEXT: .cfi_def_cfa_offset 4 ; X32-SSE1-NEXT: retl $4 ; ; X32-SSE41-LABEL: merge_8i16_i16_23u567u9: @@ -910,13 +897,9 @@ define <16 x i8> @merge_16i8_i8_01u3456789ABCDuF(i8* %ptr) nounwind uwtable noin ; X32-SSE1-NEXT: movl %esi, 3(%eax) ; X32-SSE1-NEXT: movw %bp, (%eax) ; X32-SSE1-NEXT: popl %esi -; X32-SSE1-NEXT: .cfi_def_cfa_offset 16 ; X32-SSE1-NEXT: popl %edi -; X32-SSE1-NEXT: .cfi_def_cfa_offset 12 ; X32-SSE1-NEXT: popl %ebx -; X32-SSE1-NEXT: .cfi_def_cfa_offset 8 ; X32-SSE1-NEXT: popl %ebp -; X32-SSE1-NEXT: .cfi_def_cfa_offset 4 ; X32-SSE1-NEXT: retl $4 ; ; X32-SSE41-LABEL: merge_16i8_i8_01u3456789ABCDuF: @@ -1146,9 +1129,7 @@ define <2 x i64> @merge_2i64_i64_12_volatile(i64* %ptr) nounwind uwtable noinlin ; X32-SSE1-NEXT: movl %esi, 4(%eax) ; X32-SSE1-NEXT: movl %edx, (%eax) ; X32-SSE1-NEXT: popl %esi -; X32-SSE1-NEXT: .cfi_def_cfa_offset 8 ; X32-SSE1-NEXT: popl %edi -; X32-SSE1-NEXT: .cfi_def_cfa_offset 4 ; X32-SSE1-NEXT: retl $4 ; ; X32-SSE41-LABEL: merge_2i64_i64_12_volatile: diff --git a/llvm/test/CodeGen/X86/movtopush.ll b/llvm/test/CodeGen/X86/movtopush.ll index ddcc383b65e..051c8a710c8 100644 --- a/llvm/test/CodeGen/X86/movtopush.ll +++ b/llvm/test/CodeGen/X86/movtopush.ll @@ -382,10 +382,8 @@ entry: ; LINUX: pushl $1 ; LINUX: .cfi_adjust_cfa_offset 4 ; LINUX: calll good -; LINUX: addl $16, %esp +; LINUX: addl $28, %esp ; LINUX: .cfi_adjust_cfa_offset -16 -; LINUX: addl $12, %esp -; LINUX: .cfi_def_cfa_offset 4 ; LINUX-NOT: add ; LINUX: retl define void @pr27140() optsize { diff --git a/llvm/test/CodeGen/X86/mul-constant-result.ll b/llvm/test/CodeGen/X86/mul-constant-result.ll index f778397f889..011b63ce726 100644 --- a/llvm/test/CodeGen/X86/mul-constant-result.ll +++ b/llvm/test/CodeGen/X86/mul-constant-result.ll @@ -34,116 +34,84 @@ define i32 @mult(i32, i32) local_unnamed_addr #0 { ; X86-NEXT: .LBB0_6: ; X86-NEXT: addl %eax, %eax ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl ; X86-NEXT: .LBB0_39: -; X86-NEXT: .cfi_def_cfa_offset 8 ; X86-NEXT: xorl %eax, %eax ; X86-NEXT: .LBB0_40: ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl ; X86-NEXT: .LBB0_7: -; X86-NEXT: .cfi_def_cfa_offset 8 ; X86-NEXT: leal (%eax,%eax,2), %eax ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl ; X86-NEXT: .LBB0_8: -; X86-NEXT: .cfi_def_cfa_offset 8 ; X86-NEXT: shll $2, %eax ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl ; X86-NEXT: .LBB0_9: -; X86-NEXT: .cfi_def_cfa_offset 8 ; X86-NEXT: leal (%eax,%eax,4), %eax ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl ; X86-NEXT: .LBB0_10: -; X86-NEXT: .cfi_def_cfa_offset 8 ; X86-NEXT: addl %eax, %eax ; X86-NEXT: leal (%eax,%eax,2), %eax ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl ; X86-NEXT: .LBB0_11: -; X86-NEXT: .cfi_def_cfa_offset 8 ; X86-NEXT: leal (,%eax,8), %ecx ; X86-NEXT: jmp .LBB0_12 ; X86-NEXT: .LBB0_13: ; X86-NEXT: shll $3, %eax ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl ; X86-NEXT: .LBB0_14: -; X86-NEXT: .cfi_def_cfa_offset 8 ; X86-NEXT: leal (%eax,%eax,8), %eax ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl ; X86-NEXT: .LBB0_15: -; X86-NEXT: .cfi_def_cfa_offset 8 ; X86-NEXT: addl %eax, %eax ; X86-NEXT: leal (%eax,%eax,4), %eax ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl ; X86-NEXT: .LBB0_16: -; X86-NEXT: .cfi_def_cfa_offset 8 ; X86-NEXT: leal (%eax,%eax,4), %ecx ; X86-NEXT: leal (%eax,%ecx,2), %eax ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl ; X86-NEXT: .LBB0_17: -; X86-NEXT: .cfi_def_cfa_offset 8 ; X86-NEXT: shll $2, %eax ; X86-NEXT: leal (%eax,%eax,2), %eax ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl ; X86-NEXT: .LBB0_18: -; X86-NEXT: .cfi_def_cfa_offset 8 ; X86-NEXT: leal (%eax,%eax,2), %ecx ; X86-NEXT: leal (%eax,%ecx,4), %eax ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl ; X86-NEXT: .LBB0_19: -; X86-NEXT: .cfi_def_cfa_offset 8 ; X86-NEXT: leal (%eax,%eax,2), %ecx ; X86-NEXT: jmp .LBB0_20 ; X86-NEXT: .LBB0_21: ; X86-NEXT: leal (%eax,%eax,4), %eax ; X86-NEXT: leal (%eax,%eax,2), %eax ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl ; X86-NEXT: .LBB0_22: -; X86-NEXT: .cfi_def_cfa_offset 8 ; X86-NEXT: shll $4, %eax ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl ; X86-NEXT: .LBB0_23: -; X86-NEXT: .cfi_def_cfa_offset 8 ; X86-NEXT: movl %eax, %ecx ; X86-NEXT: shll $4, %ecx ; X86-NEXT: addl %ecx, %eax ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl ; X86-NEXT: .LBB0_24: -; X86-NEXT: .cfi_def_cfa_offset 8 ; X86-NEXT: addl %eax, %eax ; X86-NEXT: leal (%eax,%eax,8), %eax ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl ; X86-NEXT: .LBB0_25: -; X86-NEXT: .cfi_def_cfa_offset 8 ; X86-NEXT: leal (%eax,%eax,4), %ecx ; X86-NEXT: shll $2, %ecx ; X86-NEXT: jmp .LBB0_12 @@ -151,26 +119,20 @@ define i32 @mult(i32, i32) local_unnamed_addr #0 { ; X86-NEXT: shll $2, %eax ; X86-NEXT: leal (%eax,%eax,4), %eax ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl ; X86-NEXT: .LBB0_27: -; X86-NEXT: .cfi_def_cfa_offset 8 ; X86-NEXT: leal (%eax,%eax,4), %ecx ; X86-NEXT: leal (%eax,%ecx,4), %eax ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl ; X86-NEXT: .LBB0_28: -; X86-NEXT: .cfi_def_cfa_offset 8 ; X86-NEXT: leal (%eax,%eax,4), %ecx ; X86-NEXT: .LBB0_20: ; X86-NEXT: leal (%eax,%ecx,4), %ecx ; X86-NEXT: addl %ecx, %eax ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl ; X86-NEXT: .LBB0_29: -; X86-NEXT: .cfi_def_cfa_offset 8 ; X86-NEXT: leal (%eax,%eax,2), %ecx ; X86-NEXT: shll $3, %ecx ; X86-NEXT: jmp .LBB0_12 @@ -178,17 +140,13 @@ define i32 @mult(i32, i32) local_unnamed_addr #0 { ; X86-NEXT: shll $3, %eax ; X86-NEXT: leal (%eax,%eax,2), %eax ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl ; X86-NEXT: .LBB0_31: -; X86-NEXT: .cfi_def_cfa_offset 8 ; X86-NEXT: leal (%eax,%eax,4), %eax ; X86-NEXT: leal (%eax,%eax,4), %eax ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl ; X86-NEXT: .LBB0_32: -; X86-NEXT: .cfi_def_cfa_offset 8 ; X86-NEXT: leal (%eax,%eax,8), %ecx ; X86-NEXT: leal (%ecx,%ecx,2), %ecx ; X86-NEXT: jmp .LBB0_12 @@ -196,27 +154,21 @@ define i32 @mult(i32, i32) local_unnamed_addr #0 { ; X86-NEXT: leal (%eax,%eax,8), %eax ; X86-NEXT: leal (%eax,%eax,2), %eax ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl ; X86-NEXT: .LBB0_34: -; X86-NEXT: .cfi_def_cfa_offset 8 ; X86-NEXT: leal (%eax,%eax,8), %ecx ; X86-NEXT: leal (%ecx,%ecx,2), %ecx ; X86-NEXT: addl %ecx, %eax ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl ; X86-NEXT: .LBB0_35: -; X86-NEXT: .cfi_def_cfa_offset 8 ; X86-NEXT: leal (%eax,%eax,8), %ecx ; X86-NEXT: leal (%ecx,%ecx,2), %ecx ; X86-NEXT: addl %eax, %ecx ; X86-NEXT: addl %ecx, %eax ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl ; X86-NEXT: .LBB0_36: -; X86-NEXT: .cfi_def_cfa_offset 8 ; X86-NEXT: movl %eax, %ecx ; X86-NEXT: shll $5, %ecx ; X86-NEXT: subl %eax, %ecx @@ -228,13 +180,10 @@ define i32 @mult(i32, i32) local_unnamed_addr #0 { ; X86-NEXT: subl %eax, %ecx ; X86-NEXT: movl %ecx, %eax ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl ; X86-NEXT: .LBB0_38: -; X86-NEXT: .cfi_def_cfa_offset 8 ; X86-NEXT: shll $5, %eax ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl ; ; X64-HSW-LABEL: mult: @@ -908,11 +857,8 @@ define i32 @foo() local_unnamed_addr #0 { ; X86-NEXT: negl %ecx ; X86-NEXT: movl %ecx, %eax ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 12 ; X86-NEXT: popl %edi -; X86-NEXT: .cfi_def_cfa_offset 8 ; X86-NEXT: popl %ebx -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl ; ; X64-HSW-LABEL: foo: @@ -1126,15 +1072,10 @@ define i32 @foo() local_unnamed_addr #0 { ; X64-HSW-NEXT: negl %ecx ; X64-HSW-NEXT: movl %ecx, %eax ; X64-HSW-NEXT: addq $8, %rsp -; X64-HSW-NEXT: .cfi_def_cfa_offset 40 ; X64-HSW-NEXT: popq %rbx -; X64-HSW-NEXT: .cfi_def_cfa_offset 32 ; X64-HSW-NEXT: popq %r14 -; X64-HSW-NEXT: .cfi_def_cfa_offset 24 ; X64-HSW-NEXT: popq %r15 -; X64-HSW-NEXT: .cfi_def_cfa_offset 16 ; X64-HSW-NEXT: popq %rbp -; X64-HSW-NEXT: .cfi_def_cfa_offset 8 ; X64-HSW-NEXT: retq %1 = tail call i32 @mult(i32 1, i32 0) %2 = icmp ne i32 %1, 1 diff --git a/llvm/test/CodeGen/X86/mul-i256.ll b/llvm/test/CodeGen/X86/mul-i256.ll index 1e05b95dda0..0a48ae761ec 100644 --- a/llvm/test/CodeGen/X86/mul-i256.ll +++ b/llvm/test/CodeGen/X86/mul-i256.ll @@ -349,15 +349,10 @@ define void @test(i256* %a, i256* %b, i256* %out) #0 { ; X32-NEXT: movl %eax, 24(%ecx) ; X32-NEXT: movl %edx, 28(%ecx) ; X32-NEXT: addl $88, %esp -; X32-NEXT: .cfi_def_cfa_offset 20 ; X32-NEXT: popl %esi -; X32-NEXT: .cfi_def_cfa_offset 16 ; X32-NEXT: popl %edi -; X32-NEXT: .cfi_def_cfa_offset 12 ; X32-NEXT: popl %ebx -; X32-NEXT: .cfi_def_cfa_offset 8 ; X32-NEXT: popl %ebp -; X32-NEXT: .cfi_def_cfa_offset 4 ; X32-NEXT: retl ; ; X64-LABEL: test: @@ -426,11 +421,8 @@ define void @test(i256* %a, i256* %b, i256* %out) #0 { ; X64-NEXT: movq %rax, 16(%r9) ; X64-NEXT: movq %rdx, 24(%r9) ; X64-NEXT: popq %rbx -; X64-NEXT: .cfi_def_cfa_offset 24 ; X64-NEXT: popq %r14 -; X64-NEXT: .cfi_def_cfa_offset 16 ; X64-NEXT: popq %r15 -; X64-NEXT: .cfi_def_cfa_offset 8 ; X64-NEXT: retq entry: %av = load i256, i256* %a diff --git a/llvm/test/CodeGen/X86/mul128.ll b/llvm/test/CodeGen/X86/mul128.ll index 0c11f17d8d1..70a6173a19f 100644 --- a/llvm/test/CodeGen/X86/mul128.ll +++ b/llvm/test/CodeGen/X86/mul128.ll @@ -86,15 +86,10 @@ define i128 @foo(i128 %t, i128 %u) { ; X86-NEXT: movl %edx, 12(%ecx) ; X86-NEXT: movl %ecx, %eax ; X86-NEXT: addl $8, %esp -; X86-NEXT: .cfi_def_cfa_offset 20 ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 16 ; X86-NEXT: popl %edi -; X86-NEXT: .cfi_def_cfa_offset 12 ; X86-NEXT: popl %ebx -; X86-NEXT: .cfi_def_cfa_offset 8 ; X86-NEXT: popl %ebp -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl $4 %k = mul i128 %t, %u ret i128 %k diff --git a/llvm/test/CodeGen/X86/pr21792.ll b/llvm/test/CodeGen/X86/pr21792.ll index 54eb1fc7272..74f6c5a361f 100644 --- a/llvm/test/CodeGen/X86/pr21792.ll +++ b/llvm/test/CodeGen/X86/pr21792.ll @@ -28,7 +28,6 @@ define void @func(<4 x float> %vx) { ; CHECK-NEXT: leaq stuff+8(%r9), %r9 ; CHECK-NEXT: callq toto ; CHECK-NEXT: popq %rax -; CHECK-NEXT: .cfi_def_cfa_offset 8 ; CHECK-NEXT: retq entry: %tmp2 = bitcast <4 x float> %vx to <2 x i64> diff --git a/llvm/test/CodeGen/X86/pr29061.ll b/llvm/test/CodeGen/X86/pr29061.ll index b62d082507d..0cbe75f9ad5 100644 --- a/llvm/test/CodeGen/X86/pr29061.ll +++ b/llvm/test/CodeGen/X86/pr29061.ll @@ -15,7 +15,6 @@ define void @t1(i8 signext %c) { ; CHECK-NEXT: #APP ; CHECK-NEXT: #NO_APP ; CHECK-NEXT: popl %edi -; CHECK-NEXT: .cfi_def_cfa_offset 4 ; CHECK-NEXT: retl entry: tail call void asm sideeffect "", "{di},~{dirflag},~{fpsr},~{flags}"(i8 %c) @@ -33,7 +32,6 @@ define void @t2(i8 signext %c) { ; CHECK-NEXT: #APP ; CHECK-NEXT: #NO_APP ; CHECK-NEXT: popl %esi -; CHECK-NEXT: .cfi_def_cfa_offset 4 ; CHECK-NEXT: retl entry: tail call void asm sideeffect "", "{si},~{dirflag},~{fpsr},~{flags}"(i8 %c) diff --git a/llvm/test/CodeGen/X86/pr29112.ll b/llvm/test/CodeGen/X86/pr29112.ll index d791936bd53..cc670eeb978 100644 --- a/llvm/test/CodeGen/X86/pr29112.ll +++ b/llvm/test/CodeGen/X86/pr29112.ll @@ -65,7 +65,6 @@ define <4 x float> @bar(<4 x float>* %a1p, <4 x float>* %a2p, <4 x float> %a3, < ; CHECK-NEXT: vaddps {{[0-9]+}}(%rsp), %xmm1, %xmm1 # 16-byte Folded Reload ; CHECK-NEXT: vaddps %xmm0, %xmm1, %xmm0 ; CHECK-NEXT: addq $88, %rsp -; CHECK-NEXT: .cfi_def_cfa_offset 8 ; CHECK-NEXT: retq %a1 = shufflevector <16 x float>%c1, <16 x float>%c2, <4 x i32> <i32 4, i32 20, i32 1, i32 17> diff --git a/llvm/test/CodeGen/X86/pr30430.ll b/llvm/test/CodeGen/X86/pr30430.ll index 06007a3a4cf..0254c0940b8 100644 --- a/llvm/test/CodeGen/X86/pr30430.ll +++ b/llvm/test/CodeGen/X86/pr30430.ll @@ -108,7 +108,6 @@ define <16 x float> @makefloat(float %f1, float %f2, float %f3, float %f4, float ; CHECK-NEXT: vmovss %xmm14, (%rsp) # 4-byte Spill ; CHECK-NEXT: movq %rbp, %rsp ; CHECK-NEXT: popq %rbp -; CHECK-NEXT: .cfi_def_cfa %rsp, 8 ; CHECK-NEXT: retq entry: %__A.addr.i = alloca float, align 4 diff --git a/llvm/test/CodeGen/X86/pr32241.ll b/llvm/test/CodeGen/X86/pr32241.ll index 02f3bb12291..f48fef5f7fb 100644 --- a/llvm/test/CodeGen/X86/pr32241.ll +++ b/llvm/test/CodeGen/X86/pr32241.ll @@ -50,9 +50,7 @@ define i32 @_Z3foov() { ; CHECK-NEXT: movw %dx, {{[0-9]+}}(%esp) ; CHECK-NEXT: movzwl {{[0-9]+}}(%esp), %eax ; CHECK-NEXT: addl $16, %esp -; CHECK-NEXT: .cfi_def_cfa_offset 8 ; CHECK-NEXT: popl %esi -; CHECK-NEXT: .cfi_def_cfa_offset 4 ; CHECK-NEXT: retl entry: %aa = alloca i16, align 2 diff --git a/llvm/test/CodeGen/X86/pr32256.ll b/llvm/test/CodeGen/X86/pr32256.ll index 5b6126fbc76..f6e254aaad0 100644 --- a/llvm/test/CodeGen/X86/pr32256.ll +++ b/llvm/test/CodeGen/X86/pr32256.ll @@ -27,7 +27,6 @@ define void @_Z1av() { ; CHECK-NEXT: andb $1, %al ; CHECK-NEXT: movb %al, {{[0-9]+}}(%esp) ; CHECK-NEXT: addl $2, %esp -; CHECK-NEXT: .cfi_def_cfa_offset 4 ; CHECK-NEXT: retl entry: %b = alloca i8, align 1 diff --git a/llvm/test/CodeGen/X86/pr32282.ll b/llvm/test/CodeGen/X86/pr32282.ll index 67a0332ac53..d6e6f6eb107 100644 --- a/llvm/test/CodeGen/X86/pr32282.ll +++ b/llvm/test/CodeGen/X86/pr32282.ll @@ -43,7 +43,6 @@ define void @foo() { ; X86-NEXT: orl %eax, %edx ; X86-NEXT: setne {{[0-9]+}}(%esp) ; X86-NEXT: popl %eax -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl ; ; X64-LABEL: foo: diff --git a/llvm/test/CodeGen/X86/pr32284.ll b/llvm/test/CodeGen/X86/pr32284.ll index 59be67f0579..11eb6968709 100644 --- a/llvm/test/CodeGen/X86/pr32284.ll +++ b/llvm/test/CodeGen/X86/pr32284.ll @@ -71,7 +71,6 @@ define void @foo() { ; 686-O0-NEXT: movzbl %al, %ecx ; 686-O0-NEXT: movl %ecx, (%esp) ; 686-O0-NEXT: addl $8, %esp -; 686-O0-NEXT: .cfi_def_cfa_offset 4 ; 686-O0-NEXT: retl ; ; 686-LABEL: foo: @@ -89,7 +88,6 @@ define void @foo() { ; 686-NEXT: setle %dl ; 686-NEXT: movl %edx, {{[0-9]+}}(%esp) ; 686-NEXT: addl $8, %esp -; 686-NEXT: .cfi_def_cfa_offset 4 ; 686-NEXT: retl entry: %a = alloca i8, align 1 @@ -234,15 +232,10 @@ define void @f1() { ; 686-O0-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill ; 686-O0-NEXT: movl %esi, (%esp) # 4-byte Spill ; 686-O0-NEXT: addl $36, %esp -; 686-O0-NEXT: .cfi_def_cfa_offset 20 ; 686-O0-NEXT: popl %esi -; 686-O0-NEXT: .cfi_def_cfa_offset 16 ; 686-O0-NEXT: popl %edi -; 686-O0-NEXT: .cfi_def_cfa_offset 12 ; 686-O0-NEXT: popl %ebx -; 686-O0-NEXT: .cfi_def_cfa_offset 8 ; 686-O0-NEXT: popl %ebp -; 686-O0-NEXT: .cfi_def_cfa_offset 4 ; 686-O0-NEXT: retl ; ; 686-LABEL: f1: @@ -284,11 +277,8 @@ define void @f1() { ; 686-NEXT: movl %eax, _ZN8struct_210member_2_0E ; 686-NEXT: movl $0, _ZN8struct_210member_2_0E+4 ; 686-NEXT: addl $1, %esp -; 686-NEXT: .cfi_def_cfa_offset 12 ; 686-NEXT: popl %esi -; 686-NEXT: .cfi_def_cfa_offset 8 ; 686-NEXT: popl %edi -; 686-NEXT: .cfi_def_cfa_offset 4 ; 686-NEXT: retl entry: %a = alloca i8, align 1 @@ -402,11 +392,8 @@ define void @f2() { ; 686-O0-NEXT: movw %cx, %di ; 686-O0-NEXT: movw %di, (%eax) ; 686-O0-NEXT: addl $2, %esp -; 686-O0-NEXT: .cfi_def_cfa_offset 12 ; 686-O0-NEXT: popl %esi -; 686-O0-NEXT: .cfi_def_cfa_offset 8 ; 686-O0-NEXT: popl %edi -; 686-O0-NEXT: .cfi_def_cfa_offset 4 ; 686-O0-NEXT: retl ; ; 686-LABEL: f2: @@ -427,7 +414,6 @@ define void @f2() { ; 686-NEXT: sete %dl ; 686-NEXT: movw %dx, (%eax) ; 686-NEXT: addl $2, %esp -; 686-NEXT: .cfi_def_cfa_offset 4 ; 686-NEXT: retl entry: %a = alloca i16, align 2 @@ -546,7 +532,6 @@ define void @f3() #0 { ; 686-O0-NEXT: popl %esi ; 686-O0-NEXT: popl %edi ; 686-O0-NEXT: popl %ebp -; 686-O0-NEXT: .cfi_def_cfa %esp, 4 ; 686-O0-NEXT: retl ; ; 686-LABEL: f3: @@ -573,7 +558,6 @@ define void @f3() #0 { ; 686-NEXT: movl %ecx, var_46 ; 686-NEXT: movl %ebp, %esp ; 686-NEXT: popl %ebp -; 686-NEXT: .cfi_def_cfa %esp, 4 ; 686-NEXT: retl entry: %a = alloca i64, align 8 diff --git a/llvm/test/CodeGen/X86/pr32329.ll b/llvm/test/CodeGen/X86/pr32329.ll index 9d1bb90e824..f6bdade24c6 100644 --- a/llvm/test/CodeGen/X86/pr32329.ll +++ b/llvm/test/CodeGen/X86/pr32329.ll @@ -57,13 +57,9 @@ define void @foo() local_unnamed_addr { ; X86-NEXT: imull %eax, %ebx ; X86-NEXT: movb %bl, var_218 ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 16 ; X86-NEXT: popl %edi -; X86-NEXT: .cfi_def_cfa_offset 12 ; X86-NEXT: popl %ebx -; X86-NEXT: .cfi_def_cfa_offset 8 ; X86-NEXT: popl %ebp -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl ; ; X64-LABEL: foo: diff --git a/llvm/test/CodeGen/X86/pr32345.ll b/llvm/test/CodeGen/X86/pr32345.ll index 2bdeca20731..f6802887e9e 100644 --- a/llvm/test/CodeGen/X86/pr32345.ll +++ b/llvm/test/CodeGen/X86/pr32345.ll @@ -84,7 +84,6 @@ define void @foo() { ; 6860-NEXT: popl %edi ; 6860-NEXT: popl %ebx ; 6860-NEXT: popl %ebp -; 6860-NEXT: .cfi_def_cfa %esp, 4 ; 6860-NEXT: retl ; ; X64-LABEL: foo: @@ -128,7 +127,6 @@ define void @foo() { ; 686-NEXT: movb %dl, (%eax) ; 686-NEXT: movl %ebp, %esp ; 686-NEXT: popl %ebp -; 686-NEXT: .cfi_def_cfa %esp, 4 ; 686-NEXT: retl bb: %tmp = alloca i64, align 8 diff --git a/llvm/test/CodeGen/X86/pr32451.ll b/llvm/test/CodeGen/X86/pr32451.ll index 5b7d1373d34..67c0cb39f8c 100644 --- a/llvm/test/CodeGen/X86/pr32451.ll +++ b/llvm/test/CodeGen/X86/pr32451.ll @@ -30,9 +30,7 @@ define i8** @japi1_convert_690(i8**, i8***, i32) { ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload ; CHECK-NEXT: movl %eax, (%ecx) ; CHECK-NEXT: addl $16, %esp -; CHECK-NEXT: .cfi_def_cfa_offset 8 ; CHECK-NEXT: popl %ebx -; CHECK-NEXT: .cfi_def_cfa_offset 4 ; CHECK-NEXT: retl top: %3 = alloca i8*** diff --git a/llvm/test/CodeGen/X86/pr34088.ll b/llvm/test/CodeGen/X86/pr34088.ll index 4d85722057f..2049c5507c6 100644 --- a/llvm/test/CodeGen/X86/pr34088.ll +++ b/llvm/test/CodeGen/X86/pr34088.ll @@ -27,7 +27,6 @@ define i32 @pr34088() local_unnamed_addr { ; CHECK-NEXT: movsd %xmm0, {{[0-9]+}}(%esp) ; CHECK-NEXT: movl %ebp, %esp ; CHECK-NEXT: popl %ebp -; CHECK-NEXT: .cfi_def_cfa %esp, 4 ; CHECK-NEXT: retl entry: %foo = alloca %struct.Foo, align 4 diff --git a/llvm/test/CodeGen/X86/pr34653.ll b/llvm/test/CodeGen/X86/pr34653.ll index 129dbcacc95..4b16ffd33d5 100644 --- a/llvm/test/CodeGen/X86/pr34653.ll +++ b/llvm/test/CodeGen/X86/pr34653.ll @@ -199,7 +199,6 @@ define void @pr34653() { ; CHECK-NEXT: vmovsd %xmm7, {{[0-9]+}}(%rsp) # 8-byte Spill ; CHECK-NEXT: movq %rbp, %rsp ; CHECK-NEXT: popq %rbp -; CHECK-NEXT: .cfi_def_cfa %rsp, 8 ; CHECK-NEXT: vzeroupper ; CHECK-NEXT: retq entry: diff --git a/llvm/test/CodeGen/X86/pr9743.ll b/llvm/test/CodeGen/X86/pr9743.ll index ac3d4575510..73b3c7f835c 100644 --- a/llvm/test/CodeGen/X86/pr9743.ll +++ b/llvm/test/CodeGen/X86/pr9743.ll @@ -11,5 +11,4 @@ define void @f() { ; CHECK-NEXT: movq %rsp, %rbp ; CHECK-NEXT: .cfi_def_cfa_register %rbp ; CHECK-NEXT: popq %rbp -; CHECK-NEXT: .cfi_def_cfa %rsp, 8 ; CHECK-NEXT: ret diff --git a/llvm/test/CodeGen/X86/push-cfi-debug.ll b/llvm/test/CodeGen/X86/push-cfi-debug.ll index 01fa12e87d0..7f438e306e4 100644 --- a/llvm/test/CodeGen/X86/push-cfi-debug.ll +++ b/llvm/test/CodeGen/X86/push-cfi-debug.ll @@ -23,10 +23,8 @@ declare x86_stdcallcc void @stdfoo(i32, i32) #0 ; CHECK: .cfi_adjust_cfa_offset 4 ; CHECK: calll stdfoo ; CHECK: .cfi_adjust_cfa_offset -8 -; CHECK: addl $8, %esp +; CHECK: addl $20, %esp ; CHECK: .cfi_adjust_cfa_offset -8 -; CHECK: addl $12, %esp -; CHECK: .cfi_def_cfa_offset 4 define void @test1() #0 !dbg !4 { entry: tail call void @foo(i32 1, i32 2) #1, !dbg !10 diff --git a/llvm/test/CodeGen/X86/push-cfi-obj.ll b/llvm/test/CodeGen/X86/push-cfi-obj.ll index 2c9ec334027..33291ec3318 100644 --- a/llvm/test/CodeGen/X86/push-cfi-obj.ll +++ b/llvm/test/CodeGen/X86/push-cfi-obj.ll @@ -12,7 +12,7 @@ ; LINUX-NEXT: ] ; LINUX-NEXT: Address: 0x0 ; LINUX-NEXT: Offset: 0x68 -; LINUX-NEXT: Size: 72 +; LINUX-NEXT: Size: 64 ; LINUX-NEXT: Link: 0 ; LINUX-NEXT: Info: 0 ; LINUX-NEXT: AddressAlignment: 4 @@ -22,9 +22,8 @@ ; LINUX-NEXT: SectionData ( ; LINUX-NEXT: 0000: 1C000000 00000000 017A504C 5200017C |.........zPLR..|| ; LINUX-NEXT: 0010: 08070000 00000000 1B0C0404 88010000 |................| -; LINUX-NEXT: 0020: 24000000 24000000 00000000 1D000000 |$...$...........| +; LINUX-NEXT: 0020: 1C000000 24000000 00000000 1D000000 |....$...........| ; LINUX-NEXT: 0030: 04000000 00410E08 8502420D 05432E10 |.....A....B..C..| -; LINUX-NEXT: 0040: 540C0404 410C0508 |T...A...| ; LINUX-NEXT: ) declare i32 @__gxx_personality_v0(...) @@ -36,7 +35,7 @@ entry: to label %continue unwind label %cleanup continue: ret void -cleanup: +cleanup: landingpad { i8*, i32 } cleanup ret void diff --git a/llvm/test/CodeGen/X86/push-cfi.ll b/llvm/test/CodeGen/X86/push-cfi.ll index 44f8bf857c4..91e579a8391 100644 --- a/llvm/test/CodeGen/X86/push-cfi.ll +++ b/llvm/test/CodeGen/X86/push-cfi.ll @@ -74,9 +74,8 @@ cleanup: ; LINUX-NEXT: pushl $1 ; LINUX-NEXT: .cfi_adjust_cfa_offset 4 ; LINUX-NEXT: call -; LINUX-NEXT: addl $16, %esp +; LINUX-NEXT: addl $28, %esp ; LINUX: .cfi_adjust_cfa_offset -16 -; LINUX: addl $12, %esp ; DARWIN-NOT: .cfi_escape ; DARWIN-NOT: pushl define void @test2_nofp() #0 personality i8* bitcast (i32 (...)* @__gxx_personality_v0 to i8*) { diff --git a/llvm/test/CodeGen/X86/return-ext.ll b/llvm/test/CodeGen/X86/return-ext.ll index c66e518943a..ef160f43b4a 100644 --- a/llvm/test/CodeGen/X86/return-ext.ll +++ b/llvm/test/CodeGen/X86/return-ext.ll @@ -106,7 +106,6 @@ entry: ; CHECK: call ; CHECK-NEXT: movzbl ; CHECK-NEXT: {{pop|add}} -; CHECK-NEXT: .cfi_def_cfa_offset {{4|8}} ; CHECK-NEXT: ret } @@ -121,7 +120,6 @@ entry: ; CHECK: call ; CHECK-NEXT: movzbl ; CHECK-NEXT: {{pop|add}} -; CHECK-NEXT: .cfi_def_cfa_offset {{4|8}} ; CHECK-NEXT: ret } @@ -136,6 +134,5 @@ entry: ; CHECK: call ; CHECK-NEXT: movzwl ; CHECK-NEXT: {{pop|add}} -; CHECK-NEXT: .cfi_def_cfa_offset {{4|8}} ; CHECK-NEXT: ret } diff --git a/llvm/test/CodeGen/X86/rtm.ll b/llvm/test/CodeGen/X86/rtm.ll index a1feeb5999b..bd2d3e544bd 100644 --- a/llvm/test/CodeGen/X86/rtm.ll +++ b/llvm/test/CodeGen/X86/rtm.ll @@ -75,7 +75,6 @@ define void @f2(i32 %x) nounwind uwtable { ; X64-NEXT: xabort $1 ; X64-NEXT: callq f1 ; X64-NEXT: popq %rax -; X64-NEXT: .cfi_def_cfa_offset 8 ; X64-NEXT: retq entry: %x.addr = alloca i32, align 4 diff --git a/llvm/test/CodeGen/X86/select-mmx.ll b/llvm/test/CodeGen/X86/select-mmx.ll index 7ad8b6f1b9c..795990e3c32 100644 --- a/llvm/test/CodeGen/X86/select-mmx.ll +++ b/llvm/test/CodeGen/X86/select-mmx.ll @@ -48,7 +48,6 @@ define i64 @test47(i64 %arg) { ; I32-NEXT: movl {{[0-9]+}}(%esp), %edx ; I32-NEXT: movl %ebp, %esp ; I32-NEXT: popl %ebp -; I32-NEXT: .cfi_def_cfa %esp, 4 ; I32-NEXT: retl %cond = icmp eq i64 %arg, 0 %slct = select i1 %cond, x86_mmx bitcast (i64 7 to x86_mmx), x86_mmx bitcast (i64 0 to x86_mmx) @@ -101,7 +100,6 @@ define i64 @test49(i64 %arg, i64 %x, i64 %y) { ; I32-NEXT: movl {{[0-9]+}}(%esp), %edx ; I32-NEXT: movl %ebp, %esp ; I32-NEXT: popl %ebp -; I32-NEXT: .cfi_def_cfa %esp, 4 ; I32-NEXT: retl %cond = icmp eq i64 %arg, 0 %xmmx = bitcast i64 %x to x86_mmx diff --git a/llvm/test/CodeGen/X86/setcc-lowering.ll b/llvm/test/CodeGen/X86/setcc-lowering.ll index 5ae2cc5f35c..359b8d68063 100644 --- a/llvm/test/CodeGen/X86/setcc-lowering.ll +++ b/llvm/test/CodeGen/X86/setcc-lowering.ll @@ -89,7 +89,6 @@ define void @pr26232(i64 %a, <16 x i1> %b) { ; KNL-32-NEXT: jne .LBB1_1 ; KNL-32-NEXT: # BB#2: # %for_exit600 ; KNL-32-NEXT: popl %esi -; KNL-32-NEXT: .cfi_def_cfa_offset 4 ; KNL-32-NEXT: retl allocas: br label %for_test11.preheader diff --git a/llvm/test/CodeGen/X86/shrink_vmul.ll b/llvm/test/CodeGen/X86/shrink_vmul.ll index a2767205fe2..79cf0f2c8f1 100644 --- a/llvm/test/CodeGen/X86/shrink_vmul.ll +++ b/llvm/test/CodeGen/X86/shrink_vmul.ll @@ -31,7 +31,6 @@ define void @mul_2xi8(i8* nocapture readonly %a, i8* nocapture readonly %b, i64 ; X86-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3] ; X86-NEXT: movq %xmm1, (%esi,%ecx,4) ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl ; ; X64-LABEL: mul_2xi8: @@ -90,7 +89,6 @@ define void @mul_4xi8(i8* nocapture readonly %a, i8* nocapture readonly %b, i64 ; X86-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3] ; X86-NEXT: movdqu %xmm1, (%esi,%ecx,4) ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl ; ; X64-LABEL: mul_4xi8: @@ -150,7 +148,6 @@ define void @mul_8xi8(i8* nocapture readonly %a, i8* nocapture readonly %b, i64 ; X86-NEXT: movdqu %xmm1, 16(%esi,%ecx,4) ; X86-NEXT: movdqu %xmm0, (%esi,%ecx,4) ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl ; ; X64-LABEL: mul_8xi8: @@ -223,7 +220,6 @@ define void @mul_16xi8(i8* nocapture readonly %a, i8* nocapture readonly %b, i64 ; X86-NEXT: movdqu %xmm4, 16(%esi,%ecx,4) ; X86-NEXT: movdqu %xmm3, (%esi,%ecx,4) ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl ; ; X64-LABEL: mul_16xi8: @@ -292,7 +288,6 @@ define void @mul_2xi16(i8* nocapture readonly %a, i8* nocapture readonly %b, i64 ; X86-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3] ; X86-NEXT: movq %xmm1, (%esi,%ecx,4) ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl ; ; X64-LABEL: mul_2xi16: @@ -347,7 +342,6 @@ define void @mul_4xi16(i8* nocapture readonly %a, i8* nocapture readonly %b, i64 ; X86-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3] ; X86-NEXT: movdqu %xmm1, (%esi,%ecx,4) ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl ; ; X64-LABEL: mul_4xi16: @@ -405,7 +399,6 @@ define void @mul_8xi16(i8* nocapture readonly %a, i8* nocapture readonly %b, i64 ; X86-NEXT: movdqu %xmm1, 16(%esi,%ecx,4) ; X86-NEXT: movdqu %xmm0, (%esi,%ecx,4) ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl ; ; X64-LABEL: mul_8xi16: @@ -476,7 +469,6 @@ define void @mul_16xi16(i8* nocapture readonly %a, i8* nocapture readonly %b, i6 ; X86-NEXT: movdqu %xmm2, 16(%esi,%ecx,4) ; X86-NEXT: movdqu %xmm0, (%esi,%ecx,4) ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl ; ; X64-LABEL: mul_16xi16: @@ -549,7 +541,6 @@ define void @mul_2xi8_sext(i8* nocapture readonly %a, i8* nocapture readonly %b, ; X86-NEXT: psrad $16, %xmm0 ; X86-NEXT: movq %xmm0, (%esi,%ecx,4) ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl ; ; X64-LABEL: mul_2xi8_sext: @@ -615,7 +606,6 @@ define void @mul_2xi8_sext_zext(i8* nocapture readonly %a, i8* nocapture readonl ; X86-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm2[0],xmm0[1],xmm2[1],xmm0[2],xmm2[2],xmm0[3],xmm2[3] ; X86-NEXT: movq %xmm0, (%esi,%ecx,4) ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl ; ; X64-LABEL: mul_2xi8_sext_zext: @@ -676,7 +666,6 @@ define void @mul_2xi16_sext(i8* nocapture readonly %a, i8* nocapture readonly %b ; X86-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3] ; X86-NEXT: movq %xmm1, (%esi,%ecx,4) ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl ; ; X64-LABEL: mul_2xi16_sext: @@ -744,7 +733,6 @@ define void @mul_2xi16_sext_zext(i8* nocapture readonly %a, i8* nocapture readon ; X86-NEXT: pshufd {{.*#+}} xmm0 = xmm1[0,2,2,3] ; X86-NEXT: movq %xmm0, (%esi,%ecx,4) ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl ; ; X64-LABEL: mul_2xi16_sext_zext: @@ -825,7 +813,6 @@ define void @mul_16xi16_sext(i8* nocapture readonly %a, i8* nocapture readonly % ; X86-NEXT: movdqu %xmm2, 16(%esi,%ecx,4) ; X86-NEXT: movdqu %xmm0, (%esi,%ecx,4) ; X86-NEXT: popl %esi -; X86-NEXT: .cfi_def_cfa_offset 4 ; X86-NEXT: retl ; ; X64-LABEL: mul_16xi16_sext: diff --git a/llvm/test/CodeGen/X86/statepoint-call-lowering.ll b/llvm/test/CodeGen/X86/statepoint-call-lowering.ll index d80c87b99b6..bd2dd53b654 100644 --- a/llvm/test/CodeGen/X86/statepoint-call-lowering.ll +++ b/llvm/test/CodeGen/X86/statepoint-call-lowering.ll @@ -83,7 +83,6 @@ define i1 @test_relocate(i32 addrspace(1)* %a) gc "statepoint-example" { ; CHECK: callq return_i1 ; CHECK-NEXT: .Ltmp5: ; CHECK-NEXT: popq %rcx -; CHECK-NEXT: .cfi_def_cfa_offset 8 ; CHECK-NEXT: retq entry: %safepoint_token = tail call token (i64, i32, i1 ()*, i32, i32, ...) @llvm.experimental.gc.statepoint.p0f_i1f(i64 0, i32 0, i1 ()* @return_i1, i32 0, i32 0, i32 0, i32 0, i32 addrspace(1)* %a) diff --git a/llvm/test/CodeGen/X86/statepoint-gctransition-call-lowering.ll b/llvm/test/CodeGen/X86/statepoint-gctransition-call-lowering.ll index 90f2002e2d4..b88ca03805f 100644 --- a/llvm/test/CodeGen/X86/statepoint-gctransition-call-lowering.ll +++ b/llvm/test/CodeGen/X86/statepoint-gctransition-call-lowering.ll @@ -69,7 +69,6 @@ define i1 @test_relocate(i32 addrspace(1)* %a) gc "statepoint-example" { ; CHECK: callq return_i1 ; CHECK-NEXT: .Ltmp4: ; CHECK-NEXT: popq %rcx -; CHECK-NEXT: .cfi_def_cfa_offset 8 ; CHECK-NEXT: retq entry: %safepoint_token = tail call token (i64, i32, i1 ()*, i32, i32, ...) @llvm.experimental.gc.statepoint.p0f_i1f(i64 0, i32 0, i1 ()* @return_i1, i32 0, i32 1, i32 0, i32 0, i32 addrspace(1)* %a) diff --git a/llvm/test/CodeGen/X86/statepoint-invoke.ll b/llvm/test/CodeGen/X86/statepoint-invoke.ll index 5aa902546c1..784b932addc 100644 --- a/llvm/test/CodeGen/X86/statepoint-invoke.ll +++ b/llvm/test/CodeGen/X86/statepoint-invoke.ll @@ -142,7 +142,6 @@ normal_return: ; CHECK-LABEL: %normal_return ; CHECK: xorl %eax, %eax ; CHECK-NEXT: popq - ; CHECK-NEXT: .cfi_def_cfa_offset 8 ; CHECK-NEXT: retq %null.relocated = call coldcc i64 addrspace(1)* @llvm.experimental.gc.relocate.p1i64(token %sp1, i32 13, i32 13) %undef.relocated = call coldcc i64 addrspace(1)* @llvm.experimental.gc.relocate.p1i64(token %sp1, i32 14, i32 14) @@ -170,7 +169,6 @@ entry: normal_return: ; CHECK: leaq ; CHECK-NEXT: popq - ; CHECK-NEXT: .cfi_def_cfa_offset 8 ; CHECK-NEXT: retq %aa.rel = call coldcc i32 addrspace(1)* @llvm.experimental.gc.relocate.p1i32(token %sp, i32 13, i32 13) %aa.converted = bitcast i32 addrspace(1)* %aa.rel to i64 addrspace(1)* @@ -179,7 +177,6 @@ normal_return: exceptional_return: ; CHECK: movl $15 ; CHECK-NEXT: popq - ; CHECK-NEXT: .cfi_def_cfa_offset 8 ; CHECK-NEXT: retq %landing_pad = landingpad token cleanup diff --git a/llvm/test/CodeGen/X86/throws-cfi-fp.ll b/llvm/test/CodeGen/X86/throws-cfi-fp.ll deleted file mode 100644 index bacd965054c..00000000000 --- a/llvm/test/CodeGen/X86/throws-cfi-fp.ll +++ /dev/null @@ -1,98 +0,0 @@ -; RUN: llc %s -o - | FileCheck %s - -; ModuleID = 'throws-cfi-fp.cpp' -source_filename = "throws-cfi-fp.cpp" -target datalayout = "e-m:e-i64:64-f80:128-n8:16:32:64-S128" -target triple = "x86_64-unknown-linux-gnu" - -$__clang_call_terminate = comdat any - -@_ZL11ShouldThrow = internal unnamed_addr global i1 false, align 1 -@_ZTIi = external constant i8* -@str = private unnamed_addr constant [20 x i8] c"Threw an exception!\00" - -; Function Attrs: uwtable -define void @_Z6throwsv() #0 personality i8* bitcast (i32 (...)* @__gxx_personality_v0 to i8*) { - -; CHECK-LABEL: _Z6throwsv: -; CHECK: popq %rbp -; CHECK-NEXT: .cfi_def_cfa %rsp, 8 -; CHECK-NEXT: retq -; CHECK-NEXT: .LBB0_1: -; CHECK-NEXT: .cfi_def_cfa %rbp, 16 - -entry: - %.b5 = load i1, i1* @_ZL11ShouldThrow, align 1 - br i1 %.b5, label %if.then, label %try.cont - -if.then: ; preds = %entry - %exception = tail call i8* @__cxa_allocate_exception(i64 4) - %0 = bitcast i8* %exception to i32* - store i32 1, i32* %0, align 16 - invoke void @__cxa_throw(i8* %exception, i8* bitcast (i8** @_ZTIi to i8*), i8* null) - to label %unreachable unwind label %lpad - -lpad: ; preds = %if.then - %1 = landingpad { i8*, i32 } - catch i8* null - %2 = extractvalue { i8*, i32 } %1, 0 - %3 = tail call i8* @__cxa_begin_catch(i8* %2) - %puts = tail call i32 @puts(i8* getelementptr inbounds ([20 x i8], [20 x i8]* @str, i64 0, i64 0)) - invoke void @__cxa_rethrow() - to label %unreachable unwind label %lpad1 - -lpad1: ; preds = %lpad - %4 = landingpad { i8*, i32 } - cleanup - invoke void @__cxa_end_catch() - to label %eh.resume unwind label %terminate.lpad - -try.cont: ; preds = %entry - ret void - -eh.resume: ; preds = %lpad1 - resume { i8*, i32 } %4 - -terminate.lpad: ; preds = %lpad1 - %5 = landingpad { i8*, i32 } - catch i8* null - %6 = extractvalue { i8*, i32 } %5, 0 - tail call void @__clang_call_terminate(i8* %6) - unreachable - -unreachable: ; preds = %lpad, %if.then - unreachable -} - -declare i8* @__cxa_allocate_exception(i64) - -declare void @__cxa_throw(i8*, i8*, i8*) - -declare i32 @__gxx_personality_v0(...) - -declare i8* @__cxa_begin_catch(i8*) - -declare void @__cxa_rethrow() - -declare void @__cxa_end_catch() - -; Function Attrs: noinline noreturn nounwind -declare void @__clang_call_terminate(i8*) - -declare void @_ZSt9terminatev() - -; Function Attrs: nounwind -declare i32 @puts(i8* nocapture readonly) - -attributes #0 = { "no-frame-pointer-elim"="true" } - -!llvm.dbg.cu = !{!2} -!llvm.module.flags = !{!8, !9, !10} - -!2 = distinct !DICompileUnit(language: DW_LANG_C_plus_plus, file: !3, producer: "clang version 6.0.0 (https://github.com/llvm-mirror/clang.git 316ebefb7fff8ad324a08a694347500b6cd7c95f) (https://github.com/llvm-mirror/llvm.git dcae9be81fc17cdfbe989402354d3c8ecd0a2c79)", isOptimized: true, runtimeVersion: 0, emissionKind: FullDebug, enums: !4, globals: !5) -!3 = !DIFile(filename: "throws-cfi-fp.cpp", directory: "epilogue-dwarf/test") -!4 = !{} -!5 = !{} -!8 = !{i32 2, !"Dwarf Version", i32 4} -!9 = !{i32 2, !"Debug Info Version", i32 3} -!10 = !{i32 1, !"wchar_size", i32 4} diff --git a/llvm/test/CodeGen/X86/throws-cfi-no-fp.ll b/llvm/test/CodeGen/X86/throws-cfi-no-fp.ll deleted file mode 100644 index 1483e6b8483..00000000000 --- a/llvm/test/CodeGen/X86/throws-cfi-no-fp.ll +++ /dev/null @@ -1,97 +0,0 @@ -; RUN: llc %s -o - | FileCheck %s - -; ModuleID = 'throws-cfi-no-fp.cpp' -source_filename = "throws-cfi-no-fp.cpp" -target datalayout = "e-m:e-i64:64-f80:128-n8:16:32:64-S128" -target triple = "x86_64-unknown-linux-gnu" - -$__clang_call_terminate = comdat any - -@_ZL11ShouldThrow = internal unnamed_addr global i1 false, align 1 -@_ZTIi = external constant i8* -@str = private unnamed_addr constant [20 x i8] c"Threw an exception!\00" - -; Function Attrs: uwtable -define void @_Z6throwsv() personality i8* bitcast (i32 (...)* @__gxx_personality_v0 to i8*) { - -; CHECK-LABEL: _Z6throwsv: -; CHECK: popq %rbx -; CHECK-NEXT: .cfi_def_cfa_offset 8 -; CHECK-NEXT: retq -; CHECK-NEXT: .LBB0_1: -; CHECK-NEXT: .cfi_def_cfa_offset 16 - -entry: - %.b5 = load i1, i1* @_ZL11ShouldThrow, align 1 - br i1 %.b5, label %if.then, label %try.cont - -if.then: ; preds = %entry - %exception = tail call i8* @__cxa_allocate_exception(i64 4) - %0 = bitcast i8* %exception to i32* - store i32 1, i32* %0, align 16 - invoke void @__cxa_throw(i8* %exception, i8* bitcast (i8** @_ZTIi to i8*), i8* null) - to label %unreachable unwind label %lpad - -lpad: ; preds = %if.then - %1 = landingpad { i8*, i32 } - catch i8* null - %2 = extractvalue { i8*, i32 } %1, 0 - %3 = tail call i8* @__cxa_begin_catch(i8* %2) - %puts = tail call i32 @puts(i8* getelementptr inbounds ([20 x i8], [20 x i8]* @str, i64 0, i64 0)) - invoke void @__cxa_rethrow() #4 - to label %unreachable unwind label %lpad1 - -lpad1: ; preds = %lpad - %4 = landingpad { i8*, i32 } - cleanup - invoke void @__cxa_end_catch() - to label %eh.resume unwind label %terminate.lpad - -try.cont: ; preds = %entry - ret void - -eh.resume: ; preds = %lpad1 - resume { i8*, i32 } %4 - -terminate.lpad: ; preds = %lpad1 - %5 = landingpad { i8*, i32 } - catch i8* null - %6 = extractvalue { i8*, i32 } %5, 0 - tail call void @__clang_call_terminate(i8* %6) - unreachable - -unreachable: ; preds = %lpad, %if.then - unreachable -} - -declare i8* @__cxa_allocate_exception(i64) - -declare void @__cxa_throw(i8*, i8*, i8*) - -declare i32 @__gxx_personality_v0(...) - -declare i8* @__cxa_begin_catch(i8*) - -declare void @__cxa_rethrow() - -declare void @__cxa_end_catch() - -; Function Attrs: noinline noreturn nounwind -declare void @__clang_call_terminate(i8*) - -declare void @_ZSt9terminatev() - - -; Function Attrs: nounwind -declare i32 @puts(i8* nocapture readonly) - -!llvm.dbg.cu = !{!2} -!llvm.module.flags = !{!8, !9, !10} - -!2 = distinct !DICompileUnit(language: DW_LANG_C_plus_plus, file: !3, producer: "clang version 6.0.0 (https://github.com/llvm-mirror/clang.git 316ebefb7fff8ad324a08a694347500b6cd7c95f) (https://github.com/llvm-mirror/llvm.git dcae9be81fc17cdfbe989402354d3c8ecd0a2c79)", isOptimized: true, runtimeVersion: 0, emissionKind: FullDebug, enums: !4, globals: !5) -!3 = !DIFile(filename: "throws-cfi-no-fp.cpp", directory: "epilogue-dwarf/test") -!4 = !{} -!5 = !{} -!8 = !{i32 2, !"Dwarf Version", i32 4} -!9 = !{i32 2, !"Debug Info Version", i32 3} -!10 = !{i32 1, !"wchar_size", i32 4} diff --git a/llvm/test/CodeGen/X86/vector-sext.ll b/llvm/test/CodeGen/X86/vector-sext.ll index 25377f26799..cd4b237735f 100644 --- a/llvm/test/CodeGen/X86/vector-sext.ll +++ b/llvm/test/CodeGen/X86/vector-sext.ll @@ -3333,17 +3333,11 @@ define <16 x i16> @load_sext_16i1_to_16i16(<16 x i1> *%ptr) { ; AVX1-NEXT: vpinsrw $7, %ebp, %xmm1, %xmm1 ; AVX1-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0 ; AVX1-NEXT: popq %rbx -; AVX1-NEXT: .cfi_def_cfa_offset 48 ; AVX1-NEXT: popq %r12 -; AVX1-NEXT: .cfi_def_cfa_offset 40 ; AVX1-NEXT: popq %r13 -; AVX1-NEXT: .cfi_def_cfa_offset 32 ; AVX1-NEXT: popq %r14 -; AVX1-NEXT: .cfi_def_cfa_offset 24 ; AVX1-NEXT: popq %r15 -; AVX1-NEXT: .cfi_def_cfa_offset 16 ; AVX1-NEXT: popq %rbp -; AVX1-NEXT: .cfi_def_cfa_offset 8 ; AVX1-NEXT: retq ; ; AVX2-LABEL: load_sext_16i1_to_16i16: @@ -3430,17 +3424,11 @@ define <16 x i16> @load_sext_16i1_to_16i16(<16 x i1> *%ptr) { ; AVX2-NEXT: vpinsrw $7, %ebp, %xmm1, %xmm1 ; AVX2-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0 ; AVX2-NEXT: popq %rbx -; AVX2-NEXT: .cfi_def_cfa_offset 48 ; AVX2-NEXT: popq %r12 -; AVX2-NEXT: .cfi_def_cfa_offset 40 ; AVX2-NEXT: popq %r13 -; AVX2-NEXT: .cfi_def_cfa_offset 32 ; AVX2-NEXT: popq %r14 -; AVX2-NEXT: .cfi_def_cfa_offset 24 ; AVX2-NEXT: popq %r15 -; AVX2-NEXT: .cfi_def_cfa_offset 16 ; AVX2-NEXT: popq %rbp -; AVX2-NEXT: .cfi_def_cfa_offset 8 ; AVX2-NEXT: retq ; ; AVX512F-LABEL: load_sext_16i1_to_16i16: @@ -4836,7 +4824,6 @@ define i32 @sext_2i8_to_i32(<16 x i8> %A) nounwind uwtable readnone ssp { ; X32-SSE41-NEXT: pmovsxbw %xmm0, %xmm0 ; X32-SSE41-NEXT: movd %xmm0, %eax ; X32-SSE41-NEXT: popl %ecx -; X32-SSE41-NEXT: .cfi_def_cfa_offset 4 ; X32-SSE41-NEXT: retl entry: %Shuf = shufflevector <16 x i8> %A, <16 x i8> undef, <2 x i32> <i32 0, i32 1> diff --git a/llvm/test/CodeGen/X86/vector-shuffle-avx512.ll b/llvm/test/CodeGen/X86/vector-shuffle-avx512.ll index b107b60cd6d..efbe5586747 100644 --- a/llvm/test/CodeGen/X86/vector-shuffle-avx512.ll +++ b/llvm/test/CodeGen/X86/vector-shuffle-avx512.ll @@ -619,7 +619,6 @@ define <64 x i8> @test_mm512_mask_blend_epi8(<64 x i8> %A, <64 x i8> %W){ ; KNL32-NEXT: vpblendvb %ymm3, 8(%ebp), %ymm1, %ymm1 ; KNL32-NEXT: movl %ebp, %esp ; KNL32-NEXT: popl %ebp -; KNL32-NEXT: .cfi_def_cfa %esp, 4 ; KNL32-NEXT: retl entry: %0 = shufflevector <64 x i8> %A, <64 x i8> %W, <64 x i32> <i32 64, i32 1, i32 66, i32 3, i32 68, i32 5, i32 70, i32 7, i32 72, i32 9, i32 74, i32 11, i32 76, i32 13, i32 78, i32 15, i32 80, i32 17, i32 82, i32 19, i32 84, i32 21, i32 86, i32 23, i32 88, i32 25, i32 90, i32 27, i32 92, i32 29, i32 94, i32 31, i32 96, i32 33, i32 98, i32 35, i32 100, i32 37, i32 102, i32 39, i32 104, i32 41, i32 106, i32 43, i32 108, i32 45, i32 110, i32 47, i32 112, i32 49, i32 114, i32 51, i32 116, i32 53, i32 118, i32 55, i32 120, i32 57, i32 122, i32 59, i32 124, i32 61, i32 126, i32 63> @@ -660,7 +659,6 @@ define <32 x i16> @test_mm512_mask_blend_epi16(<32 x i16> %A, <32 x i16> %W){ ; KNL32-NEXT: vpblendw {{.*#+}} ymm1 = mem[0],ymm1[1],mem[2],ymm1[3],mem[4],ymm1[5],mem[6],ymm1[7],mem[8],ymm1[9],mem[10],ymm1[11],mem[12],ymm1[13],mem[14],ymm1[15] ; KNL32-NEXT: movl %ebp, %esp ; KNL32-NEXT: popl %ebp -; KNL32-NEXT: .cfi_def_cfa %esp, 4 ; KNL32-NEXT: retl entry: %0 = shufflevector <32 x i16> %A, <32 x i16> %W, <32 x i32> <i32 32, i32 1, i32 34, i32 3, i32 36, i32 5, i32 38, i32 7, i32 40, i32 9, i32 42, i32 11, i32 44, i32 13, i32 46, i32 15, i32 48, i32 17, i32 50, i32 19, i32 52, i32 21, i32 54, i32 23, i32 56, i32 25, i32 58, i32 27, i32 60, i32 29, i32 62, i32 31> diff --git a/llvm/test/CodeGen/X86/vector-shuffle-v1.ll b/llvm/test/CodeGen/X86/vector-shuffle-v1.ll index 0e690347a54..8d057290085 100644 --- a/llvm/test/CodeGen/X86/vector-shuffle-v1.ll +++ b/llvm/test/CodeGen/X86/vector-shuffle-v1.ll @@ -630,7 +630,6 @@ define i64 @shuf64i1_zero(i64 %a) { ; AVX512F-NEXT: orq %rcx, %rax ; AVX512F-NEXT: movq %rbp, %rsp ; AVX512F-NEXT: popq %rbp -; AVX512F-NEXT: .cfi_def_cfa %rsp, 8 ; AVX512F-NEXT: vzeroupper ; AVX512F-NEXT: retq ; @@ -663,7 +662,6 @@ define i64 @shuf64i1_zero(i64 %a) { ; AVX512VL-NEXT: orq %rcx, %rax ; AVX512VL-NEXT: movq %rbp, %rsp ; AVX512VL-NEXT: popq %rbp -; AVX512VL-NEXT: .cfi_def_cfa %rsp, 8 ; AVX512VL-NEXT: vzeroupper ; AVX512VL-NEXT: retq ; diff --git a/llvm/test/CodeGen/X86/wide-integer-cmp.ll b/llvm/test/CodeGen/X86/wide-integer-cmp.ll index 9bd53c6fbd3..97460b36a74 100644 --- a/llvm/test/CodeGen/X86/wide-integer-cmp.ll +++ b/llvm/test/CodeGen/X86/wide-integer-cmp.ll @@ -105,13 +105,10 @@ define i32 @test_wide(i128 %a, i128 %b) { ; CHECK-NEXT: # BB#1: # %bb1 ; CHECK-NEXT: movl $1, %eax ; CHECK-NEXT: popl %esi -; CHECK-NEXT: .cfi_def_cfa_offset 4 ; CHECK-NEXT: retl ; CHECK-NEXT: .LBB4_2: # %bb2 -; CHECK-NEXT: .cfi_def_cfa_offset 8 ; CHECK-NEXT: movl $2, %eax ; CHECK-NEXT: popl %esi -; CHECK-NEXT: .cfi_def_cfa_offset 4 ; CHECK-NEXT: retl entry: %cmp = icmp slt i128 %a, %b diff --git a/llvm/test/CodeGen/X86/x86-framelowering-trap.ll b/llvm/test/CodeGen/X86/x86-framelowering-trap.ll index 89f4528fb06..f1590abcae8 100644 --- a/llvm/test/CodeGen/X86/x86-framelowering-trap.ll +++ b/llvm/test/CodeGen/X86/x86-framelowering-trap.ll @@ -6,7 +6,6 @@ target triple = "x86_64-unknown-linux-gnu" ; CHECK: pushq ; CHECK: ud2 ; CHECK-NEXT: popq -; CHECK-NEXT: .cfi_def_cfa_offset 8 ; CHECK-NEXT: retq define void @bar() { entry: diff --git a/llvm/test/CodeGen/X86/x86-interleaved-access.ll b/llvm/test/CodeGen/X86/x86-interleaved-access.ll index bc6a6ea205c..acad9f771fc 100644 --- a/llvm/test/CodeGen/X86/x86-interleaved-access.ll +++ b/llvm/test/CodeGen/X86/x86-interleaved-access.ll @@ -1816,7 +1816,6 @@ define void @interleaved_store_vf64_i8_stride4(<64 x i8> %a, <64 x i8> %b, <64 x ; AVX1-NEXT: vmovaps %ymm9, 64(%rdi) ; AVX1-NEXT: vmovaps %ymm8, (%rdi) ; AVX1-NEXT: addq $24, %rsp -; AVX1-NEXT: .cfi_def_cfa_offset 8 ; AVX1-NEXT: vzeroupper ; AVX1-NEXT: retq ; diff --git a/llvm/test/CodeGen/X86/x86-no_caller_saved_registers-preserve.ll b/llvm/test/CodeGen/X86/x86-no_caller_saved_registers-preserve.ll index 929dafbfc21..763d764698d 100644 --- a/llvm/test/CodeGen/X86/x86-no_caller_saved_registers-preserve.ll +++ b/llvm/test/CodeGen/X86/x86-no_caller_saved_registers-preserve.ll @@ -20,7 +20,6 @@ define x86_64_sysvcc i32 @bar(i32 %a0, i32 %a1, float %b0) #0 { ; CHECK-NEXT: movl $4, %eax ; CHECK-NEXT: movaps -{{[0-9]+}}(%rsp), %xmm1 # 16-byte Reload ; CHECK-NEXT: popq %rdx -; CHECK-NEXT: .cfi_def_cfa_offset 8 ; CHECK-NEXT: retq call void asm sideeffect "", "~{rax},~{rdx},~{xmm1},~{rdi},~{rsi},~{xmm0}"() ret i32 4 |