; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py ; RUN: llc < %s -mtriple=i686-linux | FileCheck %s -check-prefix=CHECK ; RUN: llc < %s -mtriple=i686-linux --disable-fixup-SFB | FileCheck %s --check-prefix=DISABLED ; RUN: llc < %s -mtriple=i686-linux -mattr +sse4.1 | FileCheck %s -check-prefix=CHECK-AVX2 ; RUN: llc < %s -mtriple=i686-linux -mattr=+avx512f,+avx512bw,+avx512vl,+avx512dq | FileCheck %s -check-prefix=CHECK-AVX512 %struct.S = type { i32, i32, i32, i32 } ; Function Attrs: nounwind uwtable define void @test_conditional_block(%struct.S* nocapture %s1, %struct.S* nocapture %s2, i32 %x, %struct.S* nocapture %s3, %struct.S* nocapture readonly %s4) local_unnamed_addr #0 { ; CHECK-LABEL: test_conditional_block: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: pushl %edi ; CHECK-NEXT: .cfi_def_cfa_offset 8 ; CHECK-NEXT: pushl %esi ; CHECK-NEXT: .cfi_def_cfa_offset 12 ; CHECK-NEXT: .cfi_offset %esi, -12 ; CHECK-NEXT: .cfi_offset %edi, -8 ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %esi ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %edx ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %edi ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %eax ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-NEXT: cmpl $18, %edi ; CHECK-NEXT: jl .LBB0_2 ; CHECK-NEXT: # %bb.1: # %if.then ; CHECK-NEXT: movl %edi, 4(%ecx) ; CHECK-NEXT: .LBB0_2: # %if.end ; CHECK-NEXT: movups (%esi), %xmm0 ; CHECK-NEXT: movups %xmm0, (%edx) ; CHECK-NEXT: movl (%ecx), %edx ; CHECK-NEXT: movl %edx, (%eax) ; CHECK-NEXT: movl 4(%ecx), %edx ; CHECK-NEXT: movl %edx, 4(%eax) ; CHECK-NEXT: movl 8(%ecx), %edx ; CHECK-NEXT: movl %edx, 8(%eax) ; CHECK-NEXT: movl 12(%ecx), %ecx ; CHECK-NEXT: movl %ecx, 12(%eax) ; CHECK-NEXT: popl %esi ; CHECK-NEXT: popl %edi ; CHECK-NEXT: retl ; ; DISABLED-LABEL: test_conditional_block: ; DISABLED: # %bb.0: # %entry ; DISABLED-NEXT: pushl %edi ; DISABLED-NEXT: .cfi_def_cfa_offset 8 ; DISABLED-NEXT: pushl %esi ; DISABLED-NEXT: .cfi_def_cfa_offset 12 ; DISABLED-NEXT: .cfi_offset %esi, -12 ; DISABLED-NEXT: .cfi_offset %edi, -8 ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %edx ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %ecx ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %edi ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %eax ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %esi ; DISABLED-NEXT: cmpl $18, %edi ; DISABLED-NEXT: jl .LBB0_2 ; DISABLED-NEXT: # %bb.1: # %if.then ; DISABLED-NEXT: movl %edi, 4(%esi) ; DISABLED-NEXT: .LBB0_2: # %if.end ; DISABLED-NEXT: movups (%edx), %xmm0 ; DISABLED-NEXT: movups %xmm0, (%ecx) ; DISABLED-NEXT: movups (%esi), %xmm0 ; DISABLED-NEXT: movups %xmm0, (%eax) ; DISABLED-NEXT: popl %esi ; DISABLED-NEXT: popl %edi ; DISABLED-NEXT: retl ; ; CHECK-AVX2-LABEL: test_conditional_block: ; CHECK-AVX2: # %bb.0: # %entry ; CHECK-AVX2-NEXT: pushl %edi ; CHECK-AVX2-NEXT: .cfi_def_cfa_offset 8 ; CHECK-AVX2-NEXT: pushl %esi ; CHECK-AVX2-NEXT: .cfi_def_cfa_offset 12 ; CHECK-AVX2-NEXT: .cfi_offset %esi, -12 ; CHECK-AVX2-NEXT: .cfi_offset %edi, -8 ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %esi ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %edx ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %edi ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %eax ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-AVX2-NEXT: cmpl $18, %edi ; CHECK-AVX2-NEXT: jl .LBB0_2 ; CHECK-AVX2-NEXT: # %bb.1: # %if.then ; CHECK-AVX2-NEXT: movl %edi, 4(%ecx) ; CHECK-AVX2-NEXT: .LBB0_2: # %if.end ; CHECK-AVX2-NEXT: movups (%esi), %xmm0 ; CHECK-AVX2-NEXT: movups %xmm0, (%edx) ; CHECK-AVX2-NEXT: movl (%ecx), %edx ; CHECK-AVX2-NEXT: movl %edx, (%eax) ; CHECK-AVX2-NEXT: movl 4(%ecx), %edx ; CHECK-AVX2-NEXT: movl %edx, 4(%eax) ; CHECK-AVX2-NEXT: movl 8(%ecx), %edx ; CHECK-AVX2-NEXT: movl %edx, 8(%eax) ; CHECK-AVX2-NEXT: movl 12(%ecx), %ecx ; CHECK-AVX2-NEXT: movl %ecx, 12(%eax) ; CHECK-AVX2-NEXT: popl %esi ; CHECK-AVX2-NEXT: popl %edi ; CHECK-AVX2-NEXT: retl ; ; CHECK-AVX512-LABEL: test_conditional_block: ; CHECK-AVX512: # %bb.0: # %entry ; CHECK-AVX512-NEXT: pushl %edi ; CHECK-AVX512-NEXT: .cfi_def_cfa_offset 8 ; CHECK-AVX512-NEXT: pushl %esi ; CHECK-AVX512-NEXT: .cfi_def_cfa_offset 12 ; CHECK-AVX512-NEXT: .cfi_offset %esi, -12 ; CHECK-AVX512-NEXT: .cfi_offset %edi, -8 ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %esi ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %edx ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %edi ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %eax ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-AVX512-NEXT: cmpl $18, %edi ; CHECK-AVX512-NEXT: jl .LBB0_2 ; CHECK-AVX512-NEXT: # %bb.1: # %if.then ; CHECK-AVX512-NEXT: movl %edi, 4(%ecx) ; CHECK-AVX512-NEXT: .LBB0_2: # %if.end ; CHECK-AVX512-NEXT: vmovups (%esi), %xmm0 ; CHECK-AVX512-NEXT: vmovups %xmm0, (%edx) ; CHECK-AVX512-NEXT: movl (%ecx), %edx ; CHECK-AVX512-NEXT: movl %edx, (%eax) ; CHECK-AVX512-NEXT: movl 4(%ecx), %edx ; CHECK-AVX512-NEXT: movl %edx, 4(%eax) ; CHECK-AVX512-NEXT: movl 8(%ecx), %edx ; CHECK-AVX512-NEXT: movl %edx, 8(%eax) ; CHECK-AVX512-NEXT: movl 12(%ecx), %ecx ; CHECK-AVX512-NEXT: movl %ecx, 12(%eax) ; CHECK-AVX512-NEXT: popl %esi ; CHECK-AVX512-NEXT: popl %edi ; CHECK-AVX512-NEXT: retl entry: %cmp = icmp sgt i32 %x, 17 br i1 %cmp, label %if.then, label %if.end if.then: ; preds = %entry %b = getelementptr inbounds %struct.S, %struct.S* %s1, i64 0, i32 1 store i32 %x, i32* %b, align 4 br label %if.end if.end: ; preds = %if.then, %entry %0 = bitcast %struct.S* %s3 to i8* %1 = bitcast %struct.S* %s4 to i8* tail call void @llvm.memcpy.p0i8.p0i8.i64(i8* %0, i8* %1, i64 16, i32 4, i1 false) %2 = bitcast %struct.S* %s2 to i8* %3 = bitcast %struct.S* %s1 to i8* tail call void @llvm.memcpy.p0i8.p0i8.i64(i8* %2, i8* %3, i64 16, i32 4, i1 false) ret void } ; Function Attrs: nounwind uwtable define void @test_imm_store(%struct.S* nocapture %s1, %struct.S* nocapture %s2, i32 %x, %struct.S* nocapture %s3) local_unnamed_addr #0 { ; CHECK-LABEL: test_imm_store: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %eax ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %edx ; CHECK-NEXT: movl $0, (%edx) ; CHECK-NEXT: movl $1, (%ecx) ; CHECK-NEXT: movl (%edx), %ecx ; CHECK-NEXT: movl %ecx, (%eax) ; CHECK-NEXT: movl 4(%edx), %ecx ; CHECK-NEXT: movl %ecx, 4(%eax) ; CHECK-NEXT: movl 8(%edx), %ecx ; CHECK-NEXT: movl %ecx, 8(%eax) ; CHECK-NEXT: movl 12(%edx), %ecx ; CHECK-NEXT: movl %ecx, 12(%eax) ; CHECK-NEXT: retl ; ; DISABLED-LABEL: test_imm_store: ; DISABLED: # %bb.0: # %entry ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %eax ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %ecx ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %edx ; DISABLED-NEXT: movl $0, (%edx) ; DISABLED-NEXT: movl $1, (%ecx) ; DISABLED-NEXT: movups (%edx), %xmm0 ; DISABLED-NEXT: movups %xmm0, (%eax) ; DISABLED-NEXT: retl ; ; CHECK-AVX2-LABEL: test_imm_store: ; CHECK-AVX2: # %bb.0: # %entry ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %eax ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %edx ; CHECK-AVX2-NEXT: movl $0, (%edx) ; CHECK-AVX2-NEXT: movl $1, (%ecx) ; CHECK-AVX2-NEXT: movl (%edx), %ecx ; CHECK-AVX2-NEXT: movl %ecx, (%eax) ; CHECK-AVX2-NEXT: movl 4(%edx), %ecx ; CHECK-AVX2-NEXT: movl %ecx, 4(%eax) ; CHECK-AVX2-NEXT: movl 8(%edx), %ecx ; CHECK-AVX2-NEXT: movl %ecx, 8(%eax) ; CHECK-AVX2-NEXT: movl 12(%edx), %ecx ; CHECK-AVX2-NEXT: movl %ecx, 12(%eax) ; CHECK-AVX2-NEXT: retl ; ; CHECK-AVX512-LABEL: test_imm_store: ; CHECK-AVX512: # %bb.0: # %entry ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %eax ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %edx ; CHECK-AVX512-NEXT: movl $0, (%edx) ; CHECK-AVX512-NEXT: movl $1, (%ecx) ; CHECK-AVX512-NEXT: movl (%edx), %ecx ; CHECK-AVX512-NEXT: movl %ecx, (%eax) ; CHECK-AVX512-NEXT: movl 4(%edx), %ecx ; CHECK-AVX512-NEXT: movl %ecx, 4(%eax) ; CHECK-AVX512-NEXT: movl 8(%edx), %ecx ; CHECK-AVX512-NEXT: movl %ecx, 8(%eax) ; CHECK-AVX512-NEXT: movl 12(%edx), %ecx ; CHECK-AVX512-NEXT: movl %ecx, 12(%eax) ; CHECK-AVX512-NEXT: retl entry: %a = getelementptr inbounds %struct.S, %struct.S* %s1, i64 0, i32 0 store i32 0, i32* %a, align 4 %a1 = getelementptr inbounds %struct.S, %struct.S* %s3, i64 0, i32 0 store i32 1, i32* %a1, align 4 %0 = bitcast %struct.S* %s2 to i8* %1 = bitcast %struct.S* %s1 to i8* tail call void @llvm.memcpy.p0i8.p0i8.i64(i8* %0, i8* %1, i64 16, i32 4, i1 false) ret void } ; Function Attrs: nounwind uwtable define void @test_nondirect_br(%struct.S* nocapture %s1, %struct.S* nocapture %s2, i32 %x, %struct.S* nocapture %s3, %struct.S* nocapture readonly %s4, i32 %x2) local_unnamed_addr #0 { ; CHECK-LABEL: test_nondirect_br: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: pushl %edi ; CHECK-NEXT: .cfi_def_cfa_offset 8 ; CHECK-NEXT: pushl %esi ; CHECK-NEXT: .cfi_def_cfa_offset 12 ; CHECK-NEXT: .cfi_offset %esi, -12 ; CHECK-NEXT: .cfi_offset %edi, -8 ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %edx ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %eax ; CHECK-NEXT: cmpl $18, %ecx ; CHECK-NEXT: jl .LBB2_2 ; CHECK-NEXT: # %bb.1: # %if.then ; CHECK-NEXT: movl %ecx, 4(%eax) ; CHECK-NEXT: .LBB2_2: # %if.end ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %edi ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %esi ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-NEXT: cmpl $14, %edx ; CHECK-NEXT: jl .LBB2_4 ; CHECK-NEXT: # %bb.3: # %if.then2 ; CHECK-NEXT: movl %edx, 12(%eax) ; CHECK-NEXT: .LBB2_4: # %if.end3 ; CHECK-NEXT: movups (%edi), %xmm0 ; CHECK-NEXT: movups %xmm0, (%esi) ; CHECK-NEXT: movl (%eax), %edx ; CHECK-NEXT: movl %edx, (%ecx) ; CHECK-NEXT: movl 4(%eax), %edx ; CHECK-NEXT: movl %edx, 4(%ecx) ; CHECK-NEXT: movl 8(%eax), %edx ; CHECK-NEXT: movl %edx, 8(%ecx) ; CHECK-NEXT: movl 12(%eax), %eax ; CHECK-NEXT: movl %eax, 12(%ecx) ; CHECK-NEXT: popl %esi ; CHECK-NEXT: popl %edi ; CHECK-NEXT: retl ; ; DISABLED-LABEL: test_nondirect_br: ; DISABLED: # %bb.0: # %entry ; DISABLED-NEXT: pushl %edi ; DISABLED-NEXT: .cfi_def_cfa_offset 8 ; DISABLED-NEXT: pushl %esi ; DISABLED-NEXT: .cfi_def_cfa_offset 12 ; DISABLED-NEXT: .cfi_offset %esi, -12 ; DISABLED-NEXT: .cfi_offset %edi, -8 ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %ecx ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %edx ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %eax ; DISABLED-NEXT: cmpl $18, %edx ; DISABLED-NEXT: jl .LBB2_2 ; DISABLED-NEXT: # %bb.1: # %if.then ; DISABLED-NEXT: movl %edx, 4(%eax) ; DISABLED-NEXT: .LBB2_2: # %if.end ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %edi ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %esi ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %edx ; DISABLED-NEXT: cmpl $14, %ecx ; DISABLED-NEXT: jl .LBB2_4 ; DISABLED-NEXT: # %bb.3: # %if.then2 ; DISABLED-NEXT: movl %ecx, 12(%eax) ; DISABLED-NEXT: .LBB2_4: # %if.end3 ; DISABLED-NEXT: movups (%edi), %xmm0 ; DISABLED-NEXT: movups %xmm0, (%esi) ; DISABLED-NEXT: movups (%eax), %xmm0 ; DISABLED-NEXT: movups %xmm0, (%edx) ; DISABLED-NEXT: popl %esi ; DISABLED-NEXT: popl %edi ; DISABLED-NEXT: retl ; ; CHECK-AVX2-LABEL: test_nondirect_br: ; CHECK-AVX2: # %bb.0: # %entry ; CHECK-AVX2-NEXT: pushl %edi ; CHECK-AVX2-NEXT: .cfi_def_cfa_offset 8 ; CHECK-AVX2-NEXT: pushl %esi ; CHECK-AVX2-NEXT: .cfi_def_cfa_offset 12 ; CHECK-AVX2-NEXT: .cfi_offset %esi, -12 ; CHECK-AVX2-NEXT: .cfi_offset %edi, -8 ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %edx ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %eax ; CHECK-AVX2-NEXT: cmpl $18, %ecx ; CHECK-AVX2-NEXT: jl .LBB2_2 ; CHECK-AVX2-NEXT: # %bb.1: # %if.then ; CHECK-AVX2-NEXT: movl %ecx, 4(%eax) ; CHECK-AVX2-NEXT: .LBB2_2: # %if.end ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %edi ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %esi ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-AVX2-NEXT: cmpl $14, %edx ; CHECK-AVX2-NEXT: jl .LBB2_4 ; CHECK-AVX2-NEXT: # %bb.3: # %if.then2 ; CHECK-AVX2-NEXT: movl %edx, 12(%eax) ; CHECK-AVX2-NEXT: .LBB2_4: # %if.end3 ; CHECK-AVX2-NEXT: movups (%edi), %xmm0 ; CHECK-AVX2-NEXT: movups %xmm0, (%esi) ; CHECK-AVX2-NEXT: movl (%eax), %edx ; CHECK-AVX2-NEXT: movl %edx, (%ecx) ; CHECK-AVX2-NEXT: movl 4(%eax), %edx ; CHECK-AVX2-NEXT: movl %edx, 4(%ecx) ; CHECK-AVX2-NEXT: movl 8(%eax), %edx ; CHECK-AVX2-NEXT: movl %edx, 8(%ecx) ; CHECK-AVX2-NEXT: movl 12(%eax), %eax ; CHECK-AVX2-NEXT: movl %eax, 12(%ecx) ; CHECK-AVX2-NEXT: popl %esi ; CHECK-AVX2-NEXT: popl %edi ; CHECK-AVX2-NEXT: retl ; ; CHECK-AVX512-LABEL: test_nondirect_br: ; CHECK-AVX512: # %bb.0: # %entry ; CHECK-AVX512-NEXT: pushl %edi ; CHECK-AVX512-NEXT: .cfi_def_cfa_offset 8 ; CHECK-AVX512-NEXT: pushl %esi ; CHECK-AVX512-NEXT: .cfi_def_cfa_offset 12 ; CHECK-AVX512-NEXT: .cfi_offset %esi, -12 ; CHECK-AVX512-NEXT: .cfi_offset %edi, -8 ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %edx ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %eax ; CHECK-AVX512-NEXT: cmpl $18, %ecx ; CHECK-AVX512-NEXT: jl .LBB2_2 ; CHECK-AVX512-NEXT: # %bb.1: # %if.then ; CHECK-AVX512-NEXT: movl %ecx, 4(%eax) ; CHECK-AVX512-NEXT: .LBB2_2: # %if.end ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %edi ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %esi ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-AVX512-NEXT: cmpl $14, %edx ; CHECK-AVX512-NEXT: jl .LBB2_4 ; CHECK-AVX512-NEXT: # %bb.3: # %if.then2 ; CHECK-AVX512-NEXT: movl %edx, 12(%eax) ; CHECK-AVX512-NEXT: .LBB2_4: # %if.end3 ; CHECK-AVX512-NEXT: vmovups (%edi), %xmm0 ; CHECK-AVX512-NEXT: vmovups %xmm0, (%esi) ; CHECK-AVX512-NEXT: movl (%eax), %edx ; CHECK-AVX512-NEXT: movl %edx, (%ecx) ; CHECK-AVX512-NEXT: movl 4(%eax), %edx ; CHECK-AVX512-NEXT: movl %edx, 4(%ecx) ; CHECK-AVX512-NEXT: movl 8(%eax), %edx ; CHECK-AVX512-NEXT: movl %edx, 8(%ecx) ; CHECK-AVX512-NEXT: movl 12(%eax), %eax ; CHECK-AVX512-NEXT: movl %eax, 12(%ecx) ; CHECK-AVX512-NEXT: popl %esi ; CHECK-AVX512-NEXT: popl %edi ; CHECK-AVX512-NEXT: retl entry: %cmp = icmp sgt i32 %x, 17 br i1 %cmp, label %if.then, label %if.end if.then: ; preds = %entry %b = getelementptr inbounds %struct.S, %struct.S* %s1, i64 0, i32 1 store i32 %x, i32* %b, align 4 br label %if.end if.end: ; preds = %if.then, %entry %cmp1 = icmp sgt i32 %x2, 13 br i1 %cmp1, label %if.then2, label %if.end3 if.then2: ; preds = %if.end %d = getelementptr inbounds %struct.S, %struct.S* %s1, i64 0, i32 3 store i32 %x2, i32* %d, align 4 br label %if.end3 if.end3: ; preds = %if.then2, %if.end %0 = bitcast %struct.S* %s3 to i8* %1 = bitcast %struct.S* %s4 to i8* tail call void @llvm.memcpy.p0i8.p0i8.i64(i8* %0, i8* %1, i64 16, i32 4, i1 false) %2 = bitcast %struct.S* %s2 to i8* %3 = bitcast %struct.S* %s1 to i8* tail call void @llvm.memcpy.p0i8.p0i8.i64(i8* %2, i8* %3, i64 16, i32 4, i1 false) ret void } ; Function Attrs: nounwind uwtable define void @test_2preds_block(%struct.S* nocapture %s1, %struct.S* nocapture %s2, i32 %x, %struct.S* nocapture %s3, %struct.S* nocapture readonly %s4, i32 %x2) local_unnamed_addr #0 { ; CHECK-LABEL: test_2preds_block: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: pushl %ebx ; CHECK-NEXT: .cfi_def_cfa_offset 8 ; CHECK-NEXT: pushl %edi ; CHECK-NEXT: .cfi_def_cfa_offset 12 ; CHECK-NEXT: pushl %esi ; CHECK-NEXT: .cfi_def_cfa_offset 16 ; CHECK-NEXT: .cfi_offset %esi, -16 ; CHECK-NEXT: .cfi_offset %edi, -12 ; CHECK-NEXT: .cfi_offset %ebx, -8 ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %esi ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %edx ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %edi ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %eax ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %ebx ; CHECK-NEXT: movl %ebx, 12(%ecx) ; CHECK-NEXT: cmpl $18, %edi ; CHECK-NEXT: jl .LBB3_2 ; CHECK-NEXT: # %bb.1: # %if.then ; CHECK-NEXT: movl %edi, 4(%ecx) ; CHECK-NEXT: .LBB3_2: # %if.end ; CHECK-NEXT: movups (%esi), %xmm0 ; CHECK-NEXT: movups %xmm0, (%edx) ; CHECK-NEXT: movl (%ecx), %edx ; CHECK-NEXT: movl %edx, (%eax) ; CHECK-NEXT: movl 4(%ecx), %edx ; CHECK-NEXT: movl %edx, 4(%eax) ; CHECK-NEXT: movl 8(%ecx), %edx ; CHECK-NEXT: movl %edx, 8(%eax) ; CHECK-NEXT: movl 12(%ecx), %ecx ; CHECK-NEXT: movl %ecx, 12(%eax) ; CHECK-NEXT: popl %esi ; CHECK-NEXT: popl %edi ; CHECK-NEXT: popl %ebx ; CHECK-NEXT: retl ; ; DISABLED-LABEL: test_2preds_block: ; DISABLED: # %bb.0: # %entry ; DISABLED-NEXT: pushl %ebx ; DISABLED-NEXT: .cfi_def_cfa_offset 8 ; DISABLED-NEXT: pushl %edi ; DISABLED-NEXT: .cfi_def_cfa_offset 12 ; DISABLED-NEXT: pushl %esi ; DISABLED-NEXT: .cfi_def_cfa_offset 16 ; DISABLED-NEXT: .cfi_offset %esi, -16 ; DISABLED-NEXT: .cfi_offset %edi, -12 ; DISABLED-NEXT: .cfi_offset %ebx, -8 ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %edx ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %ecx ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %edi ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %eax ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %esi ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %ebx ; DISABLED-NEXT: movl %ebx, 12(%esi) ; DISABLED-NEXT: cmpl $18, %edi ; DISABLED-NEXT: jl .LBB3_2 ; DISABLED-NEXT: # %bb.1: # %if.then ; DISABLED-NEXT: movl %edi, 4(%esi) ; DISABLED-NEXT: .LBB3_2: # %if.end ; DISABLED-NEXT: movups (%edx), %xmm0 ; DISABLED-NEXT: movups %xmm0, (%ecx) ; DISABLED-NEXT: movups (%esi), %xmm0 ; DISABLED-NEXT: movups %xmm0, (%eax) ; DISABLED-NEXT: popl %esi ; DISABLED-NEXT: popl %edi ; DISABLED-NEXT: popl %ebx ; DISABLED-NEXT: retl ; ; CHECK-AVX2-LABEL: test_2preds_block: ; CHECK-AVX2: # %bb.0: # %entry ; CHECK-AVX2-NEXT: pushl %ebx ; CHECK-AVX2-NEXT: .cfi_def_cfa_offset 8 ; CHECK-AVX2-NEXT: pushl %edi ; CHECK-AVX2-NEXT: .cfi_def_cfa_offset 12 ; CHECK-AVX2-NEXT: pushl %esi ; CHECK-AVX2-NEXT: .cfi_def_cfa_offset 16 ; CHECK-AVX2-NEXT: .cfi_offset %esi, -16 ; CHECK-AVX2-NEXT: .cfi_offset %edi, -12 ; CHECK-AVX2-NEXT: .cfi_offset %ebx, -8 ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %esi ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %edx ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %edi ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %eax ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %ebx ; CHECK-AVX2-NEXT: movl %ebx, 12(%ecx) ; CHECK-AVX2-NEXT: cmpl $18, %edi ; CHECK-AVX2-NEXT: jl .LBB3_2 ; CHECK-AVX2-NEXT: # %bb.1: # %if.then ; CHECK-AVX2-NEXT: movl %edi, 4(%ecx) ; CHECK-AVX2-NEXT: .LBB3_2: # %if.end ; CHECK-AVX2-NEXT: movups (%esi), %xmm0 ; CHECK-AVX2-NEXT: movups %xmm0, (%edx) ; CHECK-AVX2-NEXT: movl (%ecx), %edx ; CHECK-AVX2-NEXT: movl %edx, (%eax) ; CHECK-AVX2-NEXT: movl 4(%ecx), %edx ; CHECK-AVX2-NEXT: movl %edx, 4(%eax) ; CHECK-AVX2-NEXT: movl 8(%ecx), %edx ; CHECK-AVX2-NEXT: movl %edx, 8(%eax) ; CHECK-AVX2-NEXT: movl 12(%ecx), %ecx ; CHECK-AVX2-NEXT: movl %ecx, 12(%eax) ; CHECK-AVX2-NEXT: popl %esi ; CHECK-AVX2-NEXT: popl %edi ; CHECK-AVX2-NEXT: popl %ebx ; CHECK-AVX2-NEXT: retl ; ; CHECK-AVX512-LABEL: test_2preds_block: ; CHECK-AVX512: # %bb.0: # %entry ; CHECK-AVX512-NEXT: pushl %ebx ; CHECK-AVX512-NEXT: .cfi_def_cfa_offset 8 ; CHECK-AVX512-NEXT: pushl %edi ; CHECK-AVX512-NEXT: .cfi_def_cfa_offset 12 ; CHECK-AVX512-NEXT: pushl %esi ; CHECK-AVX512-NEXT: .cfi_def_cfa_offset 16 ; CHECK-AVX512-NEXT: .cfi_offset %esi, -16 ; CHECK-AVX512-NEXT: .cfi_offset %edi, -12 ; CHECK-AVX512-NEXT: .cfi_offset %ebx, -8 ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %esi ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %edx ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %edi ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %eax ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %ebx ; CHECK-AVX512-NEXT: movl %ebx, 12(%ecx) ; CHECK-AVX512-NEXT: cmpl $18, %edi ; CHECK-AVX512-NEXT: jl .LBB3_2 ; CHECK-AVX512-NEXT: # %bb.1: # %if.then ; CHECK-AVX512-NEXT: movl %edi, 4(%ecx) ; CHECK-AVX512-NEXT: .LBB3_2: # %if.end ; CHECK-AVX512-NEXT: vmovups (%esi), %xmm0 ; CHECK-AVX512-NEXT: vmovups %xmm0, (%edx) ; CHECK-AVX512-NEXT: movl (%ecx), %edx ; CHECK-AVX512-NEXT: movl %edx, (%eax) ; CHECK-AVX512-NEXT: movl 4(%ecx), %edx ; CHECK-AVX512-NEXT: movl %edx, 4(%eax) ; CHECK-AVX512-NEXT: movl 8(%ecx), %edx ; CHECK-AVX512-NEXT: movl %edx, 8(%eax) ; CHECK-AVX512-NEXT: movl 12(%ecx), %ecx ; CHECK-AVX512-NEXT: movl %ecx, 12(%eax) ; CHECK-AVX512-NEXT: popl %esi ; CHECK-AVX512-NEXT: popl %edi ; CHECK-AVX512-NEXT: popl %ebx ; CHECK-AVX512-NEXT: retl entry: %d = getelementptr inbounds %struct.S, %struct.S* %s1, i64 0, i32 3 store i32 %x2, i32* %d, align 4 %cmp = icmp sgt i32 %x, 17 br i1 %cmp, label %if.then, label %if.end if.then: ; preds = %entry %b = getelementptr inbounds %struct.S, %struct.S* %s1, i64 0, i32 1 store i32 %x, i32* %b, align 4 br label %if.end if.end: ; preds = %if.then, %entry %0 = bitcast %struct.S* %s3 to i8* %1 = bitcast %struct.S* %s4 to i8* tail call void @llvm.memcpy.p0i8.p0i8.i64(i8* %0, i8* %1, i64 16, i32 4, i1 false) %2 = bitcast %struct.S* %s2 to i8* %3 = bitcast %struct.S* %s1 to i8* tail call void @llvm.memcpy.p0i8.p0i8.i64(i8* %2, i8* %3, i64 16, i32 4, i1 false) ret void } %struct.S2 = type { i64, i64 } ; Function Attrs: nounwind uwtable define void @test_type64(%struct.S2* nocapture %s1, %struct.S2* nocapture %s2, i32 %x, %struct.S2* nocapture %s3, %struct.S2* nocapture readonly %s4) local_unnamed_addr #0 { ; CHECK-LABEL: test_type64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: pushl %edi ; CHECK-NEXT: .cfi_def_cfa_offset 8 ; CHECK-NEXT: pushl %esi ; CHECK-NEXT: .cfi_def_cfa_offset 12 ; CHECK-NEXT: .cfi_offset %esi, -12 ; CHECK-NEXT: .cfi_offset %edi, -8 ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %esi ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %edx ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %edi ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %eax ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-NEXT: cmpl $18, %edi ; CHECK-NEXT: jl .LBB4_2 ; CHECK-NEXT: # %bb.1: # %if.then ; CHECK-NEXT: movl %edi, 8(%ecx) ; CHECK-NEXT: sarl $31, %edi ; CHECK-NEXT: movl %edi, 12(%ecx) ; CHECK-NEXT: .LBB4_2: # %if.end ; CHECK-NEXT: movups (%esi), %xmm0 ; CHECK-NEXT: movups %xmm0, (%edx) ; CHECK-NEXT: movl (%ecx), %edx ; CHECK-NEXT: movl %edx, (%eax) ; CHECK-NEXT: movl 4(%ecx), %edx ; CHECK-NEXT: movl %edx, 4(%eax) ; CHECK-NEXT: movl 8(%ecx), %edx ; CHECK-NEXT: movl %edx, 8(%eax) ; CHECK-NEXT: movl 12(%ecx), %ecx ; CHECK-NEXT: movl %ecx, 12(%eax) ; CHECK-NEXT: popl %esi ; CHECK-NEXT: popl %edi ; CHECK-NEXT: retl ; ; DISABLED-LABEL: test_type64: ; DISABLED: # %bb.0: # %entry ; DISABLED-NEXT: pushl %edi ; DISABLED-NEXT: .cfi_def_cfa_offset 8 ; DISABLED-NEXT: pushl %esi ; DISABLED-NEXT: .cfi_def_cfa_offset 12 ; DISABLED-NEXT: .cfi_offset %esi, -12 ; DISABLED-NEXT: .cfi_offset %edi, -8 ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %edx ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %ecx ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %edi ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %eax ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %esi ; DISABLED-NEXT: cmpl $18, %edi ; DISABLED-NEXT: jl .LBB4_2 ; DISABLED-NEXT: # %bb.1: # %if.then ; DISABLED-NEXT: movl %edi, 8(%esi) ; DISABLED-NEXT: sarl $31, %edi ; DISABLED-NEXT: movl %edi, 12(%esi) ; DISABLED-NEXT: .LBB4_2: # %if.end ; DISABLED-NEXT: movups (%edx), %xmm0 ; DISABLED-NEXT: movups %xmm0, (%ecx) ; DISABLED-NEXT: movups (%esi), %xmm0 ; DISABLED-NEXT: movups %xmm0, (%eax) ; DISABLED-NEXT: popl %esi ; DISABLED-NEXT: popl %edi ; DISABLED-NEXT: retl ; ; CHECK-AVX2-LABEL: test_type64: ; CHECK-AVX2: # %bb.0: # %entry ; CHECK-AVX2-NEXT: pushl %edi ; CHECK-AVX2-NEXT: .cfi_def_cfa_offset 8 ; CHECK-AVX2-NEXT: pushl %esi ; CHECK-AVX2-NEXT: .cfi_def_cfa_offset 12 ; CHECK-AVX2-NEXT: .cfi_offset %esi, -12 ; CHECK-AVX2-NEXT: .cfi_offset %edi, -8 ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %esi ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %edx ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %edi ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %eax ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-AVX2-NEXT: cmpl $18, %edi ; CHECK-AVX2-NEXT: jl .LBB4_2 ; CHECK-AVX2-NEXT: # %bb.1: # %if.then ; CHECK-AVX2-NEXT: movl %edi, 8(%ecx) ; CHECK-AVX2-NEXT: sarl $31, %edi ; CHECK-AVX2-NEXT: movl %edi, 12(%ecx) ; CHECK-AVX2-NEXT: .LBB4_2: # %if.end ; CHECK-AVX2-NEXT: movups (%esi), %xmm0 ; CHECK-AVX2-NEXT: movups %xmm0, (%edx) ; CHECK-AVX2-NEXT: movl (%ecx), %edx ; CHECK-AVX2-NEXT: movl %edx, (%eax) ; CHECK-AVX2-NEXT: movl 4(%ecx), %edx ; CHECK-AVX2-NEXT: movl %edx, 4(%eax) ; CHECK-AVX2-NEXT: movl 8(%ecx), %edx ; CHECK-AVX2-NEXT: movl %edx, 8(%eax) ; CHECK-AVX2-NEXT: movl 12(%ecx), %ecx ; CHECK-AVX2-NEXT: movl %ecx, 12(%eax) ; CHECK-AVX2-NEXT: popl %esi ; CHECK-AVX2-NEXT: popl %edi ; CHECK-AVX2-NEXT: retl ; ; CHECK-AVX512-LABEL: test_type64: ; CHECK-AVX512: # %bb.0: # %entry ; CHECK-AVX512-NEXT: pushl %edi ; CHECK-AVX512-NEXT: .cfi_def_cfa_offset 8 ; CHECK-AVX512-NEXT: pushl %esi ; CHECK-AVX512-NEXT: .cfi_def_cfa_offset 12 ; CHECK-AVX512-NEXT: .cfi_offset %esi, -12 ; CHECK-AVX512-NEXT: .cfi_offset %edi, -8 ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %esi ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %edx ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %edi ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %eax ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-AVX512-NEXT: cmpl $18, %edi ; CHECK-AVX512-NEXT: jl .LBB4_2 ; CHECK-AVX512-NEXT: # %bb.1: # %if.then ; CHECK-AVX512-NEXT: movl %edi, 8(%ecx) ; CHECK-AVX512-NEXT: sarl $31, %edi ; CHECK-AVX512-NEXT: movl %edi, 12(%ecx) ; CHECK-AVX512-NEXT: .LBB4_2: # %if.end ; CHECK-AVX512-NEXT: vmovups (%esi), %xmm0 ; CHECK-AVX512-NEXT: vmovups %xmm0, (%edx) ; CHECK-AVX512-NEXT: movl (%ecx), %edx ; CHECK-AVX512-NEXT: movl %edx, (%eax) ; CHECK-AVX512-NEXT: movl 4(%ecx), %edx ; CHECK-AVX512-NEXT: movl %edx, 4(%eax) ; CHECK-AVX512-NEXT: movl 8(%ecx), %edx ; CHECK-AVX512-NEXT: movl %edx, 8(%eax) ; CHECK-AVX512-NEXT: movl 12(%ecx), %ecx ; CHECK-AVX512-NEXT: movl %ecx, 12(%eax) ; CHECK-AVX512-NEXT: popl %esi ; CHECK-AVX512-NEXT: popl %edi ; CHECK-AVX512-NEXT: retl entry: %cmp = icmp sgt i32 %x, 17 br i1 %cmp, label %if.then, label %if.end if.then: ; preds = %entry %conv = sext i32 %x to i64 %b = getelementptr inbounds %struct.S2, %struct.S2* %s1, i64 0, i32 1 store i64 %conv, i64* %b, align 8 br label %if.end if.end: ; preds = %if.then, %entry %0 = bitcast %struct.S2* %s3 to i8* %1 = bitcast %struct.S2* %s4 to i8* tail call void @llvm.memcpy.p0i8.p0i8.i64(i8* %0, i8* %1, i64 16, i32 8, i1 false) %2 = bitcast %struct.S2* %s2 to i8* %3 = bitcast %struct.S2* %s1 to i8* tail call void @llvm.memcpy.p0i8.p0i8.i64(i8* %2, i8* %3, i64 16, i32 8, i1 false) ret void } %struct.S3 = type { i64, i8, i8, i16, i32 } ; Function Attrs: noinline nounwind uwtable define void @test_mixed_type(%struct.S3* nocapture %s1, %struct.S3* nocapture %s2, i32 %x, %struct.S3* nocapture readnone %s3, %struct.S3* nocapture readnone %s4) local_unnamed_addr #0 { ; CHECK-LABEL: test_mixed_type: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: pushl %esi ; CHECK-NEXT: .cfi_def_cfa_offset 8 ; CHECK-NEXT: .cfi_offset %esi, -8 ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %edx ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %eax ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-NEXT: cmpl $18, %edx ; CHECK-NEXT: jl .LBB5_2 ; CHECK-NEXT: # %bb.1: # %if.then ; CHECK-NEXT: movl %edx, %esi ; CHECK-NEXT: sarl $31, %esi ; CHECK-NEXT: movl %edx, (%ecx) ; CHECK-NEXT: movl %esi, 4(%ecx) ; CHECK-NEXT: movb %dl, 8(%ecx) ; CHECK-NEXT: .LBB5_2: # %if.end ; CHECK-NEXT: movl (%ecx), %edx ; CHECK-NEXT: movl %edx, (%eax) ; CHECK-NEXT: movl 4(%ecx), %edx ; CHECK-NEXT: movl %edx, 4(%eax) ; CHECK-NEXT: movb 8(%ecx), %dl ; CHECK-NEXT: movb %dl, 8(%eax) ; CHECK-NEXT: movl 9(%ecx), %edx ; CHECK-NEXT: movl %edx, 9(%eax) ; CHECK-NEXT: movzwl 13(%ecx), %edx ; CHECK-NEXT: movw %dx, 13(%eax) ; CHECK-NEXT: movb 15(%ecx), %cl ; CHECK-NEXT: movb %cl, 15(%eax) ; CHECK-NEXT: popl %esi ; CHECK-NEXT: retl ; ; DISABLED-LABEL: test_mixed_type: ; DISABLED: # %bb.0: # %entry ; DISABLED-NEXT: pushl %esi ; DISABLED-NEXT: .cfi_def_cfa_offset 8 ; DISABLED-NEXT: .cfi_offset %esi, -8 ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %edx ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %eax ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %ecx ; DISABLED-NEXT: cmpl $18, %edx ; DISABLED-NEXT: jl .LBB5_2 ; DISABLED-NEXT: # %bb.1: # %if.then ; DISABLED-NEXT: movl %edx, %esi ; DISABLED-NEXT: sarl $31, %esi ; DISABLED-NEXT: movl %edx, (%ecx) ; DISABLED-NEXT: movl %esi, 4(%ecx) ; DISABLED-NEXT: movb %dl, 8(%ecx) ; DISABLED-NEXT: .LBB5_2: # %if.end ; DISABLED-NEXT: movups (%ecx), %xmm0 ; DISABLED-NEXT: movups %xmm0, (%eax) ; DISABLED-NEXT: popl %esi ; DISABLED-NEXT: retl ; ; CHECK-AVX2-LABEL: test_mixed_type: ; CHECK-AVX2: # %bb.0: # %entry ; CHECK-AVX2-NEXT: pushl %esi ; CHECK-AVX2-NEXT: .cfi_def_cfa_offset 8 ; CHECK-AVX2-NEXT: .cfi_offset %esi, -8 ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %edx ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %eax ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-AVX2-NEXT: cmpl $18, %edx ; CHECK-AVX2-NEXT: jl .LBB5_2 ; CHECK-AVX2-NEXT: # %bb.1: # %if.then ; CHECK-AVX2-NEXT: movl %edx, %esi ; CHECK-AVX2-NEXT: sarl $31, %esi ; CHECK-AVX2-NEXT: movl %edx, (%ecx) ; CHECK-AVX2-NEXT: movl %esi, 4(%ecx) ; CHECK-AVX2-NEXT: movb %dl, 8(%ecx) ; CHECK-AVX2-NEXT: .LBB5_2: # %if.end ; CHECK-AVX2-NEXT: movl (%ecx), %edx ; CHECK-AVX2-NEXT: movl %edx, (%eax) ; CHECK-AVX2-NEXT: movl 4(%ecx), %edx ; CHECK-AVX2-NEXT: movl %edx, 4(%eax) ; CHECK-AVX2-NEXT: movb 8(%ecx), %dl ; CHECK-AVX2-NEXT: movb %dl, 8(%eax) ; CHECK-AVX2-NEXT: movl 9(%ecx), %edx ; CHECK-AVX2-NEXT: movl %edx, 9(%eax) ; CHECK-AVX2-NEXT: movzwl 13(%ecx), %edx ; CHECK-AVX2-NEXT: movw %dx, 13(%eax) ; CHECK-AVX2-NEXT: movb 15(%ecx), %cl ; CHECK-AVX2-NEXT: movb %cl, 15(%eax) ; CHECK-AVX2-NEXT: popl %esi ; CHECK-AVX2-NEXT: retl ; ; CHECK-AVX512-LABEL: test_mixed_type: ; CHECK-AVX512: # %bb.0: # %entry ; CHECK-AVX512-NEXT: pushl %esi ; CHECK-AVX512-NEXT: .cfi_def_cfa_offset 8 ; CHECK-AVX512-NEXT: .cfi_offset %esi, -8 ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %edx ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %eax ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-AVX512-NEXT: cmpl $18, %edx ; CHECK-AVX512-NEXT: jl .LBB5_2 ; CHECK-AVX512-NEXT: # %bb.1: # %if.then ; CHECK-AVX512-NEXT: movl %edx, %esi ; CHECK-AVX512-NEXT: sarl $31, %esi ; CHECK-AVX512-NEXT: movl %edx, (%ecx) ; CHECK-AVX512-NEXT: movl %esi, 4(%ecx) ; CHECK-AVX512-NEXT: movb %dl, 8(%ecx) ; CHECK-AVX512-NEXT: .LBB5_2: # %if.end ; CHECK-AVX512-NEXT: movl (%ecx), %edx ; CHECK-AVX512-NEXT: movl %edx, (%eax) ; CHECK-AVX512-NEXT: movl 4(%ecx), %edx ; CHECK-AVX512-NEXT: movl %edx, 4(%eax) ; CHECK-AVX512-NEXT: movb 8(%ecx), %dl ; CHECK-AVX512-NEXT: movb %dl, 8(%eax) ; CHECK-AVX512-NEXT: movl 9(%ecx), %edx ; CHECK-AVX512-NEXT: movl %edx, 9(%eax) ; CHECK-AVX512-NEXT: movzwl 13(%ecx), %edx ; CHECK-AVX512-NEXT: movw %dx, 13(%eax) ; CHECK-AVX512-NEXT: movb 15(%ecx), %cl ; CHECK-AVX512-NEXT: movb %cl, 15(%eax) ; CHECK-AVX512-NEXT: popl %esi ; CHECK-AVX512-NEXT: retl entry: %cmp = icmp sgt i32 %x, 17 br i1 %cmp, label %if.then, label %if.end if.then: ; preds = %entry %conv = sext i32 %x to i64 %a = getelementptr inbounds %struct.S3, %struct.S3* %s1, i64 0, i32 0 store i64 %conv, i64* %a, align 8 %conv1 = trunc i32 %x to i8 %b = getelementptr inbounds %struct.S3, %struct.S3* %s1, i64 0, i32 1 store i8 %conv1, i8* %b, align 8 br label %if.end if.end: ; preds = %if.then, %entry %0 = bitcast %struct.S3* %s2 to i8* %1 = bitcast %struct.S3* %s1 to i8* tail call void @llvm.memcpy.p0i8.p0i8.i64(i8* %0, i8* %1, i64 16, i32 8, i1 false) ret void } %struct.S4 = type { i32, i32, i32, i32, i32, i32, i32, i32, i32, i32, i32, i32 } ; Function Attrs: nounwind uwtable define void @test_multiple_blocks(%struct.S4* nocapture %s1, %struct.S4* nocapture %s2) local_unnamed_addr #0 { ; CHECK-LABEL: test_multiple_blocks: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %eax ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-NEXT: movl $0, 4(%ecx) ; CHECK-NEXT: movl $0, 36(%ecx) ; CHECK-NEXT: movups 16(%ecx), %xmm0 ; CHECK-NEXT: movups %xmm0, 16(%eax) ; CHECK-NEXT: movl 32(%ecx), %edx ; CHECK-NEXT: movl %edx, 32(%eax) ; CHECK-NEXT: movl 36(%ecx), %edx ; CHECK-NEXT: movl %edx, 36(%eax) ; CHECK-NEXT: movl 40(%ecx), %edx ; CHECK-NEXT: movl %edx, 40(%eax) ; CHECK-NEXT: movl 44(%ecx), %edx ; CHECK-NEXT: movl %edx, 44(%eax) ; CHECK-NEXT: movl (%ecx), %edx ; CHECK-NEXT: movl %edx, (%eax) ; CHECK-NEXT: movl 4(%ecx), %edx ; CHECK-NEXT: movl %edx, 4(%eax) ; CHECK-NEXT: movl 8(%ecx), %edx ; CHECK-NEXT: movl %edx, 8(%eax) ; CHECK-NEXT: movl 12(%ecx), %ecx ; CHECK-NEXT: movl %ecx, 12(%eax) ; CHECK-NEXT: retl ; ; DISABLED-LABEL: test_multiple_blocks: ; DISABLED: # %bb.0: # %entry ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %eax ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %ecx ; DISABLED-NEXT: movl $0, 4(%ecx) ; DISABLED-NEXT: movl $0, 36(%ecx) ; DISABLED-NEXT: movups 16(%ecx), %xmm0 ; DISABLED-NEXT: movups %xmm0, 16(%eax) ; DISABLED-NEXT: movups 32(%ecx), %xmm0 ; DISABLED-NEXT: movups %xmm0, 32(%eax) ; DISABLED-NEXT: movups (%ecx), %xmm0 ; DISABLED-NEXT: movups %xmm0, (%eax) ; DISABLED-NEXT: retl ; ; CHECK-AVX2-LABEL: test_multiple_blocks: ; CHECK-AVX2: # %bb.0: # %entry ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %eax ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-AVX2-NEXT: movl $0, 4(%ecx) ; CHECK-AVX2-NEXT: movl $0, 36(%ecx) ; CHECK-AVX2-NEXT: movups 16(%ecx), %xmm0 ; CHECK-AVX2-NEXT: movups %xmm0, 16(%eax) ; CHECK-AVX2-NEXT: movl 32(%ecx), %edx ; CHECK-AVX2-NEXT: movl %edx, 32(%eax) ; CHECK-AVX2-NEXT: movl 36(%ecx), %edx ; CHECK-AVX2-NEXT: movl %edx, 36(%eax) ; CHECK-AVX2-NEXT: movl 40(%ecx), %edx ; CHECK-AVX2-NEXT: movl %edx, 40(%eax) ; CHECK-AVX2-NEXT: movl 44(%ecx), %edx ; CHECK-AVX2-NEXT: movl %edx, 44(%eax) ; CHECK-AVX2-NEXT: movl (%ecx), %edx ; CHECK-AVX2-NEXT: movl %edx, (%eax) ; CHECK-AVX2-NEXT: movl 4(%ecx), %edx ; CHECK-AVX2-NEXT: movl %edx, 4(%eax) ; CHECK-AVX2-NEXT: movl 8(%ecx), %edx ; CHECK-AVX2-NEXT: movl %edx, 8(%eax) ; CHECK-AVX2-NEXT: movl 12(%ecx), %ecx ; CHECK-AVX2-NEXT: movl %ecx, 12(%eax) ; CHECK-AVX2-NEXT: retl ; ; CHECK-AVX512-LABEL: test_multiple_blocks: ; CHECK-AVX512: # %bb.0: # %entry ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %eax ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-AVX512-NEXT: movl $0, 4(%ecx) ; CHECK-AVX512-NEXT: movl $0, 36(%ecx) ; CHECK-AVX512-NEXT: vmovups 16(%ecx), %xmm0 ; CHECK-AVX512-NEXT: vmovups %xmm0, 16(%eax) ; CHECK-AVX512-NEXT: movl 32(%ecx), %edx ; CHECK-AVX512-NEXT: movl %edx, 32(%eax) ; CHECK-AVX512-NEXT: movl 36(%ecx), %edx ; CHECK-AVX512-NEXT: movl %edx, 36(%eax) ; CHECK-AVX512-NEXT: movl 40(%ecx), %edx ; CHECK-AVX512-NEXT: movl %edx, 40(%eax) ; CHECK-AVX512-NEXT: movl 44(%ecx), %edx ; CHECK-AVX512-NEXT: movl %edx, 44(%eax) ; CHECK-AVX512-NEXT: movl (%ecx), %edx ; CHECK-AVX512-NEXT: movl %edx, (%eax) ; CHECK-AVX512-NEXT: movl 4(%ecx), %edx ; CHECK-AVX512-NEXT: movl %edx, 4(%eax) ; CHECK-AVX512-NEXT: vmovups 8(%ecx), %xmm0 ; CHECK-AVX512-NEXT: vmovups %xmm0, 8(%eax) ; CHECK-AVX512-NEXT: movl 24(%ecx), %edx ; CHECK-AVX512-NEXT: movl %edx, 24(%eax) ; CHECK-AVX512-NEXT: movl 28(%ecx), %ecx ; CHECK-AVX512-NEXT: movl %ecx, 28(%eax) ; CHECK-AVX512-NEXT: retl entry: %b = getelementptr inbounds %struct.S4, %struct.S4* %s1, i64 0, i32 1 store i32 0, i32* %b, align 4 %b3 = getelementptr inbounds %struct.S4, %struct.S4* %s1, i64 0, i32 9 store i32 0, i32* %b3, align 4 %0 = bitcast %struct.S4* %s2 to i8* %1 = bitcast %struct.S4* %s1 to i8* tail call void @llvm.memcpy.p0i8.p0i8.i64(i8* %0, i8* %1, i64 48, i32 4, i1 false) ret void } %struct.S5 = type { i16, i16, i16, i16, i16, i16, i16, i16 } ; Function Attrs: nounwind uwtable define void @test_type16(%struct.S5* nocapture %s1, %struct.S5* nocapture %s2, i32 %x, %struct.S5* nocapture %s3, %struct.S5* nocapture readonly %s4) local_unnamed_addr #0 { ; CHECK-LABEL: test_type16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: pushl %edi ; CHECK-NEXT: .cfi_def_cfa_offset 8 ; CHECK-NEXT: pushl %esi ; CHECK-NEXT: .cfi_def_cfa_offset 12 ; CHECK-NEXT: .cfi_offset %esi, -12 ; CHECK-NEXT: .cfi_offset %edi, -8 ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %esi ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %edx ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %edi ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %eax ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-NEXT: cmpl $18, %edi ; CHECK-NEXT: jl .LBB7_2 ; CHECK-NEXT: # %bb.1: # %if.then ; CHECK-NEXT: movw %di, 2(%ecx) ; CHECK-NEXT: .LBB7_2: # %if.end ; CHECK-NEXT: movups (%esi), %xmm0 ; CHECK-NEXT: movups %xmm0, (%edx) ; CHECK-NEXT: movzwl (%ecx), %edx ; CHECK-NEXT: movw %dx, (%eax) ; CHECK-NEXT: movzwl 2(%ecx), %edx ; CHECK-NEXT: movw %dx, 2(%eax) ; CHECK-NEXT: movl 4(%ecx), %edx ; CHECK-NEXT: movl %edx, 4(%eax) ; CHECK-NEXT: movl 8(%ecx), %edx ; CHECK-NEXT: movl %edx, 8(%eax) ; CHECK-NEXT: movl 12(%ecx), %ecx ; CHECK-NEXT: movl %ecx, 12(%eax) ; CHECK-NEXT: popl %esi ; CHECK-NEXT: popl %edi ; CHECK-NEXT: retl ; ; DISABLED-LABEL: test_type16: ; DISABLED: # %bb.0: # %entry ; DISABLED-NEXT: pushl %edi ; DISABLED-NEXT: .cfi_def_cfa_offset 8 ; DISABLED-NEXT: pushl %esi ; DISABLED-NEXT: .cfi_def_cfa_offset 12 ; DISABLED-NEXT: .cfi_offset %esi, -12 ; DISABLED-NEXT: .cfi_offset %edi, -8 ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %edx ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %ecx ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %edi ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %eax ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %esi ; DISABLED-NEXT: cmpl $18, %edi ; DISABLED-NEXT: jl .LBB7_2 ; DISABLED-NEXT: # %bb.1: # %if.then ; DISABLED-NEXT: movw %di, 2(%esi) ; DISABLED-NEXT: .LBB7_2: # %if.end ; DISABLED-NEXT: movups (%edx), %xmm0 ; DISABLED-NEXT: movups %xmm0, (%ecx) ; DISABLED-NEXT: movups (%esi), %xmm0 ; DISABLED-NEXT: movups %xmm0, (%eax) ; DISABLED-NEXT: popl %esi ; DISABLED-NEXT: popl %edi ; DISABLED-NEXT: retl ; ; CHECK-AVX2-LABEL: test_type16: ; CHECK-AVX2: # %bb.0: # %entry ; CHECK-AVX2-NEXT: pushl %edi ; CHECK-AVX2-NEXT: .cfi_def_cfa_offset 8 ; CHECK-AVX2-NEXT: pushl %esi ; CHECK-AVX2-NEXT: .cfi_def_cfa_offset 12 ; CHECK-AVX2-NEXT: .cfi_offset %esi, -12 ; CHECK-AVX2-NEXT: .cfi_offset %edi, -8 ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %esi ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %edx ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %edi ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %eax ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-AVX2-NEXT: cmpl $18, %edi ; CHECK-AVX2-NEXT: jl .LBB7_2 ; CHECK-AVX2-NEXT: # %bb.1: # %if.then ; CHECK-AVX2-NEXT: movw %di, 2(%ecx) ; CHECK-AVX2-NEXT: .LBB7_2: # %if.end ; CHECK-AVX2-NEXT: movups (%esi), %xmm0 ; CHECK-AVX2-NEXT: movups %xmm0, (%edx) ; CHECK-AVX2-NEXT: movzwl (%ecx), %edx ; CHECK-AVX2-NEXT: movw %dx, (%eax) ; CHECK-AVX2-NEXT: movzwl 2(%ecx), %edx ; CHECK-AVX2-NEXT: movw %dx, 2(%eax) ; CHECK-AVX2-NEXT: movl 4(%ecx), %edx ; CHECK-AVX2-NEXT: movl %edx, 4(%eax) ; CHECK-AVX2-NEXT: movl 8(%ecx), %edx ; CHECK-AVX2-NEXT: movl %edx, 8(%eax) ; CHECK-AVX2-NEXT: movl 12(%ecx), %ecx ; CHECK-AVX2-NEXT: movl %ecx, 12(%eax) ; CHECK-AVX2-NEXT: popl %esi ; CHECK-AVX2-NEXT: popl %edi ; CHECK-AVX2-NEXT: retl ; ; CHECK-AVX512-LABEL: test_type16: ; CHECK-AVX512: # %bb.0: # %entry ; CHECK-AVX512-NEXT: pushl %edi ; CHECK-AVX512-NEXT: .cfi_def_cfa_offset 8 ; CHECK-AVX512-NEXT: pushl %esi ; CHECK-AVX512-NEXT: .cfi_def_cfa_offset 12 ; CHECK-AVX512-NEXT: .cfi_offset %esi, -12 ; CHECK-AVX512-NEXT: .cfi_offset %edi, -8 ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %esi ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %edx ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %edi ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %eax ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-AVX512-NEXT: cmpl $18, %edi ; CHECK-AVX512-NEXT: jl .LBB7_2 ; CHECK-AVX512-NEXT: # %bb.1: # %if.then ; CHECK-AVX512-NEXT: movw %di, 2(%ecx) ; CHECK-AVX512-NEXT: .LBB7_2: # %if.end ; CHECK-AVX512-NEXT: vmovups (%esi), %xmm0 ; CHECK-AVX512-NEXT: vmovups %xmm0, (%edx) ; CHECK-AVX512-NEXT: movzwl (%ecx), %edx ; CHECK-AVX512-NEXT: movw %dx, (%eax) ; CHECK-AVX512-NEXT: movzwl 2(%ecx), %edx ; CHECK-AVX512-NEXT: movw %dx, 2(%eax) ; CHECK-AVX512-NEXT: movl 4(%ecx), %edx ; CHECK-AVX512-NEXT: movl %edx, 4(%eax) ; CHECK-AVX512-NEXT: movl 8(%ecx), %edx ; CHECK-AVX512-NEXT: movl %edx, 8(%eax) ; CHECK-AVX512-NEXT: movl 12(%ecx), %ecx ; CHECK-AVX512-NEXT: movl %ecx, 12(%eax) ; CHECK-AVX512-NEXT: popl %esi ; CHECK-AVX512-NEXT: popl %edi ; CHECK-AVX512-NEXT: retl entry: %cmp = icmp sgt i32 %x, 17 br i1 %cmp, label %if.then, label %if.end if.then: ; preds = %entry %conv = trunc i32 %x to i16 %b = getelementptr inbounds %struct.S5, %struct.S5* %s1, i64 0, i32 1 store i16 %conv, i16* %b, align 2 br label %if.end if.end: ; preds = %if.then, %entry %0 = bitcast %struct.S5* %s3 to i8* %1 = bitcast %struct.S5* %s4 to i8* tail call void @llvm.memcpy.p0i8.p0i8.i64(i8* %0, i8* %1, i64 16, i32 2, i1 false) %2 = bitcast %struct.S5* %s2 to i8* %3 = bitcast %struct.S5* %s1 to i8* tail call void @llvm.memcpy.p0i8.p0i8.i64(i8* %2, i8* %3, i64 16, i32 2, i1 false) ret void } %struct.S6 = type { [4 x i32], i32, i32, i32, i32 } ; Function Attrs: nounwind uwtable define void @test_stack(%struct.S6* noalias nocapture sret %agg.result, %struct.S6* byval nocapture readnone align 8 %s1, %struct.S6* byval nocapture align 8 %s2, i32 %x) local_unnamed_addr #0 { ; CHECK-LABEL: test_stack: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: pushl %eax ; CHECK-NEXT: .cfi_def_cfa_offset 8 ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %eax ; CHECK-NEXT: movl %eax, {{[0-9]+}}(%esp) ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %eax ; CHECK-NEXT: movups {{[0-9]+}}(%esp), %xmm0 ; CHECK-NEXT: movups %xmm0, (%eax) ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-NEXT: movl %ecx, 16(%eax) ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-NEXT: movl %ecx, 20(%eax) ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-NEXT: movl %ecx, 24(%eax) ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-NEXT: movl %ecx, 28(%eax) ; CHECK-NEXT: popl %ecx ; CHECK-NEXT: retl $4 ; ; DISABLED-LABEL: test_stack: ; DISABLED: # %bb.0: # %entry ; DISABLED-NEXT: pushl %eax ; DISABLED-NEXT: .cfi_def_cfa_offset 8 ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %eax ; DISABLED-NEXT: movl %eax, {{[0-9]+}}(%esp) ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %eax ; DISABLED-NEXT: movups {{[0-9]+}}(%esp), %xmm0 ; DISABLED-NEXT: movups %xmm0, (%eax) ; DISABLED-NEXT: movups {{[0-9]+}}(%esp), %xmm0 ; DISABLED-NEXT: movups %xmm0, 16(%eax) ; DISABLED-NEXT: popl %ecx ; DISABLED-NEXT: retl $4 ; ; CHECK-AVX2-LABEL: test_stack: ; CHECK-AVX2: # %bb.0: # %entry ; CHECK-AVX2-NEXT: pushl %eax ; CHECK-AVX2-NEXT: .cfi_def_cfa_offset 8 ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %eax ; CHECK-AVX2-NEXT: movl %eax, {{[0-9]+}}(%esp) ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %eax ; CHECK-AVX2-NEXT: movups {{[0-9]+}}(%esp), %xmm0 ; CHECK-AVX2-NEXT: movups %xmm0, (%eax) ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-AVX2-NEXT: movl %ecx, 16(%eax) ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-AVX2-NEXT: movl %ecx, 20(%eax) ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-AVX2-NEXT: movl %ecx, 24(%eax) ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-AVX2-NEXT: movl %ecx, 28(%eax) ; CHECK-AVX2-NEXT: popl %ecx ; CHECK-AVX2-NEXT: retl $4 ; ; CHECK-AVX512-LABEL: test_stack: ; CHECK-AVX512: # %bb.0: # %entry ; CHECK-AVX512-NEXT: pushl %eax ; CHECK-AVX512-NEXT: .cfi_def_cfa_offset 8 ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %eax ; CHECK-AVX512-NEXT: movl %eax, {{[0-9]+}}(%esp) ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %eax ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-AVX512-NEXT: movl %ecx, 16(%eax) ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-AVX512-NEXT: movl %ecx, 20(%eax) ; CHECK-AVX512-NEXT: vmovups {{[0-9]+}}(%esp), %xmm0 ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-AVX512-NEXT: movl %ecx, 24(%eax) ; CHECK-AVX512-NEXT: vmovups %xmm0, (%eax) ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-AVX512-NEXT: movl %ecx, 28(%eax) ; CHECK-AVX512-NEXT: popl %ecx ; CHECK-AVX512-NEXT: retl $4 entry: %s6.sroa.0.0..sroa_cast1 = bitcast %struct.S6* %s2 to i8* %s6.sroa.3.0..sroa_idx4 = getelementptr inbounds %struct.S6, %struct.S6* %s2, i64 0, i32 3 store i32 %x, i32* %s6.sroa.3.0..sroa_idx4, align 8 %0 = bitcast %struct.S6* %agg.result to i8* call void @llvm.memcpy.p0i8.p0i8.i64(i8* %0, i8* nonnull %s6.sroa.0.0..sroa_cast1, i64 32, i32 4, i1 false) ret void } ; Function Attrs: nounwind uwtable define void @test_limit_all(%struct.S* %s1, %struct.S* nocapture %s2, i32 %x, %struct.S* nocapture %s3, %struct.S* nocapture readonly %s4, i32 %x2) local_unnamed_addr #0 { ; CHECK-LABEL: test_limit_all: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: pushl %ebp ; CHECK-NEXT: .cfi_def_cfa_offset 8 ; CHECK-NEXT: pushl %ebx ; CHECK-NEXT: .cfi_def_cfa_offset 12 ; CHECK-NEXT: pushl %edi ; CHECK-NEXT: .cfi_def_cfa_offset 16 ; CHECK-NEXT: pushl %esi ; CHECK-NEXT: .cfi_def_cfa_offset 20 ; CHECK-NEXT: subl $12, %esp ; CHECK-NEXT: .cfi_def_cfa_offset 32 ; CHECK-NEXT: .cfi_offset %esi, -20 ; CHECK-NEXT: .cfi_offset %edi, -16 ; CHECK-NEXT: .cfi_offset %ebx, -12 ; CHECK-NEXT: .cfi_offset %ebp, -8 ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %ebx ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %edi ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %esi ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %ebp ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %eax ; CHECK-NEXT: movl %eax, 12(%ebp) ; CHECK-NEXT: movl %ebp, (%esp) ; CHECK-NEXT: calll bar ; CHECK-NEXT: cmpl $18, %esi ; CHECK-NEXT: jl .LBB9_2 ; CHECK-NEXT: # %bb.1: # %if.then ; CHECK-NEXT: movl %esi, 4(%ebp) ; CHECK-NEXT: movl %ebp, (%esp) ; CHECK-NEXT: calll bar ; CHECK-NEXT: .LBB9_2: # %if.end ; CHECK-NEXT: movups (%ebx), %xmm0 ; CHECK-NEXT: movups %xmm0, (%edi) ; CHECK-NEXT: movups (%ebp), %xmm0 ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %eax ; CHECK-NEXT: movups %xmm0, (%eax) ; CHECK-NEXT: addl $12, %esp ; CHECK-NEXT: popl %esi ; CHECK-NEXT: popl %edi ; CHECK-NEXT: popl %ebx ; CHECK-NEXT: popl %ebp ; CHECK-NEXT: retl ; ; DISABLED-LABEL: test_limit_all: ; DISABLED: # %bb.0: # %entry ; DISABLED-NEXT: pushl %ebp ; DISABLED-NEXT: .cfi_def_cfa_offset 8 ; DISABLED-NEXT: pushl %ebx ; DISABLED-NEXT: .cfi_def_cfa_offset 12 ; DISABLED-NEXT: pushl %edi ; DISABLED-NEXT: .cfi_def_cfa_offset 16 ; DISABLED-NEXT: pushl %esi ; DISABLED-NEXT: .cfi_def_cfa_offset 20 ; DISABLED-NEXT: subl $12, %esp ; DISABLED-NEXT: .cfi_def_cfa_offset 32 ; DISABLED-NEXT: .cfi_offset %esi, -20 ; DISABLED-NEXT: .cfi_offset %edi, -16 ; DISABLED-NEXT: .cfi_offset %ebx, -12 ; DISABLED-NEXT: .cfi_offset %ebp, -8 ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %ebx ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %edi ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %esi ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %ebp ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %eax ; DISABLED-NEXT: movl %eax, 12(%ebp) ; DISABLED-NEXT: movl %ebp, (%esp) ; DISABLED-NEXT: calll bar ; DISABLED-NEXT: cmpl $18, %esi ; DISABLED-NEXT: jl .LBB9_2 ; DISABLED-NEXT: # %bb.1: # %if.then ; DISABLED-NEXT: movl %esi, 4(%ebp) ; DISABLED-NEXT: movl %ebp, (%esp) ; DISABLED-NEXT: calll bar ; DISABLED-NEXT: .LBB9_2: # %if.end ; DISABLED-NEXT: movups (%ebx), %xmm0 ; DISABLED-NEXT: movups %xmm0, (%edi) ; DISABLED-NEXT: movups (%ebp), %xmm0 ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %eax ; DISABLED-NEXT: movups %xmm0, (%eax) ; DISABLED-NEXT: addl $12, %esp ; DISABLED-NEXT: popl %esi ; DISABLED-NEXT: popl %edi ; DISABLED-NEXT: popl %ebx ; DISABLED-NEXT: popl %ebp ; DISABLED-NEXT: retl ; ; CHECK-AVX2-LABEL: test_limit_all: ; CHECK-AVX2: # %bb.0: # %entry ; CHECK-AVX2-NEXT: pushl %ebp ; CHECK-AVX2-NEXT: .cfi_def_cfa_offset 8 ; CHECK-AVX2-NEXT: pushl %ebx ; CHECK-AVX2-NEXT: .cfi_def_cfa_offset 12 ; CHECK-AVX2-NEXT: pushl %edi ; CHECK-AVX2-NEXT: .cfi_def_cfa_offset 16 ; CHECK-AVX2-NEXT: pushl %esi ; CHECK-AVX2-NEXT: .cfi_def_cfa_offset 20 ; CHECK-AVX2-NEXT: subl $12, %esp ; CHECK-AVX2-NEXT: .cfi_def_cfa_offset 32 ; CHECK-AVX2-NEXT: .cfi_offset %esi, -20 ; CHECK-AVX2-NEXT: .cfi_offset %edi, -16 ; CHECK-AVX2-NEXT: .cfi_offset %ebx, -12 ; CHECK-AVX2-NEXT: .cfi_offset %ebp, -8 ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %ebx ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %edi ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %esi ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %ebp ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %eax ; CHECK-AVX2-NEXT: movl %eax, 12(%ebp) ; CHECK-AVX2-NEXT: movl %ebp, (%esp) ; CHECK-AVX2-NEXT: calll bar ; CHECK-AVX2-NEXT: cmpl $18, %esi ; CHECK-AVX2-NEXT: jl .LBB9_2 ; CHECK-AVX2-NEXT: # %bb.1: # %if.then ; CHECK-AVX2-NEXT: movl %esi, 4(%ebp) ; CHECK-AVX2-NEXT: movl %ebp, (%esp) ; CHECK-AVX2-NEXT: calll bar ; CHECK-AVX2-NEXT: .LBB9_2: # %if.end ; CHECK-AVX2-NEXT: movups (%ebx), %xmm0 ; CHECK-AVX2-NEXT: movups %xmm0, (%edi) ; CHECK-AVX2-NEXT: movups (%ebp), %xmm0 ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %eax ; CHECK-AVX2-NEXT: movups %xmm0, (%eax) ; CHECK-AVX2-NEXT: addl $12, %esp ; CHECK-AVX2-NEXT: popl %esi ; CHECK-AVX2-NEXT: popl %edi ; CHECK-AVX2-NEXT: popl %ebx ; CHECK-AVX2-NEXT: popl %ebp ; CHECK-AVX2-NEXT: retl ; ; CHECK-AVX512-LABEL: test_limit_all: ; CHECK-AVX512: # %bb.0: # %entry ; CHECK-AVX512-NEXT: pushl %ebp ; CHECK-AVX512-NEXT: .cfi_def_cfa_offset 8 ; CHECK-AVX512-NEXT: pushl %ebx ; CHECK-AVX512-NEXT: .cfi_def_cfa_offset 12 ; CHECK-AVX512-NEXT: pushl %edi ; CHECK-AVX512-NEXT: .cfi_def_cfa_offset 16 ; CHECK-AVX512-NEXT: pushl %esi ; CHECK-AVX512-NEXT: .cfi_def_cfa_offset 20 ; CHECK-AVX512-NEXT: subl $12, %esp ; CHECK-AVX512-NEXT: .cfi_def_cfa_offset 32 ; CHECK-AVX512-NEXT: .cfi_offset %esi, -20 ; CHECK-AVX512-NEXT: .cfi_offset %edi, -16 ; CHECK-AVX512-NEXT: .cfi_offset %ebx, -12 ; CHECK-AVX512-NEXT: .cfi_offset %ebp, -8 ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %ebx ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %edi ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %esi ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %ebp ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %eax ; CHECK-AVX512-NEXT: movl %eax, 12(%ebp) ; CHECK-AVX512-NEXT: movl %ebp, (%esp) ; CHECK-AVX512-NEXT: calll bar ; CHECK-AVX512-NEXT: cmpl $18, %esi ; CHECK-AVX512-NEXT: jl .LBB9_2 ; CHECK-AVX512-NEXT: # %bb.1: # %if.then ; CHECK-AVX512-NEXT: movl %esi, 4(%ebp) ; CHECK-AVX512-NEXT: movl %ebp, (%esp) ; CHECK-AVX512-NEXT: calll bar ; CHECK-AVX512-NEXT: .LBB9_2: # %if.end ; CHECK-AVX512-NEXT: vmovups (%ebx), %xmm0 ; CHECK-AVX512-NEXT: vmovups %xmm0, (%edi) ; CHECK-AVX512-NEXT: vmovups (%ebp), %xmm0 ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %eax ; CHECK-AVX512-NEXT: vmovups %xmm0, (%eax) ; CHECK-AVX512-NEXT: addl $12, %esp ; CHECK-AVX512-NEXT: popl %esi ; CHECK-AVX512-NEXT: popl %edi ; CHECK-AVX512-NEXT: popl %ebx ; CHECK-AVX512-NEXT: popl %ebp ; CHECK-AVX512-NEXT: retl entry: %d = getelementptr inbounds %struct.S, %struct.S* %s1, i64 0, i32 3 store i32 %x2, i32* %d, align 4 tail call void @bar(%struct.S* %s1) #3 %cmp = icmp sgt i32 %x, 17 br i1 %cmp, label %if.then, label %if.end if.then: ; preds = %entry %b = getelementptr inbounds %struct.S, %struct.S* %s1, i64 0, i32 1 store i32 %x, i32* %b, align 4 tail call void @bar(%struct.S* nonnull %s1) #3 br label %if.end if.end: ; preds = %if.then, %entry %0 = bitcast %struct.S* %s3 to i8* %1 = bitcast %struct.S* %s4 to i8* tail call void @llvm.memcpy.p0i8.p0i8.i64(i8* %0, i8* %1, i64 16, i32 4, i1 false) %2 = bitcast %struct.S* %s2 to i8* %3 = bitcast %struct.S* %s1 to i8* tail call void @llvm.memcpy.p0i8.p0i8.i64(i8* %2, i8* %3, i64 16, i32 4, i1 false) ret void } ; Function Attrs: nounwind uwtable define void @test_limit_one_pred(%struct.S* %s1, %struct.S* nocapture %s2, i32 %x, %struct.S* nocapture %s3, %struct.S* nocapture readonly %s4, i32 %x2) local_unnamed_addr #0 { ; CHECK-LABEL: test_limit_one_pred: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: pushl %ebp ; CHECK-NEXT: .cfi_def_cfa_offset 8 ; CHECK-NEXT: pushl %ebx ; CHECK-NEXT: .cfi_def_cfa_offset 12 ; CHECK-NEXT: pushl %edi ; CHECK-NEXT: .cfi_def_cfa_offset 16 ; CHECK-NEXT: pushl %esi ; CHECK-NEXT: .cfi_def_cfa_offset 20 ; CHECK-NEXT: subl $12, %esp ; CHECK-NEXT: .cfi_def_cfa_offset 32 ; CHECK-NEXT: .cfi_offset %esi, -20 ; CHECK-NEXT: .cfi_offset %edi, -16 ; CHECK-NEXT: .cfi_offset %ebx, -12 ; CHECK-NEXT: .cfi_offset %ebp, -8 ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %ebp ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %ebx ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %eax ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %esi ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %edi ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-NEXT: movl %ecx, 12(%edi) ; CHECK-NEXT: cmpl $18, %eax ; CHECK-NEXT: jl .LBB10_2 ; CHECK-NEXT: # %bb.1: # %if.then ; CHECK-NEXT: movl %eax, 4(%edi) ; CHECK-NEXT: movl %edi, (%esp) ; CHECK-NEXT: calll bar ; CHECK-NEXT: .LBB10_2: # %if.end ; CHECK-NEXT: movups (%ebp), %xmm0 ; CHECK-NEXT: movups %xmm0, (%ebx) ; CHECK-NEXT: movl (%edi), %eax ; CHECK-NEXT: movl %eax, (%esi) ; CHECK-NEXT: movl 4(%edi), %eax ; CHECK-NEXT: movl %eax, 4(%esi) ; CHECK-NEXT: movl 8(%edi), %eax ; CHECK-NEXT: movl %eax, 8(%esi) ; CHECK-NEXT: movl 12(%edi), %eax ; CHECK-NEXT: movl %eax, 12(%esi) ; CHECK-NEXT: addl $12, %esp ; CHECK-NEXT: popl %esi ; CHECK-NEXT: popl %edi ; CHECK-NEXT: popl %ebx ; CHECK-NEXT: popl %ebp ; CHECK-NEXT: retl ; ; DISABLED-LABEL: test_limit_one_pred: ; DISABLED: # %bb.0: # %entry ; DISABLED-NEXT: pushl %ebp ; DISABLED-NEXT: .cfi_def_cfa_offset 8 ; DISABLED-NEXT: pushl %ebx ; DISABLED-NEXT: .cfi_def_cfa_offset 12 ; DISABLED-NEXT: pushl %edi ; DISABLED-NEXT: .cfi_def_cfa_offset 16 ; DISABLED-NEXT: pushl %esi ; DISABLED-NEXT: .cfi_def_cfa_offset 20 ; DISABLED-NEXT: subl $12, %esp ; DISABLED-NEXT: .cfi_def_cfa_offset 32 ; DISABLED-NEXT: .cfi_offset %esi, -20 ; DISABLED-NEXT: .cfi_offset %edi, -16 ; DISABLED-NEXT: .cfi_offset %ebx, -12 ; DISABLED-NEXT: .cfi_offset %ebp, -8 ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %ebx ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %edi ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %eax ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %esi ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %ebp ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %ecx ; DISABLED-NEXT: movl %ecx, 12(%ebp) ; DISABLED-NEXT: cmpl $18, %eax ; DISABLED-NEXT: jl .LBB10_2 ; DISABLED-NEXT: # %bb.1: # %if.then ; DISABLED-NEXT: movl %eax, 4(%ebp) ; DISABLED-NEXT: movl %ebp, (%esp) ; DISABLED-NEXT: calll bar ; DISABLED-NEXT: .LBB10_2: # %if.end ; DISABLED-NEXT: movups (%ebx), %xmm0 ; DISABLED-NEXT: movups %xmm0, (%edi) ; DISABLED-NEXT: movups (%ebp), %xmm0 ; DISABLED-NEXT: movups %xmm0, (%esi) ; DISABLED-NEXT: addl $12, %esp ; DISABLED-NEXT: popl %esi ; DISABLED-NEXT: popl %edi ; DISABLED-NEXT: popl %ebx ; DISABLED-NEXT: popl %ebp ; DISABLED-NEXT: retl ; ; CHECK-AVX2-LABEL: test_limit_one_pred: ; CHECK-AVX2: # %bb.0: # %entry ; CHECK-AVX2-NEXT: pushl %ebp ; CHECK-AVX2-NEXT: .cfi_def_cfa_offset 8 ; CHECK-AVX2-NEXT: pushl %ebx ; CHECK-AVX2-NEXT: .cfi_def_cfa_offset 12 ; CHECK-AVX2-NEXT: pushl %edi ; CHECK-AVX2-NEXT: .cfi_def_cfa_offset 16 ; CHECK-AVX2-NEXT: pushl %esi ; CHECK-AVX2-NEXT: .cfi_def_cfa_offset 20 ; CHECK-AVX2-NEXT: subl $12, %esp ; CHECK-AVX2-NEXT: .cfi_def_cfa_offset 32 ; CHECK-AVX2-NEXT: .cfi_offset %esi, -20 ; CHECK-AVX2-NEXT: .cfi_offset %edi, -16 ; CHECK-AVX2-NEXT: .cfi_offset %ebx, -12 ; CHECK-AVX2-NEXT: .cfi_offset %ebp, -8 ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %ebp ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %ebx ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %eax ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %esi ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %edi ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-AVX2-NEXT: movl %ecx, 12(%edi) ; CHECK-AVX2-NEXT: cmpl $18, %eax ; CHECK-AVX2-NEXT: jl .LBB10_2 ; CHECK-AVX2-NEXT: # %bb.1: # %if.then ; CHECK-AVX2-NEXT: movl %eax, 4(%edi) ; CHECK-AVX2-NEXT: movl %edi, (%esp) ; CHECK-AVX2-NEXT: calll bar ; CHECK-AVX2-NEXT: .LBB10_2: # %if.end ; CHECK-AVX2-NEXT: movups (%ebp), %xmm0 ; CHECK-AVX2-NEXT: movups %xmm0, (%ebx) ; CHECK-AVX2-NEXT: movl (%edi), %eax ; CHECK-AVX2-NEXT: movl %eax, (%esi) ; CHECK-AVX2-NEXT: movl 4(%edi), %eax ; CHECK-AVX2-NEXT: movl %eax, 4(%esi) ; CHECK-AVX2-NEXT: movl 8(%edi), %eax ; CHECK-AVX2-NEXT: movl %eax, 8(%esi) ; CHECK-AVX2-NEXT: movl 12(%edi), %eax ; CHECK-AVX2-NEXT: movl %eax, 12(%esi) ; CHECK-AVX2-NEXT: addl $12, %esp ; CHECK-AVX2-NEXT: popl %esi ; CHECK-AVX2-NEXT: popl %edi ; CHECK-AVX2-NEXT: popl %ebx ; CHECK-AVX2-NEXT: popl %ebp ; CHECK-AVX2-NEXT: retl ; ; CHECK-AVX512-LABEL: test_limit_one_pred: ; CHECK-AVX512: # %bb.0: # %entry ; CHECK-AVX512-NEXT: pushl %ebp ; CHECK-AVX512-NEXT: .cfi_def_cfa_offset 8 ; CHECK-AVX512-NEXT: pushl %ebx ; CHECK-AVX512-NEXT: .cfi_def_cfa_offset 12 ; CHECK-AVX512-NEXT: pushl %edi ; CHECK-AVX512-NEXT: .cfi_def_cfa_offset 16 ; CHECK-AVX512-NEXT: pushl %esi ; CHECK-AVX512-NEXT: .cfi_def_cfa_offset 20 ; CHECK-AVX512-NEXT: subl $12, %esp ; CHECK-AVX512-NEXT: .cfi_def_cfa_offset 32 ; CHECK-AVX512-NEXT: .cfi_offset %esi, -20 ; CHECK-AVX512-NEXT: .cfi_offset %edi, -16 ; CHECK-AVX512-NEXT: .cfi_offset %ebx, -12 ; CHECK-AVX512-NEXT: .cfi_offset %ebp, -8 ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %ebp ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %ebx ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %eax ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %esi ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %edi ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-AVX512-NEXT: movl %ecx, 12(%edi) ; CHECK-AVX512-NEXT: cmpl $18, %eax ; CHECK-AVX512-NEXT: jl .LBB10_2 ; CHECK-AVX512-NEXT: # %bb.1: # %if.then ; CHECK-AVX512-NEXT: movl %eax, 4(%edi) ; CHECK-AVX512-NEXT: movl %edi, (%esp) ; CHECK-AVX512-NEXT: calll bar ; CHECK-AVX512-NEXT: .LBB10_2: # %if.end ; CHECK-AVX512-NEXT: vmovups (%ebp), %xmm0 ; CHECK-AVX512-NEXT: vmovups %xmm0, (%ebx) ; CHECK-AVX512-NEXT: movl (%edi), %eax ; CHECK-AVX512-NEXT: movl %eax, (%esi) ; CHECK-AVX512-NEXT: movl 4(%edi), %eax ; CHECK-AVX512-NEXT: movl %eax, 4(%esi) ; CHECK-AVX512-NEXT: movl 8(%edi), %eax ; CHECK-AVX512-NEXT: movl %eax, 8(%esi) ; CHECK-AVX512-NEXT: movl 12(%edi), %eax ; CHECK-AVX512-NEXT: movl %eax, 12(%esi) ; CHECK-AVX512-NEXT: addl $12, %esp ; CHECK-AVX512-NEXT: popl %esi ; CHECK-AVX512-NEXT: popl %edi ; CHECK-AVX512-NEXT: popl %ebx ; CHECK-AVX512-NEXT: popl %ebp ; CHECK-AVX512-NEXT: retl entry: %d = getelementptr inbounds %struct.S, %struct.S* %s1, i64 0, i32 3 store i32 %x2, i32* %d, align 4 %cmp = icmp sgt i32 %x, 17 br i1 %cmp, label %if.then, label %if.end if.then: ; preds = %entry %b = getelementptr inbounds %struct.S, %struct.S* %s1, i64 0, i32 1 store i32 %x, i32* %b, align 4 tail call void @bar(%struct.S* nonnull %s1) #3 br label %if.end if.end: ; preds = %if.then, %entry %0 = bitcast %struct.S* %s3 to i8* %1 = bitcast %struct.S* %s4 to i8* tail call void @llvm.memcpy.p0i8.p0i8.i64(i8* %0, i8* %1, i64 16, i32 4, i1 false) %2 = bitcast %struct.S* %s2 to i8* %3 = bitcast %struct.S* %s1 to i8* tail call void @llvm.memcpy.p0i8.p0i8.i64(i8* %2, i8* %3, i64 16, i32 4, i1 false) ret void } declare void @bar(%struct.S*) local_unnamed_addr #1 ; Function Attrs: argmemonly nounwind declare void @llvm.memcpy.p0i8.p0i8.i64(i8* nocapture writeonly, i8* nocapture readonly, i64, i32, i1) #1 attributes #0 = { nounwind uwtable "target-cpu"="x86-64" } %struct.S7 = type { float, float, float , float, float, float, float, float } ; Function Attrs: nounwind uwtable define void @test_conditional_block_float(%struct.S7* nocapture %s1, %struct.S7* nocapture %s2, i32 %x, %struct.S7* nocapture %s3, %struct.S7* nocapture readonly %s4, float %y) local_unnamed_addr #0 { ; CHECK-LABEL: test_conditional_block_float: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: pushl %ebx ; CHECK-NEXT: .cfi_def_cfa_offset 8 ; CHECK-NEXT: pushl %edi ; CHECK-NEXT: .cfi_def_cfa_offset 12 ; CHECK-NEXT: pushl %esi ; CHECK-NEXT: .cfi_def_cfa_offset 16 ; CHECK-NEXT: .cfi_offset %esi, -16 ; CHECK-NEXT: .cfi_offset %edi, -12 ; CHECK-NEXT: .cfi_offset %ebx, -8 ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %esi ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %edx ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %eax ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-NEXT: cmpl $18, {{[0-9]+}}(%esp) ; CHECK-NEXT: jl .LBB11_2 ; CHECK-NEXT: # %bb.1: # %if.then ; CHECK-NEXT: movl $1065353216, 4(%ecx) # imm = 0x3F800000 ; CHECK-NEXT: .LBB11_2: # %if.end ; CHECK-NEXT: movups (%esi), %xmm0 ; CHECK-NEXT: movups 16(%esi), %xmm1 ; CHECK-NEXT: movups %xmm1, 16(%edx) ; CHECK-NEXT: movups %xmm0, (%edx) ; CHECK-NEXT: movl (%ecx), %edx ; CHECK-NEXT: movl 4(%ecx), %esi ; CHECK-NEXT: movl 8(%ecx), %edi ; CHECK-NEXT: movl 12(%ecx), %ebx ; CHECK-NEXT: movups 16(%ecx), %xmm0 ; CHECK-NEXT: movups %xmm0, 16(%eax) ; CHECK-NEXT: movl %edx, (%eax) ; CHECK-NEXT: movl %esi, 4(%eax) ; CHECK-NEXT: movl %edi, 8(%eax) ; CHECK-NEXT: movl %ebx, 12(%eax) ; CHECK-NEXT: popl %esi ; CHECK-NEXT: popl %edi ; CHECK-NEXT: popl %ebx ; CHECK-NEXT: retl ; ; DISABLED-LABEL: test_conditional_block_float: ; DISABLED: # %bb.0: # %entry ; DISABLED-NEXT: pushl %esi ; DISABLED-NEXT: .cfi_def_cfa_offset 8 ; DISABLED-NEXT: .cfi_offset %esi, -8 ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %esi ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %edx ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %eax ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %ecx ; DISABLED-NEXT: cmpl $18, {{[0-9]+}}(%esp) ; DISABLED-NEXT: jl .LBB11_2 ; DISABLED-NEXT: # %bb.1: # %if.then ; DISABLED-NEXT: movl $1065353216, 4(%ecx) # imm = 0x3F800000 ; DISABLED-NEXT: .LBB11_2: # %if.end ; DISABLED-NEXT: movups (%esi), %xmm0 ; DISABLED-NEXT: movups 16(%esi), %xmm1 ; DISABLED-NEXT: movups %xmm1, 16(%edx) ; DISABLED-NEXT: movups %xmm0, (%edx) ; DISABLED-NEXT: movups (%ecx), %xmm0 ; DISABLED-NEXT: movups 16(%ecx), %xmm1 ; DISABLED-NEXT: movups %xmm1, 16(%eax) ; DISABLED-NEXT: movups %xmm0, (%eax) ; DISABLED-NEXT: popl %esi ; DISABLED-NEXT: retl ; ; CHECK-AVX2-LABEL: test_conditional_block_float: ; CHECK-AVX2: # %bb.0: # %entry ; CHECK-AVX2-NEXT: pushl %ebx ; CHECK-AVX2-NEXT: .cfi_def_cfa_offset 8 ; CHECK-AVX2-NEXT: pushl %edi ; CHECK-AVX2-NEXT: .cfi_def_cfa_offset 12 ; CHECK-AVX2-NEXT: pushl %esi ; CHECK-AVX2-NEXT: .cfi_def_cfa_offset 16 ; CHECK-AVX2-NEXT: .cfi_offset %esi, -16 ; CHECK-AVX2-NEXT: .cfi_offset %edi, -12 ; CHECK-AVX2-NEXT: .cfi_offset %ebx, -8 ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %esi ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %edx ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %eax ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-AVX2-NEXT: cmpl $18, {{[0-9]+}}(%esp) ; CHECK-AVX2-NEXT: jl .LBB11_2 ; CHECK-AVX2-NEXT: # %bb.1: # %if.then ; CHECK-AVX2-NEXT: movl $1065353216, 4(%ecx) # imm = 0x3F800000 ; CHECK-AVX2-NEXT: .LBB11_2: # %if.end ; CHECK-AVX2-NEXT: movups (%esi), %xmm0 ; CHECK-AVX2-NEXT: movups 16(%esi), %xmm1 ; CHECK-AVX2-NEXT: movups %xmm1, 16(%edx) ; CHECK-AVX2-NEXT: movups %xmm0, (%edx) ; CHECK-AVX2-NEXT: movl (%ecx), %edx ; CHECK-AVX2-NEXT: movl 4(%ecx), %esi ; CHECK-AVX2-NEXT: movl 8(%ecx), %edi ; CHECK-AVX2-NEXT: movl 12(%ecx), %ebx ; CHECK-AVX2-NEXT: movups 16(%ecx), %xmm0 ; CHECK-AVX2-NEXT: movups %xmm0, 16(%eax) ; CHECK-AVX2-NEXT: movl %edx, (%eax) ; CHECK-AVX2-NEXT: movl %esi, 4(%eax) ; CHECK-AVX2-NEXT: movl %edi, 8(%eax) ; CHECK-AVX2-NEXT: movl %ebx, 12(%eax) ; CHECK-AVX2-NEXT: popl %esi ; CHECK-AVX2-NEXT: popl %edi ; CHECK-AVX2-NEXT: popl %ebx ; CHECK-AVX2-NEXT: retl ; ; CHECK-AVX512-LABEL: test_conditional_block_float: ; CHECK-AVX512: # %bb.0: # %entry ; CHECK-AVX512-NEXT: pushl %esi ; CHECK-AVX512-NEXT: .cfi_def_cfa_offset 8 ; CHECK-AVX512-NEXT: .cfi_offset %esi, -8 ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %esi ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %edx ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %eax ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-AVX512-NEXT: cmpl $18, {{[0-9]+}}(%esp) ; CHECK-AVX512-NEXT: jl .LBB11_2 ; CHECK-AVX512-NEXT: # %bb.1: # %if.then ; CHECK-AVX512-NEXT: movl $1065353216, 4(%ecx) # imm = 0x3F800000 ; CHECK-AVX512-NEXT: .LBB11_2: # %if.end ; CHECK-AVX512-NEXT: vmovups (%esi), %ymm0 ; CHECK-AVX512-NEXT: vmovups %ymm0, (%edx) ; CHECK-AVX512-NEXT: movl (%ecx), %edx ; CHECK-AVX512-NEXT: movl %edx, (%eax) ; CHECK-AVX512-NEXT: movl 4(%ecx), %edx ; CHECK-AVX512-NEXT: movl %edx, 4(%eax) ; CHECK-AVX512-NEXT: vmovups 8(%ecx), %xmm0 ; CHECK-AVX512-NEXT: vmovups %xmm0, 8(%eax) ; CHECK-AVX512-NEXT: movl 24(%ecx), %edx ; CHECK-AVX512-NEXT: movl %edx, 24(%eax) ; CHECK-AVX512-NEXT: movl 28(%ecx), %ecx ; CHECK-AVX512-NEXT: movl %ecx, 28(%eax) ; CHECK-AVX512-NEXT: popl %esi ; CHECK-AVX512-NEXT: vzeroupper ; CHECK-AVX512-NEXT: retl entry: %cmp = icmp sgt i32 %x, 17 br i1 %cmp, label %if.then, label %if.end if.then: ; preds = %entry %b = getelementptr inbounds %struct.S7, %struct.S7* %s1, i64 0, i32 1 store float 1.0, float* %b, align 4 br label %if.end if.end: ; preds = %if.then, %entry %0 = bitcast %struct.S7* %s3 to i8* %1 = bitcast %struct.S7* %s4 to i8* tail call void @llvm.memcpy.p0i8.p0i8.i64(i8* %0, i8* %1, i64 32, i32 4, i1 false) %2 = bitcast %struct.S7* %s2 to i8* %3 = bitcast %struct.S7* %s1 to i8* tail call void @llvm.memcpy.p0i8.p0i8.i64(i8* %2, i8* %3, i64 32, i32 4, i1 false) ret void } %struct.S8 = type { i64, i64, i64, i64, i64, i64 } ; Function Attrs: nounwind uwtable define void @test_conditional_block_ymm(%struct.S8* nocapture %s1, %struct.S8* nocapture %s2, i32 %x, %struct.S8* nocapture %s3, %struct.S8* nocapture readonly %s4) local_unnamed_addr #0 { ; CHECK-LABEL: test_conditional_block_ymm: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: pushl %ebx ; CHECK-NEXT: .cfi_def_cfa_offset 8 ; CHECK-NEXT: pushl %edi ; CHECK-NEXT: .cfi_def_cfa_offset 12 ; CHECK-NEXT: pushl %esi ; CHECK-NEXT: .cfi_def_cfa_offset 16 ; CHECK-NEXT: .cfi_offset %esi, -16 ; CHECK-NEXT: .cfi_offset %edi, -12 ; CHECK-NEXT: .cfi_offset %ebx, -8 ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %esi ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %edx ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %eax ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-NEXT: cmpl $18, {{[0-9]+}}(%esp) ; CHECK-NEXT: jl .LBB12_2 ; CHECK-NEXT: # %bb.1: # %if.then ; CHECK-NEXT: movl $0, 12(%ecx) ; CHECK-NEXT: movl $1, 8(%ecx) ; CHECK-NEXT: .LBB12_2: # %if.end ; CHECK-NEXT: movups (%esi), %xmm0 ; CHECK-NEXT: movups 16(%esi), %xmm1 ; CHECK-NEXT: movups %xmm1, 16(%edx) ; CHECK-NEXT: movups %xmm0, (%edx) ; CHECK-NEXT: movl (%ecx), %edx ; CHECK-NEXT: movl 4(%ecx), %esi ; CHECK-NEXT: movl 8(%ecx), %edi ; CHECK-NEXT: movl 12(%ecx), %ebx ; CHECK-NEXT: movups 16(%ecx), %xmm0 ; CHECK-NEXT: movups %xmm0, 16(%eax) ; CHECK-NEXT: movl %edx, (%eax) ; CHECK-NEXT: movl %esi, 4(%eax) ; CHECK-NEXT: movl %edi, 8(%eax) ; CHECK-NEXT: movl %ebx, 12(%eax) ; CHECK-NEXT: popl %esi ; CHECK-NEXT: popl %edi ; CHECK-NEXT: popl %ebx ; CHECK-NEXT: retl ; ; DISABLED-LABEL: test_conditional_block_ymm: ; DISABLED: # %bb.0: # %entry ; DISABLED-NEXT: pushl %esi ; DISABLED-NEXT: .cfi_def_cfa_offset 8 ; DISABLED-NEXT: .cfi_offset %esi, -8 ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %esi ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %edx ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %eax ; DISABLED-NEXT: movl {{[0-9]+}}(%esp), %ecx ; DISABLED-NEXT: cmpl $18, {{[0-9]+}}(%esp) ; DISABLED-NEXT: jl .LBB12_2 ; DISABLED-NEXT: # %bb.1: # %if.then ; DISABLED-NEXT: movl $0, 12(%ecx) ; DISABLED-NEXT: movl $1, 8(%ecx) ; DISABLED-NEXT: .LBB12_2: # %if.end ; DISABLED-NEXT: movups (%esi), %xmm0 ; DISABLED-NEXT: movups 16(%esi), %xmm1 ; DISABLED-NEXT: movups %xmm1, 16(%edx) ; DISABLED-NEXT: movups %xmm0, (%edx) ; DISABLED-NEXT: movups (%ecx), %xmm0 ; DISABLED-NEXT: movups 16(%ecx), %xmm1 ; DISABLED-NEXT: movups %xmm1, 16(%eax) ; DISABLED-NEXT: movups %xmm0, (%eax) ; DISABLED-NEXT: popl %esi ; DISABLED-NEXT: retl ; ; CHECK-AVX2-LABEL: test_conditional_block_ymm: ; CHECK-AVX2: # %bb.0: # %entry ; CHECK-AVX2-NEXT: pushl %ebx ; CHECK-AVX2-NEXT: .cfi_def_cfa_offset 8 ; CHECK-AVX2-NEXT: pushl %edi ; CHECK-AVX2-NEXT: .cfi_def_cfa_offset 12 ; CHECK-AVX2-NEXT: pushl %esi ; CHECK-AVX2-NEXT: .cfi_def_cfa_offset 16 ; CHECK-AVX2-NEXT: .cfi_offset %esi, -16 ; CHECK-AVX2-NEXT: .cfi_offset %edi, -12 ; CHECK-AVX2-NEXT: .cfi_offset %ebx, -8 ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %esi ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %edx ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %eax ; CHECK-AVX2-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-AVX2-NEXT: cmpl $18, {{[0-9]+}}(%esp) ; CHECK-AVX2-NEXT: jl .LBB12_2 ; CHECK-AVX2-NEXT: # %bb.1: # %if.then ; CHECK-AVX2-NEXT: movl $0, 12(%ecx) ; CHECK-AVX2-NEXT: movl $1, 8(%ecx) ; CHECK-AVX2-NEXT: .LBB12_2: # %if.end ; CHECK-AVX2-NEXT: movups (%esi), %xmm0 ; CHECK-AVX2-NEXT: movups 16(%esi), %xmm1 ; CHECK-AVX2-NEXT: movups %xmm1, 16(%edx) ; CHECK-AVX2-NEXT: movups %xmm0, (%edx) ; CHECK-AVX2-NEXT: movl (%ecx), %edx ; CHECK-AVX2-NEXT: movl 4(%ecx), %esi ; CHECK-AVX2-NEXT: movl 8(%ecx), %edi ; CHECK-AVX2-NEXT: movl 12(%ecx), %ebx ; CHECK-AVX2-NEXT: movups 16(%ecx), %xmm0 ; CHECK-AVX2-NEXT: movups %xmm0, 16(%eax) ; CHECK-AVX2-NEXT: movl %edx, (%eax) ; CHECK-AVX2-NEXT: movl %esi, 4(%eax) ; CHECK-AVX2-NEXT: movl %edi, 8(%eax) ; CHECK-AVX2-NEXT: movl %ebx, 12(%eax) ; CHECK-AVX2-NEXT: popl %esi ; CHECK-AVX2-NEXT: popl %edi ; CHECK-AVX2-NEXT: popl %ebx ; CHECK-AVX2-NEXT: retl ; ; CHECK-AVX512-LABEL: test_conditional_block_ymm: ; CHECK-AVX512: # %bb.0: # %entry ; CHECK-AVX512-NEXT: pushl %esi ; CHECK-AVX512-NEXT: .cfi_def_cfa_offset 8 ; CHECK-AVX512-NEXT: .cfi_offset %esi, -8 ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %esi ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %edx ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %eax ; CHECK-AVX512-NEXT: movl {{[0-9]+}}(%esp), %ecx ; CHECK-AVX512-NEXT: cmpl $18, {{[0-9]+}}(%esp) ; CHECK-AVX512-NEXT: jl .LBB12_2 ; CHECK-AVX512-NEXT: # %bb.1: # %if.then ; CHECK-AVX512-NEXT: movl $0, 12(%ecx) ; CHECK-AVX512-NEXT: movl $1, 8(%ecx) ; CHECK-AVX512-NEXT: .LBB12_2: # %if.end ; CHECK-AVX512-NEXT: vmovups (%esi), %ymm0 ; CHECK-AVX512-NEXT: vmovups %ymm0, (%edx) ; CHECK-AVX512-NEXT: movl (%ecx), %edx ; CHECK-AVX512-NEXT: movl %edx, (%eax) ; CHECK-AVX512-NEXT: movl 4(%ecx), %edx ; CHECK-AVX512-NEXT: movl %edx, 4(%eax) ; CHECK-AVX512-NEXT: movl 8(%ecx), %edx ; CHECK-AVX512-NEXT: movl %edx, 8(%eax) ; CHECK-AVX512-NEXT: movl 12(%ecx), %edx ; CHECK-AVX512-NEXT: movl %edx, 12(%eax) ; CHECK-AVX512-NEXT: vmovups 16(%ecx), %xmm0 ; CHECK-AVX512-NEXT: vmovups %xmm0, 16(%eax) ; CHECK-AVX512-NEXT: popl %esi ; CHECK-AVX512-NEXT: vzeroupper ; CHECK-AVX512-NEXT: retl entry: %cmp = icmp sgt i32 %x, 17 br i1 %cmp, label %if.then, label %if.end if.then: ; preds = %entry %b = getelementptr inbounds %struct.S8, %struct.S8* %s1, i64 0, i32 1 store i64 1, i64* %b, align 4 br label %if.end if.end: ; preds = %if.then, %entry %0 = bitcast %struct.S8* %s3 to i8* %1 = bitcast %struct.S8* %s4 to i8* tail call void @llvm.memcpy.p0i8.p0i8.i64(i8* %0, i8* %1, i64 32, i32 4, i1 false) %2 = bitcast %struct.S8* %s2 to i8* %3 = bitcast %struct.S8* %s1 to i8* tail call void @llvm.memcpy.p0i8.p0i8.i64(i8* %2, i8* %3, i64 32, i32 4, i1 false) ret void }