; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py ; RUN: llc < %s -mtriple=x86_64-apple-darwin -mattr=+avx512f | FileCheck --check-prefix=CHECK --check-prefix=KNL %s ; RUN: llc < %s -mtriple=x86_64-apple-darwin -mattr=+avx512f,+avx512bw,+avx512vl,+avx512dq | FileCheck --check-prefix=CHECK --check-prefix=SKX --check-prefix=SKX_ONLY %s ; RUN: llc < %s -mtriple=x86_64-apple-darwin -mattr=+avx512f,+avx512bw,+avx512vl,+avx512dq,+avx512vbmi | FileCheck --check-prefix=CHECK --check-prefix=SKX --check-prefix=SKX_VBMI %s define <16 x float> @test1(<16 x float> %x, float* %br, float %y) nounwind { ; CHECK-LABEL: test1: ; CHECK: ## BB#0: ; CHECK-NEXT: vinsertps {{.*#+}} xmm2 = xmm0[0],mem[0],xmm0[2,3] ; CHECK-NEXT: vinsertf32x4 $0, %xmm2, %zmm0, %zmm2 ; CHECK-NEXT: vextractf32x4 $3, %zmm0, %xmm0 ; CHECK-NEXT: vinsertps {{.*#+}} xmm0 = xmm0[0,1],xmm1[0],xmm0[3] ; CHECK-NEXT: vinsertf32x4 $3, %xmm0, %zmm2, %zmm0 ; CHECK-NEXT: retq %rrr = load float, float* %br %rrr2 = insertelement <16 x float> %x, float %rrr, i32 1 %rrr3 = insertelement <16 x float> %rrr2, float %y, i32 14 ret <16 x float> %rrr3 } define <8 x double> @test2(<8 x double> %x, double* %br, double %y) nounwind { ; CHECK-LABEL: test2: ; CHECK: ## BB#0: ; CHECK-NEXT: vmovhpd {{.*#+}} xmm2 = xmm0[0],mem[0] ; CHECK-NEXT: vinsertf32x4 $0, %xmm2, %zmm0, %zmm2 ; CHECK-NEXT: vextractf32x4 $3, %zmm0, %xmm0 ; CHECK-NEXT: vmovsd {{.*#+}} xmm0 = xmm1[0],xmm0[1] ; CHECK-NEXT: vinsertf32x4 $3, %xmm0, %zmm2, %zmm0 ; CHECK-NEXT: retq %rrr = load double, double* %br %rrr2 = insertelement <8 x double> %x, double %rrr, i32 1 %rrr3 = insertelement <8 x double> %rrr2, double %y, i32 6 ret <8 x double> %rrr3 } define <16 x float> @test3(<16 x float> %x) nounwind { ; CHECK-LABEL: test3: ; CHECK: ## BB#0: ; CHECK-NEXT: vextractf128 $1, %ymm0, %xmm1 ; CHECK-NEXT: vinsertps {{.*#+}} xmm1 = xmm0[0],xmm1[0],xmm0[2,3] ; CHECK-NEXT: vinsertf32x4 $0, %xmm1, %zmm0, %zmm0 ; CHECK-NEXT: retq %eee = extractelement <16 x float> %x, i32 4 %rrr2 = insertelement <16 x float> %x, float %eee, i32 1 ret <16 x float> %rrr2 } define <8 x i64> @test4(<8 x i64> %x) nounwind { ; CHECK-LABEL: test4: ; CHECK: ## BB#0: ; CHECK-NEXT: vextracti32x4 $2, %zmm0, %xmm1 ; CHECK-NEXT: vmovq %xmm1, %rax ; CHECK-NEXT: vpinsrq $1, %rax, %xmm0, %xmm1 ; CHECK-NEXT: vinserti32x4 $0, %xmm1, %zmm0, %zmm0 ; CHECK-NEXT: retq %eee = extractelement <8 x i64> %x, i32 4 %rrr2 = insertelement <8 x i64> %x, i64 %eee, i32 1 ret <8 x i64> %rrr2 } define i32 @test5(<4 x float> %x) nounwind { ; CHECK-LABEL: test5: ; CHECK: ## BB#0: ; CHECK-NEXT: vextractps $3, %xmm0, %eax ; CHECK-NEXT: retq %ef = extractelement <4 x float> %x, i32 3 %ei = bitcast float %ef to i32 ret i32 %ei } define void @test6(<4 x float> %x, float* %out) nounwind { ; CHECK-LABEL: test6: ; CHECK: ## BB#0: ; CHECK-NEXT: vextractps $3, %xmm0, (%rdi) ; CHECK-NEXT: retq %ef = extractelement <4 x float> %x, i32 3 store float %ef, float* %out, align 4 ret void } define float @test7(<16 x float> %x, i32 %ind) nounwind { ; CHECK-LABEL: test7: ; CHECK: ## BB#0: ; CHECK-NEXT: pushq %rbp ; CHECK-NEXT: movq %rsp, %rbp ; CHECK-NEXT: andq $-64, %rsp ; CHECK-NEXT: subq $128, %rsp ; CHECK-NEXT: ## kill: %EDI %EDI %RDI ; CHECK-NEXT: vmovaps %zmm0, (%rsp) ; CHECK-NEXT: andl $15, %edi ; CHECK-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero ; CHECK-NEXT: movq %rbp, %rsp ; CHECK-NEXT: popq %rbp ; CHECK-NEXT: vzeroupper ; CHECK-NEXT: retq %e = extractelement <16 x float> %x, i32 %ind ret float %e } define double @test8(<8 x double> %x, i32 %ind) nounwind { ; CHECK-LABEL: test8: ; CHECK: ## BB#0: ; CHECK-NEXT: pushq %rbp ; CHECK-NEXT: movq %rsp, %rbp ; CHECK-NEXT: andq $-64, %rsp ; CHECK-NEXT: subq $128, %rsp ; CHECK-NEXT: ## kill: %EDI %EDI %RDI ; CHECK-NEXT: vmovaps %zmm0, (%rsp) ; CHECK-NEXT: andl $7, %edi ; CHECK-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero ; CHECK-NEXT: movq %rbp, %rsp ; CHECK-NEXT: popq %rbp ; CHECK-NEXT: vzeroupper ; CHECK-NEXT: retq %e = extractelement <8 x double> %x, i32 %ind ret double %e } define float @test9(<8 x float> %x, i32 %ind) nounwind { ; CHECK-LABEL: test9: ; CHECK: ## BB#0: ; CHECK-NEXT: pushq %rbp ; CHECK-NEXT: movq %rsp, %rbp ; CHECK-NEXT: andq $-32, %rsp ; CHECK-NEXT: subq $64, %rsp ; CHECK-NEXT: ## kill: %EDI %EDI %RDI ; CHECK-NEXT: vmovaps %ymm0, (%rsp) ; CHECK-NEXT: andl $7, %edi ; CHECK-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero ; CHECK-NEXT: movq %rbp, %rsp ; CHECK-NEXT: popq %rbp ; CHECK-NEXT: vzeroupper ; CHECK-NEXT: retq %e = extractelement <8 x float> %x, i32 %ind ret float %e } define i32 @test10(<16 x i32> %x, i32 %ind) nounwind { ; CHECK-LABEL: test10: ; CHECK: ## BB#0: ; CHECK-NEXT: pushq %rbp ; CHECK-NEXT: movq %rsp, %rbp ; CHECK-NEXT: andq $-64, %rsp ; CHECK-NEXT: subq $128, %rsp ; CHECK-NEXT: ## kill: %EDI %EDI %RDI ; CHECK-NEXT: vmovaps %zmm0, (%rsp) ; CHECK-NEXT: andl $15, %edi ; CHECK-NEXT: movl (%rsp,%rdi,4), %eax ; CHECK-NEXT: movq %rbp, %rsp ; CHECK-NEXT: popq %rbp ; CHECK-NEXT: vzeroupper ; CHECK-NEXT: retq %e = extractelement <16 x i32> %x, i32 %ind ret i32 %e } define <16 x i32> @test11(<16 x i32>%a, <16 x i32>%b) { ; KNL-LABEL: test11: ; KNL: ## BB#0: ; KNL-NEXT: vpcmpltud %zmm1, %zmm0, %k0 ; KNL-NEXT: kshiftlw $11, %k0, %k0 ; KNL-NEXT: kshiftrw $15, %k0, %k0 ; KNL-NEXT: kmovw %k0, %eax ; KNL-NEXT: testb $1, %al ; KNL-NEXT: je LBB10_2 ; KNL-NEXT: ## BB#1: ## %A ; KNL-NEXT: vmovdqa64 %zmm1, %zmm0 ; KNL-NEXT: retq ; KNL-NEXT: LBB10_2: ## %B ; KNL-NEXT: vpaddd %zmm0, %zmm1, %zmm0 ; KNL-NEXT: retq ; ; SKX-LABEL: test11: ; SKX: ## BB#0: ; SKX-NEXT: vpcmpltud %zmm1, %zmm0, %k0 ; SKX-NEXT: kshiftlw $11, %k0, %k0 ; SKX-NEXT: kshiftrw $15, %k0, %k0 ; SKX-NEXT: kmovd %k0, %eax ; SKX-NEXT: testb $1, %al ; SKX-NEXT: je LBB10_2 ; SKX-NEXT: ## BB#1: ## %A ; SKX-NEXT: vmovdqa64 %zmm1, %zmm0 ; SKX-NEXT: retq ; SKX-NEXT: LBB10_2: ## %B ; SKX-NEXT: vpaddd %zmm0, %zmm1, %zmm0 ; SKX-NEXT: retq %cmp_res = icmp ult <16 x i32> %a, %b %ia = extractelement <16 x i1> %cmp_res, i32 4 br i1 %ia, label %A, label %B A: ret <16 x i32>%b B: %c = add <16 x i32>%b, %a ret <16 x i32>%c } define i64 @test12(<16 x i64>%a, <16 x i64>%b, i64 %a1, i64 %b1) { ; KNL-LABEL: test12: ; KNL: ## BB#0: ; KNL-NEXT: vpcmpgtq %zmm0, %zmm2, %k0 ; KNL-NEXT: kshiftlw $15, %k0, %k0 ; KNL-NEXT: kshiftrw $15, %k0, %k0 ; KNL-NEXT: kmovw %k0, %eax ; KNL-NEXT: testb $1, %al ; KNL-NEXT: cmoveq %rsi, %rdi ; KNL-NEXT: movq %rdi, %rax ; KNL-NEXT: vzeroupper ; KNL-NEXT: retq ; ; SKX-LABEL: test12: ; SKX: ## BB#0: ; SKX-NEXT: vpcmpgtq %zmm0, %zmm2, %k0 ; SKX-NEXT: kshiftlb $7, %k0, %k0 ; SKX-NEXT: kshiftrb $7, %k0, %k0 ; SKX-NEXT: kmovd %k0, %eax ; SKX-NEXT: testb $1, %al ; SKX-NEXT: cmoveq %rsi, %rdi ; SKX-NEXT: movq %rdi, %rax ; SKX-NEXT: vzeroupper ; SKX-NEXT: retq %cmpvector_func.i = icmp slt <16 x i64> %a, %b %extract24vector_func.i = extractelement <16 x i1> %cmpvector_func.i, i32 0 %res = select i1 %extract24vector_func.i, i64 %a1, i64 %b1 ret i64 %res } define i16 @test13(i32 %a, i32 %b) { ; KNL-LABEL: test13: ; KNL: ## BB#0: ; KNL-NEXT: cmpl %esi, %edi ; KNL-NEXT: setb %al ; KNL-NEXT: movw $-4, %cx ; KNL-NEXT: kmovw %ecx, %k0 ; KNL-NEXT: kshiftrw $1, %k0, %k0 ; KNL-NEXT: kshiftlw $1, %k0, %k0 ; KNL-NEXT: andl $1, %eax ; KNL-NEXT: kmovw %eax, %k1 ; KNL-NEXT: korw %k1, %k0, %k0 ; KNL-NEXT: kmovw %k0, %eax ; KNL-NEXT: ## kill: %AX %AX %EAX ; KNL-NEXT: retq ; ; SKX-LABEL: test13: ; SKX: ## BB#0: ; SKX-NEXT: cmpl %esi, %edi ; SKX-NEXT: setb %al ; SKX-NEXT: movw $-4, %cx ; SKX-NEXT: kmovd %ecx, %k0 ; SKX-NEXT: kshiftrw $1, %k0, %k0 ; SKX-NEXT: kshiftlw $1, %k0, %k0 ; SKX-NEXT: andl $1, %eax ; SKX-NEXT: kmovw %eax, %k1 ; SKX-NEXT: korw %k1, %k0, %k0 ; SKX-NEXT: kmovd %k0, %eax ; SKX-NEXT: ## kill: %AX %AX %EAX ; SKX-NEXT: retq %cmp_res = icmp ult i32 %a, %b %maskv = insertelement <16 x i1> , i1 %cmp_res, i32 0 %res = bitcast <16 x i1> %maskv to i16 ret i16 %res } define i64 @test14(<8 x i64>%a, <8 x i64>%b, i64 %a1, i64 %b1) { ; KNL-LABEL: test14: ; KNL: ## BB#0: ; KNL-NEXT: vpcmpgtq %zmm0, %zmm1, %k0 ; KNL-NEXT: kshiftlw $11, %k0, %k0 ; KNL-NEXT: kshiftrw $15, %k0, %k0 ; KNL-NEXT: kmovw %k0, %eax ; KNL-NEXT: testb $1, %al ; KNL-NEXT: cmoveq %rsi, %rdi ; KNL-NEXT: movq %rdi, %rax ; KNL-NEXT: vzeroupper ; KNL-NEXT: retq ; ; SKX-LABEL: test14: ; SKX: ## BB#0: ; SKX-NEXT: vpcmpgtq %zmm0, %zmm1, %k0 ; SKX-NEXT: kshiftlb $3, %k0, %k0 ; SKX-NEXT: kshiftrb $7, %k0, %k0 ; SKX-NEXT: kmovd %k0, %eax ; SKX-NEXT: testb $1, %al ; SKX-NEXT: cmoveq %rsi, %rdi ; SKX-NEXT: movq %rdi, %rax ; SKX-NEXT: vzeroupper ; SKX-NEXT: retq %cmpvector_func.i = icmp slt <8 x i64> %a, %b %extract24vector_func.i = extractelement <8 x i1> %cmpvector_func.i, i32 4 %res = select i1 %extract24vector_func.i, i64 %a1, i64 %b1 ret i64 %res } define i16 @test15(i1 *%addr) { ; CHECK-LABEL: test15: ; CHECK: ## BB#0: ; CHECK-NEXT: movb (%rdi), %al ; CHECK-NEXT: xorl %ecx, %ecx ; CHECK-NEXT: testb %al, %al ; CHECK-NEXT: movw $-1, %ax ; CHECK-NEXT: cmovew %cx, %ax ; CHECK-NEXT: retq %x = load i1 , i1 * %addr, align 1 %x1 = insertelement <16 x i1> undef, i1 %x, i32 10 %x2 = bitcast <16 x i1>%x1 to i16 ret i16 %x2 } define i16 @test16(i1 *%addr, i16 %a) { ; KNL-LABEL: test16: ; KNL: ## BB#0: ; KNL-NEXT: movb (%rdi), %al ; KNL-NEXT: kmovw %esi, %k1 ; KNL-NEXT: kmovw %eax, %k2 ; KNL-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k2} {z} ; KNL-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 {%k1} {z} ; KNL-NEXT: vmovdqa32 {{.*#+}} zmm2 = [0,1,2,3,4,5,6,7,8,9,16,11,12,13,14,15] ; KNL-NEXT: vpermi2d %zmm0, %zmm1, %zmm2 ; KNL-NEXT: vpslld $31, %zmm2, %zmm0 ; KNL-NEXT: vptestmd %zmm0, %zmm0, %k0 ; KNL-NEXT: kmovw %k0, %eax ; KNL-NEXT: ## kill: %AX %AX %EAX ; KNL-NEXT: vzeroupper ; KNL-NEXT: retq ; ; SKX-LABEL: test16: ; SKX: ## BB#0: ; SKX-NEXT: movb (%rdi), %al ; SKX-NEXT: kmovd %esi, %k0 ; SKX-NEXT: kmovd %eax, %k1 ; SKX-NEXT: vpmovm2d %k1, %zmm0 ; SKX-NEXT: vpmovm2d %k0, %zmm1 ; SKX-NEXT: vmovdqa32 {{.*#+}} zmm2 = [0,1,2,3,4,5,6,7,8,9,16,11,12,13,14,15] ; SKX-NEXT: vpermi2d %zmm0, %zmm1, %zmm2 ; SKX-NEXT: vpmovd2m %zmm2, %k0 ; SKX-NEXT: kmovd %k0, %eax ; SKX-NEXT: ## kill: %AX %AX %EAX ; SKX-NEXT: vzeroupper ; SKX-NEXT: retq %x = load i1 , i1 * %addr, align 128 %a1 = bitcast i16 %a to <16 x i1> %x1 = insertelement <16 x i1> %a1, i1 %x, i32 10 %x2 = bitcast <16 x i1>%x1 to i16 ret i16 %x2 } define i8 @test17(i1 *%addr, i8 %a) { ; KNL-LABEL: test17: ; KNL: ## BB#0: ; KNL-NEXT: movb (%rdi), %al ; KNL-NEXT: kmovw %esi, %k1 ; KNL-NEXT: kmovw %eax, %k2 ; KNL-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k2} {z} ; KNL-NEXT: vpternlogq $255, %zmm1, %zmm1, %zmm1 {%k1} {z} ; KNL-NEXT: vmovdqa64 {{.*#+}} zmm2 = [0,1,2,3,8,5,6,7] ; KNL-NEXT: vpermi2q %zmm0, %zmm1, %zmm2 ; KNL-NEXT: vpsllq $63, %zmm2, %zmm0 ; KNL-NEXT: vptestmq %zmm0, %zmm0, %k0 ; KNL-NEXT: kmovw %k0, %eax ; KNL-NEXT: ## kill: %AL %AL %EAX ; KNL-NEXT: vzeroupper ; KNL-NEXT: retq ; ; SKX-LABEL: test17: ; SKX: ## BB#0: ; SKX-NEXT: movb (%rdi), %al ; SKX-NEXT: kmovd %esi, %k0 ; SKX-NEXT: kmovd %eax, %k1 ; SKX-NEXT: vpmovm2q %k1, %zmm0 ; SKX-NEXT: vpmovm2q %k0, %zmm1 ; SKX-NEXT: vmovdqa64 {{.*#+}} zmm2 = [0,1,2,3,8,5,6,7] ; SKX-NEXT: vpermi2q %zmm0, %zmm1, %zmm2 ; SKX-NEXT: vpmovq2m %zmm2, %k0 ; SKX-NEXT: kmovd %k0, %eax ; SKX-NEXT: ## kill: %AL %AL %EAX ; SKX-NEXT: vzeroupper ; SKX-NEXT: retq %x = load i1 , i1 * %addr, align 128 %a1 = bitcast i8 %a to <8 x i1> %x1 = insertelement <8 x i1> %a1, i1 %x, i32 4 %x2 = bitcast <8 x i1>%x1 to i8 ret i8 %x2 } define i64 @extract_v8i64(<8 x i64> %x, i64* %dst) { ; CHECK-LABEL: extract_v8i64: ; CHECK: ## BB#0: ; CHECK-NEXT: vpextrq $1, %xmm0, %rax ; CHECK-NEXT: vextracti128 $1, %ymm0, %xmm0 ; CHECK-NEXT: vpextrq $1, %xmm0, (%rdi) ; CHECK-NEXT: vzeroupper ; CHECK-NEXT: retq %r1 = extractelement <8 x i64> %x, i32 1 %r2 = extractelement <8 x i64> %x, i32 3 store i64 %r2, i64* %dst, align 1 ret i64 %r1 } define i64 @extract_v4i64(<4 x i64> %x, i64* %dst) { ; CHECK-LABEL: extract_v4i64: ; CHECK: ## BB#0: ; CHECK-NEXT: vpextrq $1, %xmm0, %rax ; CHECK-NEXT: vextracti128 $1, %ymm0, %xmm0 ; CHECK-NEXT: vpextrq $1, %xmm0, (%rdi) ; CHECK-NEXT: vzeroupper ; CHECK-NEXT: retq %r1 = extractelement <4 x i64> %x, i32 1 %r2 = extractelement <4 x i64> %x, i32 3 store i64 %r2, i64* %dst, align 1 ret i64 %r1 } define i64 @extract_v2i64(<2 x i64> %x, i64* %dst) { ; CHECK-LABEL: extract_v2i64: ; CHECK: ## BB#0: ; CHECK-NEXT: vmovq %xmm0, %rax ; CHECK-NEXT: vpextrq $1, %xmm0, (%rdi) ; CHECK-NEXT: retq %r1 = extractelement <2 x i64> %x, i32 0 %r2 = extractelement <2 x i64> %x, i32 1 store i64 %r2, i64* %dst, align 1 ret i64 %r1 } define i32 @extract_v16i32(<16 x i32> %x, i32* %dst) { ; CHECK-LABEL: extract_v16i32: ; CHECK: ## BB#0: ; CHECK-NEXT: vpextrd $1, %xmm0, %eax ; CHECK-NEXT: vextracti128 $1, %ymm0, %xmm0 ; CHECK-NEXT: vpextrd $1, %xmm0, (%rdi) ; CHECK-NEXT: vzeroupper ; CHECK-NEXT: retq %r1 = extractelement <16 x i32> %x, i32 1 %r2 = extractelement <16 x i32> %x, i32 5 store i32 %r2, i32* %dst, align 1 ret i32 %r1 } define i32 @extract_v8i32(<8 x i32> %x, i32* %dst) { ; CHECK-LABEL: extract_v8i32: ; CHECK: ## BB#0: ; CHECK-NEXT: vpextrd $1, %xmm0, %eax ; CHECK-NEXT: vextracti128 $1, %ymm0, %xmm0 ; CHECK-NEXT: vpextrd $1, %xmm0, (%rdi) ; CHECK-NEXT: vzeroupper ; CHECK-NEXT: retq %r1 = extractelement <8 x i32> %x, i32 1 %r2 = extractelement <8 x i32> %x, i32 5 store i32 %r2, i32* %dst, align 1 ret i32 %r1 } define i32 @extract_v4i32(<4 x i32> %x, i32* %dst) { ; CHECK-LABEL: extract_v4i32: ; CHECK: ## BB#0: ; CHECK-NEXT: vpextrd $1, %xmm0, %eax ; CHECK-NEXT: vpextrd $3, %xmm0, (%rdi) ; CHECK-NEXT: retq %r1 = extractelement <4 x i32> %x, i32 1 %r2 = extractelement <4 x i32> %x, i32 3 store i32 %r2, i32* %dst, align 1 ret i32 %r1 } define i16 @extract_v32i16(<32 x i16> %x, i16* %dst) { ; CHECK-LABEL: extract_v32i16: ; CHECK: ## BB#0: ; CHECK-NEXT: vpextrw $1, %xmm0, %eax ; CHECK-NEXT: vextracti128 $1, %ymm0, %xmm0 ; CHECK-NEXT: vpextrw $1, %xmm0, (%rdi) ; CHECK-NEXT: ## kill: %AX %AX %EAX ; CHECK-NEXT: vzeroupper ; CHECK-NEXT: retq %r1 = extractelement <32 x i16> %x, i32 1 %r2 = extractelement <32 x i16> %x, i32 9 store i16 %r2, i16* %dst, align 1 ret i16 %r1 } define i16 @extract_v16i16(<16 x i16> %x, i16* %dst) { ; CHECK-LABEL: extract_v16i16: ; CHECK: ## BB#0: ; CHECK-NEXT: vpextrw $1, %xmm0, %eax ; CHECK-NEXT: vextracti128 $1, %ymm0, %xmm0 ; CHECK-NEXT: vpextrw $1, %xmm0, (%rdi) ; CHECK-NEXT: ## kill: %AX %AX %EAX ; CHECK-NEXT: vzeroupper ; CHECK-NEXT: retq %r1 = extractelement <16 x i16> %x, i32 1 %r2 = extractelement <16 x i16> %x, i32 9 store i16 %r2, i16* %dst, align 1 ret i16 %r1 } define i16 @extract_v8i16(<8 x i16> %x, i16* %dst) { ; CHECK-LABEL: extract_v8i16: ; CHECK: ## BB#0: ; CHECK-NEXT: vpextrw $1, %xmm0, %eax ; CHECK-NEXT: vpextrw $3, %xmm0, (%rdi) ; CHECK-NEXT: ## kill: %AX %AX %EAX ; CHECK-NEXT: retq %r1 = extractelement <8 x i16> %x, i32 1 %r2 = extractelement <8 x i16> %x, i32 3 store i16 %r2, i16* %dst, align 1 ret i16 %r1 } define i8 @extract_v64i8(<64 x i8> %x, i8* %dst) { ; CHECK-LABEL: extract_v64i8: ; CHECK: ## BB#0: ; CHECK-NEXT: vpextrb $1, %xmm0, %eax ; CHECK-NEXT: vextracti128 $1, %ymm0, %xmm0 ; CHECK-NEXT: vpextrb $1, %xmm0, (%rdi) ; CHECK-NEXT: ## kill: %AL %AL %EAX ; CHECK-NEXT: vzeroupper ; CHECK-NEXT: retq %r1 = extractelement <64 x i8> %x, i32 1 %r2 = extractelement <64 x i8> %x, i32 17 store i8 %r2, i8* %dst, align 1 ret i8 %r1 } define i8 @extract_v32i8(<32 x i8> %x, i8* %dst) { ; CHECK-LABEL: extract_v32i8: ; CHECK: ## BB#0: ; CHECK-NEXT: vpextrb $1, %xmm0, %eax ; CHECK-NEXT: vextracti128 $1, %ymm0, %xmm0 ; CHECK-NEXT: vpextrb $1, %xmm0, (%rdi) ; CHECK-NEXT: ## kill: %AL %AL %EAX ; CHECK-NEXT: vzeroupper ; CHECK-NEXT: retq %r1 = extractelement <32 x i8> %x, i32 1 %r2 = extractelement <32 x i8> %x, i32 17 store i8 %r2, i8* %dst, align 1 ret i8 %r1 } define i8 @extract_v16i8(<16 x i8> %x, i8* %dst) { ; CHECK-LABEL: extract_v16i8: ; CHECK: ## BB#0: ; CHECK-NEXT: vpextrb $1, %xmm0, %eax ; CHECK-NEXT: vpextrb $3, %xmm0, (%rdi) ; CHECK-NEXT: ## kill: %AL %AL %EAX ; CHECK-NEXT: retq %r1 = extractelement <16 x i8> %x, i32 1 %r2 = extractelement <16 x i8> %x, i32 3 store i8 %r2, i8* %dst, align 1 ret i8 %r1 } define <8 x i64> @insert_v8i64(<8 x i64> %x, i64 %y , i64* %ptr) { ; CHECK-LABEL: insert_v8i64: ; CHECK: ## BB#0: ; CHECK-NEXT: vpinsrq $1, (%rsi), %xmm0, %xmm1 ; CHECK-NEXT: vinserti32x4 $0, %xmm1, %zmm0, %zmm1 ; CHECK-NEXT: vextracti128 $1, %ymm0, %xmm0 ; CHECK-NEXT: vpinsrq $1, %rdi, %xmm0, %xmm0 ; CHECK-NEXT: vinserti32x4 $1, %xmm0, %zmm1, %zmm0 ; CHECK-NEXT: retq %val = load i64, i64* %ptr %r1 = insertelement <8 x i64> %x, i64 %val, i32 1 %r2 = insertelement <8 x i64> %r1, i64 %y, i32 3 ret <8 x i64> %r2 } define <4 x i64> @insert_v4i64(<4 x i64> %x, i64 %y , i64* %ptr) { ; CHECK-LABEL: insert_v4i64: ; CHECK: ## BB#0: ; CHECK-NEXT: vpinsrq $1, (%rsi), %xmm0, %xmm1 ; CHECK-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm0[4,5,6,7] ; CHECK-NEXT: vextracti128 $1, %ymm0, %xmm0 ; CHECK-NEXT: vpinsrq $1, %rdi, %xmm0, %xmm0 ; CHECK-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0 ; CHECK-NEXT: retq %val = load i64, i64* %ptr %r1 = insertelement <4 x i64> %x, i64 %val, i32 1 %r2 = insertelement <4 x i64> %r1, i64 %y, i32 3 ret <4 x i64> %r2 } define <2 x i64> @insert_v2i64(<2 x i64> %x, i64 %y , i64* %ptr) { ; CHECK-LABEL: insert_v2i64: ; CHECK: ## BB#0: ; CHECK-NEXT: vpinsrq $0, %rdi, %xmm0, %xmm0 ; CHECK-NEXT: vpinsrq $1, (%rsi), %xmm0, %xmm0 ; CHECK-NEXT: retq %val = load i64, i64* %ptr %r1 = insertelement <2 x i64> %x, i64 %val, i32 1 %r2 = insertelement <2 x i64> %r1, i64 %y, i32 0 ret <2 x i64> %r2 } define <16 x i32> @insert_v16i32(<16 x i32> %x, i32 %y, i32* %ptr) { ; CHECK-LABEL: insert_v16i32: ; CHECK: ## BB#0: ; CHECK-NEXT: vpinsrd $1, (%rsi), %xmm0, %xmm1 ; CHECK-NEXT: vinserti32x4 $0, %xmm1, %zmm0, %zmm1 ; CHECK-NEXT: vextracti128 $1, %ymm0, %xmm0 ; CHECK-NEXT: vpinsrd $1, %edi, %xmm0, %xmm0 ; CHECK-NEXT: vinserti32x4 $1, %xmm0, %zmm1, %zmm0 ; CHECK-NEXT: retq %val = load i32, i32* %ptr %r1 = insertelement <16 x i32> %x, i32 %val, i32 1 %r2 = insertelement <16 x i32> %r1, i32 %y, i32 5 ret <16 x i32> %r2 } define <8 x i32> @insert_v8i32(<8 x i32> %x, i32 %y, i32* %ptr) { ; CHECK-LABEL: insert_v8i32: ; CHECK: ## BB#0: ; CHECK-NEXT: vpinsrd $1, (%rsi), %xmm0, %xmm1 ; CHECK-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm0[4,5,6,7] ; CHECK-NEXT: vextracti128 $1, %ymm0, %xmm0 ; CHECK-NEXT: vpinsrd $1, %edi, %xmm0, %xmm0 ; CHECK-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0 ; CHECK-NEXT: retq %val = load i32, i32* %ptr %r1 = insertelement <8 x i32> %x, i32 %val, i32 1 %r2 = insertelement <8 x i32> %r1, i32 %y, i32 5 ret <8 x i32> %r2 } define <4 x i32> @insert_v4i32(<4 x i32> %x, i32 %y, i32* %ptr) { ; CHECK-LABEL: insert_v4i32: ; CHECK: ## BB#0: ; CHECK-NEXT: vpinsrd $1, (%rsi), %xmm0, %xmm0 ; CHECK-NEXT: vpinsrd $3, %edi, %xmm0, %xmm0 ; CHECK-NEXT: retq %val = load i32, i32* %ptr %r1 = insertelement <4 x i32> %x, i32 %val, i32 1 %r2 = insertelement <4 x i32> %r1, i32 %y, i32 3 ret <4 x i32> %r2 } define <32 x i16> @insert_v32i16(<32 x i16> %x, i16 %y, i16* %ptr) { ; KNL-LABEL: insert_v32i16: ; KNL: ## BB#0: ; KNL-NEXT: vpinsrw $1, (%rsi), %xmm0, %xmm2 ; KNL-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm0[4,5,6,7] ; KNL-NEXT: vextracti128 $1, %ymm0, %xmm0 ; KNL-NEXT: vpinsrw $1, %edi, %xmm0, %xmm0 ; KNL-NEXT: vinserti128 $1, %xmm0, %ymm2, %ymm0 ; KNL-NEXT: retq ; ; SKX-LABEL: insert_v32i16: ; SKX: ## BB#0: ; SKX-NEXT: vpinsrw $1, (%rsi), %xmm0, %xmm1 ; SKX-NEXT: vinserti32x4 $0, %xmm1, %zmm0, %zmm1 ; SKX-NEXT: vextracti128 $1, %ymm0, %xmm0 ; SKX-NEXT: vpinsrw $1, %edi, %xmm0, %xmm0 ; SKX-NEXT: vinserti32x4 $1, %xmm0, %zmm1, %zmm0 ; SKX-NEXT: retq %val = load i16, i16* %ptr %r1 = insertelement <32 x i16> %x, i16 %val, i32 1 %r2 = insertelement <32 x i16> %r1, i16 %y, i32 9 ret <32 x i16> %r2 } define <16 x i16> @insert_v16i16(<16 x i16> %x, i16 %y, i16* %ptr) { ; CHECK-LABEL: insert_v16i16: ; CHECK: ## BB#0: ; CHECK-NEXT: vpinsrw $1, (%rsi), %xmm0, %xmm1 ; CHECK-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm0[4,5,6,7] ; CHECK-NEXT: vextracti128 $1, %ymm0, %xmm0 ; CHECK-NEXT: vpinsrw $1, %edi, %xmm0, %xmm0 ; CHECK-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0 ; CHECK-NEXT: retq %val = load i16, i16* %ptr %r1 = insertelement <16 x i16> %x, i16 %val, i32 1 %r2 = insertelement <16 x i16> %r1, i16 %y, i32 9 ret <16 x i16> %r2 } define <8 x i16> @insert_v8i16(<8 x i16> %x, i16 %y, i16* %ptr) { ; CHECK-LABEL: insert_v8i16: ; CHECK: ## BB#0: ; CHECK-NEXT: vpinsrw $1, (%rsi), %xmm0, %xmm0 ; CHECK-NEXT: vpinsrw $5, %edi, %xmm0, %xmm0 ; CHECK-NEXT: retq %val = load i16, i16* %ptr %r1 = insertelement <8 x i16> %x, i16 %val, i32 1 %r2 = insertelement <8 x i16> %r1, i16 %y, i32 5 ret <8 x i16> %r2 } define <64 x i8> @insert_v64i8(<64 x i8> %x, i8 %y, i8* %ptr) { ; KNL-LABEL: insert_v64i8: ; KNL: ## BB#0: ; KNL-NEXT: vpinsrb $1, (%rsi), %xmm0, %xmm2 ; KNL-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm0[4,5,6,7] ; KNL-NEXT: vextracti128 $1, %ymm1, %xmm2 ; KNL-NEXT: vpinsrb $2, %edi, %xmm2, %xmm2 ; KNL-NEXT: vinserti128 $1, %xmm2, %ymm1, %ymm1 ; KNL-NEXT: retq ; ; SKX-LABEL: insert_v64i8: ; SKX: ## BB#0: ; SKX-NEXT: vpinsrb $1, (%rsi), %xmm0, %xmm1 ; SKX-NEXT: vinserti32x4 $0, %xmm1, %zmm0, %zmm1 ; SKX-NEXT: vextracti32x4 $3, %zmm0, %xmm0 ; SKX-NEXT: vpinsrb $2, %edi, %xmm0, %xmm0 ; SKX-NEXT: vinserti32x4 $3, %xmm0, %zmm1, %zmm0 ; SKX-NEXT: retq %val = load i8, i8* %ptr %r1 = insertelement <64 x i8> %x, i8 %val, i32 1 %r2 = insertelement <64 x i8> %r1, i8 %y, i32 50 ret <64 x i8> %r2 } define <32 x i8> @insert_v32i8(<32 x i8> %x, i8 %y, i8* %ptr) { ; CHECK-LABEL: insert_v32i8: ; CHECK: ## BB#0: ; CHECK-NEXT: vpinsrb $1, (%rsi), %xmm0, %xmm1 ; CHECK-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm0[4,5,6,7] ; CHECK-NEXT: vextracti128 $1, %ymm0, %xmm0 ; CHECK-NEXT: vpinsrb $1, %edi, %xmm0, %xmm0 ; CHECK-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0 ; CHECK-NEXT: retq %val = load i8, i8* %ptr %r1 = insertelement <32 x i8> %x, i8 %val, i32 1 %r2 = insertelement <32 x i8> %r1, i8 %y, i32 17 ret <32 x i8> %r2 } define <16 x i8> @insert_v16i8(<16 x i8> %x, i8 %y, i8* %ptr) { ; CHECK-LABEL: insert_v16i8: ; CHECK: ## BB#0: ; CHECK-NEXT: vpinsrb $3, (%rsi), %xmm0, %xmm0 ; CHECK-NEXT: vpinsrb $10, %edi, %xmm0, %xmm0 ; CHECK-NEXT: retq %val = load i8, i8* %ptr %r1 = insertelement <16 x i8> %x, i8 %val, i32 3 %r2 = insertelement <16 x i8> %r1, i8 %y, i32 10 ret <16 x i8> %r2 } define <8 x i64> @test_insert_128_v8i64(<8 x i64> %x, i64 %y) { ; CHECK-LABEL: test_insert_128_v8i64: ; CHECK: ## BB#0: ; CHECK-NEXT: vpinsrq $1, %rdi, %xmm0, %xmm1 ; CHECK-NEXT: vinserti32x4 $0, %xmm1, %zmm0, %zmm0 ; CHECK-NEXT: retq %r = insertelement <8 x i64> %x, i64 %y, i32 1 ret <8 x i64> %r } define <16 x i32> @test_insert_128_v16i32(<16 x i32> %x, i32 %y) { ; CHECK-LABEL: test_insert_128_v16i32: ; CHECK: ## BB#0: ; CHECK-NEXT: vpinsrd $1, %edi, %xmm0, %xmm1 ; CHECK-NEXT: vinserti32x4 $0, %xmm1, %zmm0, %zmm0 ; CHECK-NEXT: retq %r = insertelement <16 x i32> %x, i32 %y, i32 1 ret <16 x i32> %r } define <8 x double> @test_insert_128_v8f64(<8 x double> %x, double %y) { ; CHECK-LABEL: test_insert_128_v8f64: ; CHECK: ## BB#0: ; CHECK-NEXT: vmovlhps {{.*#+}} xmm1 = xmm0[0],xmm1[0] ; CHECK-NEXT: vinsertf32x4 $0, %xmm1, %zmm0, %zmm0 ; CHECK-NEXT: retq %r = insertelement <8 x double> %x, double %y, i32 1 ret <8 x double> %r } define <16 x float> @test_insert_128_v16f32(<16 x float> %x, float %y) { ; CHECK-LABEL: test_insert_128_v16f32: ; CHECK: ## BB#0: ; CHECK-NEXT: vinsertps {{.*#+}} xmm1 = xmm0[0],xmm1[0],xmm0[2,3] ; CHECK-NEXT: vinsertf32x4 $0, %xmm1, %zmm0, %zmm0 ; CHECK-NEXT: retq %r = insertelement <16 x float> %x, float %y, i32 1 ret <16 x float> %r } define <16 x i16> @test_insert_128_v16i16(<16 x i16> %x, i16 %y) { ; CHECK-LABEL: test_insert_128_v16i16: ; CHECK: ## BB#0: ; CHECK-NEXT: vextracti128 $1, %ymm0, %xmm1 ; CHECK-NEXT: vpinsrw $2, %edi, %xmm1, %xmm1 ; CHECK-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0 ; CHECK-NEXT: retq %r = insertelement <16 x i16> %x, i16 %y, i32 10 ret <16 x i16> %r } define <32 x i8> @test_insert_128_v32i8(<32 x i8> %x, i8 %y) { ; CHECK-LABEL: test_insert_128_v32i8: ; CHECK: ## BB#0: ; CHECK-NEXT: vextracti128 $1, %ymm0, %xmm1 ; CHECK-NEXT: vpinsrb $4, %edi, %xmm1, %xmm1 ; CHECK-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0 ; CHECK-NEXT: retq %r = insertelement <32 x i8> %x, i8 %y, i32 20 ret <32 x i8> %r } define i32 @test_insertelement_v32i1(i32 %a, i32 %b, <32 x i32> %x , <32 x i32> %y) { ; KNL-LABEL: test_insertelement_v32i1: ; KNL: ## BB#0: ; KNL-NEXT: pushq %rbp ; KNL-NEXT: .cfi_def_cfa_offset 16 ; KNL-NEXT: .cfi_offset %rbp, -16 ; KNL-NEXT: movq %rsp, %rbp ; KNL-NEXT: .cfi_def_cfa_register %rbp ; KNL-NEXT: andq $-32, %rsp ; KNL-NEXT: subq $32, %rsp ; KNL-NEXT: xorl %eax, %eax ; KNL-NEXT: cmpl %esi, %edi ; KNL-NEXT: setb %al ; KNL-NEXT: vpcmpltud %zmm3, %zmm1, %k0 ; KNL-NEXT: kshiftlw $14, %k0, %k1 ; KNL-NEXT: kshiftrw $15, %k1, %k1 ; KNL-NEXT: kmovw %k1, %ecx ; KNL-NEXT: kshiftlw $15, %k0, %k1 ; KNL-NEXT: kshiftrw $15, %k1, %k1 ; KNL-NEXT: kmovw %k1, %edx ; KNL-NEXT: vmovd %edx, %xmm1 ; KNL-NEXT: vpinsrb $1, %ecx, %xmm1, %xmm1 ; KNL-NEXT: kshiftlw $13, %k0, %k1 ; KNL-NEXT: kshiftrw $15, %k1, %k1 ; KNL-NEXT: kmovw %k1, %ecx ; KNL-NEXT: vpinsrb $2, %ecx, %xmm1, %xmm1 ; KNL-NEXT: kshiftlw $12, %k0, %k1 ; KNL-NEXT: kshiftrw $15, %k1, %k1 ; KNL-NEXT: kmovw %k1, %ecx ; KNL-NEXT: vpinsrb $3, %ecx, %xmm1, %xmm1 ; KNL-NEXT: kshiftlw $11, %k0, %k1 ; KNL-NEXT: kshiftrw $15, %k1, %k1 ; KNL-NEXT: kmovw %k1, %ecx ; KNL-NEXT: vpinsrb $4, %ecx, %xmm1, %xmm1 ; KNL-NEXT: kshiftlw $10, %k0, %k1 ; KNL-NEXT: kshiftrw $15, %k1, %k1 ; KNL-NEXT: kmovw %k1, %ecx ; KNL-NEXT: vpinsrb $5, %ecx, %xmm1, %xmm1 ; KNL-NEXT: kshiftlw $9, %k0, %k1 ; KNL-NEXT: kshiftrw $15, %k1, %k1 ; KNL-NEXT: kmovw %k1, %ecx ; KNL-NEXT: vpinsrb $6, %ecx, %xmm1, %xmm1 ; KNL-NEXT: kshiftlw $8, %k0, %k1 ; KNL-NEXT: kshiftrw $15, %k1, %k1 ; KNL-NEXT: kmovw %k1, %ecx ; KNL-NEXT: vpinsrb $7, %ecx, %xmm1, %xmm1 ; KNL-NEXT: kshiftlw $7, %k0, %k1 ; KNL-NEXT: kshiftrw $15, %k1, %k1 ; KNL-NEXT: kmovw %k1, %ecx ; KNL-NEXT: vpinsrb $8, %ecx, %xmm1, %xmm1 ; KNL-NEXT: kshiftlw $6, %k0, %k1 ; KNL-NEXT: kshiftrw $15, %k1, %k1 ; KNL-NEXT: kmovw %k1, %ecx ; KNL-NEXT: vpinsrb $9, %ecx, %xmm1, %xmm1 ; KNL-NEXT: kshiftlw $5, %k0, %k1 ; KNL-NEXT: kshiftrw $15, %k1, %k1 ; KNL-NEXT: kmovw %k1, %ecx ; KNL-NEXT: vpinsrb $10, %ecx, %xmm1, %xmm1 ; KNL-NEXT: kshiftlw $4, %k0, %k1 ; KNL-NEXT: kshiftrw $15, %k1, %k1 ; KNL-NEXT: kmovw %k1, %ecx ; KNL-NEXT: vpinsrb $11, %ecx, %xmm1, %xmm1 ; KNL-NEXT: kshiftlw $3, %k0, %k1 ; KNL-NEXT: kshiftrw $15, %k1, %k1 ; KNL-NEXT: kmovw %k1, %ecx ; KNL-NEXT: vpinsrb $12, %ecx, %xmm1, %xmm1 ; KNL-NEXT: kshiftlw $2, %k0, %k1 ; KNL-NEXT: kshiftrw $15, %k1, %k1 ; KNL-NEXT: kmovw %k1, %ecx ; KNL-NEXT: vpinsrb $13, %ecx, %xmm1, %xmm1 ; KNL-NEXT: kshiftlw $1, %k0, %k1 ; KNL-NEXT: kshiftrw $15, %k1, %k1 ; KNL-NEXT: kmovw %k1, %ecx ; KNL-NEXT: vpinsrb $14, %ecx, %xmm1, %xmm1 ; KNL-NEXT: kshiftrw $15, %k0, %k0 ; KNL-NEXT: kmovw %k0, %ecx ; KNL-NEXT: vpinsrb $15, %ecx, %xmm1, %xmm1 ; KNL-NEXT: vpcmpltud %zmm2, %zmm0, %k0 ; KNL-NEXT: kshiftlw $14, %k0, %k1 ; KNL-NEXT: kshiftrw $15, %k1, %k1 ; KNL-NEXT: kmovw %k1, %ecx ; KNL-NEXT: kshiftlw $15, %k0, %k1 ; KNL-NEXT: kshiftrw $15, %k1, %k1 ; KNL-NEXT: kmovw %k1, %edx ; KNL-NEXT: vmovd %edx, %xmm0 ; KNL-NEXT: vpinsrb $1, %ecx, %xmm0, %xmm0 ; KNL-NEXT: kshiftlw $13, %k0, %k1 ; KNL-NEXT: kshiftrw $15, %k1, %k1 ; KNL-NEXT: kmovw %k1, %ecx ; KNL-NEXT: vpinsrb $2, %ecx, %xmm0, %xmm0 ; KNL-NEXT: kshiftlw $12, %k0, %k1 ; KNL-NEXT: kshiftrw $15, %k1, %k1 ; KNL-NEXT: kmovw %k1, %ecx ; KNL-NEXT: vpinsrb $3, %ecx, %xmm0, %xmm0 ; KNL-NEXT: kshiftlw $11, %k0, %k1 ; KNL-NEXT: kshiftrw $15, %k1, %k1 ; KNL-NEXT: kmovw %k1, %ecx ; KNL-NEXT: vpinsrb $4, %ecx, %xmm0, %xmm0 ; KNL-NEXT: kshiftlw $10, %k0, %k1 ; KNL-NEXT: kshiftrw $15, %k1, %k1 ; KNL-NEXT: kmovw %k1, %ecx ; KNL-NEXT: vpinsrb $5, %ecx, %xmm0, %xmm0 ; KNL-NEXT: kshiftlw $9, %k0, %k1 ; KNL-NEXT: kshiftrw $15, %k1, %k1 ; KNL-NEXT: kmovw %k1, %ecx ; KNL-NEXT: vpinsrb $6, %ecx, %xmm0, %xmm0 ; KNL-NEXT: kshiftlw $8, %k0, %k1 ; KNL-NEXT: kshiftrw $15, %k1, %k1 ; KNL-NEXT: kmovw %k1, %ecx ; KNL-NEXT: vpinsrb $7, %ecx, %xmm0, %xmm0 ; KNL-NEXT: kshiftlw $7, %k0, %k1 ; KNL-NEXT: kshiftrw $15, %k1, %k1 ; KNL-NEXT: kmovw %k1, %ecx ; KNL-NEXT: vpinsrb $8, %ecx, %xmm0, %xmm0 ; KNL-NEXT: kshiftlw $6, %k0, %k1 ; KNL-NEXT: kshiftrw $15, %k1, %k1 ; KNL-NEXT: kmovw %k1, %ecx ; KNL-NEXT: vpinsrb $9, %ecx, %xmm0, %xmm0 ; KNL-NEXT: kshiftlw $5, %k0, %k1 ; KNL-NEXT: kshiftrw $15, %k1, %k1 ; KNL-NEXT: kmovw %k1, %ecx ; KNL-NEXT: vpinsrb $10, %ecx, %xmm0, %xmm0 ; KNL-NEXT: kshiftlw $4, %k0, %k1 ; KNL-NEXT: kshiftrw $15, %k1, %k1 ; KNL-NEXT: kmovw %k1, %ecx ; KNL-NEXT: vpinsrb $11, %ecx, %xmm0, %xmm0 ; KNL-NEXT: kshiftlw $3, %k0, %k1 ; KNL-NEXT: kshiftrw $15, %k1, %k1 ; KNL-NEXT: kmovw %k1, %ecx ; KNL-NEXT: vpinsrb $12, %ecx, %xmm0, %xmm0 ; KNL-NEXT: kshiftlw $2, %k0, %k1 ; KNL-NEXT: kshiftrw $15, %k1, %k1 ; KNL-NEXT: kmovw %k1, %ecx ; KNL-NEXT: vpinsrb $13, %ecx, %xmm0, %xmm0 ; KNL-NEXT: kshiftlw $1, %k0, %k1 ; KNL-NEXT: kshiftrw $15, %k1, %k1 ; KNL-NEXT: kmovw %k1, %ecx ; KNL-NEXT: vpinsrb $14, %ecx, %xmm0, %xmm0 ; KNL-NEXT: kshiftrw $15, %k0, %k0 ; KNL-NEXT: kmovw %k0, %ecx ; KNL-NEXT: vpinsrb $15, %ecx, %xmm0, %xmm0 ; KNL-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0 ; KNL-NEXT: vpsllw $7, %ymm0, %ymm0 ; KNL-NEXT: vpand {{.*}}(%rip), %ymm0, %ymm0 ; KNL-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; KNL-NEXT: vpcmpgtb %ymm0, %ymm1, %ymm0 ; KNL-NEXT: vextracti128 $1, %ymm0, %xmm1 ; KNL-NEXT: vpmovsxbd %xmm1, %zmm1 ; KNL-NEXT: vpslld $31, %zmm1, %zmm1 ; KNL-NEXT: vptestmd %zmm1, %zmm1, %k0 ; KNL-NEXT: kmovw %k0, {{[0-9]+}}(%rsp) ; KNL-NEXT: vpinsrb $4, %eax, %xmm0, %xmm0 ; KNL-NEXT: vpmovsxbd %xmm0, %zmm0 ; KNL-NEXT: vpslld $31, %zmm0, %zmm0 ; KNL-NEXT: vptestmd %zmm0, %zmm0, %k0 ; KNL-NEXT: kmovw %k0, (%rsp) ; KNL-NEXT: movl (%rsp), %eax ; KNL-NEXT: movq %rbp, %rsp ; KNL-NEXT: popq %rbp ; KNL-NEXT: vzeroupper ; KNL-NEXT: retq ; ; SKX-LABEL: test_insertelement_v32i1: ; SKX: ## BB#0: ; SKX-NEXT: cmpl %esi, %edi ; SKX-NEXT: setb %al ; SKX-NEXT: vpcmpltud %zmm2, %zmm0, %k0 ; SKX-NEXT: vpcmpltud %zmm3, %zmm1, %k1 ; SKX-NEXT: kunpckwd %k0, %k1, %k0 ; SKX-NEXT: vpmovm2w %k0, %zmm0 ; SKX-NEXT: kmovd %eax, %k0 ; SKX-NEXT: vpmovm2w %k0, %zmm1 ; SKX-NEXT: vmovdqa64 {{.*#+}} zmm2 = [0,1,2,3,32,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31] ; SKX-NEXT: vpermi2w %zmm1, %zmm0, %zmm2 ; SKX-NEXT: vpmovw2m %zmm2, %k0 ; SKX-NEXT: kmovd %k0, %eax ; SKX-NEXT: vzeroupper ; SKX-NEXT: retq %cmp_res_i1 = icmp ult i32 %a, %b %cmp_cmp_vec = icmp ult <32 x i32> %x, %y %maskv = insertelement <32 x i1> %cmp_cmp_vec, i1 %cmp_res_i1, i32 4 %res = bitcast <32 x i1> %maskv to i32 ret i32 %res } define i8 @test_iinsertelement_v4i1(i32 %a, i32 %b, <4 x i32> %x , <4 x i32> %y) { ; KNL-LABEL: test_iinsertelement_v4i1: ; KNL: ## BB#0: ; KNL-NEXT: cmpl %esi, %edi ; KNL-NEXT: setb %al ; KNL-NEXT: vpbroadcastd {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648] ; KNL-NEXT: vpxor %xmm2, %xmm0, %xmm0 ; KNL-NEXT: vpxor %xmm2, %xmm1, %xmm1 ; KNL-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; KNL-NEXT: vpextrb $4, %xmm0, %ecx ; KNL-NEXT: kmovw %ecx, %k1 ; KNL-NEXT: vpternlogq $255, %zmm1, %zmm1, %zmm1 {%k1} {z} ; KNL-NEXT: vpextrb $0, %xmm0, %ecx ; KNL-NEXT: kmovw %ecx, %k1 ; KNL-NEXT: vpternlogq $255, %zmm2, %zmm2, %zmm2 {%k1} {z} ; KNL-NEXT: vmovdqa64 {{.*#+}} zmm3 = [0,8,2,3,4,5,6,7] ; KNL-NEXT: vpermi2q %zmm1, %zmm2, %zmm3 ; KNL-NEXT: vpsllq $63, %zmm3, %zmm1 ; KNL-NEXT: vptestmq %zmm1, %zmm1, %k1 ; KNL-NEXT: vpternlogq $255, %zmm1, %zmm1, %zmm1 {%k1} {z} ; KNL-NEXT: kmovw %eax, %k1 ; KNL-NEXT: vpternlogq $255, %zmm2, %zmm2, %zmm2 {%k1} {z} ; KNL-NEXT: vmovdqa64 {{.*#+}} zmm3 = [0,1,8,3,4,5,6,7] ; KNL-NEXT: vpermi2q %zmm2, %zmm1, %zmm3 ; KNL-NEXT: vpsllq $63, %zmm3, %zmm1 ; KNL-NEXT: vptestmq %zmm1, %zmm1, %k1 ; KNL-NEXT: vpternlogq $255, %zmm1, %zmm1, %zmm1 {%k1} {z} ; KNL-NEXT: vpextrb $12, %xmm0, %eax ; KNL-NEXT: kmovw %eax, %k1 ; KNL-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; KNL-NEXT: vmovdqa64 {{.*#+}} zmm2 = [0,1,2,8,4,5,6,7] ; KNL-NEXT: vpermi2q %zmm0, %zmm1, %zmm2 ; KNL-NEXT: vpsllq $63, %zmm2, %zmm0 ; KNL-NEXT: vptestmq %zmm0, %zmm0, %k0 ; KNL-NEXT: kmovw %k0, %eax ; KNL-NEXT: ## kill: %AL %AL %EAX ; KNL-NEXT: vzeroupper ; KNL-NEXT: retq ; ; SKX-LABEL: test_iinsertelement_v4i1: ; SKX: ## BB#0: ; SKX-NEXT: cmpl %esi, %edi ; SKX-NEXT: setb %al ; SKX-NEXT: vpcmpltud %xmm1, %xmm0, %k0 ; SKX-NEXT: vpmovm2d %k0, %xmm0 ; SKX-NEXT: kmovd %eax, %k0 ; SKX-NEXT: vpmovm2d %k0, %xmm1 ; SKX-NEXT: vpbroadcastq %xmm1, %xmm1 ; SKX-NEXT: vpblendd {{.*#+}} xmm0 = xmm0[0,1],xmm1[2],xmm0[3] ; SKX-NEXT: vpmovd2m %xmm0, %k0 ; SKX-NEXT: kmovd %k0, %eax ; SKX-NEXT: ## kill: %AL %AL %EAX ; SKX-NEXT: retq %cmp_res_i1 = icmp ult i32 %a, %b %cmp_cmp_vec = icmp ult <4 x i32> %x, %y %maskv = insertelement <4 x i1> %cmp_cmp_vec, i1 %cmp_res_i1, i32 2 %res0 = shufflevector <4 x i1> %maskv, <4 x i1> undef , <8 x i32> %res = bitcast <8 x i1> %res0 to i8 ret i8 %res } define i8 @test_iinsertelement_v2i1(i32 %a, i32 %b, <2 x i64> %x , <2 x i64> %y) { ; KNL-LABEL: test_iinsertelement_v2i1: ; KNL: ## BB#0: ; KNL-NEXT: cmpl %esi, %edi ; KNL-NEXT: setb %al ; KNL-NEXT: vmovdqa {{.*#+}} xmm2 = [9223372036854775808,9223372036854775808] ; KNL-NEXT: vpxor %xmm2, %xmm0, %xmm0 ; KNL-NEXT: vpxor %xmm2, %xmm1, %xmm1 ; KNL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; KNL-NEXT: vpextrb $0, %xmm0, %ecx ; KNL-NEXT: kmovw %ecx, %k1 ; KNL-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; KNL-NEXT: kmovw %eax, %k1 ; KNL-NEXT: vpternlogq $255, %zmm1, %zmm1, %zmm1 {%k1} {z} ; KNL-NEXT: vmovdqa64 {{.*#+}} zmm2 = [0,8,2,3,4,5,6,7] ; KNL-NEXT: vpermi2q %zmm1, %zmm0, %zmm2 ; KNL-NEXT: vpsllq $63, %zmm2, %zmm0 ; KNL-NEXT: vptestmq %zmm0, %zmm0, %k0 ; KNL-NEXT: kmovw %k0, %eax ; KNL-NEXT: ## kill: %AL %AL %EAX ; KNL-NEXT: vzeroupper ; KNL-NEXT: retq ; ; SKX-LABEL: test_iinsertelement_v2i1: ; SKX: ## BB#0: ; SKX-NEXT: cmpl %esi, %edi ; SKX-NEXT: setb %al ; SKX-NEXT: vpcmpltuq %xmm1, %xmm0, %k0 ; SKX-NEXT: kmovd %eax, %k1 ; SKX-NEXT: kshiftlw $1, %k1, %k1 ; SKX-NEXT: kshiftlw $1, %k0, %k0 ; SKX-NEXT: kshiftrw $1, %k0, %k0 ; SKX-NEXT: korw %k1, %k0, %k0 ; SKX-NEXT: kmovd %k0, %eax ; SKX-NEXT: ## kill: %AL %AL %EAX ; SKX-NEXT: retq %cmp_res_i1 = icmp ult i32 %a, %b %cmp_cmp_vec = icmp ult <2 x i64> %x, %y %maskv = insertelement <2 x i1> %cmp_cmp_vec, i1 %cmp_res_i1, i32 1 %res0 = shufflevector <2 x i1> %maskv, <2 x i1> undef , <8 x i32> %res = bitcast <8 x i1> %res0 to i8 ret i8 %res } define zeroext i8 @test_extractelement_v2i1(<2 x i64> %a, <2 x i64> %b) { ; KNL-LABEL: test_extractelement_v2i1: ; KNL: ## BB#0: ; KNL-NEXT: vmovdqa {{.*#+}} xmm2 = [9223372036854775808,9223372036854775808] ; KNL-NEXT: vpxor %xmm2, %xmm1, %xmm1 ; KNL-NEXT: vpxor %xmm2, %xmm0, %xmm0 ; KNL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; KNL-NEXT: vpextrb $0, %xmm0, %eax ; KNL-NEXT: andb $1, %al ; KNL-NEXT: movb $4, %cl ; KNL-NEXT: subb %al, %cl ; KNL-NEXT: movzbl %cl, %eax ; KNL-NEXT: retq ; ; SKX-LABEL: test_extractelement_v2i1: ; SKX: ## BB#0: ; SKX-NEXT: vpcmpnleuq %xmm1, %xmm0, %k0 ; SKX-NEXT: kshiftlw $15, %k0, %k0 ; SKX-NEXT: kshiftrw $15, %k0, %k0 ; SKX-NEXT: kmovd %k0, %eax ; SKX-NEXT: andb $1, %al ; SKX-NEXT: movb $4, %cl ; SKX-NEXT: subb %al, %cl ; SKX-NEXT: movzbl %cl, %eax ; SKX-NEXT: retq %t1 = icmp ugt <2 x i64> %a, %b %t2 = extractelement <2 x i1> %t1, i32 0 %res = select i1 %t2, i8 3, i8 4 ret i8 %res } define zeroext i8 @extractelement_v2i1_alt(<2 x i64> %a, <2 x i64> %b) { ; KNL-LABEL: extractelement_v2i1_alt: ; KNL: ## BB#0: ; KNL-NEXT: vmovdqa {{.*#+}} xmm2 = [9223372036854775808,9223372036854775808] ; KNL-NEXT: vpxor %xmm2, %xmm1, %xmm1 ; KNL-NEXT: vpxor %xmm2, %xmm0, %xmm0 ; KNL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; KNL-NEXT: vpextrb $0, %xmm0, %eax ; KNL-NEXT: andb $1, %al ; KNL-NEXT: movb $4, %cl ; KNL-NEXT: subb %al, %cl ; KNL-NEXT: movzbl %cl, %eax ; KNL-NEXT: retq ; ; SKX-LABEL: extractelement_v2i1_alt: ; SKX: ## BB#0: ; SKX-NEXT: vpcmpnleuq %xmm1, %xmm0, %k0 ; SKX-NEXT: kshiftlw $15, %k0, %k0 ; SKX-NEXT: kshiftrw $15, %k0, %k0 ; SKX-NEXT: kmovd %k0, %eax ; SKX-NEXT: andb $1, %al ; SKX-NEXT: movb $4, %cl ; SKX-NEXT: subb %al, %cl ; SKX-NEXT: movzbl %cl, %eax ; SKX-NEXT: retq %t1 = icmp ugt <2 x i64> %a, %b %t2 = extractelement <2 x i1> %t1, i32 0 %sext = sext i1 %t2 to i8 %res = add i8 %sext, 4 ret i8 %res } define zeroext i8 @test_extractelement_v4i1(<4 x i32> %a, <4 x i32> %b) { ; KNL-LABEL: test_extractelement_v4i1: ; KNL: ## BB#0: ; KNL-NEXT: vpbroadcastd {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648] ; KNL-NEXT: vpxor %xmm2, %xmm1, %xmm1 ; KNL-NEXT: vpxor %xmm2, %xmm0, %xmm0 ; KNL-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; KNL-NEXT: vpextrd $3, %xmm0, %eax ; KNL-NEXT: andl $1, %eax ; KNL-NEXT: retq ; ; SKX-LABEL: test_extractelement_v4i1: ; SKX: ## BB#0: ; SKX-NEXT: vpcmpnleud %xmm1, %xmm0, %k0 ; SKX-NEXT: kshiftlw $12, %k0, %k0 ; SKX-NEXT: kshiftrw $15, %k0, %k0 ; SKX-NEXT: kmovd %k0, %eax ; SKX-NEXT: andl $1, %eax ; SKX-NEXT: retq %t1 = icmp ugt <4 x i32> %a, %b %t2 = extractelement <4 x i1> %t1, i32 3 %res = zext i1 %t2 to i8 ret i8 %res } define zeroext i8 @test_extractelement_v32i1(<32 x i8> %a, <32 x i8> %b) { ; KNL-LABEL: test_extractelement_v32i1: ; KNL: ## BB#0: ; KNL-NEXT: vmovdqa {{.*#+}} ymm2 = [128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128] ; KNL-NEXT: vpxor %ymm2, %ymm1, %ymm1 ; KNL-NEXT: vpxor %ymm2, %ymm0, %ymm0 ; KNL-NEXT: vpcmpgtb %ymm1, %ymm0, %ymm0 ; KNL-NEXT: vpextrb $2, %xmm0, %eax ; KNL-NEXT: andl $1, %eax ; KNL-NEXT: vzeroupper ; KNL-NEXT: retq ; ; SKX-LABEL: test_extractelement_v32i1: ; SKX: ## BB#0: ; SKX-NEXT: vpcmpnleub %ymm1, %ymm0, %k0 ; SKX-NEXT: kshiftld $29, %k0, %k0 ; SKX-NEXT: kshiftrd $31, %k0, %k0 ; SKX-NEXT: kmovd %k0, %eax ; SKX-NEXT: andl $1, %eax ; SKX-NEXT: vzeroupper ; SKX-NEXT: retq %t1 = icmp ugt <32 x i8> %a, %b %t2 = extractelement <32 x i1> %t1, i32 2 %res = zext i1 %t2 to i8 ret i8 %res } define zeroext i8 @test_extractelement_v64i1(<64 x i8> %a, <64 x i8> %b) { ; KNL-LABEL: test_extractelement_v64i1: ; KNL: ## BB#0: ; KNL-NEXT: vmovdqa {{.*#+}} ymm0 = [128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128] ; KNL-NEXT: vpxor %ymm0, %ymm3, %ymm2 ; KNL-NEXT: vpxor %ymm0, %ymm1, %ymm0 ; KNL-NEXT: vpcmpgtb %ymm2, %ymm0, %ymm0 ; KNL-NEXT: vextracti128 $1, %ymm0, %xmm0 ; KNL-NEXT: vpextrb $15, %xmm0, %eax ; KNL-NEXT: andb $1, %al ; KNL-NEXT: movb $4, %cl ; KNL-NEXT: subb %al, %cl ; KNL-NEXT: movzbl %cl, %eax ; KNL-NEXT: vzeroupper ; KNL-NEXT: retq ; ; SKX-LABEL: test_extractelement_v64i1: ; SKX: ## BB#0: ; SKX-NEXT: vpcmpnleub %zmm1, %zmm0, %k0 ; SKX-NEXT: kshiftrq $63, %k0, %k0 ; SKX-NEXT: kmovd %k0, %eax ; SKX-NEXT: andb $1, %al ; SKX-NEXT: movb $4, %cl ; SKX-NEXT: subb %al, %cl ; SKX-NEXT: movzbl %cl, %eax ; SKX-NEXT: vzeroupper ; SKX-NEXT: retq %t1 = icmp ugt <64 x i8> %a, %b %t2 = extractelement <64 x i1> %t1, i32 63 %res = select i1 %t2, i8 3, i8 4 ret i8 %res } define zeroext i8 @extractelement_v64i1_alt(<64 x i8> %a, <64 x i8> %b) { ; KNL-LABEL: extractelement_v64i1_alt: ; KNL: ## BB#0: ; KNL-NEXT: vmovdqa {{.*#+}} ymm0 = [128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128] ; KNL-NEXT: vpxor %ymm0, %ymm3, %ymm2 ; KNL-NEXT: vpxor %ymm0, %ymm1, %ymm0 ; KNL-NEXT: vpcmpgtb %ymm2, %ymm0, %ymm0 ; KNL-NEXT: vextracti128 $1, %ymm0, %xmm0 ; KNL-NEXT: vpextrb $15, %xmm0, %eax ; KNL-NEXT: andb $1, %al ; KNL-NEXT: movb $4, %cl ; KNL-NEXT: subb %al, %cl ; KNL-NEXT: movzbl %cl, %eax ; KNL-NEXT: vzeroupper ; KNL-NEXT: retq ; ; SKX-LABEL: extractelement_v64i1_alt: ; SKX: ## BB#0: ; SKX-NEXT: vpcmpnleub %zmm1, %zmm0, %k0 ; SKX-NEXT: kshiftrq $63, %k0, %k0 ; SKX-NEXT: kmovd %k0, %eax ; SKX-NEXT: andb $1, %al ; SKX-NEXT: movb $4, %cl ; SKX-NEXT: subb %al, %cl ; SKX-NEXT: movzbl %cl, %eax ; SKX-NEXT: vzeroupper ; SKX-NEXT: retq %t1 = icmp ugt <64 x i8> %a, %b %t2 = extractelement <64 x i1> %t1, i32 63 %sext = sext i1 %t2 to i8 %res = add i8 %sext, 4 ret i8 %res } define i64 @test_extractelement_variable_v2i64(<2 x i64> %t1, i32 %index) { ; CHECK-LABEL: test_extractelement_variable_v2i64: ; CHECK: ## BB#0: ; CHECK-NEXT: ## kill: %EDI %EDI %RDI ; CHECK-NEXT: vmovaps %xmm0, -{{[0-9]+}}(%rsp) ; CHECK-NEXT: andl $1, %edi ; CHECK-NEXT: movq -24(%rsp,%rdi,8), %rax ; CHECK-NEXT: retq %t2 = extractelement <2 x i64> %t1, i32 %index ret i64 %t2 } define i64 @test_extractelement_variable_v4i64(<4 x i64> %t1, i32 %index) { ; CHECK-LABEL: test_extractelement_variable_v4i64: ; CHECK: ## BB#0: ; CHECK-NEXT: pushq %rbp ; CHECK-NEXT: .cfi_def_cfa_offset 16 ; CHECK-NEXT: .cfi_offset %rbp, -16 ; CHECK-NEXT: movq %rsp, %rbp ; CHECK-NEXT: .cfi_def_cfa_register %rbp ; CHECK-NEXT: andq $-32, %rsp ; CHECK-NEXT: subq $64, %rsp ; CHECK-NEXT: ## kill: %EDI %EDI %RDI ; CHECK-NEXT: vmovaps %ymm0, (%rsp) ; CHECK-NEXT: andl $3, %edi ; CHECK-NEXT: movq (%rsp,%rdi,8), %rax ; CHECK-NEXT: movq %rbp, %rsp ; CHECK-NEXT: popq %rbp ; CHECK-NEXT: vzeroupper ; CHECK-NEXT: retq %t2 = extractelement <4 x i64> %t1, i32 %index ret i64 %t2 } define i64 @test_extractelement_variable_v8i64(<8 x i64> %t1, i32 %index) { ; CHECK-LABEL: test_extractelement_variable_v8i64: ; CHECK: ## BB#0: ; CHECK-NEXT: pushq %rbp ; CHECK-NEXT: .cfi_def_cfa_offset 16 ; CHECK-NEXT: .cfi_offset %rbp, -16 ; CHECK-NEXT: movq %rsp, %rbp ; CHECK-NEXT: .cfi_def_cfa_register %rbp ; CHECK-NEXT: andq $-64, %rsp ; CHECK-NEXT: subq $128, %rsp ; CHECK-NEXT: ## kill: %EDI %EDI %RDI ; CHECK-NEXT: vmovaps %zmm0, (%rsp) ; CHECK-NEXT: andl $7, %edi ; CHECK-NEXT: movq (%rsp,%rdi,8), %rax ; CHECK-NEXT: movq %rbp, %rsp ; CHECK-NEXT: popq %rbp ; CHECK-NEXT: vzeroupper ; CHECK-NEXT: retq %t2 = extractelement <8 x i64> %t1, i32 %index ret i64 %t2 } define double @test_extractelement_variable_v2f64(<2 x double> %t1, i32 %index) { ; CHECK-LABEL: test_extractelement_variable_v2f64: ; CHECK: ## BB#0: ; CHECK-NEXT: ## kill: %EDI %EDI %RDI ; CHECK-NEXT: vmovaps %xmm0, -{{[0-9]+}}(%rsp) ; CHECK-NEXT: andl $1, %edi ; CHECK-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero ; CHECK-NEXT: retq %t2 = extractelement <2 x double> %t1, i32 %index ret double %t2 } define double @test_extractelement_variable_v4f64(<4 x double> %t1, i32 %index) { ; CHECK-LABEL: test_extractelement_variable_v4f64: ; CHECK: ## BB#0: ; CHECK-NEXT: pushq %rbp ; CHECK-NEXT: .cfi_def_cfa_offset 16 ; CHECK-NEXT: .cfi_offset %rbp, -16 ; CHECK-NEXT: movq %rsp, %rbp ; CHECK-NEXT: .cfi_def_cfa_register %rbp ; CHECK-NEXT: andq $-32, %rsp ; CHECK-NEXT: subq $64, %rsp ; CHECK-NEXT: ## kill: %EDI %EDI %RDI ; CHECK-NEXT: vmovaps %ymm0, (%rsp) ; CHECK-NEXT: andl $3, %edi ; CHECK-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero ; CHECK-NEXT: movq %rbp, %rsp ; CHECK-NEXT: popq %rbp ; CHECK-NEXT: vzeroupper ; CHECK-NEXT: retq %t2 = extractelement <4 x double> %t1, i32 %index ret double %t2 } define double @test_extractelement_variable_v8f64(<8 x double> %t1, i32 %index) { ; CHECK-LABEL: test_extractelement_variable_v8f64: ; CHECK: ## BB#0: ; CHECK-NEXT: pushq %rbp ; CHECK-NEXT: .cfi_def_cfa_offset 16 ; CHECK-NEXT: .cfi_offset %rbp, -16 ; CHECK-NEXT: movq %rsp, %rbp ; CHECK-NEXT: .cfi_def_cfa_register %rbp ; CHECK-NEXT: andq $-64, %rsp ; CHECK-NEXT: subq $128, %rsp ; CHECK-NEXT: ## kill: %EDI %EDI %RDI ; CHECK-NEXT: vmovaps %zmm0, (%rsp) ; CHECK-NEXT: andl $7, %edi ; CHECK-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero ; CHECK-NEXT: movq %rbp, %rsp ; CHECK-NEXT: popq %rbp ; CHECK-NEXT: vzeroupper ; CHECK-NEXT: retq %t2 = extractelement <8 x double> %t1, i32 %index ret double %t2 } define i32 @test_extractelement_variable_v4i32(<4 x i32> %t1, i32 %index) { ; CHECK-LABEL: test_extractelement_variable_v4i32: ; CHECK: ## BB#0: ; CHECK-NEXT: ## kill: %EDI %EDI %RDI ; CHECK-NEXT: vmovaps %xmm0, -{{[0-9]+}}(%rsp) ; CHECK-NEXT: andl $3, %edi ; CHECK-NEXT: movl -24(%rsp,%rdi,4), %eax ; CHECK-NEXT: retq %t2 = extractelement <4 x i32> %t1, i32 %index ret i32 %t2 } define i32 @test_extractelement_variable_v8i32(<8 x i32> %t1, i32 %index) { ; CHECK-LABEL: test_extractelement_variable_v8i32: ; CHECK: ## BB#0: ; CHECK-NEXT: pushq %rbp ; CHECK-NEXT: .cfi_def_cfa_offset 16 ; CHECK-NEXT: .cfi_offset %rbp, -16 ; CHECK-NEXT: movq %rsp, %rbp ; CHECK-NEXT: .cfi_def_cfa_register %rbp ; CHECK-NEXT: andq $-32, %rsp ; CHECK-NEXT: subq $64, %rsp ; CHECK-NEXT: ## kill: %EDI %EDI %RDI ; CHECK-NEXT: vmovaps %ymm0, (%rsp) ; CHECK-NEXT: andl $7, %edi ; CHECK-NEXT: movl (%rsp,%rdi,4), %eax ; CHECK-NEXT: movq %rbp, %rsp ; CHECK-NEXT: popq %rbp ; CHECK-NEXT: vzeroupper ; CHECK-NEXT: retq %t2 = extractelement <8 x i32> %t1, i32 %index ret i32 %t2 } define i32 @test_extractelement_variable_v16i32(<16 x i32> %t1, i32 %index) { ; CHECK-LABEL: test_extractelement_variable_v16i32: ; CHECK: ## BB#0: ; CHECK-NEXT: pushq %rbp ; CHECK-NEXT: .cfi_def_cfa_offset 16 ; CHECK-NEXT: .cfi_offset %rbp, -16 ; CHECK-NEXT: movq %rsp, %rbp ; CHECK-NEXT: .cfi_def_cfa_register %rbp ; CHECK-NEXT: andq $-64, %rsp ; CHECK-NEXT: subq $128, %rsp ; CHECK-NEXT: ## kill: %EDI %EDI %RDI ; CHECK-NEXT: vmovaps %zmm0, (%rsp) ; CHECK-NEXT: andl $15, %edi ; CHECK-NEXT: movl (%rsp,%rdi,4), %eax ; CHECK-NEXT: movq %rbp, %rsp ; CHECK-NEXT: popq %rbp ; CHECK-NEXT: vzeroupper ; CHECK-NEXT: retq %t2 = extractelement <16 x i32> %t1, i32 %index ret i32 %t2 } define float @test_extractelement_variable_v4f32(<4 x float> %t1, i32 %index) { ; CHECK-LABEL: test_extractelement_variable_v4f32: ; CHECK: ## BB#0: ; CHECK-NEXT: ## kill: %EDI %EDI %RDI ; CHECK-NEXT: vmovaps %xmm0, -{{[0-9]+}}(%rsp) ; CHECK-NEXT: andl $3, %edi ; CHECK-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero ; CHECK-NEXT: retq %t2 = extractelement <4 x float> %t1, i32 %index ret float %t2 } define float @test_extractelement_variable_v8f32(<8 x float> %t1, i32 %index) { ; CHECK-LABEL: test_extractelement_variable_v8f32: ; CHECK: ## BB#0: ; CHECK-NEXT: pushq %rbp ; CHECK-NEXT: .cfi_def_cfa_offset 16 ; CHECK-NEXT: .cfi_offset %rbp, -16 ; CHECK-NEXT: movq %rsp, %rbp ; CHECK-NEXT: .cfi_def_cfa_register %rbp ; CHECK-NEXT: andq $-32, %rsp ; CHECK-NEXT: subq $64, %rsp ; CHECK-NEXT: ## kill: %EDI %EDI %RDI ; CHECK-NEXT: vmovaps %ymm0, (%rsp) ; CHECK-NEXT: andl $7, %edi ; CHECK-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero ; CHECK-NEXT: movq %rbp, %rsp ; CHECK-NEXT: popq %rbp ; CHECK-NEXT: vzeroupper ; CHECK-NEXT: retq %t2 = extractelement <8 x float> %t1, i32 %index ret float %t2 } define float @test_extractelement_variable_v16f32(<16 x float> %t1, i32 %index) { ; CHECK-LABEL: test_extractelement_variable_v16f32: ; CHECK: ## BB#0: ; CHECK-NEXT: pushq %rbp ; CHECK-NEXT: .cfi_def_cfa_offset 16 ; CHECK-NEXT: .cfi_offset %rbp, -16 ; CHECK-NEXT: movq %rsp, %rbp ; CHECK-NEXT: .cfi_def_cfa_register %rbp ; CHECK-NEXT: andq $-64, %rsp ; CHECK-NEXT: subq $128, %rsp ; CHECK-NEXT: ## kill: %EDI %EDI %RDI ; CHECK-NEXT: vmovaps %zmm0, (%rsp) ; CHECK-NEXT: andl $15, %edi ; CHECK-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero ; CHECK-NEXT: movq %rbp, %rsp ; CHECK-NEXT: popq %rbp ; CHECK-NEXT: vzeroupper ; CHECK-NEXT: retq %t2 = extractelement <16 x float> %t1, i32 %index ret float %t2 } define i16 @test_extractelement_variable_v8i16(<8 x i16> %t1, i32 %index) { ; CHECK-LABEL: test_extractelement_variable_v8i16: ; CHECK: ## BB#0: ; CHECK-NEXT: ## kill: %EDI %EDI %RDI ; CHECK-NEXT: vmovaps %xmm0, -{{[0-9]+}}(%rsp) ; CHECK-NEXT: andl $7, %edi ; CHECK-NEXT: movzwl -24(%rsp,%rdi,2), %eax ; CHECK-NEXT: retq %t2 = extractelement <8 x i16> %t1, i32 %index ret i16 %t2 } define i16 @test_extractelement_variable_v16i16(<16 x i16> %t1, i32 %index) { ; CHECK-LABEL: test_extractelement_variable_v16i16: ; CHECK: ## BB#0: ; CHECK-NEXT: pushq %rbp ; CHECK-NEXT: .cfi_def_cfa_offset 16 ; CHECK-NEXT: .cfi_offset %rbp, -16 ; CHECK-NEXT: movq %rsp, %rbp ; CHECK-NEXT: .cfi_def_cfa_register %rbp ; CHECK-NEXT: andq $-32, %rsp ; CHECK-NEXT: subq $64, %rsp ; CHECK-NEXT: ## kill: %EDI %EDI %RDI ; CHECK-NEXT: vmovaps %ymm0, (%rsp) ; CHECK-NEXT: andl $15, %edi ; CHECK-NEXT: movzwl (%rsp,%rdi,2), %eax ; CHECK-NEXT: movq %rbp, %rsp ; CHECK-NEXT: popq %rbp ; CHECK-NEXT: vzeroupper ; CHECK-NEXT: retq %t2 = extractelement <16 x i16> %t1, i32 %index ret i16 %t2 } define i16 @test_extractelement_variable_v32i16(<32 x i16> %t1, i32 %index) { ; KNL-LABEL: test_extractelement_variable_v32i16: ; KNL: ## BB#0: ; KNL-NEXT: pushq %rbp ; KNL-NEXT: .cfi_def_cfa_offset 16 ; KNL-NEXT: .cfi_offset %rbp, -16 ; KNL-NEXT: movq %rsp, %rbp ; KNL-NEXT: .cfi_def_cfa_register %rbp ; KNL-NEXT: andq $-64, %rsp ; KNL-NEXT: subq $128, %rsp ; KNL-NEXT: ## kill: %EDI %EDI %RDI ; KNL-NEXT: vmovaps %ymm1, {{[0-9]+}}(%rsp) ; KNL-NEXT: vmovaps %ymm0, (%rsp) ; KNL-NEXT: andl $31, %edi ; KNL-NEXT: movzwl (%rsp,%rdi,2), %eax ; KNL-NEXT: movq %rbp, %rsp ; KNL-NEXT: popq %rbp ; KNL-NEXT: vzeroupper ; KNL-NEXT: retq ; ; SKX-LABEL: test_extractelement_variable_v32i16: ; SKX: ## BB#0: ; SKX-NEXT: pushq %rbp ; SKX-NEXT: .cfi_def_cfa_offset 16 ; SKX-NEXT: .cfi_offset %rbp, -16 ; SKX-NEXT: movq %rsp, %rbp ; SKX-NEXT: .cfi_def_cfa_register %rbp ; SKX-NEXT: andq $-64, %rsp ; SKX-NEXT: subq $128, %rsp ; SKX-NEXT: ## kill: %EDI %EDI %RDI ; SKX-NEXT: vmovaps %zmm0, (%rsp) ; SKX-NEXT: andl $31, %edi ; SKX-NEXT: movzwl (%rsp,%rdi,2), %eax ; SKX-NEXT: movq %rbp, %rsp ; SKX-NEXT: popq %rbp ; SKX-NEXT: vzeroupper ; SKX-NEXT: retq %t2 = extractelement <32 x i16> %t1, i32 %index ret i16 %t2 } define i8 @test_extractelement_variable_v16i8(<16 x i8> %t1, i32 %index) { ; CHECK-LABEL: test_extractelement_variable_v16i8: ; CHECK: ## BB#0: ; CHECK-NEXT: ## kill: %EDI %EDI %RDI ; CHECK-NEXT: vmovaps %xmm0, -{{[0-9]+}}(%rsp) ; CHECK-NEXT: andl $15, %edi ; CHECK-NEXT: leaq -{{[0-9]+}}(%rsp), %rax ; CHECK-NEXT: movb (%rdi,%rax), %al ; CHECK-NEXT: retq %t2 = extractelement <16 x i8> %t1, i32 %index ret i8 %t2 } define i8 @test_extractelement_variable_v32i8(<32 x i8> %t1, i32 %index) { ; CHECK-LABEL: test_extractelement_variable_v32i8: ; CHECK: ## BB#0: ; CHECK-NEXT: pushq %rbp ; CHECK-NEXT: .cfi_def_cfa_offset 16 ; CHECK-NEXT: .cfi_offset %rbp, -16 ; CHECK-NEXT: movq %rsp, %rbp ; CHECK-NEXT: .cfi_def_cfa_register %rbp ; CHECK-NEXT: andq $-32, %rsp ; CHECK-NEXT: subq $64, %rsp ; CHECK-NEXT: ## kill: %EDI %EDI %RDI ; CHECK-NEXT: vmovaps %ymm0, (%rsp) ; CHECK-NEXT: andl $31, %edi ; CHECK-NEXT: movq %rsp, %rax ; CHECK-NEXT: movb (%rdi,%rax), %al ; CHECK-NEXT: movq %rbp, %rsp ; CHECK-NEXT: popq %rbp ; CHECK-NEXT: vzeroupper ; CHECK-NEXT: retq %t2 = extractelement <32 x i8> %t1, i32 %index ret i8 %t2 } define i8 @test_extractelement_variable_v64i8(<64 x i8> %t1, i32 %index) { ; KNL-LABEL: test_extractelement_variable_v64i8: ; KNL: ## BB#0: ; KNL-NEXT: pushq %rbp ; KNL-NEXT: .cfi_def_cfa_offset 16 ; KNL-NEXT: .cfi_offset %rbp, -16 ; KNL-NEXT: movq %rsp, %rbp ; KNL-NEXT: .cfi_def_cfa_register %rbp ; KNL-NEXT: andq $-64, %rsp ; KNL-NEXT: subq $128, %rsp ; KNL-NEXT: ## kill: %EDI %EDI %RDI ; KNL-NEXT: vmovaps %ymm1, {{[0-9]+}}(%rsp) ; KNL-NEXT: vmovaps %ymm0, (%rsp) ; KNL-NEXT: andl $63, %edi ; KNL-NEXT: movq %rsp, %rax ; KNL-NEXT: movb (%rdi,%rax), %al ; KNL-NEXT: movq %rbp, %rsp ; KNL-NEXT: popq %rbp ; KNL-NEXT: vzeroupper ; KNL-NEXT: retq ; ; SKX-LABEL: test_extractelement_variable_v64i8: ; SKX: ## BB#0: ; SKX-NEXT: pushq %rbp ; SKX-NEXT: .cfi_def_cfa_offset 16 ; SKX-NEXT: .cfi_offset %rbp, -16 ; SKX-NEXT: movq %rsp, %rbp ; SKX-NEXT: .cfi_def_cfa_register %rbp ; SKX-NEXT: andq $-64, %rsp ; SKX-NEXT: subq $128, %rsp ; SKX-NEXT: ## kill: %EDI %EDI %RDI ; SKX-NEXT: vmovaps %zmm0, (%rsp) ; SKX-NEXT: andl $63, %edi ; SKX-NEXT: movq %rsp, %rax ; SKX-NEXT: movb (%rdi,%rax), %al ; SKX-NEXT: movq %rbp, %rsp ; SKX-NEXT: popq %rbp ; SKX-NEXT: vzeroupper ; SKX-NEXT: retq %t2 = extractelement <64 x i8> %t1, i32 %index ret i8 %t2 } define i8 @test_extractelement_variable_v64i8_indexi8(<64 x i8> %t1, i8 %index) { ; KNL-LABEL: test_extractelement_variable_v64i8_indexi8: ; KNL: ## BB#0: ; KNL-NEXT: pushq %rbp ; KNL-NEXT: .cfi_def_cfa_offset 16 ; KNL-NEXT: .cfi_offset %rbp, -16 ; KNL-NEXT: movq %rsp, %rbp ; KNL-NEXT: .cfi_def_cfa_register %rbp ; KNL-NEXT: andq $-64, %rsp ; KNL-NEXT: subq $128, %rsp ; KNL-NEXT: addb %dil, %dil ; KNL-NEXT: vmovaps %ymm1, {{[0-9]+}}(%rsp) ; KNL-NEXT: vmovaps %ymm0, (%rsp) ; KNL-NEXT: movzbl %dil, %eax ; KNL-NEXT: andl $63, %eax ; KNL-NEXT: movq %rsp, %rcx ; KNL-NEXT: movb (%rax,%rcx), %al ; KNL-NEXT: movq %rbp, %rsp ; KNL-NEXT: popq %rbp ; KNL-NEXT: vzeroupper ; KNL-NEXT: retq ; ; SKX-LABEL: test_extractelement_variable_v64i8_indexi8: ; SKX: ## BB#0: ; SKX-NEXT: pushq %rbp ; SKX-NEXT: .cfi_def_cfa_offset 16 ; SKX-NEXT: .cfi_offset %rbp, -16 ; SKX-NEXT: movq %rsp, %rbp ; SKX-NEXT: .cfi_def_cfa_register %rbp ; SKX-NEXT: andq $-64, %rsp ; SKX-NEXT: subq $128, %rsp ; SKX-NEXT: addb %dil, %dil ; SKX-NEXT: vmovaps %zmm0, (%rsp) ; SKX-NEXT: movzbl %dil, %eax ; SKX-NEXT: andl $63, %eax ; SKX-NEXT: movq %rsp, %rcx ; SKX-NEXT: movb (%rax,%rcx), %al ; SKX-NEXT: movq %rbp, %rsp ; SKX-NEXT: popq %rbp ; SKX-NEXT: vzeroupper ; SKX-NEXT: retq %i = add i8 %index, %index %t2 = extractelement <64 x i8> %t1, i8 %i ret i8 %t2 } define zeroext i8 @test_extractelement_varible_v2i1(<2 x i64> %a, <2 x i64> %b, i32 %index) { ; KNL-LABEL: test_extractelement_varible_v2i1: ; KNL: ## BB#0: ; KNL-NEXT: ## kill: %EDI %EDI %RDI ; KNL-NEXT: vmovdqa {{.*#+}} xmm2 = [9223372036854775808,9223372036854775808] ; KNL-NEXT: vpxor %xmm2, %xmm1, %xmm1 ; KNL-NEXT: vpxor %xmm2, %xmm0, %xmm0 ; KNL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; KNL-NEXT: vmovdqa %xmm0, -{{[0-9]+}}(%rsp) ; KNL-NEXT: andl $1, %edi ; KNL-NEXT: movl -24(%rsp,%rdi,8), %eax ; KNL-NEXT: andl $1, %eax ; KNL-NEXT: retq ; ; SKX-LABEL: test_extractelement_varible_v2i1: ; SKX: ## BB#0: ; SKX-NEXT: ## kill: %EDI %EDI %RDI ; SKX-NEXT: vpcmpnleuq %xmm1, %xmm0, %k0 ; SKX-NEXT: vpmovm2q %k0, %xmm0 ; SKX-NEXT: vmovdqa %xmm0, -{{[0-9]+}}(%rsp) ; SKX-NEXT: andl $1, %edi ; SKX-NEXT: movzbl -24(%rsp,%rdi,8), %eax ; SKX-NEXT: andl $1, %eax ; SKX-NEXT: retq %t1 = icmp ugt <2 x i64> %a, %b %t2 = extractelement <2 x i1> %t1, i32 %index %res = zext i1 %t2 to i8 ret i8 %res } define zeroext i8 @test_extractelement_varible_v4i1(<4 x i32> %a, <4 x i32> %b, i32 %index) { ; KNL-LABEL: test_extractelement_varible_v4i1: ; KNL: ## BB#0: ; KNL-NEXT: ## kill: %EDI %EDI %RDI ; KNL-NEXT: vpbroadcastd {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648] ; KNL-NEXT: vpxor %xmm2, %xmm1, %xmm1 ; KNL-NEXT: vpxor %xmm2, %xmm0, %xmm0 ; KNL-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; KNL-NEXT: vmovdqa %xmm0, -{{[0-9]+}}(%rsp) ; KNL-NEXT: andl $3, %edi ; KNL-NEXT: movl -24(%rsp,%rdi,4), %eax ; KNL-NEXT: andl $1, %eax ; KNL-NEXT: retq ; ; SKX-LABEL: test_extractelement_varible_v4i1: ; SKX: ## BB#0: ; SKX-NEXT: ## kill: %EDI %EDI %RDI ; SKX-NEXT: vpcmpnleud %xmm1, %xmm0, %k0 ; SKX-NEXT: vpmovm2d %k0, %xmm0 ; SKX-NEXT: vmovdqa %xmm0, -{{[0-9]+}}(%rsp) ; SKX-NEXT: andl $3, %edi ; SKX-NEXT: movzbl -24(%rsp,%rdi,4), %eax ; SKX-NEXT: andl $1, %eax ; SKX-NEXT: retq %t1 = icmp ugt <4 x i32> %a, %b %t2 = extractelement <4 x i1> %t1, i32 %index %res = zext i1 %t2 to i8 ret i8 %res } define zeroext i8 @test_extractelement_varible_v8i1(<8 x i32> %a, <8 x i32> %b, i32 %index) { ; KNL-LABEL: test_extractelement_varible_v8i1: ; KNL: ## BB#0: ; KNL-NEXT: pushq %rbp ; KNL-NEXT: .cfi_def_cfa_offset 16 ; KNL-NEXT: .cfi_offset %rbp, -16 ; KNL-NEXT: movq %rsp, %rbp ; KNL-NEXT: .cfi_def_cfa_register %rbp ; KNL-NEXT: andq $-64, %rsp ; KNL-NEXT: subq $128, %rsp ; KNL-NEXT: ## kill: %EDI %EDI %RDI ; KNL-NEXT: ## kill: %YMM1 %YMM1 %ZMM1 ; KNL-NEXT: ## kill: %YMM0 %YMM0 %ZMM0 ; KNL-NEXT: vpcmpnleud %zmm1, %zmm0, %k1 ; KNL-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; KNL-NEXT: vmovdqa64 %zmm0, (%rsp) ; KNL-NEXT: andl $7, %edi ; KNL-NEXT: movzbl (%rsp,%rdi,8), %eax ; KNL-NEXT: andl $1, %eax ; KNL-NEXT: movq %rbp, %rsp ; KNL-NEXT: popq %rbp ; KNL-NEXT: vzeroupper ; KNL-NEXT: retq ; ; SKX-LABEL: test_extractelement_varible_v8i1: ; SKX: ## BB#0: ; SKX-NEXT: pushq %rbp ; SKX-NEXT: .cfi_def_cfa_offset 16 ; SKX-NEXT: .cfi_offset %rbp, -16 ; SKX-NEXT: movq %rsp, %rbp ; SKX-NEXT: .cfi_def_cfa_register %rbp ; SKX-NEXT: andq $-64, %rsp ; SKX-NEXT: subq $128, %rsp ; SKX-NEXT: ## kill: %EDI %EDI %RDI ; SKX-NEXT: vpcmpnleud %ymm1, %ymm0, %k0 ; SKX-NEXT: vpmovm2q %k0, %zmm0 ; SKX-NEXT: vmovdqa64 %zmm0, (%rsp) ; SKX-NEXT: andl $7, %edi ; SKX-NEXT: movzbl (%rsp,%rdi,8), %eax ; SKX-NEXT: andl $1, %eax ; SKX-NEXT: movq %rbp, %rsp ; SKX-NEXT: popq %rbp ; SKX-NEXT: vzeroupper ; SKX-NEXT: retq %t1 = icmp ugt <8 x i32> %a, %b %t2 = extractelement <8 x i1> %t1, i32 %index %res = zext i1 %t2 to i8 ret i8 %res } define zeroext i8 @test_extractelement_varible_v16i1(<16 x i32> %a, <16 x i32> %b, i32 %index) { ; KNL-LABEL: test_extractelement_varible_v16i1: ; KNL: ## BB#0: ; KNL-NEXT: pushq %rbp ; KNL-NEXT: .cfi_def_cfa_offset 16 ; KNL-NEXT: .cfi_offset %rbp, -16 ; KNL-NEXT: movq %rsp, %rbp ; KNL-NEXT: .cfi_def_cfa_register %rbp ; KNL-NEXT: andq $-64, %rsp ; KNL-NEXT: subq $128, %rsp ; KNL-NEXT: ## kill: %EDI %EDI %RDI ; KNL-NEXT: vpcmpnleud %zmm1, %zmm0, %k1 ; KNL-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; KNL-NEXT: vmovdqa32 %zmm0, (%rsp) ; KNL-NEXT: andl $15, %edi ; KNL-NEXT: movzbl (%rsp,%rdi,4), %eax ; KNL-NEXT: andl $1, %eax ; KNL-NEXT: movq %rbp, %rsp ; KNL-NEXT: popq %rbp ; KNL-NEXT: vzeroupper ; KNL-NEXT: retq ; ; SKX-LABEL: test_extractelement_varible_v16i1: ; SKX: ## BB#0: ; SKX-NEXT: pushq %rbp ; SKX-NEXT: .cfi_def_cfa_offset 16 ; SKX-NEXT: .cfi_offset %rbp, -16 ; SKX-NEXT: movq %rsp, %rbp ; SKX-NEXT: .cfi_def_cfa_register %rbp ; SKX-NEXT: andq $-64, %rsp ; SKX-NEXT: subq $128, %rsp ; SKX-NEXT: ## kill: %EDI %EDI %RDI ; SKX-NEXT: vpcmpnleud %zmm1, %zmm0, %k0 ; SKX-NEXT: vpmovm2d %k0, %zmm0 ; SKX-NEXT: vmovdqa32 %zmm0, (%rsp) ; SKX-NEXT: andl $15, %edi ; SKX-NEXT: movzbl (%rsp,%rdi,4), %eax ; SKX-NEXT: andl $1, %eax ; SKX-NEXT: movq %rbp, %rsp ; SKX-NEXT: popq %rbp ; SKX-NEXT: vzeroupper ; SKX-NEXT: retq %t1 = icmp ugt <16 x i32> %a, %b %t2 = extractelement <16 x i1> %t1, i32 %index %res = zext i1 %t2 to i8 ret i8 %res } define zeroext i8 @test_extractelement_varible_v32i1(<32 x i8> %a, <32 x i8> %b, i32 %index) { ; KNL-LABEL: test_extractelement_varible_v32i1: ; KNL: ## BB#0: ; KNL-NEXT: pushq %rbp ; KNL-NEXT: .cfi_def_cfa_offset 16 ; KNL-NEXT: .cfi_offset %rbp, -16 ; KNL-NEXT: movq %rsp, %rbp ; KNL-NEXT: .cfi_def_cfa_register %rbp ; KNL-NEXT: andq $-32, %rsp ; KNL-NEXT: subq $64, %rsp ; KNL-NEXT: ## kill: %EDI %EDI %RDI ; KNL-NEXT: vmovdqa {{.*#+}} ymm2 = [128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128] ; KNL-NEXT: vpxor %ymm2, %ymm1, %ymm1 ; KNL-NEXT: vpxor %ymm2, %ymm0, %ymm0 ; KNL-NEXT: vpcmpgtb %ymm1, %ymm0, %ymm0 ; KNL-NEXT: vmovdqa %ymm0, (%rsp) ; KNL-NEXT: andl $31, %edi ; KNL-NEXT: movq %rsp, %rax ; KNL-NEXT: movzbl (%rdi,%rax), %eax ; KNL-NEXT: andl $1, %eax ; KNL-NEXT: movq %rbp, %rsp ; KNL-NEXT: popq %rbp ; KNL-NEXT: vzeroupper ; KNL-NEXT: retq ; ; SKX-LABEL: test_extractelement_varible_v32i1: ; SKX: ## BB#0: ; SKX-NEXT: pushq %rbp ; SKX-NEXT: .cfi_def_cfa_offset 16 ; SKX-NEXT: .cfi_offset %rbp, -16 ; SKX-NEXT: movq %rsp, %rbp ; SKX-NEXT: .cfi_def_cfa_register %rbp ; SKX-NEXT: andq $-64, %rsp ; SKX-NEXT: subq $128, %rsp ; SKX-NEXT: ## kill: %EDI %EDI %RDI ; SKX-NEXT: vpcmpnleub %ymm1, %ymm0, %k0 ; SKX-NEXT: vpmovm2w %k0, %zmm0 ; SKX-NEXT: vmovdqa32 %zmm0, (%rsp) ; SKX-NEXT: andl $31, %edi ; SKX-NEXT: movzbl (%rsp,%rdi,2), %eax ; SKX-NEXT: andl $1, %eax ; SKX-NEXT: movq %rbp, %rsp ; SKX-NEXT: popq %rbp ; SKX-NEXT: vzeroupper ; SKX-NEXT: retq %t1 = icmp ugt <32 x i8> %a, %b %t2 = extractelement <32 x i1> %t1, i32 %index %res = zext i1 %t2 to i8 ret i8 %res } define <8 x i64> @insert_double_zero(<2 x i64> %a) nounwind { ; CHECK-LABEL: insert_double_zero: ; CHECK: ## BB#0: ; CHECK-NEXT: vxorps %xmm1, %xmm1, %xmm1 ; CHECK-NEXT: vinsertf32x4 $2, %xmm0, %zmm1, %zmm0 ; CHECK-NEXT: retq %b = shufflevector <2 x i64> %a, <2 x i64> zeroinitializer, <4 x i32> %d = shufflevector <4 x i64> %b, <4 x i64> undef, <8 x i32> %e = shufflevector <8 x i64> %d, <8 x i64> zeroinitializer, <8 x i32> ret <8 x i64> %e }