summaryrefslogtreecommitdiffstats
path: root/llvm/test/CodeGen/X86/build-vector-512.ll
diff options
context:
space:
mode:
Diffstat (limited to 'llvm/test/CodeGen/X86/build-vector-512.ll')
-rw-r--r--llvm/test/CodeGen/X86/build-vector-512.ll648
1 files changed, 216 insertions, 432 deletions
diff --git a/llvm/test/CodeGen/X86/build-vector-512.ll b/llvm/test/CodeGen/X86/build-vector-512.ll
index aba8b13db96..4bc731cbf02 100644
--- a/llvm/test/CodeGen/X86/build-vector-512.ll
+++ b/llvm/test/CodeGen/X86/build-vector-512.ll
@@ -156,159 +156,83 @@ define <16 x i32> @test_buildvector_v16i32(i32 %a0, i32 %a1, i32 %a2, i32 %a3, i
}
define <32 x i16> @test_buildvector_v32i16(i16 %a0, i16 %a1, i16 %a2, i16 %a3, i16 %a4, i16 %a5, i16 %a6, i16 %a7, i16 %a8, i16 %a9, i16 %a10, i16 %a11, i16 %a12, i16 %a13, i16 %a14, i16 %a15, i16 %a16, i16 %a17, i16 %a18, i16 %a19, i16 %a20, i16 %a21, i16 %a22, i16 %a23, i16 %a24, i16 %a25, i16 %a26, i16 %a27, i16 %a28, i16 %a29, i16 %a30, i16 %a31) {
-; AVX512F-32-LABEL: test_buildvector_v32i16:
-; AVX512F-32: # %bb.0:
-; AVX512F-32-NEXT: vmovd {{.*#+}} xmm0 = mem[0],zero,zero,zero
-; AVX512F-32-NEXT: vpinsrw $1, {{[0-9]+}}(%esp), %xmm0, %xmm0
-; AVX512F-32-NEXT: vpinsrw $2, {{[0-9]+}}(%esp), %xmm0, %xmm0
-; AVX512F-32-NEXT: vpinsrw $3, {{[0-9]+}}(%esp), %xmm0, %xmm0
-; AVX512F-32-NEXT: vpinsrw $4, {{[0-9]+}}(%esp), %xmm0, %xmm0
-; AVX512F-32-NEXT: vpinsrw $5, {{[0-9]+}}(%esp), %xmm0, %xmm0
-; AVX512F-32-NEXT: vpinsrw $6, {{[0-9]+}}(%esp), %xmm0, %xmm0
-; AVX512F-32-NEXT: vpinsrw $7, {{[0-9]+}}(%esp), %xmm0, %xmm0
-; AVX512F-32-NEXT: vmovd {{.*#+}} xmm1 = mem[0],zero,zero,zero
-; AVX512F-32-NEXT: vpinsrw $1, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512F-32-NEXT: vpinsrw $2, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512F-32-NEXT: vpinsrw $3, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512F-32-NEXT: vpinsrw $4, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512F-32-NEXT: vpinsrw $5, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512F-32-NEXT: vpinsrw $6, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512F-32-NEXT: vpinsrw $7, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512F-32-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
-; AVX512F-32-NEXT: vmovd {{.*#+}} xmm1 = mem[0],zero,zero,zero
-; AVX512F-32-NEXT: vpinsrw $1, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512F-32-NEXT: vpinsrw $2, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512F-32-NEXT: vpinsrw $3, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512F-32-NEXT: vpinsrw $4, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512F-32-NEXT: vpinsrw $5, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512F-32-NEXT: vpinsrw $6, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512F-32-NEXT: vpinsrw $7, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512F-32-NEXT: vmovd {{.*#+}} xmm2 = mem[0],zero,zero,zero
-; AVX512F-32-NEXT: vpinsrw $1, {{[0-9]+}}(%esp), %xmm2, %xmm2
-; AVX512F-32-NEXT: vpinsrw $2, {{[0-9]+}}(%esp), %xmm2, %xmm2
-; AVX512F-32-NEXT: vpinsrw $3, {{[0-9]+}}(%esp), %xmm2, %xmm2
-; AVX512F-32-NEXT: vpinsrw $4, {{[0-9]+}}(%esp), %xmm2, %xmm2
-; AVX512F-32-NEXT: vpinsrw $5, {{[0-9]+}}(%esp), %xmm2, %xmm2
-; AVX512F-32-NEXT: vpinsrw $6, {{[0-9]+}}(%esp), %xmm2, %xmm2
-; AVX512F-32-NEXT: vpinsrw $7, {{[0-9]+}}(%esp), %xmm2, %xmm2
-; AVX512F-32-NEXT: vinserti128 $1, %xmm2, %ymm1, %ymm1
-; AVX512F-32-NEXT: retl
-;
-; AVX512F-64-LABEL: test_buildvector_v32i16:
-; AVX512F-64: # %bb.0:
-; AVX512F-64-NEXT: vmovd {{.*#+}} xmm0 = mem[0],zero,zero,zero
-; AVX512F-64-NEXT: vpinsrw $1, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512F-64-NEXT: vpinsrw $2, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512F-64-NEXT: vpinsrw $3, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512F-64-NEXT: vpinsrw $4, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512F-64-NEXT: vpinsrw $5, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512F-64-NEXT: vpinsrw $6, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512F-64-NEXT: vpinsrw $7, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512F-64-NEXT: vmovd {{.*#+}} xmm1 = mem[0],zero,zero,zero
-; AVX512F-64-NEXT: vpinsrw $1, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512F-64-NEXT: vpinsrw $2, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512F-64-NEXT: vpinsrw $3, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512F-64-NEXT: vpinsrw $4, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512F-64-NEXT: vpinsrw $5, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512F-64-NEXT: vpinsrw $6, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512F-64-NEXT: vpinsrw $7, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512F-64-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
-; AVX512F-64-NEXT: vmovd %edi, %xmm0
-; AVX512F-64-NEXT: vpinsrw $1, %esi, %xmm0, %xmm0
-; AVX512F-64-NEXT: vpinsrw $2, %edx, %xmm0, %xmm0
-; AVX512F-64-NEXT: vpinsrw $3, %ecx, %xmm0, %xmm0
-; AVX512F-64-NEXT: vpinsrw $4, %r8d, %xmm0, %xmm0
-; AVX512F-64-NEXT: vpinsrw $5, %r9d, %xmm0, %xmm0
-; AVX512F-64-NEXT: vpinsrw $6, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512F-64-NEXT: vpinsrw $7, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512F-64-NEXT: vmovd {{.*#+}} xmm2 = mem[0],zero,zero,zero
-; AVX512F-64-NEXT: vpinsrw $1, {{[0-9]+}}(%rsp), %xmm2, %xmm2
-; AVX512F-64-NEXT: vpinsrw $2, {{[0-9]+}}(%rsp), %xmm2, %xmm2
-; AVX512F-64-NEXT: vpinsrw $3, {{[0-9]+}}(%rsp), %xmm2, %xmm2
-; AVX512F-64-NEXT: vpinsrw $4, {{[0-9]+}}(%rsp), %xmm2, %xmm2
-; AVX512F-64-NEXT: vpinsrw $5, {{[0-9]+}}(%rsp), %xmm2, %xmm2
-; AVX512F-64-NEXT: vpinsrw $6, {{[0-9]+}}(%rsp), %xmm2, %xmm2
-; AVX512F-64-NEXT: vpinsrw $7, {{[0-9]+}}(%rsp), %xmm2, %xmm2
-; AVX512F-64-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm0
-; AVX512F-64-NEXT: retq
-;
-; AVX512BW-32-LABEL: test_buildvector_v32i16:
-; AVX512BW-32: # %bb.0:
-; AVX512BW-32-NEXT: vmovd {{.*#+}} xmm0 = mem[0],zero,zero,zero
-; AVX512BW-32-NEXT: vpinsrw $1, {{[0-9]+}}(%esp), %xmm0, %xmm0
-; AVX512BW-32-NEXT: vpinsrw $2, {{[0-9]+}}(%esp), %xmm0, %xmm0
-; AVX512BW-32-NEXT: vpinsrw $3, {{[0-9]+}}(%esp), %xmm0, %xmm0
-; AVX512BW-32-NEXT: vpinsrw $4, {{[0-9]+}}(%esp), %xmm0, %xmm0
-; AVX512BW-32-NEXT: vpinsrw $5, {{[0-9]+}}(%esp), %xmm0, %xmm0
-; AVX512BW-32-NEXT: vpinsrw $6, {{[0-9]+}}(%esp), %xmm0, %xmm0
-; AVX512BW-32-NEXT: vpinsrw $7, {{[0-9]+}}(%esp), %xmm0, %xmm0
-; AVX512BW-32-NEXT: vmovd {{.*#+}} xmm1 = mem[0],zero,zero,zero
-; AVX512BW-32-NEXT: vpinsrw $1, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512BW-32-NEXT: vpinsrw $2, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512BW-32-NEXT: vpinsrw $3, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512BW-32-NEXT: vpinsrw $4, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512BW-32-NEXT: vpinsrw $5, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512BW-32-NEXT: vpinsrw $6, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512BW-32-NEXT: vpinsrw $7, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512BW-32-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
-; AVX512BW-32-NEXT: vmovd {{.*#+}} xmm1 = mem[0],zero,zero,zero
-; AVX512BW-32-NEXT: vpinsrw $1, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512BW-32-NEXT: vpinsrw $2, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512BW-32-NEXT: vpinsrw $3, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512BW-32-NEXT: vpinsrw $4, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512BW-32-NEXT: vpinsrw $5, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512BW-32-NEXT: vpinsrw $6, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512BW-32-NEXT: vpinsrw $7, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512BW-32-NEXT: vmovd {{.*#+}} xmm2 = mem[0],zero,zero,zero
-; AVX512BW-32-NEXT: vpinsrw $1, {{[0-9]+}}(%esp), %xmm2, %xmm2
-; AVX512BW-32-NEXT: vpinsrw $2, {{[0-9]+}}(%esp), %xmm2, %xmm2
-; AVX512BW-32-NEXT: vpinsrw $3, {{[0-9]+}}(%esp), %xmm2, %xmm2
-; AVX512BW-32-NEXT: vpinsrw $4, {{[0-9]+}}(%esp), %xmm2, %xmm2
-; AVX512BW-32-NEXT: vpinsrw $5, {{[0-9]+}}(%esp), %xmm2, %xmm2
-; AVX512BW-32-NEXT: vpinsrw $6, {{[0-9]+}}(%esp), %xmm2, %xmm2
-; AVX512BW-32-NEXT: vpinsrw $7, {{[0-9]+}}(%esp), %xmm2, %xmm2
-; AVX512BW-32-NEXT: vinserti128 $1, %xmm2, %ymm1, %ymm1
-; AVX512BW-32-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0
-; AVX512BW-32-NEXT: retl
+; AVX-32-LABEL: test_buildvector_v32i16:
+; AVX-32: # %bb.0:
+; AVX-32-NEXT: vmovd {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; AVX-32-NEXT: vpinsrw $1, {{[0-9]+}}(%esp), %xmm0, %xmm0
+; AVX-32-NEXT: vpinsrw $2, {{[0-9]+}}(%esp), %xmm0, %xmm0
+; AVX-32-NEXT: vpinsrw $3, {{[0-9]+}}(%esp), %xmm0, %xmm0
+; AVX-32-NEXT: vpinsrw $4, {{[0-9]+}}(%esp), %xmm0, %xmm0
+; AVX-32-NEXT: vpinsrw $5, {{[0-9]+}}(%esp), %xmm0, %xmm0
+; AVX-32-NEXT: vpinsrw $6, {{[0-9]+}}(%esp), %xmm0, %xmm0
+; AVX-32-NEXT: vpinsrw $7, {{[0-9]+}}(%esp), %xmm0, %xmm0
+; AVX-32-NEXT: vmovd {{.*#+}} xmm1 = mem[0],zero,zero,zero
+; AVX-32-NEXT: vpinsrw $1, {{[0-9]+}}(%esp), %xmm1, %xmm1
+; AVX-32-NEXT: vpinsrw $2, {{[0-9]+}}(%esp), %xmm1, %xmm1
+; AVX-32-NEXT: vpinsrw $3, {{[0-9]+}}(%esp), %xmm1, %xmm1
+; AVX-32-NEXT: vpinsrw $4, {{[0-9]+}}(%esp), %xmm1, %xmm1
+; AVX-32-NEXT: vpinsrw $5, {{[0-9]+}}(%esp), %xmm1, %xmm1
+; AVX-32-NEXT: vpinsrw $6, {{[0-9]+}}(%esp), %xmm1, %xmm1
+; AVX-32-NEXT: vpinsrw $7, {{[0-9]+}}(%esp), %xmm1, %xmm1
+; AVX-32-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
+; AVX-32-NEXT: vmovd {{.*#+}} xmm1 = mem[0],zero,zero,zero
+; AVX-32-NEXT: vpinsrw $1, {{[0-9]+}}(%esp), %xmm1, %xmm1
+; AVX-32-NEXT: vpinsrw $2, {{[0-9]+}}(%esp), %xmm1, %xmm1
+; AVX-32-NEXT: vpinsrw $3, {{[0-9]+}}(%esp), %xmm1, %xmm1
+; AVX-32-NEXT: vpinsrw $4, {{[0-9]+}}(%esp), %xmm1, %xmm1
+; AVX-32-NEXT: vpinsrw $5, {{[0-9]+}}(%esp), %xmm1, %xmm1
+; AVX-32-NEXT: vpinsrw $6, {{[0-9]+}}(%esp), %xmm1, %xmm1
+; AVX-32-NEXT: vpinsrw $7, {{[0-9]+}}(%esp), %xmm1, %xmm1
+; AVX-32-NEXT: vmovd {{.*#+}} xmm2 = mem[0],zero,zero,zero
+; AVX-32-NEXT: vpinsrw $1, {{[0-9]+}}(%esp), %xmm2, %xmm2
+; AVX-32-NEXT: vpinsrw $2, {{[0-9]+}}(%esp), %xmm2, %xmm2
+; AVX-32-NEXT: vpinsrw $3, {{[0-9]+}}(%esp), %xmm2, %xmm2
+; AVX-32-NEXT: vpinsrw $4, {{[0-9]+}}(%esp), %xmm2, %xmm2
+; AVX-32-NEXT: vpinsrw $5, {{[0-9]+}}(%esp), %xmm2, %xmm2
+; AVX-32-NEXT: vpinsrw $6, {{[0-9]+}}(%esp), %xmm2, %xmm2
+; AVX-32-NEXT: vpinsrw $7, {{[0-9]+}}(%esp), %xmm2, %xmm2
+; AVX-32-NEXT: vinserti128 $1, %xmm2, %ymm1, %ymm1
+; AVX-32-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0
+; AVX-32-NEXT: retl
;
-; AVX512BW-64-LABEL: test_buildvector_v32i16:
-; AVX512BW-64: # %bb.0:
-; AVX512BW-64-NEXT: vmovd {{.*#+}} xmm0 = mem[0],zero,zero,zero
-; AVX512BW-64-NEXT: vpinsrw $1, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512BW-64-NEXT: vpinsrw $2, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512BW-64-NEXT: vpinsrw $3, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512BW-64-NEXT: vpinsrw $4, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512BW-64-NEXT: vpinsrw $5, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512BW-64-NEXT: vpinsrw $6, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512BW-64-NEXT: vpinsrw $7, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512BW-64-NEXT: vmovd {{.*#+}} xmm1 = mem[0],zero,zero,zero
-; AVX512BW-64-NEXT: vpinsrw $1, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512BW-64-NEXT: vpinsrw $2, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512BW-64-NEXT: vpinsrw $3, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512BW-64-NEXT: vpinsrw $4, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512BW-64-NEXT: vpinsrw $5, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512BW-64-NEXT: vpinsrw $6, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512BW-64-NEXT: vpinsrw $7, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512BW-64-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
-; AVX512BW-64-NEXT: vmovd %edi, %xmm1
-; AVX512BW-64-NEXT: vpinsrw $1, %esi, %xmm1, %xmm1
-; AVX512BW-64-NEXT: vpinsrw $2, %edx, %xmm1, %xmm1
-; AVX512BW-64-NEXT: vpinsrw $3, %ecx, %xmm1, %xmm1
-; AVX512BW-64-NEXT: vpinsrw $4, %r8d, %xmm1, %xmm1
-; AVX512BW-64-NEXT: vpinsrw $5, %r9d, %xmm1, %xmm1
-; AVX512BW-64-NEXT: vpinsrw $6, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512BW-64-NEXT: vpinsrw $7, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512BW-64-NEXT: vmovd {{.*#+}} xmm2 = mem[0],zero,zero,zero
-; AVX512BW-64-NEXT: vpinsrw $1, {{[0-9]+}}(%rsp), %xmm2, %xmm2
-; AVX512BW-64-NEXT: vpinsrw $2, {{[0-9]+}}(%rsp), %xmm2, %xmm2
-; AVX512BW-64-NEXT: vpinsrw $3, {{[0-9]+}}(%rsp), %xmm2, %xmm2
-; AVX512BW-64-NEXT: vpinsrw $4, {{[0-9]+}}(%rsp), %xmm2, %xmm2
-; AVX512BW-64-NEXT: vpinsrw $5, {{[0-9]+}}(%rsp), %xmm2, %xmm2
-; AVX512BW-64-NEXT: vpinsrw $6, {{[0-9]+}}(%rsp), %xmm2, %xmm2
-; AVX512BW-64-NEXT: vpinsrw $7, {{[0-9]+}}(%rsp), %xmm2, %xmm2
-; AVX512BW-64-NEXT: vinserti128 $1, %xmm2, %ymm1, %ymm1
-; AVX512BW-64-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0
-; AVX512BW-64-NEXT: retq
+; AVX-64-LABEL: test_buildvector_v32i16:
+; AVX-64: # %bb.0:
+; AVX-64-NEXT: vmovd {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; AVX-64-NEXT: vpinsrw $1, {{[0-9]+}}(%rsp), %xmm0, %xmm0
+; AVX-64-NEXT: vpinsrw $2, {{[0-9]+}}(%rsp), %xmm0, %xmm0
+; AVX-64-NEXT: vpinsrw $3, {{[0-9]+}}(%rsp), %xmm0, %xmm0
+; AVX-64-NEXT: vpinsrw $4, {{[0-9]+}}(%rsp), %xmm0, %xmm0
+; AVX-64-NEXT: vpinsrw $5, {{[0-9]+}}(%rsp), %xmm0, %xmm0
+; AVX-64-NEXT: vpinsrw $6, {{[0-9]+}}(%rsp), %xmm0, %xmm0
+; AVX-64-NEXT: vpinsrw $7, {{[0-9]+}}(%rsp), %xmm0, %xmm0
+; AVX-64-NEXT: vmovd {{.*#+}} xmm1 = mem[0],zero,zero,zero
+; AVX-64-NEXT: vpinsrw $1, {{[0-9]+}}(%rsp), %xmm1, %xmm1
+; AVX-64-NEXT: vpinsrw $2, {{[0-9]+}}(%rsp), %xmm1, %xmm1
+; AVX-64-NEXT: vpinsrw $3, {{[0-9]+}}(%rsp), %xmm1, %xmm1
+; AVX-64-NEXT: vpinsrw $4, {{[0-9]+}}(%rsp), %xmm1, %xmm1
+; AVX-64-NEXT: vpinsrw $5, {{[0-9]+}}(%rsp), %xmm1, %xmm1
+; AVX-64-NEXT: vpinsrw $6, {{[0-9]+}}(%rsp), %xmm1, %xmm1
+; AVX-64-NEXT: vpinsrw $7, {{[0-9]+}}(%rsp), %xmm1, %xmm1
+; AVX-64-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
+; AVX-64-NEXT: vmovd %edi, %xmm1
+; AVX-64-NEXT: vpinsrw $1, %esi, %xmm1, %xmm1
+; AVX-64-NEXT: vpinsrw $2, %edx, %xmm1, %xmm1
+; AVX-64-NEXT: vpinsrw $3, %ecx, %xmm1, %xmm1
+; AVX-64-NEXT: vpinsrw $4, %r8d, %xmm1, %xmm1
+; AVX-64-NEXT: vpinsrw $5, %r9d, %xmm1, %xmm1
+; AVX-64-NEXT: vpinsrw $6, {{[0-9]+}}(%rsp), %xmm1, %xmm1
+; AVX-64-NEXT: vpinsrw $7, {{[0-9]+}}(%rsp), %xmm1, %xmm1
+; AVX-64-NEXT: vmovd {{.*#+}} xmm2 = mem[0],zero,zero,zero
+; AVX-64-NEXT: vpinsrw $1, {{[0-9]+}}(%rsp), %xmm2, %xmm2
+; AVX-64-NEXT: vpinsrw $2, {{[0-9]+}}(%rsp), %xmm2, %xmm2
+; AVX-64-NEXT: vpinsrw $3, {{[0-9]+}}(%rsp), %xmm2, %xmm2
+; AVX-64-NEXT: vpinsrw $4, {{[0-9]+}}(%rsp), %xmm2, %xmm2
+; AVX-64-NEXT: vpinsrw $5, {{[0-9]+}}(%rsp), %xmm2, %xmm2
+; AVX-64-NEXT: vpinsrw $6, {{[0-9]+}}(%rsp), %xmm2, %xmm2
+; AVX-64-NEXT: vpinsrw $7, {{[0-9]+}}(%rsp), %xmm2, %xmm2
+; AVX-64-NEXT: vinserti128 $1, %xmm2, %ymm1, %ymm1
+; AVX-64-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0
+; AVX-64-NEXT: retq
%ins0 = insertelement <32 x i16> undef, i16 %a0, i32 0
%ins1 = insertelement <32 x i16> %ins0, i16 %a1, i32 1
%ins2 = insertelement <32 x i16> %ins1, i16 %a2, i32 2
@@ -345,287 +269,147 @@ define <32 x i16> @test_buildvector_v32i16(i16 %a0, i16 %a1, i16 %a2, i16 %a3, i
}
define <64 x i8> @test_buildvector_v64i8(i8 %a0, i8 %a1, i8 %a2, i8 %a3, i8 %a4, i8 %a5, i8 %a6, i8 %a7, i8 %a8, i8 %a9, i8 %a10, i8 %a11, i8 %a12, i8 %a13, i8 %a14, i8 %a15, i8 %a16, i8 %a17, i8 %a18, i8 %a19, i8 %a20, i8 %a21, i8 %a22, i8 %a23, i8 %a24, i8 %a25, i8 %a26, i8 %a27, i8 %a28, i8 %a29, i8 %a30, i8 %a31, i8 %a32, i8 %a33, i8 %a34, i8 %a35, i8 %a36, i8 %a37, i8 %a38, i8 %a39, i8 %a40, i8 %a41, i8 %a42, i8 %a43, i8 %a44, i8 %a45, i8 %a46, i8 %a47, i8 %a48, i8 %a49, i8 %a50, i8 %a51, i8 %a52, i8 %a53, i8 %a54, i8 %a55, i8 %a56, i8 %a57, i8 %a58, i8 %a59, i8 %a60, i8 %a61, i8 %a62, i8 %a63) {
-; AVX512F-32-LABEL: test_buildvector_v64i8:
-; AVX512F-32: # %bb.0:
-; AVX512F-32-NEXT: vmovd {{.*#+}} xmm0 = mem[0],zero,zero,zero
-; AVX512F-32-NEXT: vpinsrb $1, {{[0-9]+}}(%esp), %xmm0, %xmm0
-; AVX512F-32-NEXT: vpinsrb $2, {{[0-9]+}}(%esp), %xmm0, %xmm0
-; AVX512F-32-NEXT: vpinsrb $3, {{[0-9]+}}(%esp), %xmm0, %xmm0
-; AVX512F-32-NEXT: vpinsrb $4, {{[0-9]+}}(%esp), %xmm0, %xmm0
-; AVX512F-32-NEXT: vpinsrb $5, {{[0-9]+}}(%esp), %xmm0, %xmm0
-; AVX512F-32-NEXT: vpinsrb $6, {{[0-9]+}}(%esp), %xmm0, %xmm0
-; AVX512F-32-NEXT: vpinsrb $7, {{[0-9]+}}(%esp), %xmm0, %xmm0
-; AVX512F-32-NEXT: vpinsrb $8, {{[0-9]+}}(%esp), %xmm0, %xmm0
-; AVX512F-32-NEXT: vpinsrb $9, {{[0-9]+}}(%esp), %xmm0, %xmm0
-; AVX512F-32-NEXT: vpinsrb $10, {{[0-9]+}}(%esp), %xmm0, %xmm0
-; AVX512F-32-NEXT: vpinsrb $11, {{[0-9]+}}(%esp), %xmm0, %xmm0
-; AVX512F-32-NEXT: vpinsrb $12, {{[0-9]+}}(%esp), %xmm0, %xmm0
-; AVX512F-32-NEXT: vpinsrb $13, {{[0-9]+}}(%esp), %xmm0, %xmm0
-; AVX512F-32-NEXT: vpinsrb $14, {{[0-9]+}}(%esp), %xmm0, %xmm0
-; AVX512F-32-NEXT: vpinsrb $15, {{[0-9]+}}(%esp), %xmm0, %xmm0
-; AVX512F-32-NEXT: vmovd {{.*#+}} xmm1 = mem[0],zero,zero,zero
-; AVX512F-32-NEXT: vpinsrb $1, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512F-32-NEXT: vpinsrb $2, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512F-32-NEXT: vpinsrb $3, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512F-32-NEXT: vpinsrb $4, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512F-32-NEXT: vpinsrb $5, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512F-32-NEXT: vpinsrb $6, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512F-32-NEXT: vpinsrb $7, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512F-32-NEXT: vpinsrb $8, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512F-32-NEXT: vpinsrb $9, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512F-32-NEXT: vpinsrb $10, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512F-32-NEXT: vpinsrb $11, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512F-32-NEXT: vpinsrb $12, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512F-32-NEXT: vpinsrb $13, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512F-32-NEXT: vpinsrb $14, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512F-32-NEXT: vpinsrb $15, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512F-32-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
-; AVX512F-32-NEXT: vmovd {{.*#+}} xmm1 = mem[0],zero,zero,zero
-; AVX512F-32-NEXT: vpinsrb $1, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512F-32-NEXT: vpinsrb $2, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512F-32-NEXT: vpinsrb $3, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512F-32-NEXT: vpinsrb $4, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512F-32-NEXT: vpinsrb $5, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512F-32-NEXT: vpinsrb $6, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512F-32-NEXT: vpinsrb $7, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512F-32-NEXT: vpinsrb $8, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512F-32-NEXT: vpinsrb $9, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512F-32-NEXT: vpinsrb $10, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512F-32-NEXT: vpinsrb $11, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512F-32-NEXT: vpinsrb $12, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512F-32-NEXT: vpinsrb $13, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512F-32-NEXT: vpinsrb $14, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512F-32-NEXT: vpinsrb $15, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512F-32-NEXT: vmovd {{.*#+}} xmm2 = mem[0],zero,zero,zero
-; AVX512F-32-NEXT: vpinsrb $1, {{[0-9]+}}(%esp), %xmm2, %xmm2
-; AVX512F-32-NEXT: vpinsrb $2, {{[0-9]+}}(%esp), %xmm2, %xmm2
-; AVX512F-32-NEXT: vpinsrb $3, {{[0-9]+}}(%esp), %xmm2, %xmm2
-; AVX512F-32-NEXT: vpinsrb $4, {{[0-9]+}}(%esp), %xmm2, %xmm2
-; AVX512F-32-NEXT: vpinsrb $5, {{[0-9]+}}(%esp), %xmm2, %xmm2
-; AVX512F-32-NEXT: vpinsrb $6, {{[0-9]+}}(%esp), %xmm2, %xmm2
-; AVX512F-32-NEXT: vpinsrb $7, {{[0-9]+}}(%esp), %xmm2, %xmm2
-; AVX512F-32-NEXT: vpinsrb $8, {{[0-9]+}}(%esp), %xmm2, %xmm2
-; AVX512F-32-NEXT: vpinsrb $9, {{[0-9]+}}(%esp), %xmm2, %xmm2
-; AVX512F-32-NEXT: vpinsrb $10, {{[0-9]+}}(%esp), %xmm2, %xmm2
-; AVX512F-32-NEXT: vpinsrb $11, {{[0-9]+}}(%esp), %xmm2, %xmm2
-; AVX512F-32-NEXT: vpinsrb $12, {{[0-9]+}}(%esp), %xmm2, %xmm2
-; AVX512F-32-NEXT: vpinsrb $13, {{[0-9]+}}(%esp), %xmm2, %xmm2
-; AVX512F-32-NEXT: vpinsrb $14, {{[0-9]+}}(%esp), %xmm2, %xmm2
-; AVX512F-32-NEXT: vpinsrb $15, {{[0-9]+}}(%esp), %xmm2, %xmm2
-; AVX512F-32-NEXT: vinserti128 $1, %xmm2, %ymm1, %ymm1
-; AVX512F-32-NEXT: retl
-;
-; AVX512F-64-LABEL: test_buildvector_v64i8:
-; AVX512F-64: # %bb.0:
-; AVX512F-64-NEXT: vmovd {{.*#+}} xmm0 = mem[0],zero,zero,zero
-; AVX512F-64-NEXT: vpinsrb $1, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512F-64-NEXT: vpinsrb $2, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512F-64-NEXT: vpinsrb $3, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512F-64-NEXT: vpinsrb $4, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512F-64-NEXT: vpinsrb $5, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512F-64-NEXT: vpinsrb $6, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512F-64-NEXT: vpinsrb $7, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512F-64-NEXT: vpinsrb $8, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512F-64-NEXT: vpinsrb $9, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512F-64-NEXT: vpinsrb $10, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512F-64-NEXT: vpinsrb $11, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512F-64-NEXT: vpinsrb $12, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512F-64-NEXT: vpinsrb $13, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512F-64-NEXT: vpinsrb $14, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512F-64-NEXT: vpinsrb $15, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512F-64-NEXT: vmovd {{.*#+}} xmm1 = mem[0],zero,zero,zero
-; AVX512F-64-NEXT: vpinsrb $1, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512F-64-NEXT: vpinsrb $2, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512F-64-NEXT: vpinsrb $3, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512F-64-NEXT: vpinsrb $4, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512F-64-NEXT: vpinsrb $5, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512F-64-NEXT: vpinsrb $6, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512F-64-NEXT: vpinsrb $7, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512F-64-NEXT: vpinsrb $8, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512F-64-NEXT: vpinsrb $9, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512F-64-NEXT: vpinsrb $10, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512F-64-NEXT: vpinsrb $11, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512F-64-NEXT: vpinsrb $12, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512F-64-NEXT: vpinsrb $13, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512F-64-NEXT: vpinsrb $14, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512F-64-NEXT: vpinsrb $15, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512F-64-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
-; AVX512F-64-NEXT: vmovd %edi, %xmm0
-; AVX512F-64-NEXT: vpinsrb $1, %esi, %xmm0, %xmm0
-; AVX512F-64-NEXT: vpinsrb $2, %edx, %xmm0, %xmm0
-; AVX512F-64-NEXT: vpinsrb $3, %ecx, %xmm0, %xmm0
-; AVX512F-64-NEXT: vpinsrb $4, %r8d, %xmm0, %xmm0
-; AVX512F-64-NEXT: vpinsrb $5, %r9d, %xmm0, %xmm0
-; AVX512F-64-NEXT: vpinsrb $6, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512F-64-NEXT: vpinsrb $7, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512F-64-NEXT: vpinsrb $8, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512F-64-NEXT: vpinsrb $9, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512F-64-NEXT: vpinsrb $10, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512F-64-NEXT: vpinsrb $11, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512F-64-NEXT: vpinsrb $12, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512F-64-NEXT: vpinsrb $13, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512F-64-NEXT: vpinsrb $14, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512F-64-NEXT: vpinsrb $15, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512F-64-NEXT: vmovd {{.*#+}} xmm2 = mem[0],zero,zero,zero
-; AVX512F-64-NEXT: vpinsrb $1, {{[0-9]+}}(%rsp), %xmm2, %xmm2
-; AVX512F-64-NEXT: vpinsrb $2, {{[0-9]+}}(%rsp), %xmm2, %xmm2
-; AVX512F-64-NEXT: vpinsrb $3, {{[0-9]+}}(%rsp), %xmm2, %xmm2
-; AVX512F-64-NEXT: vpinsrb $4, {{[0-9]+}}(%rsp), %xmm2, %xmm2
-; AVX512F-64-NEXT: vpinsrb $5, {{[0-9]+}}(%rsp), %xmm2, %xmm2
-; AVX512F-64-NEXT: vpinsrb $6, {{[0-9]+}}(%rsp), %xmm2, %xmm2
-; AVX512F-64-NEXT: vpinsrb $7, {{[0-9]+}}(%rsp), %xmm2, %xmm2
-; AVX512F-64-NEXT: vpinsrb $8, {{[0-9]+}}(%rsp), %xmm2, %xmm2
-; AVX512F-64-NEXT: vpinsrb $9, {{[0-9]+}}(%rsp), %xmm2, %xmm2
-; AVX512F-64-NEXT: vpinsrb $10, {{[0-9]+}}(%rsp), %xmm2, %xmm2
-; AVX512F-64-NEXT: vpinsrb $11, {{[0-9]+}}(%rsp), %xmm2, %xmm2
-; AVX512F-64-NEXT: vpinsrb $12, {{[0-9]+}}(%rsp), %xmm2, %xmm2
-; AVX512F-64-NEXT: vpinsrb $13, {{[0-9]+}}(%rsp), %xmm2, %xmm2
-; AVX512F-64-NEXT: vpinsrb $14, {{[0-9]+}}(%rsp), %xmm2, %xmm2
-; AVX512F-64-NEXT: vpinsrb $15, {{[0-9]+}}(%rsp), %xmm2, %xmm2
-; AVX512F-64-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm0
-; AVX512F-64-NEXT: retq
-;
-; AVX512BW-32-LABEL: test_buildvector_v64i8:
-; AVX512BW-32: # %bb.0:
-; AVX512BW-32-NEXT: vmovd {{.*#+}} xmm0 = mem[0],zero,zero,zero
-; AVX512BW-32-NEXT: vpinsrb $1, {{[0-9]+}}(%esp), %xmm0, %xmm0
-; AVX512BW-32-NEXT: vpinsrb $2, {{[0-9]+}}(%esp), %xmm0, %xmm0
-; AVX512BW-32-NEXT: vpinsrb $3, {{[0-9]+}}(%esp), %xmm0, %xmm0
-; AVX512BW-32-NEXT: vpinsrb $4, {{[0-9]+}}(%esp), %xmm0, %xmm0
-; AVX512BW-32-NEXT: vpinsrb $5, {{[0-9]+}}(%esp), %xmm0, %xmm0
-; AVX512BW-32-NEXT: vpinsrb $6, {{[0-9]+}}(%esp), %xmm0, %xmm0
-; AVX512BW-32-NEXT: vpinsrb $7, {{[0-9]+}}(%esp), %xmm0, %xmm0
-; AVX512BW-32-NEXT: vpinsrb $8, {{[0-9]+}}(%esp), %xmm0, %xmm0
-; AVX512BW-32-NEXT: vpinsrb $9, {{[0-9]+}}(%esp), %xmm0, %xmm0
-; AVX512BW-32-NEXT: vpinsrb $10, {{[0-9]+}}(%esp), %xmm0, %xmm0
-; AVX512BW-32-NEXT: vpinsrb $11, {{[0-9]+}}(%esp), %xmm0, %xmm0
-; AVX512BW-32-NEXT: vpinsrb $12, {{[0-9]+}}(%esp), %xmm0, %xmm0
-; AVX512BW-32-NEXT: vpinsrb $13, {{[0-9]+}}(%esp), %xmm0, %xmm0
-; AVX512BW-32-NEXT: vpinsrb $14, {{[0-9]+}}(%esp), %xmm0, %xmm0
-; AVX512BW-32-NEXT: vpinsrb $15, {{[0-9]+}}(%esp), %xmm0, %xmm0
-; AVX512BW-32-NEXT: vmovd {{.*#+}} xmm1 = mem[0],zero,zero,zero
-; AVX512BW-32-NEXT: vpinsrb $1, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512BW-32-NEXT: vpinsrb $2, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512BW-32-NEXT: vpinsrb $3, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512BW-32-NEXT: vpinsrb $4, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512BW-32-NEXT: vpinsrb $5, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512BW-32-NEXT: vpinsrb $6, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512BW-32-NEXT: vpinsrb $7, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512BW-32-NEXT: vpinsrb $8, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512BW-32-NEXT: vpinsrb $9, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512BW-32-NEXT: vpinsrb $10, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512BW-32-NEXT: vpinsrb $11, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512BW-32-NEXT: vpinsrb $12, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512BW-32-NEXT: vpinsrb $13, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512BW-32-NEXT: vpinsrb $14, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512BW-32-NEXT: vpinsrb $15, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512BW-32-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
-; AVX512BW-32-NEXT: vmovd {{.*#+}} xmm1 = mem[0],zero,zero,zero
-; AVX512BW-32-NEXT: vpinsrb $1, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512BW-32-NEXT: vpinsrb $2, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512BW-32-NEXT: vpinsrb $3, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512BW-32-NEXT: vpinsrb $4, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512BW-32-NEXT: vpinsrb $5, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512BW-32-NEXT: vpinsrb $6, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512BW-32-NEXT: vpinsrb $7, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512BW-32-NEXT: vpinsrb $8, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512BW-32-NEXT: vpinsrb $9, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512BW-32-NEXT: vpinsrb $10, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512BW-32-NEXT: vpinsrb $11, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512BW-32-NEXT: vpinsrb $12, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512BW-32-NEXT: vpinsrb $13, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512BW-32-NEXT: vpinsrb $14, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512BW-32-NEXT: vpinsrb $15, {{[0-9]+}}(%esp), %xmm1, %xmm1
-; AVX512BW-32-NEXT: vmovd {{.*#+}} xmm2 = mem[0],zero,zero,zero
-; AVX512BW-32-NEXT: vpinsrb $1, {{[0-9]+}}(%esp), %xmm2, %xmm2
-; AVX512BW-32-NEXT: vpinsrb $2, {{[0-9]+}}(%esp), %xmm2, %xmm2
-; AVX512BW-32-NEXT: vpinsrb $3, {{[0-9]+}}(%esp), %xmm2, %xmm2
-; AVX512BW-32-NEXT: vpinsrb $4, {{[0-9]+}}(%esp), %xmm2, %xmm2
-; AVX512BW-32-NEXT: vpinsrb $5, {{[0-9]+}}(%esp), %xmm2, %xmm2
-; AVX512BW-32-NEXT: vpinsrb $6, {{[0-9]+}}(%esp), %xmm2, %xmm2
-; AVX512BW-32-NEXT: vpinsrb $7, {{[0-9]+}}(%esp), %xmm2, %xmm2
-; AVX512BW-32-NEXT: vpinsrb $8, {{[0-9]+}}(%esp), %xmm2, %xmm2
-; AVX512BW-32-NEXT: vpinsrb $9, {{[0-9]+}}(%esp), %xmm2, %xmm2
-; AVX512BW-32-NEXT: vpinsrb $10, {{[0-9]+}}(%esp), %xmm2, %xmm2
-; AVX512BW-32-NEXT: vpinsrb $11, {{[0-9]+}}(%esp), %xmm2, %xmm2
-; AVX512BW-32-NEXT: vpinsrb $12, {{[0-9]+}}(%esp), %xmm2, %xmm2
-; AVX512BW-32-NEXT: vpinsrb $13, {{[0-9]+}}(%esp), %xmm2, %xmm2
-; AVX512BW-32-NEXT: vpinsrb $14, {{[0-9]+}}(%esp), %xmm2, %xmm2
-; AVX512BW-32-NEXT: vpinsrb $15, {{[0-9]+}}(%esp), %xmm2, %xmm2
-; AVX512BW-32-NEXT: vinserti128 $1, %xmm2, %ymm1, %ymm1
-; AVX512BW-32-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0
-; AVX512BW-32-NEXT: retl
+; AVX-32-LABEL: test_buildvector_v64i8:
+; AVX-32: # %bb.0:
+; AVX-32-NEXT: vmovd {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; AVX-32-NEXT: vpinsrb $1, {{[0-9]+}}(%esp), %xmm0, %xmm0
+; AVX-32-NEXT: vpinsrb $2, {{[0-9]+}}(%esp), %xmm0, %xmm0
+; AVX-32-NEXT: vpinsrb $3, {{[0-9]+}}(%esp), %xmm0, %xmm0
+; AVX-32-NEXT: vpinsrb $4, {{[0-9]+}}(%esp), %xmm0, %xmm0
+; AVX-32-NEXT: vpinsrb $5, {{[0-9]+}}(%esp), %xmm0, %xmm0
+; AVX-32-NEXT: vpinsrb $6, {{[0-9]+}}(%esp), %xmm0, %xmm0
+; AVX-32-NEXT: vpinsrb $7, {{[0-9]+}}(%esp), %xmm0, %xmm0
+; AVX-32-NEXT: vpinsrb $8, {{[0-9]+}}(%esp), %xmm0, %xmm0
+; AVX-32-NEXT: vpinsrb $9, {{[0-9]+}}(%esp), %xmm0, %xmm0
+; AVX-32-NEXT: vpinsrb $10, {{[0-9]+}}(%esp), %xmm0, %xmm0
+; AVX-32-NEXT: vpinsrb $11, {{[0-9]+}}(%esp), %xmm0, %xmm0
+; AVX-32-NEXT: vpinsrb $12, {{[0-9]+}}(%esp), %xmm0, %xmm0
+; AVX-32-NEXT: vpinsrb $13, {{[0-9]+}}(%esp), %xmm0, %xmm0
+; AVX-32-NEXT: vpinsrb $14, {{[0-9]+}}(%esp), %xmm0, %xmm0
+; AVX-32-NEXT: vpinsrb $15, {{[0-9]+}}(%esp), %xmm0, %xmm0
+; AVX-32-NEXT: vmovd {{.*#+}} xmm1 = mem[0],zero,zero,zero
+; AVX-32-NEXT: vpinsrb $1, {{[0-9]+}}(%esp), %xmm1, %xmm1
+; AVX-32-NEXT: vpinsrb $2, {{[0-9]+}}(%esp), %xmm1, %xmm1
+; AVX-32-NEXT: vpinsrb $3, {{[0-9]+}}(%esp), %xmm1, %xmm1
+; AVX-32-NEXT: vpinsrb $4, {{[0-9]+}}(%esp), %xmm1, %xmm1
+; AVX-32-NEXT: vpinsrb $5, {{[0-9]+}}(%esp), %xmm1, %xmm1
+; AVX-32-NEXT: vpinsrb $6, {{[0-9]+}}(%esp), %xmm1, %xmm1
+; AVX-32-NEXT: vpinsrb $7, {{[0-9]+}}(%esp), %xmm1, %xmm1
+; AVX-32-NEXT: vpinsrb $8, {{[0-9]+}}(%esp), %xmm1, %xmm1
+; AVX-32-NEXT: vpinsrb $9, {{[0-9]+}}(%esp), %xmm1, %xmm1
+; AVX-32-NEXT: vpinsrb $10, {{[0-9]+}}(%esp), %xmm1, %xmm1
+; AVX-32-NEXT: vpinsrb $11, {{[0-9]+}}(%esp), %xmm1, %xmm1
+; AVX-32-NEXT: vpinsrb $12, {{[0-9]+}}(%esp), %xmm1, %xmm1
+; AVX-32-NEXT: vpinsrb $13, {{[0-9]+}}(%esp), %xmm1, %xmm1
+; AVX-32-NEXT: vpinsrb $14, {{[0-9]+}}(%esp), %xmm1, %xmm1
+; AVX-32-NEXT: vpinsrb $15, {{[0-9]+}}(%esp), %xmm1, %xmm1
+; AVX-32-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
+; AVX-32-NEXT: vmovd {{.*#+}} xmm1 = mem[0],zero,zero,zero
+; AVX-32-NEXT: vpinsrb $1, {{[0-9]+}}(%esp), %xmm1, %xmm1
+; AVX-32-NEXT: vpinsrb $2, {{[0-9]+}}(%esp), %xmm1, %xmm1
+; AVX-32-NEXT: vpinsrb $3, {{[0-9]+}}(%esp), %xmm1, %xmm1
+; AVX-32-NEXT: vpinsrb $4, {{[0-9]+}}(%esp), %xmm1, %xmm1
+; AVX-32-NEXT: vpinsrb $5, {{[0-9]+}}(%esp), %xmm1, %xmm1
+; AVX-32-NEXT: vpinsrb $6, {{[0-9]+}}(%esp), %xmm1, %xmm1
+; AVX-32-NEXT: vpinsrb $7, {{[0-9]+}}(%esp), %xmm1, %xmm1
+; AVX-32-NEXT: vpinsrb $8, {{[0-9]+}}(%esp), %xmm1, %xmm1
+; AVX-32-NEXT: vpinsrb $9, {{[0-9]+}}(%esp), %xmm1, %xmm1
+; AVX-32-NEXT: vpinsrb $10, {{[0-9]+}}(%esp), %xmm1, %xmm1
+; AVX-32-NEXT: vpinsrb $11, {{[0-9]+}}(%esp), %xmm1, %xmm1
+; AVX-32-NEXT: vpinsrb $12, {{[0-9]+}}(%esp), %xmm1, %xmm1
+; AVX-32-NEXT: vpinsrb $13, {{[0-9]+}}(%esp), %xmm1, %xmm1
+; AVX-32-NEXT: vpinsrb $14, {{[0-9]+}}(%esp), %xmm1, %xmm1
+; AVX-32-NEXT: vpinsrb $15, {{[0-9]+}}(%esp), %xmm1, %xmm1
+; AVX-32-NEXT: vmovd {{.*#+}} xmm2 = mem[0],zero,zero,zero
+; AVX-32-NEXT: vpinsrb $1, {{[0-9]+}}(%esp), %xmm2, %xmm2
+; AVX-32-NEXT: vpinsrb $2, {{[0-9]+}}(%esp), %xmm2, %xmm2
+; AVX-32-NEXT: vpinsrb $3, {{[0-9]+}}(%esp), %xmm2, %xmm2
+; AVX-32-NEXT: vpinsrb $4, {{[0-9]+}}(%esp), %xmm2, %xmm2
+; AVX-32-NEXT: vpinsrb $5, {{[0-9]+}}(%esp), %xmm2, %xmm2
+; AVX-32-NEXT: vpinsrb $6, {{[0-9]+}}(%esp), %xmm2, %xmm2
+; AVX-32-NEXT: vpinsrb $7, {{[0-9]+}}(%esp), %xmm2, %xmm2
+; AVX-32-NEXT: vpinsrb $8, {{[0-9]+}}(%esp), %xmm2, %xmm2
+; AVX-32-NEXT: vpinsrb $9, {{[0-9]+}}(%esp), %xmm2, %xmm2
+; AVX-32-NEXT: vpinsrb $10, {{[0-9]+}}(%esp), %xmm2, %xmm2
+; AVX-32-NEXT: vpinsrb $11, {{[0-9]+}}(%esp), %xmm2, %xmm2
+; AVX-32-NEXT: vpinsrb $12, {{[0-9]+}}(%esp), %xmm2, %xmm2
+; AVX-32-NEXT: vpinsrb $13, {{[0-9]+}}(%esp), %xmm2, %xmm2
+; AVX-32-NEXT: vpinsrb $14, {{[0-9]+}}(%esp), %xmm2, %xmm2
+; AVX-32-NEXT: vpinsrb $15, {{[0-9]+}}(%esp), %xmm2, %xmm2
+; AVX-32-NEXT: vinserti128 $1, %xmm2, %ymm1, %ymm1
+; AVX-32-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0
+; AVX-32-NEXT: retl
;
-; AVX512BW-64-LABEL: test_buildvector_v64i8:
-; AVX512BW-64: # %bb.0:
-; AVX512BW-64-NEXT: vmovd {{.*#+}} xmm0 = mem[0],zero,zero,zero
-; AVX512BW-64-NEXT: vpinsrb $1, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512BW-64-NEXT: vpinsrb $2, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512BW-64-NEXT: vpinsrb $3, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512BW-64-NEXT: vpinsrb $4, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512BW-64-NEXT: vpinsrb $5, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512BW-64-NEXT: vpinsrb $6, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512BW-64-NEXT: vpinsrb $7, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512BW-64-NEXT: vpinsrb $8, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512BW-64-NEXT: vpinsrb $9, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512BW-64-NEXT: vpinsrb $10, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512BW-64-NEXT: vpinsrb $11, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512BW-64-NEXT: vpinsrb $12, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512BW-64-NEXT: vpinsrb $13, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512BW-64-NEXT: vpinsrb $14, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512BW-64-NEXT: vpinsrb $15, {{[0-9]+}}(%rsp), %xmm0, %xmm0
-; AVX512BW-64-NEXT: vmovd {{.*#+}} xmm1 = mem[0],zero,zero,zero
-; AVX512BW-64-NEXT: vpinsrb $1, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512BW-64-NEXT: vpinsrb $2, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512BW-64-NEXT: vpinsrb $3, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512BW-64-NEXT: vpinsrb $4, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512BW-64-NEXT: vpinsrb $5, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512BW-64-NEXT: vpinsrb $6, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512BW-64-NEXT: vpinsrb $7, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512BW-64-NEXT: vpinsrb $8, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512BW-64-NEXT: vpinsrb $9, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512BW-64-NEXT: vpinsrb $10, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512BW-64-NEXT: vpinsrb $11, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512BW-64-NEXT: vpinsrb $12, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512BW-64-NEXT: vpinsrb $13, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512BW-64-NEXT: vpinsrb $14, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512BW-64-NEXT: vpinsrb $15, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512BW-64-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
-; AVX512BW-64-NEXT: vmovd %edi, %xmm1
-; AVX512BW-64-NEXT: vpinsrb $1, %esi, %xmm1, %xmm1
-; AVX512BW-64-NEXT: vpinsrb $2, %edx, %xmm1, %xmm1
-; AVX512BW-64-NEXT: vpinsrb $3, %ecx, %xmm1, %xmm1
-; AVX512BW-64-NEXT: vpinsrb $4, %r8d, %xmm1, %xmm1
-; AVX512BW-64-NEXT: vpinsrb $5, %r9d, %xmm1, %xmm1
-; AVX512BW-64-NEXT: vpinsrb $6, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512BW-64-NEXT: vpinsrb $7, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512BW-64-NEXT: vpinsrb $8, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512BW-64-NEXT: vpinsrb $9, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512BW-64-NEXT: vpinsrb $10, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512BW-64-NEXT: vpinsrb $11, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512BW-64-NEXT: vpinsrb $12, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512BW-64-NEXT: vpinsrb $13, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512BW-64-NEXT: vpinsrb $14, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512BW-64-NEXT: vpinsrb $15, {{[0-9]+}}(%rsp), %xmm1, %xmm1
-; AVX512BW-64-NEXT: vmovd {{.*#+}} xmm2 = mem[0],zero,zero,zero
-; AVX512BW-64-NEXT: vpinsrb $1, {{[0-9]+}}(%rsp), %xmm2, %xmm2
-; AVX512BW-64-NEXT: vpinsrb $2, {{[0-9]+}}(%rsp), %xmm2, %xmm2
-; AVX512BW-64-NEXT: vpinsrb $3, {{[0-9]+}}(%rsp), %xmm2, %xmm2
-; AVX512BW-64-NEXT: vpinsrb $4, {{[0-9]+}}(%rsp), %xmm2, %xmm2
-; AVX512BW-64-NEXT: vpinsrb $5, {{[0-9]+}}(%rsp), %xmm2, %xmm2
-; AVX512BW-64-NEXT: vpinsrb $6, {{[0-9]+}}(%rsp), %xmm2, %xmm2
-; AVX512BW-64-NEXT: vpinsrb $7, {{[0-9]+}}(%rsp), %xmm2, %xmm2
-; AVX512BW-64-NEXT: vpinsrb $8, {{[0-9]+}}(%rsp), %xmm2, %xmm2
-; AVX512BW-64-NEXT: vpinsrb $9, {{[0-9]+}}(%rsp), %xmm2, %xmm2
-; AVX512BW-64-NEXT: vpinsrb $10, {{[0-9]+}}(%rsp), %xmm2, %xmm2
-; AVX512BW-64-NEXT: vpinsrb $11, {{[0-9]+}}(%rsp), %xmm2, %xmm2
-; AVX512BW-64-NEXT: vpinsrb $12, {{[0-9]+}}(%rsp), %xmm2, %xmm2
-; AVX512BW-64-NEXT: vpinsrb $13, {{[0-9]+}}(%rsp), %xmm2, %xmm2
-; AVX512BW-64-NEXT: vpinsrb $14, {{[0-9]+}}(%rsp), %xmm2, %xmm2
-; AVX512BW-64-NEXT: vpinsrb $15, {{[0-9]+}}(%rsp), %xmm2, %xmm2
-; AVX512BW-64-NEXT: vinserti128 $1, %xmm2, %ymm1, %ymm1
-; AVX512BW-64-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0
-; AVX512BW-64-NEXT: retq
+; AVX-64-LABEL: test_buildvector_v64i8:
+; AVX-64: # %bb.0:
+; AVX-64-NEXT: vmovd {{.*#+}} xmm0 = mem[0],zero,zero,zero
+; AVX-64-NEXT: vpinsrb $1, {{[0-9]+}}(%rsp), %xmm0, %xmm0
+; AVX-64-NEXT: vpinsrb $2, {{[0-9]+}}(%rsp), %xmm0, %xmm0
+; AVX-64-NEXT: vpinsrb $3, {{[0-9]+}}(%rsp), %xmm0, %xmm0
+; AVX-64-NEXT: vpinsrb $4, {{[0-9]+}}(%rsp), %xmm0, %xmm0
+; AVX-64-NEXT: vpinsrb $5, {{[0-9]+}}(%rsp), %xmm0, %xmm0
+; AVX-64-NEXT: vpinsrb $6, {{[0-9]+}}(%rsp), %xmm0, %xmm0
+; AVX-64-NEXT: vpinsrb $7, {{[0-9]+}}(%rsp), %xmm0, %xmm0
+; AVX-64-NEXT: vpinsrb $8, {{[0-9]+}}(%rsp), %xmm0, %xmm0
+; AVX-64-NEXT: vpinsrb $9, {{[0-9]+}}(%rsp), %xmm0, %xmm0
+; AVX-64-NEXT: vpinsrb $10, {{[0-9]+}}(%rsp), %xmm0, %xmm0
+; AVX-64-NEXT: vpinsrb $11, {{[0-9]+}}(%rsp), %xmm0, %xmm0
+; AVX-64-NEXT: vpinsrb $12, {{[0-9]+}}(%rsp), %xmm0, %xmm0
+; AVX-64-NEXT: vpinsrb $13, {{[0-9]+}}(%rsp), %xmm0, %xmm0
+; AVX-64-NEXT: vpinsrb $14, {{[0-9]+}}(%rsp), %xmm0, %xmm0
+; AVX-64-NEXT: vpinsrb $15, {{[0-9]+}}(%rsp), %xmm0, %xmm0
+; AVX-64-NEXT: vmovd {{.*#+}} xmm1 = mem[0],zero,zero,zero
+; AVX-64-NEXT: vpinsrb $1, {{[0-9]+}}(%rsp), %xmm1, %xmm1
+; AVX-64-NEXT: vpinsrb $2, {{[0-9]+}}(%rsp), %xmm1, %xmm1
+; AVX-64-NEXT: vpinsrb $3, {{[0-9]+}}(%rsp), %xmm1, %xmm1
+; AVX-64-NEXT: vpinsrb $4, {{[0-9]+}}(%rsp), %xmm1, %xmm1
+; AVX-64-NEXT: vpinsrb $5, {{[0-9]+}}(%rsp), %xmm1, %xmm1
+; AVX-64-NEXT: vpinsrb $6, {{[0-9]+}}(%rsp), %xmm1, %xmm1
+; AVX-64-NEXT: vpinsrb $7, {{[0-9]+}}(%rsp), %xmm1, %xmm1
+; AVX-64-NEXT: vpinsrb $8, {{[0-9]+}}(%rsp), %xmm1, %xmm1
+; AVX-64-NEXT: vpinsrb $9, {{[0-9]+}}(%rsp), %xmm1, %xmm1
+; AVX-64-NEXT: vpinsrb $10, {{[0-9]+}}(%rsp), %xmm1, %xmm1
+; AVX-64-NEXT: vpinsrb $11, {{[0-9]+}}(%rsp), %xmm1, %xmm1
+; AVX-64-NEXT: vpinsrb $12, {{[0-9]+}}(%rsp), %xmm1, %xmm1
+; AVX-64-NEXT: vpinsrb $13, {{[0-9]+}}(%rsp), %xmm1, %xmm1
+; AVX-64-NEXT: vpinsrb $14, {{[0-9]+}}(%rsp), %xmm1, %xmm1
+; AVX-64-NEXT: vpinsrb $15, {{[0-9]+}}(%rsp), %xmm1, %xmm1
+; AVX-64-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
+; AVX-64-NEXT: vmovd %edi, %xmm1
+; AVX-64-NEXT: vpinsrb $1, %esi, %xmm1, %xmm1
+; AVX-64-NEXT: vpinsrb $2, %edx, %xmm1, %xmm1
+; AVX-64-NEXT: vpinsrb $3, %ecx, %xmm1, %xmm1
+; AVX-64-NEXT: vpinsrb $4, %r8d, %xmm1, %xmm1
+; AVX-64-NEXT: vpinsrb $5, %r9d, %xmm1, %xmm1
+; AVX-64-NEXT: vpinsrb $6, {{[0-9]+}}(%rsp), %xmm1, %xmm1
+; AVX-64-NEXT: vpinsrb $7, {{[0-9]+}}(%rsp), %xmm1, %xmm1
+; AVX-64-NEXT: vpinsrb $8, {{[0-9]+}}(%rsp), %xmm1, %xmm1
+; AVX-64-NEXT: vpinsrb $9, {{[0-9]+}}(%rsp), %xmm1, %xmm1
+; AVX-64-NEXT: vpinsrb $10, {{[0-9]+}}(%rsp), %xmm1, %xmm1
+; AVX-64-NEXT: vpinsrb $11, {{[0-9]+}}(%rsp), %xmm1, %xmm1
+; AVX-64-NEXT: vpinsrb $12, {{[0-9]+}}(%rsp), %xmm1, %xmm1
+; AVX-64-NEXT: vpinsrb $13, {{[0-9]+}}(%rsp), %xmm1, %xmm1
+; AVX-64-NEXT: vpinsrb $14, {{[0-9]+}}(%rsp), %xmm1, %xmm1
+; AVX-64-NEXT: vpinsrb $15, {{[0-9]+}}(%rsp), %xmm1, %xmm1
+; AVX-64-NEXT: vmovd {{.*#+}} xmm2 = mem[0],zero,zero,zero
+; AVX-64-NEXT: vpinsrb $1, {{[0-9]+}}(%rsp), %xmm2, %xmm2
+; AVX-64-NEXT: vpinsrb $2, {{[0-9]+}}(%rsp), %xmm2, %xmm2
+; AVX-64-NEXT: vpinsrb $3, {{[0-9]+}}(%rsp), %xmm2, %xmm2
+; AVX-64-NEXT: vpinsrb $4, {{[0-9]+}}(%rsp), %xmm2, %xmm2
+; AVX-64-NEXT: vpinsrb $5, {{[0-9]+}}(%rsp), %xmm2, %xmm2
+; AVX-64-NEXT: vpinsrb $6, {{[0-9]+}}(%rsp), %xmm2, %xmm2
+; AVX-64-NEXT: vpinsrb $7, {{[0-9]+}}(%rsp), %xmm2, %xmm2
+; AVX-64-NEXT: vpinsrb $8, {{[0-9]+}}(%rsp), %xmm2, %xmm2
+; AVX-64-NEXT: vpinsrb $9, {{[0-9]+}}(%rsp), %xmm2, %xmm2
+; AVX-64-NEXT: vpinsrb $10, {{[0-9]+}}(%rsp), %xmm2, %xmm2
+; AVX-64-NEXT: vpinsrb $11, {{[0-9]+}}(%rsp), %xmm2, %xmm2
+; AVX-64-NEXT: vpinsrb $12, {{[0-9]+}}(%rsp), %xmm2, %xmm2
+; AVX-64-NEXT: vpinsrb $13, {{[0-9]+}}(%rsp), %xmm2, %xmm2
+; AVX-64-NEXT: vpinsrb $14, {{[0-9]+}}(%rsp), %xmm2, %xmm2
+; AVX-64-NEXT: vpinsrb $15, {{[0-9]+}}(%rsp), %xmm2, %xmm2
+; AVX-64-NEXT: vinserti128 $1, %xmm2, %ymm1, %ymm1
+; AVX-64-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0
+; AVX-64-NEXT: retq
%ins0 = insertelement <64 x i8> undef, i8 %a0, i32 0
%ins1 = insertelement <64 x i8> %ins0, i8 %a1, i32 1
%ins2 = insertelement <64 x i8> %ins1, i8 %a2, i32 2
OpenPOWER on IntegriCloud