diff options
Diffstat (limited to 'llvm/test/CodeGen/X86/bitcast-int-to-vector-bool-sext.ll')
-rw-r--r-- | llvm/test/CodeGen/X86/bitcast-int-to-vector-bool-sext.ll | 98 |
1 files changed, 49 insertions, 49 deletions
diff --git a/llvm/test/CodeGen/X86/bitcast-int-to-vector-bool-sext.ll b/llvm/test/CodeGen/X86/bitcast-int-to-vector-bool-sext.ll index 988cbe1e6df..92a4ebc8051 100644 --- a/llvm/test/CodeGen/X86/bitcast-int-to-vector-bool-sext.ll +++ b/llvm/test/CodeGen/X86/bitcast-int-to-vector-bool-sext.ll @@ -11,7 +11,7 @@ define <2 x i64> @ext_i2_2i64(i2 %a0) { ; SSE2-SSSE3-LABEL: ext_i2_2i64: -; SSE2-SSSE3: # BB#0: +; SSE2-SSSE3: # %bb.0: ; SSE2-SSSE3-NEXT: # kill: %edi<def> %edi<kill> %rdi<def> ; SSE2-SSSE3-NEXT: movq %rdi, %xmm0 ; SSE2-SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[0,1,0,1] @@ -23,7 +23,7 @@ define <2 x i64> @ext_i2_2i64(i2 %a0) { ; SSE2-SSSE3-NEXT: retq ; ; AVX1-LABEL: ext_i2_2i64: -; AVX1: # BB#0: +; AVX1: # %bb.0: ; AVX1-NEXT: # kill: %edi<def> %edi<kill> %rdi<def> ; AVX1-NEXT: vmovq %rdi, %xmm0 ; AVX1-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[0,1,0,1] @@ -33,7 +33,7 @@ define <2 x i64> @ext_i2_2i64(i2 %a0) { ; AVX1-NEXT: retq ; ; AVX2-LABEL: ext_i2_2i64: -; AVX2: # BB#0: +; AVX2: # %bb.0: ; AVX2-NEXT: # kill: %edi<def> %edi<kill> %rdi<def> ; AVX2-NEXT: vmovq %rdi, %xmm0 ; AVX2-NEXT: vpbroadcastq %xmm0, %xmm0 @@ -43,7 +43,7 @@ define <2 x i64> @ext_i2_2i64(i2 %a0) { ; AVX2-NEXT: retq ; ; AVX512-LABEL: ext_i2_2i64: -; AVX512: # BB#0: +; AVX512: # %bb.0: ; AVX512-NEXT: andb $3, %dil ; AVX512-NEXT: movb %dil, -{{[0-9]+}}(%rsp) ; AVX512-NEXT: movzbl -{{[0-9]+}}(%rsp), %eax @@ -59,7 +59,7 @@ define <2 x i64> @ext_i2_2i64(i2 %a0) { define <4 x i32> @ext_i4_4i32(i4 %a0) { ; SSE2-SSSE3-LABEL: ext_i4_4i32: -; SSE2-SSSE3: # BB#0: +; SSE2-SSSE3: # %bb.0: ; SSE2-SSSE3-NEXT: movd %edi, %xmm0 ; SSE2-SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,0,0] ; SSE2-SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [1,2,4,8] @@ -68,7 +68,7 @@ define <4 x i32> @ext_i4_4i32(i4 %a0) { ; SSE2-SSSE3-NEXT: retq ; ; AVX1-LABEL: ext_i4_4i32: -; AVX1: # BB#0: +; AVX1: # %bb.0: ; AVX1-NEXT: vmovd %edi, %xmm0 ; AVX1-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[0,0,0,0] ; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [1,2,4,8] @@ -77,7 +77,7 @@ define <4 x i32> @ext_i4_4i32(i4 %a0) { ; AVX1-NEXT: retq ; ; AVX2-LABEL: ext_i4_4i32: -; AVX2: # BB#0: +; AVX2: # %bb.0: ; AVX2-NEXT: vmovd %edi, %xmm0 ; AVX2-NEXT: vpbroadcastd %xmm0, %xmm0 ; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [1,2,4,8] @@ -86,7 +86,7 @@ define <4 x i32> @ext_i4_4i32(i4 %a0) { ; AVX2-NEXT: retq ; ; AVX512-LABEL: ext_i4_4i32: -; AVX512: # BB#0: +; AVX512: # %bb.0: ; AVX512-NEXT: andb $15, %dil ; AVX512-NEXT: movb %dil, -{{[0-9]+}}(%rsp) ; AVX512-NEXT: movzbl -{{[0-9]+}}(%rsp), %eax @@ -103,7 +103,7 @@ define <4 x i32> @ext_i4_4i32(i4 %a0) { define <8 x i16> @ext_i8_8i16(i8 %a0) { ; SSE2-SSSE3-LABEL: ext_i8_8i16: -; SSE2-SSSE3: # BB#0: +; SSE2-SSSE3: # %bb.0: ; SSE2-SSSE3-NEXT: movd %edi, %xmm0 ; SSE2-SSSE3-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[0,0,0,0,4,5,6,7] ; SSE2-SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,1,1] @@ -113,7 +113,7 @@ define <8 x i16> @ext_i8_8i16(i8 %a0) { ; SSE2-SSSE3-NEXT: retq ; ; AVX1-LABEL: ext_i8_8i16: -; AVX1: # BB#0: +; AVX1: # %bb.0: ; AVX1-NEXT: vmovd %edi, %xmm0 ; AVX1-NEXT: vpshuflw {{.*#+}} xmm0 = xmm0[0,0,0,0,4,5,6,7] ; AVX1-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[0,0,1,1] @@ -123,7 +123,7 @@ define <8 x i16> @ext_i8_8i16(i8 %a0) { ; AVX1-NEXT: retq ; ; AVX2-LABEL: ext_i8_8i16: -; AVX2: # BB#0: +; AVX2: # %bb.0: ; AVX2-NEXT: vmovd %edi, %xmm0 ; AVX2-NEXT: vpbroadcastw %xmm0, %xmm0 ; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [1,2,4,8,16,32,64,128] @@ -132,7 +132,7 @@ define <8 x i16> @ext_i8_8i16(i8 %a0) { ; AVX2-NEXT: retq ; ; AVX512-LABEL: ext_i8_8i16: -; AVX512: # BB#0: +; AVX512: # %bb.0: ; AVX512-NEXT: kmovd %edi, %k0 ; AVX512-NEXT: vpmovm2w %k0, %xmm0 ; AVX512-NEXT: retq @@ -143,7 +143,7 @@ define <8 x i16> @ext_i8_8i16(i8 %a0) { define <16 x i8> @ext_i16_16i8(i16 %a0) { ; SSE2-LABEL: ext_i16_16i8: -; SSE2: # BB#0: +; SSE2: # %bb.0: ; SSE2-NEXT: movd %edi, %xmm0 ; SSE2-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7] ; SSE2-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[0,0,1,1,4,5,6,7] @@ -154,7 +154,7 @@ define <16 x i8> @ext_i16_16i8(i16 %a0) { ; SSE2-NEXT: retq ; ; SSSE3-LABEL: ext_i16_16i8: -; SSSE3: # BB#0: +; SSSE3: # %bb.0: ; SSSE3-NEXT: movd %edi, %xmm0 ; SSSE3-NEXT: pshufb {{.*#+}} xmm0 = xmm0[0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1] ; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [1,2,4,8,16,32,64,128,1,2,4,8,16,32,64,128] @@ -163,7 +163,7 @@ define <16 x i8> @ext_i16_16i8(i16 %a0) { ; SSSE3-NEXT: retq ; ; AVX1-LABEL: ext_i16_16i8: -; AVX1: # BB#0: +; AVX1: # %bb.0: ; AVX1-NEXT: vmovd %edi, %xmm0 ; AVX1-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1] ; AVX1-NEXT: vmovddup {{.*#+}} xmm1 = mem[0,0] @@ -172,7 +172,7 @@ define <16 x i8> @ext_i16_16i8(i16 %a0) { ; AVX1-NEXT: retq ; ; AVX2-LABEL: ext_i16_16i8: -; AVX2: # BB#0: +; AVX2: # %bb.0: ; AVX2-NEXT: vmovd %edi, %xmm0 ; AVX2-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1] ; AVX2-NEXT: vpbroadcastq {{.*#+}} xmm1 = [9241421688590303745,9241421688590303745] @@ -181,7 +181,7 @@ define <16 x i8> @ext_i16_16i8(i16 %a0) { ; AVX2-NEXT: retq ; ; AVX512-LABEL: ext_i16_16i8: -; AVX512: # BB#0: +; AVX512: # %bb.0: ; AVX512-NEXT: kmovd %edi, %k0 ; AVX512-NEXT: vpmovm2b %k0, %xmm0 ; AVX512-NEXT: retq @@ -196,7 +196,7 @@ define <16 x i8> @ext_i16_16i8(i16 %a0) { define <4 x i64> @ext_i4_4i64(i4 %a0) { ; SSE2-SSSE3-LABEL: ext_i4_4i64: -; SSE2-SSSE3: # BB#0: +; SSE2-SSSE3: # %bb.0: ; SSE2-SSSE3-NEXT: # kill: %edi<def> %edi<kill> %rdi<def> ; SSE2-SSSE3-NEXT: movq %rdi, %xmm0 ; SSE2-SSSE3-NEXT: pshufd {{.*#+}} xmm2 = xmm0[0,1,0,1] @@ -214,7 +214,7 @@ define <4 x i64> @ext_i4_4i64(i4 %a0) { ; SSE2-SSSE3-NEXT: retq ; ; AVX1-LABEL: ext_i4_4i64: -; AVX1: # BB#0: +; AVX1: # %bb.0: ; AVX1-NEXT: # kill: %edi<def> %edi<kill> %rdi<def> ; AVX1-NEXT: vmovq %rdi, %xmm0 ; AVX1-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[0,1,0,1] @@ -231,7 +231,7 @@ define <4 x i64> @ext_i4_4i64(i4 %a0) { ; AVX1-NEXT: retq ; ; AVX2-LABEL: ext_i4_4i64: -; AVX2: # BB#0: +; AVX2: # %bb.0: ; AVX2-NEXT: # kill: %edi<def> %edi<kill> %rdi<def> ; AVX2-NEXT: vmovq %rdi, %xmm0 ; AVX2-NEXT: vpbroadcastq %xmm0, %ymm0 @@ -241,7 +241,7 @@ define <4 x i64> @ext_i4_4i64(i4 %a0) { ; AVX2-NEXT: retq ; ; AVX512-LABEL: ext_i4_4i64: -; AVX512: # BB#0: +; AVX512: # %bb.0: ; AVX512-NEXT: andb $15, %dil ; AVX512-NEXT: movb %dil, -{{[0-9]+}}(%rsp) ; AVX512-NEXT: movzbl -{{[0-9]+}}(%rsp), %eax @@ -256,7 +256,7 @@ define <4 x i64> @ext_i4_4i64(i4 %a0) { define <8 x i32> @ext_i8_8i32(i8 %a0) { ; SSE2-SSSE3-LABEL: ext_i8_8i32: -; SSE2-SSSE3: # BB#0: +; SSE2-SSSE3: # %bb.0: ; SSE2-SSSE3-NEXT: movd %edi, %xmm0 ; SSE2-SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[0,0,0,0] ; SSE2-SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [1,2,4,8] @@ -269,7 +269,7 @@ define <8 x i32> @ext_i8_8i32(i8 %a0) { ; SSE2-SSSE3-NEXT: retq ; ; AVX1-LABEL: ext_i8_8i32: -; AVX1: # BB#0: +; AVX1: # %bb.0: ; AVX1-NEXT: vmovd %edi, %xmm0 ; AVX1-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[0,0,0,0] ; AVX1-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0 @@ -285,7 +285,7 @@ define <8 x i32> @ext_i8_8i32(i8 %a0) { ; AVX1-NEXT: retq ; ; AVX2-LABEL: ext_i8_8i32: -; AVX2: # BB#0: +; AVX2: # %bb.0: ; AVX2-NEXT: vmovd %edi, %xmm0 ; AVX2-NEXT: vpbroadcastd %xmm0, %ymm0 ; AVX2-NEXT: vmovdqa {{.*#+}} ymm1 = [1,2,4,8,16,32,64,128] @@ -294,7 +294,7 @@ define <8 x i32> @ext_i8_8i32(i8 %a0) { ; AVX2-NEXT: retq ; ; AVX512-LABEL: ext_i8_8i32: -; AVX512: # BB#0: +; AVX512: # %bb.0: ; AVX512-NEXT: kmovd %edi, %k1 ; AVX512-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 ; AVX512-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} @@ -306,7 +306,7 @@ define <8 x i32> @ext_i8_8i32(i8 %a0) { define <16 x i16> @ext_i16_16i16(i16 %a0) { ; SSE2-SSSE3-LABEL: ext_i16_16i16: -; SSE2-SSSE3: # BB#0: +; SSE2-SSSE3: # %bb.0: ; SSE2-SSSE3-NEXT: movd %edi, %xmm0 ; SSE2-SSSE3-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[0,0,0,0,4,5,6,7] ; SSE2-SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[0,0,1,1] @@ -320,7 +320,7 @@ define <16 x i16> @ext_i16_16i16(i16 %a0) { ; SSE2-SSSE3-NEXT: retq ; ; AVX1-LABEL: ext_i16_16i16: -; AVX1: # BB#0: +; AVX1: # %bb.0: ; AVX1-NEXT: vmovd %edi, %xmm0 ; AVX1-NEXT: vpshuflw {{.*#+}} xmm0 = xmm0[0,0,0,0,4,5,6,7] ; AVX1-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[0,0,1,1] @@ -337,7 +337,7 @@ define <16 x i16> @ext_i16_16i16(i16 %a0) { ; AVX1-NEXT: retq ; ; AVX2-LABEL: ext_i16_16i16: -; AVX2: # BB#0: +; AVX2: # %bb.0: ; AVX2-NEXT: vmovd %edi, %xmm0 ; AVX2-NEXT: vpbroadcastw %xmm0, %ymm0 ; AVX2-NEXT: vmovdqa {{.*#+}} ymm1 = [1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,16384,32768] @@ -346,7 +346,7 @@ define <16 x i16> @ext_i16_16i16(i16 %a0) { ; AVX2-NEXT: retq ; ; AVX512-LABEL: ext_i16_16i16: -; AVX512: # BB#0: +; AVX512: # %bb.0: ; AVX512-NEXT: kmovd %edi, %k0 ; AVX512-NEXT: vpmovm2w %k0, %ymm0 ; AVX512-NEXT: retq @@ -357,7 +357,7 @@ define <16 x i16> @ext_i16_16i16(i16 %a0) { define <32 x i8> @ext_i32_32i8(i32 %a0) { ; SSE2-SSSE3-LABEL: ext_i32_32i8: -; SSE2-SSSE3: # BB#0: +; SSE2-SSSE3: # %bb.0: ; SSE2-SSSE3-NEXT: movd %edi, %xmm1 ; SSE2-SSSE3-NEXT: punpcklbw {{.*#+}} xmm1 = xmm1[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7] ; SSE2-SSSE3-NEXT: pshuflw {{.*#+}} xmm0 = xmm1[0,0,1,1,4,5,6,7] @@ -372,7 +372,7 @@ define <32 x i8> @ext_i32_32i8(i32 %a0) { ; SSE2-SSSE3-NEXT: retq ; ; AVX1-LABEL: ext_i32_32i8: -; AVX1: # BB#0: +; AVX1: # %bb.0: ; AVX1-NEXT: vmovd %edi, %xmm0 ; AVX1-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm0[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7] ; AVX1-NEXT: vpshuflw {{.*#+}} xmm1 = xmm0[0,0,1,1,4,5,6,7] @@ -392,7 +392,7 @@ define <32 x i8> @ext_i32_32i8(i32 %a0) { ; AVX1-NEXT: retq ; ; AVX2-LABEL: ext_i32_32i8: -; AVX2: # BB#0: +; AVX2: # %bb.0: ; AVX2-NEXT: vmovd %edi, %xmm0 ; AVX2-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm0[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7] ; AVX2-NEXT: vpshuflw {{.*#+}} xmm1 = xmm0[0,0,1,1,4,5,6,7] @@ -406,7 +406,7 @@ define <32 x i8> @ext_i32_32i8(i32 %a0) { ; AVX2-NEXT: retq ; ; AVX512-LABEL: ext_i32_32i8: -; AVX512: # BB#0: +; AVX512: # %bb.0: ; AVX512-NEXT: kmovd %edi, %k0 ; AVX512-NEXT: vpmovm2b %k0, %ymm0 ; AVX512-NEXT: retq @@ -421,7 +421,7 @@ define <32 x i8> @ext_i32_32i8(i32 %a0) { define <8 x i64> @ext_i8_8i64(i8 %a0) { ; SSE2-SSSE3-LABEL: ext_i8_8i64: -; SSE2-SSSE3: # BB#0: +; SSE2-SSSE3: # %bb.0: ; SSE2-SSSE3-NEXT: # kill: %edi<def> %edi<kill> %rdi<def> ; SSE2-SSSE3-NEXT: movq %rdi, %xmm0 ; SSE2-SSSE3-NEXT: pshufd {{.*#+}} xmm4 = xmm0[0,1,0,1] @@ -451,7 +451,7 @@ define <8 x i64> @ext_i8_8i64(i8 %a0) { ; SSE2-SSSE3-NEXT: retq ; ; AVX1-LABEL: ext_i8_8i64: -; AVX1: # BB#0: +; AVX1: # %bb.0: ; AVX1-NEXT: # kill: %edi<def> %edi<kill> %rdi<def> ; AVX1-NEXT: vmovq %rdi, %xmm0 ; AVX1-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[0,1,0,1] @@ -475,7 +475,7 @@ define <8 x i64> @ext_i8_8i64(i8 %a0) { ; AVX1-NEXT: retq ; ; AVX2-LABEL: ext_i8_8i64: -; AVX2: # BB#0: +; AVX2: # %bb.0: ; AVX2-NEXT: # kill: %edi<def> %edi<kill> %rdi<def> ; AVX2-NEXT: vmovq %rdi, %xmm0 ; AVX2-NEXT: vpbroadcastq %xmm0, %ymm1 @@ -488,7 +488,7 @@ define <8 x i64> @ext_i8_8i64(i8 %a0) { ; AVX2-NEXT: retq ; ; AVX512-LABEL: ext_i8_8i64: -; AVX512: # BB#0: +; AVX512: # %bb.0: ; AVX512-NEXT: kmovd %edi, %k1 ; AVX512-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512-NEXT: retq @@ -499,7 +499,7 @@ define <8 x i64> @ext_i8_8i64(i8 %a0) { define <16 x i32> @ext_i16_16i32(i16 %a0) { ; SSE2-SSSE3-LABEL: ext_i16_16i32: -; SSE2-SSSE3: # BB#0: +; SSE2-SSSE3: # %bb.0: ; SSE2-SSSE3-NEXT: movd %edi, %xmm0 ; SSE2-SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm0[0,0,0,0] ; SSE2-SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [1,2,4,8] @@ -520,7 +520,7 @@ define <16 x i32> @ext_i16_16i32(i16 %a0) { ; SSE2-SSSE3-NEXT: retq ; ; AVX1-LABEL: ext_i16_16i32: -; AVX1: # BB#0: +; AVX1: # %bb.0: ; AVX1-NEXT: vmovd %edi, %xmm0 ; AVX1-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[0,0,0,0] ; AVX1-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm1 @@ -543,7 +543,7 @@ define <16 x i32> @ext_i16_16i32(i16 %a0) { ; AVX1-NEXT: retq ; ; AVX2-LABEL: ext_i16_16i32: -; AVX2: # BB#0: +; AVX2: # %bb.0: ; AVX2-NEXT: vmovd %edi, %xmm0 ; AVX2-NEXT: vpbroadcastd %xmm0, %ymm1 ; AVX2-NEXT: vmovdqa {{.*#+}} ymm0 = [1,2,4,8,16,32,64,128] @@ -555,7 +555,7 @@ define <16 x i32> @ext_i16_16i32(i16 %a0) { ; AVX2-NEXT: retq ; ; AVX512-LABEL: ext_i16_16i32: -; AVX512: # BB#0: +; AVX512: # %bb.0: ; AVX512-NEXT: kmovd %edi, %k1 ; AVX512-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512-NEXT: retq @@ -566,7 +566,7 @@ define <16 x i32> @ext_i16_16i32(i16 %a0) { define <32 x i16> @ext_i32_32i16(i32 %a0) { ; SSE2-SSSE3-LABEL: ext_i32_32i16: -; SSE2-SSSE3: # BB#0: +; SSE2-SSSE3: # %bb.0: ; SSE2-SSSE3-NEXT: movd %edi, %xmm2 ; SSE2-SSSE3-NEXT: pshuflw {{.*#+}} xmm0 = xmm2[0,0,0,0,4,5,6,7] ; SSE2-SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[0,0,1,1] @@ -587,7 +587,7 @@ define <32 x i16> @ext_i32_32i16(i32 %a0) { ; SSE2-SSSE3-NEXT: retq ; ; AVX1-LABEL: ext_i32_32i16: -; AVX1: # BB#0: +; AVX1: # %bb.0: ; AVX1-NEXT: vmovd %edi, %xmm1 ; AVX1-NEXT: vpshuflw {{.*#+}} xmm0 = xmm1[0,0,0,0,4,5,6,7] ; AVX1-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[0,0,1,1] @@ -615,7 +615,7 @@ define <32 x i16> @ext_i32_32i16(i32 %a0) { ; AVX1-NEXT: retq ; ; AVX2-LABEL: ext_i32_32i16: -; AVX2: # BB#0: +; AVX2: # %bb.0: ; AVX2-NEXT: vmovd %edi, %xmm0 ; AVX2-NEXT: vpbroadcastw %xmm0, %ymm0 ; AVX2-NEXT: vmovdqa {{.*#+}} ymm1 = [1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,16384,32768] @@ -629,7 +629,7 @@ define <32 x i16> @ext_i32_32i16(i32 %a0) { ; AVX2-NEXT: retq ; ; AVX512-LABEL: ext_i32_32i16: -; AVX512: # BB#0: +; AVX512: # %bb.0: ; AVX512-NEXT: kmovd %edi, %k0 ; AVX512-NEXT: vpmovm2w %k0, %zmm0 ; AVX512-NEXT: retq @@ -640,7 +640,7 @@ define <32 x i16> @ext_i32_32i16(i32 %a0) { define <64 x i8> @ext_i64_64i8(i64 %a0) { ; SSE2-SSSE3-LABEL: ext_i64_64i8: -; SSE2-SSSE3: # BB#0: +; SSE2-SSSE3: # %bb.0: ; SSE2-SSSE3-NEXT: movq %rdi, %xmm3 ; SSE2-SSSE3-NEXT: punpcklbw {{.*#+}} xmm3 = xmm3[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7] ; SSE2-SSSE3-NEXT: pshuflw {{.*#+}} xmm0 = xmm3[0,0,1,1,4,5,6,7] @@ -663,7 +663,7 @@ define <64 x i8> @ext_i64_64i8(i64 %a0) { ; SSE2-SSSE3-NEXT: retq ; ; AVX1-LABEL: ext_i64_64i8: -; AVX1: # BB#0: +; AVX1: # %bb.0: ; AVX1-NEXT: vmovq %rdi, %xmm0 ; AVX1-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm0[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7] ; AVX1-NEXT: vpshuflw {{.*#+}} xmm0 = xmm1[0,0,1,1,4,5,6,7] @@ -696,7 +696,7 @@ define <64 x i8> @ext_i64_64i8(i64 %a0) { ; AVX1-NEXT: retq ; ; AVX2-LABEL: ext_i64_64i8: -; AVX2: # BB#0: +; AVX2: # %bb.0: ; AVX2-NEXT: vmovq %rdi, %xmm0 ; AVX2-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm0[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7] ; AVX2-NEXT: vpshuflw {{.*#+}} xmm0 = xmm1[0,0,1,1,4,5,6,7] @@ -717,7 +717,7 @@ define <64 x i8> @ext_i64_64i8(i64 %a0) { ; AVX2-NEXT: retq ; ; AVX512-LABEL: ext_i64_64i8: -; AVX512: # BB#0: +; AVX512: # %bb.0: ; AVX512-NEXT: kmovq %rdi, %k0 ; AVX512-NEXT: vpmovm2b %k0, %zmm0 ; AVX512-NEXT: retq |