; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py ; RUN: llc < %s -mtriple=i386-unknown-unknown -mattr=+sse4a | FileCheck %s --check-prefix=X32 ; RUN: llc < %s -mtriple=i386-unknown-unknown -mattr=+sse4a,+avx | FileCheck %s --check-prefix=X32 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+sse4a | FileCheck %s --check-prefix=X64 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+sse4a,+avx | FileCheck %s --check-prefix=X64 define void @test_movntss(i8* %p, <4 x float> %a) nounwind optsize ssp { ; X32-LABEL: test_movntss: ; X32: # BB#0: ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax ; X32-NEXT: movntss %xmm0, (%eax) ; X32-NEXT: retl ; ; X64-LABEL: test_movntss: ; X64: # BB#0: ; X64-NEXT: movntss %xmm0, (%rdi) ; X64-NEXT: retq tail call void @llvm.x86.sse4a.movnt.ss(i8* %p, <4 x float> %a) nounwind ret void } declare void @llvm.x86.sse4a.movnt.ss(i8*, <4 x float>) define void @test_movntsd(i8* %p, <2 x double> %a) nounwind optsize ssp { ; X32-LABEL: test_movntsd: ; X32: # BB#0: ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax ; X32-NEXT: movntsd %xmm0, (%eax) ; X32-NEXT: retl ; ; X64-LABEL: test_movntsd: ; X64: # BB#0: ; X64-NEXT: movntsd %xmm0, (%rdi) ; X64-NEXT: retq tail call void @llvm.x86.sse4a.movnt.sd(i8* %p, <2 x double> %a) nounwind ret void } declare void @llvm.x86.sse4a.movnt.sd(i8*, <2 x double>) define <2 x i64> @test_extrqi(<2 x i64> %x) nounwind uwtable ssp { ; X32-LABEL: test_extrqi: ; X32: # BB#0: ; X32-NEXT: extrq $2, $3, %xmm0 ; X32-NEXT: retl ; ; X64-LABEL: test_extrqi: ; X64: # BB#0: ; X64-NEXT: extrq $2, $3, %xmm0 ; X64-NEXT: retq %1 = tail call <2 x i64> @llvm.x86.sse4a.extrqi(<2 x i64> %x, i8 3, i8 2) ret <2 x i64> %1 } declare <2 x i64> @llvm.x86.sse4a.extrqi(<2 x i64>, i8, i8) nounwind define <2 x i64> @test_extrq(<2 x i64> %x, <2 x i64> %y) nounwind uwtable ssp { ; X32-LABEL: test_extrq: ; X32: # BB#0: ; X32-NEXT: extrq %xmm1, %xmm0 ; X32-NEXT: retl ; ; X64-LABEL: test_extrq: ; X64: # BB#0: ; X64-NEXT: extrq %xmm1, %xmm0 ; X64-NEXT: retq %1 = bitcast <2 x i64> %y to <16 x i8> %2 = tail call <2 x i64> @llvm.x86.sse4a.extrq(<2 x i64> %x, <16 x i8> %1) nounwind ret <2 x i64> %2 } declare <2 x i64> @llvm.x86.sse4a.extrq(<2 x i64>, <16 x i8>) nounwind define <2 x i64> @test_insertqi(<2 x i64> %x, <2 x i64> %y) nounwind uwtable ssp { ; X32-LABEL: test_insertqi: ; X32: # BB#0: ; X32-NEXT: insertq $6, $5, %xmm1, %xmm0 ; X32-NEXT: retl ; ; X64-LABEL: test_insertqi: ; X64: # BB#0: ; X64-NEXT: insertq $6, $5, %xmm1, %xmm0 ; X64-NEXT: retq %1 = tail call <2 x i64> @llvm.x86.sse4a.insertqi(<2 x i64> %x, <2 x i64> %y, i8 5, i8 6) ret <2 x i64> %1 } declare <2 x i64> @llvm.x86.sse4a.insertqi(<2 x i64>, <2 x i64>, i8, i8) nounwind define <2 x i64> @test_insertq(<2 x i64> %x, <2 x i64> %y) nounwind uwtable ssp { ; X32-LABEL: test_insertq: ; X32: # BB#0: ; X32-NEXT: insertq %xmm1, %xmm0 ; X32-NEXT: retl ; ; X64-LABEL: test_insertq: ; X64: # BB#0: ; X64-NEXT: insertq %xmm1, %xmm0 ; X64-NEXT: retq %1 = tail call <2 x i64> @llvm.x86.sse4a.insertq(<2 x i64> %x, <2 x i64> %y) nounwind ret <2 x i64> %1 } declare <2 x i64> @llvm.x86.sse4a.insertq(<2 x i64>, <2 x i64>) nounwind