diff options
Diffstat (limited to 'llvm/test/CodeGen/X86/avx512-logic.ll')
-rw-r--r-- | llvm/test/CodeGen/X86/avx512-logic.ll | 96 |
1 files changed, 32 insertions, 64 deletions
diff --git a/llvm/test/CodeGen/X86/avx512-logic.ll b/llvm/test/CodeGen/X86/avx512-logic.ll index b54c8a94058..ca603b11fbb 100644 --- a/llvm/test/CodeGen/X86/avx512-logic.ll +++ b/llvm/test/CodeGen/X86/avx512-logic.ll @@ -498,16 +498,14 @@ entry: define <8 x double> @test_mm512_mask_xor_pd(<8 x double> %__W, i8 zeroext %__U, <8 x double> %__A, <8 x double> %__B) { ; KNL-LABEL: test_mm512_mask_xor_pd: ; KNL: ## BB#0: ## %entry -; KNL-NEXT: vpxorq %zmm2, %zmm1, %zmm1 ; KNL-NEXT: kmovw %edi, %k1 -; KNL-NEXT: vblendmpd %zmm1, %zmm0, %zmm0 {%k1} +; KNL-NEXT: vpxorq %zmm2, %zmm1, %zmm0 {%k1} ; KNL-NEXT: retq ; ; SKX-LABEL: test_mm512_mask_xor_pd: ; SKX: ## BB#0: ## %entry -; SKX-NEXT: vxorpd %zmm2, %zmm1, %zmm1 ; SKX-NEXT: kmovb %edi, %k1 -; SKX-NEXT: vblendmpd %zmm1, %zmm0, %zmm0 {%k1} +; SKX-NEXT: vxorpd %zmm2, %zmm1, %zmm0 {%k1} ; SKX-NEXT: retq entry: %0 = bitcast <8 x double> %__A to <8 x i64> @@ -522,16 +520,14 @@ entry: define <8 x double> @test_mm512_maskz_xor_pd(i8 zeroext %__U, <8 x double> %__A, <8 x double> %__B) { ; KNL-LABEL: test_mm512_maskz_xor_pd: ; KNL: ## BB#0: ## %entry -; KNL-NEXT: vpxorq %zmm1, %zmm0, %zmm0 ; KNL-NEXT: kmovw %edi, %k1 -; KNL-NEXT: vmovapd %zmm0, %zmm0 {%k1} {z} +; KNL-NEXT: vpxorq %zmm1, %zmm0, %zmm0 {%k1} {z} ; KNL-NEXT: retq ; ; SKX-LABEL: test_mm512_maskz_xor_pd: ; SKX: ## BB#0: ## %entry -; SKX-NEXT: vxorpd %zmm1, %zmm0, %zmm0 ; SKX-NEXT: kmovb %edi, %k1 -; SKX-NEXT: vmovapd %zmm0, %zmm0 {%k1} {z} +; SKX-NEXT: vxorpd %zmm1, %zmm0, %zmm0 {%k1} {z} ; SKX-NEXT: retq entry: %0 = bitcast <8 x double> %__A to <8 x i64> @@ -546,16 +542,14 @@ entry: define <16 x float> @test_mm512_mask_xor_ps(<16 x float> %__W, i16 zeroext %__U, <16 x float> %__A, <16 x float> %__B) { ; KNL-LABEL: test_mm512_mask_xor_ps: ; KNL: ## BB#0: ## %entry -; KNL-NEXT: vpxorq %zmm2, %zmm1, %zmm1 ; KNL-NEXT: kmovw %edi, %k1 -; KNL-NEXT: vblendmps %zmm1, %zmm0, %zmm0 {%k1} +; KNL-NEXT: vpxord %zmm2, %zmm1, %zmm0 {%k1} ; KNL-NEXT: retq ; ; SKX-LABEL: test_mm512_mask_xor_ps: ; SKX: ## BB#0: ## %entry -; SKX-NEXT: vxorps %zmm2, %zmm1, %zmm1 ; SKX-NEXT: kmovw %edi, %k1 -; SKX-NEXT: vblendmps %zmm1, %zmm0, %zmm0 {%k1} +; SKX-NEXT: vxorps %zmm2, %zmm1, %zmm0 {%k1} ; SKX-NEXT: retq entry: %0 = bitcast <16 x float> %__A to <16 x i32> @@ -570,16 +564,14 @@ entry: define <16 x float> @test_mm512_maskz_xor_ps(i16 zeroext %__U, <16 x float> %__A, <16 x float> %__B) { ; KNL-LABEL: test_mm512_maskz_xor_ps: ; KNL: ## BB#0: ## %entry -; KNL-NEXT: vpxorq %zmm1, %zmm0, %zmm0 ; KNL-NEXT: kmovw %edi, %k1 -; KNL-NEXT: vmovaps %zmm0, %zmm0 {%k1} {z} +; KNL-NEXT: vpxord %zmm1, %zmm0, %zmm0 {%k1} {z} ; KNL-NEXT: retq ; ; SKX-LABEL: test_mm512_maskz_xor_ps: ; SKX: ## BB#0: ## %entry -; SKX-NEXT: vxorps %zmm1, %zmm0, %zmm0 ; SKX-NEXT: kmovw %edi, %k1 -; SKX-NEXT: vmovaps %zmm0, %zmm0 {%k1} {z} +; SKX-NEXT: vxorps %zmm1, %zmm0, %zmm0 {%k1} {z} ; SKX-NEXT: retq entry: %0 = bitcast <16 x float> %__A to <16 x i32> @@ -594,16 +586,14 @@ entry: define <8 x double> @test_mm512_mask_or_pd(<8 x double> %__W, i8 zeroext %__U, <8 x double> %__A, <8 x double> %__B) { ; KNL-LABEL: test_mm512_mask_or_pd: ; KNL: ## BB#0: ## %entry -; KNL-NEXT: vporq %zmm1, %zmm2, %zmm1 ; KNL-NEXT: kmovw %edi, %k1 -; KNL-NEXT: vblendmpd %zmm1, %zmm0, %zmm0 {%k1} +; KNL-NEXT: vporq %zmm1, %zmm2, %zmm0 {%k1} ; KNL-NEXT: retq ; ; SKX-LABEL: test_mm512_mask_or_pd: ; SKX: ## BB#0: ## %entry -; SKX-NEXT: vorpd %zmm1, %zmm2, %zmm1 ; SKX-NEXT: kmovb %edi, %k1 -; SKX-NEXT: vblendmpd %zmm1, %zmm0, %zmm0 {%k1} +; SKX-NEXT: vorpd %zmm1, %zmm2, %zmm0 {%k1} ; SKX-NEXT: retq entry: %0 = bitcast <8 x double> %__A to <8 x i64> @@ -618,16 +608,14 @@ entry: define <8 x double> @test_mm512_maskz_or_pd(i8 zeroext %__U, <8 x double> %__A, <8 x double> %__B) { ; KNL-LABEL: test_mm512_maskz_or_pd: ; KNL: ## BB#0: ## %entry -; KNL-NEXT: vporq %zmm0, %zmm1, %zmm0 ; KNL-NEXT: kmovw %edi, %k1 -; KNL-NEXT: vmovapd %zmm0, %zmm0 {%k1} {z} +; KNL-NEXT: vporq %zmm0, %zmm1, %zmm0 {%k1} {z} ; KNL-NEXT: retq ; ; SKX-LABEL: test_mm512_maskz_or_pd: ; SKX: ## BB#0: ## %entry -; SKX-NEXT: vorpd %zmm0, %zmm1, %zmm0 ; SKX-NEXT: kmovb %edi, %k1 -; SKX-NEXT: vmovapd %zmm0, %zmm0 {%k1} {z} +; SKX-NEXT: vorpd %zmm0, %zmm1, %zmm0 {%k1} {z} ; SKX-NEXT: retq entry: %0 = bitcast <8 x double> %__A to <8 x i64> @@ -642,16 +630,14 @@ entry: define <16 x float> @test_mm512_mask_or_ps(<16 x float> %__W, i16 zeroext %__U, <16 x float> %__A, <16 x float> %__B) { ; KNL-LABEL: test_mm512_mask_or_ps: ; KNL: ## BB#0: ## %entry -; KNL-NEXT: vporq %zmm1, %zmm2, %zmm1 ; KNL-NEXT: kmovw %edi, %k1 -; KNL-NEXT: vblendmps %zmm1, %zmm0, %zmm0 {%k1} +; KNL-NEXT: vpord %zmm1, %zmm2, %zmm0 {%k1} ; KNL-NEXT: retq ; ; SKX-LABEL: test_mm512_mask_or_ps: ; SKX: ## BB#0: ## %entry -; SKX-NEXT: vorps %zmm1, %zmm2, %zmm1 ; SKX-NEXT: kmovw %edi, %k1 -; SKX-NEXT: vblendmps %zmm1, %zmm0, %zmm0 {%k1} +; SKX-NEXT: vorps %zmm1, %zmm2, %zmm0 {%k1} ; SKX-NEXT: retq entry: %0 = bitcast <16 x float> %__A to <16 x i32> @@ -666,16 +652,14 @@ entry: define <16 x float> @test_mm512_maskz_or_ps(i16 zeroext %__U, <16 x float> %__A, <16 x float> %__B) { ; KNL-LABEL: test_mm512_maskz_or_ps: ; KNL: ## BB#0: ## %entry -; KNL-NEXT: vporq %zmm0, %zmm1, %zmm0 ; KNL-NEXT: kmovw %edi, %k1 -; KNL-NEXT: vmovaps %zmm0, %zmm0 {%k1} {z} +; KNL-NEXT: vpord %zmm0, %zmm1, %zmm0 {%k1} {z} ; KNL-NEXT: retq ; ; SKX-LABEL: test_mm512_maskz_or_ps: ; SKX: ## BB#0: ## %entry -; SKX-NEXT: vorps %zmm0, %zmm1, %zmm0 ; SKX-NEXT: kmovw %edi, %k1 -; SKX-NEXT: vmovaps %zmm0, %zmm0 {%k1} {z} +; SKX-NEXT: vorps %zmm0, %zmm1, %zmm0 {%k1} {z} ; SKX-NEXT: retq entry: %0 = bitcast <16 x float> %__A to <16 x i32> @@ -690,16 +674,14 @@ entry: define <8 x double> @test_mm512_mask_and_pd(<8 x double> %__W, i8 zeroext %__U, <8 x double> %__A, <8 x double> %__B) { ; KNL-LABEL: test_mm512_mask_and_pd: ; KNL: ## BB#0: ## %entry -; KNL-NEXT: vpandq %zmm1, %zmm2, %zmm1 ; KNL-NEXT: kmovw %edi, %k1 -; KNL-NEXT: vblendmpd %zmm1, %zmm0, %zmm0 {%k1} +; KNL-NEXT: vpandq %zmm1, %zmm2, %zmm0 {%k1} ; KNL-NEXT: retq ; ; SKX-LABEL: test_mm512_mask_and_pd: ; SKX: ## BB#0: ## %entry -; SKX-NEXT: vandpd %zmm1, %zmm2, %zmm1 ; SKX-NEXT: kmovb %edi, %k1 -; SKX-NEXT: vblendmpd %zmm1, %zmm0, %zmm0 {%k1} +; SKX-NEXT: vandpd %zmm1, %zmm2, %zmm0 {%k1} ; SKX-NEXT: retq entry: %0 = bitcast <8 x double> %__A to <8 x i64> @@ -714,16 +696,14 @@ entry: define <8 x double> @test_mm512_maskz_and_pd(i8 zeroext %__U, <8 x double> %__A, <8 x double> %__B) { ; KNL-LABEL: test_mm512_maskz_and_pd: ; KNL: ## BB#0: ## %entry -; KNL-NEXT: vpandq %zmm0, %zmm1, %zmm0 ; KNL-NEXT: kmovw %edi, %k1 -; KNL-NEXT: vmovapd %zmm0, %zmm0 {%k1} {z} +; KNL-NEXT: vpandq %zmm0, %zmm1, %zmm0 {%k1} {z} ; KNL-NEXT: retq ; ; SKX-LABEL: test_mm512_maskz_and_pd: ; SKX: ## BB#0: ## %entry -; SKX-NEXT: vandpd %zmm0, %zmm1, %zmm0 ; SKX-NEXT: kmovb %edi, %k1 -; SKX-NEXT: vmovapd %zmm0, %zmm0 {%k1} {z} +; SKX-NEXT: vandpd %zmm0, %zmm1, %zmm0 {%k1} {z} ; SKX-NEXT: retq entry: %0 = bitcast <8 x double> %__A to <8 x i64> @@ -738,16 +718,14 @@ entry: define <16 x float> @test_mm512_mask_and_ps(<16 x float> %__W, i16 zeroext %__U, <16 x float> %__A, <16 x float> %__B) { ; KNL-LABEL: test_mm512_mask_and_ps: ; KNL: ## BB#0: ## %entry -; KNL-NEXT: vpandq %zmm1, %zmm2, %zmm1 ; KNL-NEXT: kmovw %edi, %k1 -; KNL-NEXT: vblendmps %zmm1, %zmm0, %zmm0 {%k1} +; KNL-NEXT: vpandd %zmm1, %zmm2, %zmm0 {%k1} ; KNL-NEXT: retq ; ; SKX-LABEL: test_mm512_mask_and_ps: ; SKX: ## BB#0: ## %entry -; SKX-NEXT: vandps %zmm1, %zmm2, %zmm1 ; SKX-NEXT: kmovw %edi, %k1 -; SKX-NEXT: vblendmps %zmm1, %zmm0, %zmm0 {%k1} +; SKX-NEXT: vandps %zmm1, %zmm2, %zmm0 {%k1} ; SKX-NEXT: retq entry: %0 = bitcast <16 x float> %__A to <16 x i32> @@ -762,16 +740,14 @@ entry: define <16 x float> @test_mm512_maskz_and_ps(i16 zeroext %__U, <16 x float> %__A, <16 x float> %__B) { ; KNL-LABEL: test_mm512_maskz_and_ps: ; KNL: ## BB#0: ## %entry -; KNL-NEXT: vpandq %zmm0, %zmm1, %zmm0 ; KNL-NEXT: kmovw %edi, %k1 -; KNL-NEXT: vmovaps %zmm0, %zmm0 {%k1} {z} +; KNL-NEXT: vpandd %zmm0, %zmm1, %zmm0 {%k1} {z} ; KNL-NEXT: retq ; ; SKX-LABEL: test_mm512_maskz_and_ps: ; SKX: ## BB#0: ## %entry -; SKX-NEXT: vandps %zmm0, %zmm1, %zmm0 ; SKX-NEXT: kmovw %edi, %k1 -; SKX-NEXT: vmovaps %zmm0, %zmm0 {%k1} {z} +; SKX-NEXT: vandps %zmm0, %zmm1, %zmm0 {%k1} {z} ; SKX-NEXT: retq entry: %0 = bitcast <16 x float> %__A to <16 x i32> @@ -786,16 +762,14 @@ entry: define <8 x double> @test_mm512_mask_andnot_pd(<8 x double> %__W, i8 zeroext %__U, <8 x double> %__A, <8 x double> %__B) { ; KNL-LABEL: test_mm512_mask_andnot_pd: ; KNL: ## BB#0: ## %entry -; KNL-NEXT: vpandnq %zmm2, %zmm1, %zmm1 ; KNL-NEXT: kmovw %edi, %k1 -; KNL-NEXT: vblendmpd %zmm1, %zmm0, %zmm0 {%k1} +; KNL-NEXT: vpandnq %zmm2, %zmm1, %zmm0 {%k1} ; KNL-NEXT: retq ; ; SKX-LABEL: test_mm512_mask_andnot_pd: ; SKX: ## BB#0: ## %entry -; SKX-NEXT: vandnpd %zmm2, %zmm1, %zmm1 ; SKX-NEXT: kmovb %edi, %k1 -; SKX-NEXT: vblendmpd %zmm1, %zmm0, %zmm0 {%k1} +; SKX-NEXT: vandnpd %zmm2, %zmm1, %zmm0 {%k1} ; SKX-NEXT: retq entry: %0 = bitcast <8 x double> %__A to <8 x i64> @@ -811,16 +785,14 @@ entry: define <8 x double> @test_mm512_maskz_andnot_pd(i8 zeroext %__U, <8 x double> %__A, <8 x double> %__B) { ; KNL-LABEL: test_mm512_maskz_andnot_pd: ; KNL: ## BB#0: ## %entry -; KNL-NEXT: vpandnq %zmm1, %zmm0, %zmm0 ; KNL-NEXT: kmovw %edi, %k1 -; KNL-NEXT: vmovapd %zmm0, %zmm0 {%k1} {z} +; KNL-NEXT: vpandnq %zmm1, %zmm0, %zmm0 {%k1} {z} ; KNL-NEXT: retq ; ; SKX-LABEL: test_mm512_maskz_andnot_pd: ; SKX: ## BB#0: ## %entry -; SKX-NEXT: vandnpd %zmm1, %zmm0, %zmm0 ; SKX-NEXT: kmovb %edi, %k1 -; SKX-NEXT: vmovapd %zmm0, %zmm0 {%k1} {z} +; SKX-NEXT: vandnpd %zmm1, %zmm0, %zmm0 {%k1} {z} ; SKX-NEXT: retq entry: %0 = bitcast <8 x double> %__A to <8 x i64> @@ -836,16 +808,14 @@ entry: define <16 x float> @test_mm512_mask_andnot_ps(<16 x float> %__W, i16 zeroext %__U, <16 x float> %__A, <16 x float> %__B) { ; KNL-LABEL: test_mm512_mask_andnot_ps: ; KNL: ## BB#0: ## %entry -; KNL-NEXT: vpandnq %zmm2, %zmm1, %zmm1 ; KNL-NEXT: kmovw %edi, %k1 -; KNL-NEXT: vblendmps %zmm1, %zmm0, %zmm0 {%k1} +; KNL-NEXT: vpandnd %zmm2, %zmm1, %zmm0 {%k1} ; KNL-NEXT: retq ; ; SKX-LABEL: test_mm512_mask_andnot_ps: ; SKX: ## BB#0: ## %entry -; SKX-NEXT: vandnps %zmm2, %zmm1, %zmm1 ; SKX-NEXT: kmovw %edi, %k1 -; SKX-NEXT: vblendmps %zmm1, %zmm0, %zmm0 {%k1} +; SKX-NEXT: vandnps %zmm2, %zmm1, %zmm0 {%k1} ; SKX-NEXT: retq entry: %0 = bitcast <16 x float> %__A to <16 x i32> @@ -861,16 +831,14 @@ entry: define <16 x float> @test_mm512_maskz_andnot_ps(i16 zeroext %__U, <16 x float> %__A, <16 x float> %__B) { ; KNL-LABEL: test_mm512_maskz_andnot_ps: ; KNL: ## BB#0: ## %entry -; KNL-NEXT: vpandnq %zmm1, %zmm0, %zmm0 ; KNL-NEXT: kmovw %edi, %k1 -; KNL-NEXT: vmovaps %zmm0, %zmm0 {%k1} {z} +; KNL-NEXT: vpandnd %zmm1, %zmm0, %zmm0 {%k1} {z} ; KNL-NEXT: retq ; ; SKX-LABEL: test_mm512_maskz_andnot_ps: ; SKX: ## BB#0: ## %entry -; SKX-NEXT: vandnps %zmm1, %zmm0, %zmm0 ; SKX-NEXT: kmovw %edi, %k1 -; SKX-NEXT: vmovaps %zmm0, %zmm0 {%k1} {z} +; SKX-NEXT: vandnps %zmm1, %zmm0, %zmm0 {%k1} {z} ; SKX-NEXT: retq entry: %0 = bitcast <16 x float> %__A to <16 x i32> |