diff options
| author | Craig Topper <craig.topper@intel.com> | 2017-08-01 17:18:14 +0000 |
|---|---|---|
| committer | Craig Topper <craig.topper@intel.com> | 2017-08-01 17:18:14 +0000 |
| commit | 2a5bba73255f54338f0d6862bf5bf3a6de6cbd8d (patch) | |
| tree | 55bc68122c7308b3f92994d7e230de58ff87d862 /llvm/test/CodeGen/X86/tbm_patterns.ll | |
| parent | e925caf41659d059457561484267b3c286d2d58a (diff) | |
| download | bcm5719-llvm-2a5bba73255f54338f0d6862bf5bf3a6de6cbd8d.tar.gz bcm5719-llvm-2a5bba73255f54338f0d6862bf5bf3a6de6cbd8d.zip | |
[X86] Use BEXTR/BEXTRI for 64-bit 'and' with a large mask
Summary: The 64-bit 'and' with immediate instruction only supports a 32-bit immediate. So for larger constants we have to load the constant into a register first. If the immediate happens to be a mask we can use the BEXTRI instruction to perform the masking. We already do something similar using the BZHI instruction from the BMI2 instruction set.
Reviewers: RKSimon, spatel
Reviewed By: RKSimon
Subscribers: llvm-commits
Differential Revision: https://reviews.llvm.org/D36129
llvm-svn: 309706
Diffstat (limited to 'llvm/test/CodeGen/X86/tbm_patterns.ll')
| -rw-r--r-- | llvm/test/CodeGen/X86/tbm_patterns.ll | 20 |
1 files changed, 20 insertions, 0 deletions
diff --git a/llvm/test/CodeGen/X86/tbm_patterns.ll b/llvm/test/CodeGen/X86/tbm_patterns.ll index 5ce6bbd4b49..f110bd538bf 100644 --- a/llvm/test/CodeGen/X86/tbm_patterns.ll +++ b/llvm/test/CodeGen/X86/tbm_patterns.ll @@ -253,3 +253,23 @@ define i64 @test_x86_tbm_tzmsk_u64(i64 %a) nounwind { ret i64 %t2 } +define i64 @test_and_large_constant_mask(i64 %x) { +; CHECK-LABEL: test_and_large_constant_mask: +; CHECK: # BB#0: # %entry +; CHECK-NEXT: bextr $15872, %rdi, %rax # imm = 0x3E00 +; CHECK-NEXT: retq +entry: + %and = and i64 %x, 4611686018427387903 + ret i64 %and +} + +define i64 @test_and_large_constant_mask_load(i64* %x) { +; CHECK-LABEL: test_and_large_constant_mask_load: +; CHECK: # BB#0: # %entry +; CHECK-NEXT: bextr $15872, (%rdi), %rax # imm = 0x3E00 +; CHECK-NEXT: retq +entry: + %x1 = load i64, i64* %x + %and = and i64 %x1, 4611686018427387903 + ret i64 %and +} |

