diff options
| author | Dale Johannesen <dalej@apple.com> | 2008-08-20 00:48:50 +0000 |
|---|---|---|
| committer | Dale Johannesen <dalej@apple.com> | 2008-08-20 00:48:50 +0000 |
| commit | 6f765f392cf11fd61c46086e2ed532e5b4eb45ca (patch) | |
| tree | 197f9a4254576d18cd21426ebb2e35a98fe404cd /llvm/lib/Target/X86/X86ISelLowering.cpp | |
| parent | 847ebb90b81229b4d271aa88d8de715639ac4482 (diff) | |
| download | bcm5719-llvm-6f765f392cf11fd61c46086e2ed532e5b4eb45ca.tar.gz bcm5719-llvm-6f765f392cf11fd61c46086e2ed532e5b4eb45ca.zip | |
Add remaining 64-bit atomic patterns for x86-64.
llvm-svn: 55029
Diffstat (limited to 'llvm/lib/Target/X86/X86ISelLowering.cpp')
| -rw-r--r-- | llvm/lib/Target/X86/X86ISelLowering.cpp | 32 |
1 files changed, 32 insertions, 0 deletions
diff --git a/llvm/lib/Target/X86/X86ISelLowering.cpp b/llvm/lib/Target/X86/X86ISelLowering.cpp index 2fb9a2e651e..c43ce33c8c6 100644 --- a/llvm/lib/Target/X86/X86ISelLowering.cpp +++ b/llvm/lib/Target/X86/X86ISelLowering.cpp @@ -6568,6 +6568,38 @@ X86TargetLowering::EmitInstrWithCustomInserter(MachineInstr *MI, X86::NOT8r, X86::AL, X86::GR8RegisterClass, true); // FIXME: There are no CMOV8 instructions; MIN/MAX need some other way. + case X86::ATOMAND64: + return EmitAtomicBitwiseWithCustomInserter(MI, BB, X86::AND64rr, + X86::AND64ri32, X86::MOV64rm, + X86::LCMPXCHG64, X86::MOV64rr, + X86::NOT64r, X86::RAX, + X86::GR64RegisterClass); + case X86::ATOMOR64: + return EmitAtomicBitwiseWithCustomInserter(MI, BB, X86::OR64rr, + X86::OR64ri32, X86::MOV64rm, + X86::LCMPXCHG64, X86::MOV64rr, + X86::NOT64r, X86::RAX, + X86::GR64RegisterClass); + case X86::ATOMXOR64: + return EmitAtomicBitwiseWithCustomInserter(MI, BB, X86::XOR64rr, + X86::XOR64ri32, X86::MOV64rm, + X86::LCMPXCHG64, X86::MOV64rr, + X86::NOT64r, X86::RAX, + X86::GR64RegisterClass); + case X86::ATOMNAND64: + return EmitAtomicBitwiseWithCustomInserter(MI, BB, X86::AND64rr, + X86::AND64ri32, X86::MOV64rm, + X86::LCMPXCHG64, X86::MOV64rr, + X86::NOT64r, X86::RAX, + X86::GR64RegisterClass, true); + case X86::ATOMMIN64: + return EmitAtomicMinMaxWithCustomInserter(MI, BB, X86::CMOVL64rr); + case X86::ATOMMAX64: + return EmitAtomicMinMaxWithCustomInserter(MI, BB, X86::CMOVG64rr); + case X86::ATOMUMIN64: + return EmitAtomicMinMaxWithCustomInserter(MI, BB, X86::CMOVB64rr); + case X86::ATOMUMAX64: + return EmitAtomicMinMaxWithCustomInserter(MI, BB, X86::CMOVA64rr); } } |

