diff options
| author | Matt Arsenault <Matthew.Arsenault@amd.com> | 2019-07-31 00:14:43 +0000 |
|---|---|---|
| committer | Matt Arsenault <Matthew.Arsenault@amd.com> | 2019-07-31 00:14:43 +0000 |
| commit | 52c262484f9e3aa19ef4421b970a2fb0445ca433 (patch) | |
| tree | c2f2328162457703c27864b49a8135f4a6849df9 /llvm/utils | |
| parent | 84e80979b53036d1df4e35ae30439728c1244ba6 (diff) | |
| download | bcm5719-llvm-52c262484f9e3aa19ef4421b970a2fb0445ca433.tar.gz bcm5719-llvm-52c262484f9e3aa19ef4421b970a2fb0445ca433.zip | |
TableGen: Add MinAlignment predicate
AMDGPU uses some custom code predicates for testing alignments.
I'm still having trouble comprehending the behavior of predicate bits
in the PatFrag hierarchy. Any attempt to abstract these properties
unexpectdly fails to apply them.
llvm-svn: 367373
Diffstat (limited to 'llvm/utils')
| -rw-r--r-- | llvm/utils/TableGen/CodeGenDAGPatterns.cpp | 20 | ||||
| -rw-r--r-- | llvm/utils/TableGen/CodeGenDAGPatterns.h | 1 | ||||
| -rw-r--r-- | llvm/utils/TableGen/GlobalISelEmitter.cpp | 46 |
3 files changed, 65 insertions, 2 deletions
diff --git a/llvm/utils/TableGen/CodeGenDAGPatterns.cpp b/llvm/utils/TableGen/CodeGenDAGPatterns.cpp index 75890d2718d..de423655971 100644 --- a/llvm/utils/TableGen/CodeGenDAGPatterns.cpp +++ b/llvm/utils/TableGen/CodeGenDAGPatterns.cpp @@ -883,7 +883,8 @@ std::string TreePredicateFn::getPredCode() const { if (isLoad()) { if (!isUnindexed() && !isNonExtLoad() && !isAnyExtLoad() && !isSignExtLoad() && !isZeroExtLoad() && getMemoryVT() == nullptr && - getScalarMemoryVT() == nullptr) + getScalarMemoryVT() == nullptr && getAddressSpaces() == nullptr && + getMinAlignment() < 1) PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(), "IsLoad cannot be used by itself"); } else { @@ -903,7 +904,8 @@ std::string TreePredicateFn::getPredCode() const { if (isStore()) { if (!isUnindexed() && !isTruncStore() && !isNonTruncStore() && - getMemoryVT() == nullptr && getScalarMemoryVT() == nullptr) + getMemoryVT() == nullptr && getScalarMemoryVT() == nullptr && + getAddressSpaces() == nullptr && getMinAlignment() < 1) PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(), "IsStore cannot be used by itself"); } else { @@ -977,6 +979,13 @@ std::string TreePredicateFn::getPredCode() const { Code += ")\nreturn false;\n"; } + int64_t MinAlign = getMinAlignment(); + if (MinAlign > 0) { + Code += "if (cast<MemSDNode>(N)->getAlignment() < "; + Code += utostr(MinAlign); + Code += ")\nreturn false;\n"; + } + Record *MemoryVT = getMemoryVT(); if (MemoryVT) @@ -1177,6 +1186,13 @@ ListInit *TreePredicateFn::getAddressSpaces() const { return R->getValueAsListInit("AddressSpaces"); } +int64_t TreePredicateFn::getMinAlignment() const { + Record *R = getOrigPatFragRecord()->getRecord(); + if (R->isValueUnset("MinAlignment")) + return 0; + return R->getValueAsInt("MinAlignment"); +} + Record *TreePredicateFn::getScalarMemoryVT() const { Record *R = getOrigPatFragRecord()->getRecord(); if (R->isValueUnset("ScalarMemoryVT")) diff --git a/llvm/utils/TableGen/CodeGenDAGPatterns.h b/llvm/utils/TableGen/CodeGenDAGPatterns.h index d41de68f02f..80fc932a7a5 100644 --- a/llvm/utils/TableGen/CodeGenDAGPatterns.h +++ b/llvm/utils/TableGen/CodeGenDAGPatterns.h @@ -594,6 +594,7 @@ public: Record *getScalarMemoryVT() const; ListInit *getAddressSpaces() const; + int64_t getMinAlignment() const; // If true, indicates that GlobalISel-based C++ code was supplied. bool hasGISelPredicateCode() const; diff --git a/llvm/utils/TableGen/GlobalISelEmitter.cpp b/llvm/utils/TableGen/GlobalISelEmitter.cpp index 65c96069808..7e17dafc832 100644 --- a/llvm/utils/TableGen/GlobalISelEmitter.cpp +++ b/llvm/utils/TableGen/GlobalISelEmitter.cpp @@ -249,6 +249,10 @@ static std::string explainPredicates(const TreePatternNode *N) { OS << ']'; } + int64_t MinAlign = P.getMinAlignment(); + if (MinAlign > 0) + Explanation += " MinAlign=" + utostr(MinAlign); + if (P.isAtomicOrderingMonotonic()) Explanation += " monotonic"; if (P.isAtomicOrderingAcquire()) @@ -329,6 +333,9 @@ static Error isTrivialOperatorNode(const TreePatternNode *N) { const ListInit *AddrSpaces = Predicate.getAddressSpaces(); if (AddrSpaces && !AddrSpaces->empty()) continue; + + if (Predicate.getMinAlignment() > 0) + continue; } if (Predicate.isAtomic() && Predicate.getMemoryVT()) @@ -1052,6 +1059,7 @@ public: IPM_MemoryLLTSize, IPM_MemoryVsLLTSize, IPM_MemoryAddressSpace, + IPM_MemoryAlignment, IPM_GenericPredicate, OPM_SameOperand, OPM_ComplexPattern, @@ -1849,6 +1857,40 @@ public: } }; +class MemoryAlignmentPredicateMatcher : public InstructionPredicateMatcher { +protected: + unsigned MMOIdx; + int MinAlign; + +public: + MemoryAlignmentPredicateMatcher(unsigned InsnVarID, unsigned MMOIdx, + int MinAlign) + : InstructionPredicateMatcher(IPM_MemoryAlignment, InsnVarID), + MMOIdx(MMOIdx), MinAlign(MinAlign) { + assert(MinAlign > 0); + } + + static bool classof(const PredicateMatcher *P) { + return P->getKind() == IPM_MemoryAlignment; + } + + bool isIdentical(const PredicateMatcher &B) const override { + if (!InstructionPredicateMatcher::isIdentical(B)) + return false; + auto *Other = cast<MemoryAlignmentPredicateMatcher>(&B); + return MMOIdx == Other->MMOIdx && MinAlign == Other->MinAlign; + } + + void emitPredicateOpcodes(MatchTable &Table, + RuleMatcher &Rule) const override { + Table << MatchTable::Opcode("GIM_CheckMemoryAlignment") + << MatchTable::Comment("MI") << MatchTable::IntValue(InsnVarID) + << MatchTable::Comment("MMO") << MatchTable::IntValue(MMOIdx) + << MatchTable::Comment("MinAlign") << MatchTable::IntValue(MinAlign) + << MatchTable::LineBreak; + } +}; + /// Generates code to check that the size of an MMO is less-than, equal-to, or /// greater than a given LLT. class MemoryVsLLTSizePredicateMatcher : public InstructionPredicateMatcher { @@ -3287,6 +3329,10 @@ Expected<InstructionMatcher &> GlobalISelEmitter::createAndImportSelDAGMatcher( 0, ParsedAddrSpaces); } } + + int64_t MinAlign = Predicate.getMinAlignment(); + if (MinAlign > 0) + InsnMatcher.addPredicate<MemoryAlignmentPredicateMatcher>(0, MinAlign); } // G_LOAD is used for both non-extending and any-extending loads. |

