diff options
Diffstat (limited to 'clang')
-rw-r--r-- | clang/include/clang/Tooling/Syntax/Tokens.h | 5 | ||||
-rw-r--r-- | clang/lib/Tooling/Syntax/Tokens.cpp | 16 | ||||
-rw-r--r-- | clang/unittests/Tooling/Syntax/TokensTest.cpp | 15 |
3 files changed, 36 insertions, 0 deletions
diff --git a/clang/include/clang/Tooling/Syntax/Tokens.h b/clang/include/clang/Tooling/Syntax/Tokens.h index 301432d3888..6f4d0e0c050 100644 --- a/clang/include/clang/Tooling/Syntax/Tokens.h +++ b/clang/include/clang/Tooling/Syntax/Tokens.h @@ -175,6 +175,7 @@ public: /// All tokens produced by the preprocessor after all macro replacements, /// directives, etc. Source locations found in the clang AST will always /// point to one of these tokens. + /// Tokens are in TU order (per SourceManager::isBeforeInTranslationUnit()). /// FIXME: figure out how to handle token splitting, e.g. '>>' can be split /// into two '>' tokens by the parser. However, TokenBuffer currently /// keeps it as a single '>>' token. @@ -182,6 +183,10 @@ public: return ExpandedTokens; } + /// Returns the subrange of expandedTokens() corresponding to the closed + /// token range R. + llvm::ArrayRef<syntax::Token> expandedTokens(SourceRange R) const; + /// Find the subrange of spelled tokens that produced the corresponding \p /// Expanded tokens. /// diff --git a/clang/lib/Tooling/Syntax/Tokens.cpp b/clang/lib/Tooling/Syntax/Tokens.cpp index a2c3bc137d6..5941507e086 100644 --- a/clang/lib/Tooling/Syntax/Tokens.cpp +++ b/clang/lib/Tooling/Syntax/Tokens.cpp @@ -119,6 +119,22 @@ llvm::StringRef FileRange::text(const SourceManager &SM) const { return Text.substr(Begin, length()); } +llvm::ArrayRef<syntax::Token> TokenBuffer::expandedTokens(SourceRange R) const { + if (R.isInvalid()) + return {}; + const Token *Begin = + llvm::partition_point(expandedTokens(), [&](const syntax::Token &T) { + return SourceMgr->isBeforeInTranslationUnit(T.location(), R.getBegin()); + }); + const Token *End = + llvm::partition_point(expandedTokens(), [&](const syntax::Token &T) { + return !SourceMgr->isBeforeInTranslationUnit(R.getEnd(), T.location()); + }); + if (Begin > End) + return {}; + return {Begin, End}; +} + std::pair<const syntax::Token *, const TokenBuffer::Mapping *> TokenBuffer::spelledForExpandedToken(const syntax::Token *Expanded) const { assert(Expanded); diff --git a/clang/unittests/Tooling/Syntax/TokensTest.cpp b/clang/unittests/Tooling/Syntax/TokensTest.cpp index 6ffe2c43dd0..2c462d49ee4 100644 --- a/clang/unittests/Tooling/Syntax/TokensTest.cpp +++ b/clang/unittests/Tooling/Syntax/TokensTest.cpp @@ -40,6 +40,7 @@ #include "llvm/Support/raw_ostream.h" #include "llvm/Testing/Support/Annotations.h" #include "llvm/Testing/Support/SupportHelpers.h" +#include "gmock/gmock.h" #include <cassert> #include <cstdlib> #include <gmock/gmock.h> @@ -663,6 +664,20 @@ TEST_F(TokenBufferTest, SpelledByExpanded) { ValueIs(SameRange(findSpelled("not_mapped")))); } +TEST_F(TokenBufferTest, ExpandedTokensForRange) { + recordTokens(R"cpp( + #define SIGN(X) X##_washere + A SIGN(B) C SIGN(D) E SIGN(F) G + )cpp"); + + SourceRange R(findExpanded("C").front().location(), + findExpanded("F_washere").front().location()); + // Sanity check: expanded and spelled tokens are stored separately. + EXPECT_THAT(Buffer.expandedTokens(R), + SameRange(findExpanded("C D_washere E F_washere"))); + EXPECT_THAT(Buffer.expandedTokens(SourceRange()), testing::IsEmpty()); +} + TEST_F(TokenBufferTest, ExpansionStartingAt) { // Object-like macro expansions. recordTokens(R"cpp( |