diff options
Diffstat (limited to 'clang/lib')
-rw-r--r-- | clang/lib/Frontend/PrintPreprocessedOutput.cpp | 4 | ||||
-rw-r--r-- | clang/lib/Lex/PPDirectives.cpp | 43 | ||||
-rw-r--r-- | clang/lib/Lex/PPLexerChange.cpp | 74 | ||||
-rw-r--r-- | clang/lib/Lex/Preprocessor.cpp | 4 | ||||
-rw-r--r-- | clang/lib/Parse/ParseCXXInlineMethods.cpp | 6 | ||||
-rw-r--r-- | clang/lib/Parse/ParseDecl.cpp | 5 | ||||
-rw-r--r-- | clang/lib/Parse/ParseDeclCXX.cpp | 10 | ||||
-rw-r--r-- | clang/lib/Parse/ParseInit.cpp | 2 | ||||
-rw-r--r-- | clang/lib/Parse/ParseObjc.cpp | 8 | ||||
-rw-r--r-- | clang/lib/Parse/ParseStmt.cpp | 4 | ||||
-rw-r--r-- | clang/lib/Parse/Parser.cpp | 33 |
11 files changed, 135 insertions, 58 deletions
diff --git a/clang/lib/Frontend/PrintPreprocessedOutput.cpp b/clang/lib/Frontend/PrintPreprocessedOutput.cpp index 55a66d87f8b..f3393bfe51c 100644 --- a/clang/lib/Frontend/PrintPreprocessedOutput.cpp +++ b/clang/lib/Frontend/PrintPreprocessedOutput.cpp @@ -657,7 +657,9 @@ static void PrintPreprocessedTokens(Preprocessor &PP, Token &Tok, // -traditional-cpp the lexer keeps /all/ whitespace, including comments. SourceLocation StartLoc = Tok.getLocation(); Callbacks->MoveToLine(StartLoc.getLocWithOffset(Tok.getLength())); - } else if (Tok.is(tok::annot_module_include)) { + } else if (Tok.is(tok::annot_module_include) || + Tok.is(tok::annot_module_begin) || + Tok.is(tok::annot_module_end)) { // PrintPPOutputPPCallbacks::InclusionDirective handles producing // appropriate output here. Ignore this token entirely. PP.Lex(Tok); diff --git a/clang/lib/Lex/PPDirectives.cpp b/clang/lib/Lex/PPDirectives.cpp index 86c508fe9b1..1e3602e84f4 100644 --- a/clang/lib/Lex/PPDirectives.cpp +++ b/clang/lib/Lex/PPDirectives.cpp @@ -1389,6 +1389,19 @@ bool Preprocessor::ConcatenateIncludeName( return true; } +/// \brief Push a token onto the token stream containing an annotation. +static void EnterAnnotationToken(Preprocessor &PP, + SourceLocation Begin, SourceLocation End, + tok::TokenKind Kind, void *AnnotationVal) { + Token *Tok = new Token[1]; + Tok[0].startToken(); + Tok[0].setKind(Kind); + Tok[0].setLocation(Begin); + Tok[0].setAnnotationEndLoc(End); + Tok[0].setAnnotationValue(AnnotationVal); + PP.EnterTokenStream(Tok, 1, true, true); +} + /// HandleIncludeDirective - The "\#include" tokens have just been read, read /// the file to be included from the lexer, then include it! This is a common /// routine with functionality shared between \#include, \#include_next and @@ -1590,7 +1603,7 @@ void Preprocessor::HandleIncludeDirective(SourceLocation HashLoc, // include directive maps to. bool BuildingImportedModule = Path[0].first->getName() == getLangOpts().CurrentModule; - + if (!BuildingImportedModule && getLangOpts().ObjC2) { // If we're not building the imported module, warn that we're going // to automatically turn this inclusion directive into a module import. @@ -1639,13 +1652,8 @@ void Preprocessor::HandleIncludeDirective(SourceLocation HashLoc, // make the module visible. // FIXME: Produce this as the current token directly, rather than // allocating a new token for it. - Token *Tok = new Token[1]; - Tok[0].startToken(); - Tok[0].setKind(tok::annot_module_include); - Tok[0].setLocation(HashLoc); - Tok[0].setAnnotationEndLoc(End); - Tok[0].setAnnotationValue(Imported); - EnterTokenStream(Tok, 1, true, true); + EnterAnnotationToken(*this, HashLoc, End, tok::annot_module_include, + Imported); } return; } @@ -1692,8 +1700,23 @@ void Preprocessor::HandleIncludeDirective(SourceLocation HashLoc, FileID FID = SourceMgr.createFileID(File, IncludePos, FileCharacter); assert(!FID.isInvalid() && "Expected valid file ID"); - // Finally, if all is good, enter the new file! - EnterSourceFile(FID, CurDir, FilenameTok.getLocation()); + // Determine if we're switching to building a new submodule, and which one. + ModuleMap::KnownHeader BuildingModule; + if (getLangOpts().Modules && !getLangOpts().CurrentModule.empty()) { + Module *RequestingModule = getModuleForLocation(FilenameLoc); + BuildingModule = + HeaderInfo.getModuleMap().findModuleForHeader(File, RequestingModule); + } + + // If all is good, enter the new file! + EnterSourceFile(FID, CurDir, FilenameTok.getLocation(), + static_cast<bool>(BuildingModule)); + + // If we're walking into another part of the same module, let the parser + // know that any future declarations are within that other submodule. + if (BuildingModule) + EnterAnnotationToken(*this, HashLoc, End, tok::annot_module_begin, + BuildingModule.getModule()); } /// HandleIncludeNextDirective - Implements \#include_next. diff --git a/clang/lib/Lex/PPLexerChange.cpp b/clang/lib/Lex/PPLexerChange.cpp index 1f970a4450d..3b3c5a91920 100644 --- a/clang/lib/Lex/PPLexerChange.cpp +++ b/clang/lib/Lex/PPLexerChange.cpp @@ -69,7 +69,7 @@ PreprocessorLexer *Preprocessor::getCurrentFileLexer() const { /// EnterSourceFile - Add a source file to the top of the include stack and /// start lexing tokens from it instead of the current buffer. void Preprocessor::EnterSourceFile(FileID FID, const DirectoryLookup *CurDir, - SourceLocation Loc) { + SourceLocation Loc, bool IsSubmodule) { assert(!CurTokenLexer && "Cannot #include a file inside a macro!"); ++NumEnteredSourceFiles; @@ -78,7 +78,7 @@ void Preprocessor::EnterSourceFile(FileID FID, const DirectoryLookup *CurDir, if (PTH) { if (PTHLexer *PL = PTH->CreateLexer(FID)) { - EnterSourceFileWithPTH(PL, CurDir); + EnterSourceFileWithPTH(PL, CurDir, IsSubmodule); return; } } @@ -101,14 +101,16 @@ void Preprocessor::EnterSourceFile(FileID FID, const DirectoryLookup *CurDir, CodeCompletionFileLoc.getLocWithOffset(CodeCompletionOffset); } - EnterSourceFileWithLexer(new Lexer(FID, InputFile, *this), CurDir); + EnterSourceFileWithLexer(new Lexer(FID, InputFile, *this), CurDir, + IsSubmodule); return; } /// EnterSourceFileWithLexer - Add a source file to the top of the include stack /// and start lexing tokens from it instead of the current buffer. void Preprocessor::EnterSourceFileWithLexer(Lexer *TheLexer, - const DirectoryLookup *CurDir) { + const DirectoryLookup *CurDir, + bool IsSubmodule) { // Add the current lexer to the include stack. if (CurPPLexer || CurTokenLexer) @@ -117,6 +119,7 @@ void Preprocessor::EnterSourceFileWithLexer(Lexer *TheLexer, CurLexer.reset(TheLexer); CurPPLexer = TheLexer; CurDirLookup = CurDir; + CurIsSubmodule = IsSubmodule; if (CurLexerKind != CLK_LexAfterModuleImport) CurLexerKind = CLK_Lexer; @@ -133,7 +136,8 @@ void Preprocessor::EnterSourceFileWithLexer(Lexer *TheLexer, /// EnterSourceFileWithPTH - Add a source file to the top of the include stack /// and start getting tokens from it using the PTH cache. void Preprocessor::EnterSourceFileWithPTH(PTHLexer *PL, - const DirectoryLookup *CurDir) { + const DirectoryLookup *CurDir, + bool IsSubmodule) { if (CurPPLexer || CurTokenLexer) PushIncludeMacroStack(); @@ -141,6 +145,7 @@ void Preprocessor::EnterSourceFileWithPTH(PTHLexer *PL, CurDirLookup = CurDir; CurPTHLexer.reset(PL); CurPPLexer = CurPTHLexer.get(); + CurIsSubmodule = IsSubmodule; if (CurLexerKind != CLK_LexAfterModuleImport) CurLexerKind = CLK_PTHLexer; @@ -244,6 +249,29 @@ void Preprocessor::PropagateLineStartLeadingSpaceInfo(Token &Result) { // but it might if they're empty? } +/// \brief Determine the location to use as the end of the buffer for a lexer. +/// +/// If the file ends with a newline, form the EOF token on the newline itself, +/// rather than "on the line following it", which doesn't exist. This makes +/// diagnostics relating to the end of file include the last file that the user +/// actually typed, which is goodness. +const char *Preprocessor::getCurLexerEndPos() { + const char *EndPos = CurLexer->BufferEnd; + if (EndPos != CurLexer->BufferStart && + (EndPos[-1] == '\n' || EndPos[-1] == '\r')) { + --EndPos; + + // Handle \n\r and \r\n: + if (EndPos != CurLexer->BufferStart && + (EndPos[-1] == '\n' || EndPos[-1] == '\r') && + EndPos[-1] != EndPos[0]) + --EndPos; + } + + return EndPos; +} + + /// HandleEndOfFile - This callback is invoked when the lexer hits the end of /// the current file. This either returns the EOF token or pops a level off /// the include stack and keeps going. @@ -342,7 +370,19 @@ bool Preprocessor::HandleEndOfFile(Token &Result, bool isEndOfMacro) { FileID ExitedFID; if (Callbacks && !isEndOfMacro && CurPPLexer) ExitedFID = CurPPLexer->getFileID(); - + + // If this file corresponded to a submodule, notify the parser that we've + // left that submodule. + bool LeavingSubmodule = CurIsSubmodule && CurLexer; + if (LeavingSubmodule) { + const char *EndPos = getCurLexerEndPos(); + Result.startToken(); + CurLexer->BufferPtr = EndPos; + CurLexer->FormTokenWithChars(Result, EndPos, tok::annot_module_end); + Result.setAnnotationEndLoc(Result.getLocation()); + Result.setAnnotationValue(0); + } + // We're done with the #included file. RemoveTopOfLexerStack(); @@ -357,27 +397,13 @@ bool Preprocessor::HandleEndOfFile(Token &Result, bool isEndOfMacro) { PPCallbacks::ExitFile, FileType, ExitedFID); } - // Client should lex another token. - return false; + // Client should lex another token unless we generated an EOM. + return LeavingSubmodule; } - // If the file ends with a newline, form the EOF token on the newline itself, - // rather than "on the line following it", which doesn't exist. This makes - // diagnostics relating to the end of file include the last file that the user - // actually typed, which is goodness. + // If this is the end of the main file, form an EOF token. if (CurLexer) { - const char *EndPos = CurLexer->BufferEnd; - if (EndPos != CurLexer->BufferStart && - (EndPos[-1] == '\n' || EndPos[-1] == '\r')) { - --EndPos; - - // Handle \n\r and \r\n: - if (EndPos != CurLexer->BufferStart && - (EndPos[-1] == '\n' || EndPos[-1] == '\r') && - EndPos[-1] != EndPos[0]) - --EndPos; - } - + const char *EndPos = getCurLexerEndPos(); Result.startToken(); CurLexer->BufferPtr = EndPos; CurLexer->FormTokenWithChars(Result, EndPos, tok::eof); diff --git a/clang/lib/Lex/Preprocessor.cpp b/clang/lib/Lex/Preprocessor.cpp index b500efee4e6..92aeb5c3e5f 100644 --- a/clang/lib/Lex/Preprocessor.cpp +++ b/clang/lib/Lex/Preprocessor.cpp @@ -67,8 +67,8 @@ Preprocessor::Preprocessor(IntrusiveRefCntPtr<PreprocessorOptions> PPOpts, CodeComplete(0), CodeCompletionFile(0), CodeCompletionOffset(0), LastTokenWasAt(false), ModuleImportExpectsIdentifier(false), CodeCompletionReached(0), SkipMainFilePreamble(0, true), CurPPLexer(0), - CurDirLookup(0), CurLexerKind(CLK_Lexer), Callbacks(0), - MacroArgCache(0), Record(0), MIChainHead(0), MICache(0), + CurDirLookup(0), CurLexerKind(CLK_Lexer), CurIsSubmodule(false), + Callbacks(0), MacroArgCache(0), Record(0), MIChainHead(0), MICache(0), DeserialMIChainHead(0) { OwnsHeaderSearch = OwnsHeaders; diff --git a/clang/lib/Parse/ParseCXXInlineMethods.cpp b/clang/lib/Parse/ParseCXXInlineMethods.cpp index 77923051653..9a69bec1679 100644 --- a/clang/lib/Parse/ParseCXXInlineMethods.cpp +++ b/clang/lib/Parse/ParseCXXInlineMethods.cpp @@ -580,6 +580,9 @@ bool Parser::ConsumeAndStoreUntil(tok::TokenKind T1, tok::TokenKind T2, switch (Tok.getKind()) { case tok::eof: + case tok::annot_module_begin: + case tok::annot_module_end: + case tok::annot_module_include: // Ran out of tokens. return false; @@ -965,6 +968,9 @@ bool Parser::ConsumeAndStoreInitializer(CachedTokens &Toks, goto consume_token; case tok::eof: + case tok::annot_module_begin: + case tok::annot_module_end: + case tok::annot_module_include: // Ran out of tokens. return false; diff --git a/clang/lib/Parse/ParseDecl.cpp b/clang/lib/Parse/ParseDecl.cpp index b93979127a5..7185de6fede 100644 --- a/clang/lib/Parse/ParseDecl.cpp +++ b/clang/lib/Parse/ParseDecl.cpp @@ -1561,6 +1561,9 @@ void Parser::SkipMalformedDecl() { break; case tok::eof: + case tok::annot_module_begin: + case tok::annot_module_end: + case tok::annot_module_include: return; default: @@ -3371,7 +3374,7 @@ void Parser::ParseStructUnionBody(SourceLocation RecordLoc, SmallVector<Decl *, 32> FieldDecls; // While we still have something to read, read the declarations in the struct. - while (Tok.isNot(tok::r_brace) && Tok.isNot(tok::eof)) { + while (Tok.isNot(tok::r_brace) && !isEofOrEom()) { // Each iteration of this loop reads one struct-declaration. // Check for extraneous top-level semicolon. diff --git a/clang/lib/Parse/ParseDeclCXX.cpp b/clang/lib/Parse/ParseDeclCXX.cpp index 382ed043abf..d2d9b220fd7 100644 --- a/clang/lib/Parse/ParseDeclCXX.cpp +++ b/clang/lib/Parse/ParseDeclCXX.cpp @@ -195,7 +195,7 @@ void Parser::ParseInnerNamespace(std::vector<SourceLocation>& IdentLoc, ParsedAttributes& attrs, BalancedDelimiterTracker &Tracker) { if (index == Ident.size()) { - while (Tok.isNot(tok::r_brace) && Tok.isNot(tok::eof)) { + while (Tok.isNot(tok::r_brace) && !isEofOrEom()) { ParsedAttributesWithRange attrs(AttrFactory); MaybeParseCXX11Attributes(attrs); MaybeParseMicrosoftAttributes(attrs); @@ -318,7 +318,7 @@ Decl *Parser::ParseLinkage(ParsingDeclSpec &DS, unsigned Context) { BalancedDelimiterTracker T(*this, tok::l_brace); T.consumeOpen(); - while (Tok.isNot(tok::r_brace) && Tok.isNot(tok::eof)) { + while (Tok.isNot(tok::r_brace) && !isEofOrEom()) { ParsedAttributesWithRange attrs(AttrFactory); MaybeParseCXX11Attributes(attrs); MaybeParseMicrosoftAttributes(attrs); @@ -2452,7 +2452,7 @@ ExprResult Parser::ParseCXXMemberInitializer(Decl *D, bool IsFunction, // a top-level comma always ends the initializer expression. const Token &Next = NextToken(); if (IsFunction || Next.is(tok::semi) || Next.is(tok::comma) || - Next.is(tok::eof)) { + Next.is(tok::eof)) { if (IsFunction) Diag(ConsumeToken(), diag::err_default_delete_in_multiple_declaration) << 1 /* delete */; @@ -2597,7 +2597,7 @@ void Parser::ParseCXXMemberSpecification(SourceLocation RecordLoc, if (TagDecl) { // While we still have something to read, read the member-declarations. - while (Tok.isNot(tok::r_brace) && Tok.isNot(tok::eof)) { + while (Tok.isNot(tok::r_brace) && !isEofOrEom()) { // Each iteration of this loop reads one member-declaration. if (getLangOpts().MicrosoftExt && (Tok.is(tok::kw___if_exists) || @@ -3420,7 +3420,7 @@ void Parser::ParseMicrosoftIfExistsClassDeclaration(DeclSpec::TST TagType, return; } - while (Tok.isNot(tok::r_brace) && Tok.isNot(tok::eof)) { + while (Tok.isNot(tok::r_brace) && !isEofOrEom()) { // __if_exists, __if_not_exists can nest. if ((Tok.is(tok::kw___if_exists) || Tok.is(tok::kw___if_not_exists))) { ParseMicrosoftIfExistsClassDeclaration((DeclSpec::TST)TagType, CurAS); diff --git a/clang/lib/Parse/ParseInit.cpp b/clang/lib/Parse/ParseInit.cpp index 37f74bbcd51..56d8edcdb41 100644 --- a/clang/lib/Parse/ParseInit.cpp +++ b/clang/lib/Parse/ParseInit.cpp @@ -512,7 +512,7 @@ bool Parser::ParseMicrosoftIfExistsBraceInitializer(ExprVector &InitExprs, return false; } - while (Tok.isNot(tok::eof)) { + while (!isEofOrEom()) { trailingComma = false; // If we know that this cannot be a designation, just parse the nested // initializer directly. diff --git a/clang/lib/Parse/ParseObjc.cpp b/clang/lib/Parse/ParseObjc.cpp index 86f38cfee37..238474b93a4 100644 --- a/clang/lib/Parse/ParseObjc.cpp +++ b/clang/lib/Parse/ParseObjc.cpp @@ -423,7 +423,7 @@ void Parser::ParseObjCInterfaceDeclList(tok::ObjCKeywordKind contextKey, } // If we got to the end of the file, exit the loop. - if (Tok.is(tok::eof)) + if (isEofOrEom()) break; // Code completion within an Objective-C interface. @@ -1289,7 +1289,7 @@ void Parser::ParseObjCClassInstanceVariables(Decl *interfaceDecl, BalancedDelimiterTracker T(*this, tok::l_brace); T.consumeOpen(); // While we still have something to read, read the instance variables. - while (Tok.isNot(tok::r_brace) && Tok.isNot(tok::eof)) { + while (Tok.isNot(tok::r_brace) && !isEofOrEom()) { // Each iteration of this loop reads one objc-instance-variable-decl. // Check for extraneous top-level semicolon. @@ -1582,7 +1582,7 @@ Parser::ParseObjCAtImplementationDeclaration(SourceLocation AtLoc) { { ObjCImplParsingDataRAII ObjCImplParsing(*this, ObjCImpDecl); - while (!ObjCImplParsing.isFinished() && Tok.isNot(tok::eof)) { + while (!ObjCImplParsing.isFinished() && !isEofOrEom()) { ParsedAttributesWithRange attrs(AttrFactory); MaybeParseCXX11Attributes(attrs); MaybeParseMicrosoftAttributes(attrs); @@ -1612,7 +1612,7 @@ Parser::ParseObjCAtEndDeclaration(SourceRange atEnd) { Parser::ObjCImplParsingDataRAII::~ObjCImplParsingDataRAII() { if (!Finished) { finish(P.Tok.getLocation()); - if (P.Tok.is(tok::eof)) { + if (P.isEofOrEom()) { P.Diag(P.Tok, diag::err_objc_missing_end) << FixItHint::CreateInsertion(P.Tok.getLocation(), "\n@end\n"); P.Diag(Dcl->getLocStart(), diag::note_objc_container_start) diff --git a/clang/lib/Parse/ParseStmt.cpp b/clang/lib/Parse/ParseStmt.cpp index d1f2138db48..5f939fc354a 100644 --- a/clang/lib/Parse/ParseStmt.cpp +++ b/clang/lib/Parse/ParseStmt.cpp @@ -890,7 +890,7 @@ StmtResult Parser::ParseCompoundStatementBody(bool isStmtExpr) { Stmts.push_back(R.release()); } - while (Tok.isNot(tok::r_brace) && Tok.isNot(tok::eof)) { + while (Tok.isNot(tok::r_brace) && !isEofOrEom()) { if (Tok.is(tok::annot_pragma_unused)) { HandlePragmaUnused(); continue; @@ -2058,7 +2058,7 @@ StmtResult Parser::ParseMicrosoftAsmStatement(SourceLocation AsmLoc) { SourceLocation TokLoc = Tok.getLocation(); do { // If we hit EOF, we're done, period. - if (Tok.is(tok::eof)) + if (isEofOrEom()) break; if (!InAsmComment && Tok.is(tok::semi)) { diff --git a/clang/lib/Parse/Parser.cpp b/clang/lib/Parse/Parser.cpp index 0f18abac75c..cacff4a3eeb 100644 --- a/clang/lib/Parse/Parser.cpp +++ b/clang/lib/Parse/Parser.cpp @@ -288,7 +288,7 @@ bool Parser::SkipUntil(ArrayRef<tok::TokenKind> Toks, SkipUntilFlags Flags) { if (Toks.size() == 1 && Toks[0] == tok::eof && !HasFlagsSet(Flags, StopAtSemi) && !HasFlagsSet(Flags, StopAtCodeCompletion)) { - while (Tok.getKind() != tok::eof) + while (Tok.isNot(tok::eof)) ConsumeAnyToken(); return true; } @@ -297,7 +297,15 @@ bool Parser::SkipUntil(ArrayRef<tok::TokenKind> Toks, SkipUntilFlags Flags) { case tok::eof: // Ran out of tokens. return false; - + + case tok::annot_module_begin: + case tok::annot_module_end: + case tok::annot_module_include: + // Stop before we change submodules. They generally indicate a "good" + // place to pick up parsing again (except in the special case where + // we're trying to skip to EOF). + return false; + case tok::code_completion: if (!HasFlagsSet(Flags, StopAtCodeCompletion)) ConsumeToken(); @@ -574,10 +582,12 @@ namespace { bool Parser::ParseTopLevelDecl(DeclGroupPtrTy &Result) { DestroyTemplateIdAnnotationsRAIIObj CleanupRAII(TemplateIds); - // Skip over the EOF token, flagging end of previous input for incremental + // Skip over the EOF token, flagging end of previous input for incremental // processing - if (PP.isIncrementalProcessingEnabled() && Tok.is(tok::eof)) + if (PP.isIncrementalProcessingEnabled() && Tok.is(tok::eof)) { ConsumeToken(); + return false; + } Result = DeclGroupPtrTy(); switch (Tok.getKind()) { @@ -592,6 +602,12 @@ bool Parser::ParseTopLevelDecl(DeclGroupPtrTy &Result) { ConsumeToken(); return false; + case tok::annot_module_begin: + case tok::annot_module_end: + // FIXME: Update visibility based on the submodule we're in. + ConsumeToken(); + return false; + case tok::eof: // Late template parsing can begin. if (getLangOpts().DelayedTemplateParsing) @@ -1917,14 +1933,15 @@ void Parser::ParseMicrosoftIfExistsExternalDeclaration() { } // Parse the declarations. - while (Tok.isNot(tok::r_brace) && Tok.isNot(tok::eof)) { + // FIXME: Support module import within __if_exists? + while (Tok.isNot(tok::r_brace) && !isEofOrEom()) { ParsedAttributesWithRange attrs(AttrFactory); MaybeParseCXX11Attributes(attrs); MaybeParseMicrosoftAttributes(attrs); DeclGroupPtrTy Result = ParseExternalDeclaration(attrs); if (Result && !getCurScope()->getParent()) Actions.getASTConsumer().HandleTopLevelDecl(Result.get()); - } + } Braces.consumeClose(); } @@ -1980,8 +1997,8 @@ bool BalancedDelimiterTracker::diagnoseOverflow() { P.Diag(P.Tok, diag::err_bracket_depth_exceeded) << P.getLangOpts().BracketDepth; P.Diag(P.Tok, diag::note_bracket_depth); - P.SkipUntil(tok::eof); - return true; + P.cutOffParsing(); + return true; } bool BalancedDelimiterTracker::expectAndConsume(unsigned DiagID, |