When creating raw tokens for the PTHLexer specially handle angled strings for #include directives.
git-svn-id: https://llvm.org/svn/llvm-project/cfe/trunk@59840 91177308-0d34-0410-b5e6-96231b3b80d8
diff --git a/lib/Lex/PPLexerChange.cpp b/lib/Lex/PPLexerChange.cpp
index a12309a..da87749 100644
--- a/lib/Lex/PPLexerChange.cpp
+++ b/lib/Lex/PPLexerChange.cpp
@@ -90,18 +90,42 @@
// Lex the file, populating our data structures.
std::vector<Token>* Tokens = new std::vector<Token>();
- Token Tok;
+ Token Tok;
do {
L.LexFromRawLexer(Tok);
- if (Tok.is(tok::identifier))
+ if (Tok.is(tok::identifier)) {
Tok.setIdentifierInfo(LookUpIdentifierInfo(Tok));
-
- // Store the token.
- Tokens->push_back(Tok);
+ }
+ else if (Tok.is(tok::hash) && Tok.isAtStartOfLine()) {
+ // Special processing for #include. Store the '#' token and lex
+ // the next token.
+ Tokens->push_back(Tok);
+ L.LexFromRawLexer(Tok);
+
+ // Did we see 'include'/'import'/'include_next'?
+ if (!Tok.is(tok::identifier))
+ continue;
+
+ IdentifierInfo* II = LookUpIdentifierInfo(Tok);
+ Tok.setIdentifierInfo(II);
+ tok::PPKeywordKind K = II->getPPKeywordID();
+
+ if (K == tok::pp_include || K == tok::pp_import ||
+ K == tok::pp_include_next) {
+
+ // Save the 'include' token.
+ Tokens->push_back(Tok);
+
+ // Lex the next token as an include string.
+ L.ParsingPreprocessorDirective = true;
+ L.LexIncludeFilename(Tok);
+ L.ParsingPreprocessorDirective = false;
+ }
+ }
}
- while (Tok.isNot(tok::eof));
+ while (Tokens->push_back(Tok), Tok.isNot(tok::eof));
if (CurPPLexer || CurTokenLexer)
PushIncludeMacroStack();