blob: 5716d492070cd537b5a7a58cf822730e17a55522 [file] [log] [blame]
Chris Lattner8c32b1a2008-03-09 04:10:46 +00001//===--- PPLexerChange.cpp - Handle changing lexers in the preprocessor ---===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file implements pieces of the Preprocessor interface that manage the
11// current lexer stack.
12//
13//===----------------------------------------------------------------------===//
14
15#include "clang/Lex/Preprocessor.h"
16#include "clang/Lex/HeaderSearch.h"
17#include "clang/Lex/MacroInfo.h"
Chris Lattner8c32b1a2008-03-09 04:10:46 +000018#include "clang/Basic/Diagnostic.h"
19#include "clang/Basic/SourceManager.h"
Ted Kremenek15ba2af2008-11-20 07:56:33 +000020#include "llvm/Support/MemoryBuffer.h"
21
Chris Lattner8c32b1a2008-03-09 04:10:46 +000022using namespace clang;
23
Ted Kremenek4b391082008-11-18 01:33:13 +000024PPCallbacks::~PPCallbacks() {}
Chris Lattner8c32b1a2008-03-09 04:10:46 +000025
26//===----------------------------------------------------------------------===//
Chris Lattner6b884502008-03-10 06:06:04 +000027// Miscellaneous Methods.
Chris Lattner8c32b1a2008-03-09 04:10:46 +000028//===----------------------------------------------------------------------===//
29
Chris Lattner8c32b1a2008-03-09 04:10:46 +000030/// isInPrimaryFile - Return true if we're in the top-level file, not in a
Chris Lattner7d39d742008-03-09 04:49:35 +000031/// #include. This looks through macro expansions and active _Pragma lexers.
Chris Lattner8c32b1a2008-03-09 04:10:46 +000032bool Preprocessor::isInPrimaryFile() const {
Ted Kremenek81d24e12008-11-20 16:19:53 +000033 if (IsFileLexer())
Chris Lattner8c32b1a2008-03-09 04:10:46 +000034 return IncludeMacroStack.empty();
35
36 // If there are any stacked lexers, we're in a #include.
Ted Kremenek81d24e12008-11-20 16:19:53 +000037 assert(IsFileLexer(IncludeMacroStack[0]) &&
Chris Lattner8c32b1a2008-03-09 04:10:46 +000038 "Top level include stack isn't our primary lexer?");
39 for (unsigned i = 1, e = IncludeMacroStack.size(); i != e; ++i)
Ted Kremenek81d24e12008-11-20 16:19:53 +000040 if (IsFileLexer(IncludeMacroStack[i]))
Chris Lattner8c32b1a2008-03-09 04:10:46 +000041 return false;
42 return true;
43}
44
45/// getCurrentLexer - Return the current file lexer being lexed from. Note
46/// that this ignores any potentially active macro expansions and _Pragma
47/// expansions going on at the time.
Ted Kremenek68e48e42008-11-20 01:49:44 +000048PreprocessorLexer *Preprocessor::getCurrentFileLexer() const {
Ted Kremenek81d24e12008-11-20 16:19:53 +000049 if (IsFileLexer())
Ted Kremenek68e48e42008-11-20 01:49:44 +000050 return CurPPLexer;
Chris Lattner8c32b1a2008-03-09 04:10:46 +000051
52 // Look for a stacked lexer.
53 for (unsigned i = IncludeMacroStack.size(); i != 0; --i) {
Ted Kremenek68e48e42008-11-20 01:49:44 +000054 const IncludeStackInfo& ISI = IncludeMacroStack[i-1];
Ted Kremenek81d24e12008-11-20 16:19:53 +000055 if (IsFileLexer(ISI))
Ted Kremenek68e48e42008-11-20 01:49:44 +000056 return ISI.ThePPLexer;
Chris Lattner8c32b1a2008-03-09 04:10:46 +000057 }
58 return 0;
59}
60
Chris Lattner6b884502008-03-10 06:06:04 +000061
62//===----------------------------------------------------------------------===//
63// Methods for Entering and Callbacks for leaving various contexts
64//===----------------------------------------------------------------------===//
Chris Lattner8c32b1a2008-03-09 04:10:46 +000065
66/// EnterSourceFile - Add a source file to the top of the include stack and
67/// start lexing tokens from it instead of the current buffer. Return true
68/// on failure.
69void Preprocessor::EnterSourceFile(unsigned FileID,
70 const DirectoryLookup *CurDir) {
71 assert(CurTokenLexer == 0 && "Cannot #include a file inside a macro!");
72 ++NumEnteredSourceFiles;
73
74 if (MaxIncludeStackDepth < IncludeMacroStack.size())
75 MaxIncludeStackDepth = IncludeMacroStack.size();
76
Ted Kremenekb93efa32008-11-21 20:51:59 +000077#if 1
Chris Lattner8c32b1a2008-03-09 04:10:46 +000078 Lexer *TheLexer = new Lexer(SourceLocation::getFileLoc(FileID, 0), *this);
79 EnterSourceFileWithLexer(TheLexer, CurDir);
Ted Kremenek15ba2af2008-11-20 07:56:33 +000080#else
81 const llvm::MemoryBuffer* B = getSourceManager().getBuffer(FileID);
82
83 // Create a raw lexer.
84 Lexer L(SourceLocation::getFileLoc(FileID, 0), getLangOptions(),
85 B->getBufferStart(), B->getBufferEnd(), B);
86
87 // Ignore whitespace.
88 L.SetKeepWhitespaceMode(false);
89 L.SetCommentRetentionState(false);
90
91 // Lex the file, populating our data structures.
92 std::vector<Token>* Tokens = new std::vector<Token>();
Ted Kremenekc840f0c2008-11-21 19:41:29 +000093 Token Tok;
Ted Kremenek15ba2af2008-11-20 07:56:33 +000094
95 do {
96 L.LexFromRawLexer(Tok);
97
Ted Kremenekc840f0c2008-11-21 19:41:29 +000098 if (Tok.is(tok::identifier)) {
Ted Kremenek15ba2af2008-11-20 07:56:33 +000099 Tok.setIdentifierInfo(LookUpIdentifierInfo(Tok));
Ted Kremenekc840f0c2008-11-21 19:41:29 +0000100 }
101 else if (Tok.is(tok::hash) && Tok.isAtStartOfLine()) {
102 // Special processing for #include. Store the '#' token and lex
103 // the next token.
104 Tokens->push_back(Tok);
105 L.LexFromRawLexer(Tok);
106
107 // Did we see 'include'/'import'/'include_next'?
108 if (!Tok.is(tok::identifier))
109 continue;
110
111 IdentifierInfo* II = LookUpIdentifierInfo(Tok);
112 Tok.setIdentifierInfo(II);
113 tok::PPKeywordKind K = II->getPPKeywordID();
114
115 if (K == tok::pp_include || K == tok::pp_import ||
116 K == tok::pp_include_next) {
117
118 // Save the 'include' token.
119 Tokens->push_back(Tok);
120
121 // Lex the next token as an include string.
122 L.ParsingPreprocessorDirective = true;
123 L.LexIncludeFilename(Tok);
124 L.ParsingPreprocessorDirective = false;
Ted Kremenekd5a8f0b2008-11-21 20:51:15 +0000125
126 if (Tok.is(tok::identifier))
127 Tok.setIdentifierInfo(LookUpIdentifierInfo(Tok));
Ted Kremenekc840f0c2008-11-21 19:41:29 +0000128 }
129 }
Ted Kremenek15ba2af2008-11-20 07:56:33 +0000130 }
Ted Kremenekc840f0c2008-11-21 19:41:29 +0000131 while (Tokens->push_back(Tok), Tok.isNot(tok::eof));
Ted Kremenek15ba2af2008-11-20 07:56:33 +0000132
133 if (CurPPLexer || CurTokenLexer)
134 PushIncludeMacroStack();
135
136 CurDirLookup = CurDir;
137 SourceLocation Loc = SourceLocation::getFileLoc(FileID, 0);
138 CurPTHLexer.reset(new PTHLexer(*this, Loc, &(*Tokens)[0], Tokens->size()));
139 CurPPLexer = CurPTHLexer.get();
140
141 // Notify the client, if desired, that we are in a new source file.
142 if (Callbacks) {
143 SrcMgr::CharacteristicKind FileType =
144 SourceMgr.getFileCharacteristic(CurPPLexer->getFileID());
145 Callbacks->FileChanged(Loc, PPCallbacks::EnterFile, FileType);
146 }
147#endif
Chris Lattner8c32b1a2008-03-09 04:10:46 +0000148}
Chris Lattner72181832008-09-26 20:12:23 +0000149
Chris Lattner8c32b1a2008-03-09 04:10:46 +0000150/// EnterSourceFile - Add a source file to the top of the include stack and
151/// start lexing tokens from it instead of the current buffer.
152void Preprocessor::EnterSourceFileWithLexer(Lexer *TheLexer,
153 const DirectoryLookup *CurDir) {
154
155 // Add the current lexer to the include stack.
Ted Kremenek41938c82008-11-19 21:57:25 +0000156 if (CurPPLexer || CurTokenLexer)
Ted Kremeneked04c4c2008-11-13 16:51:03 +0000157 PushIncludeMacroStack();
158
Ted Kremenekcaaa7df2008-11-13 17:11:24 +0000159 CurLexer.reset(TheLexer);
Ted Kremenek9c1b7502008-11-18 00:12:49 +0000160 CurPPLexer = TheLexer;
Chris Lattner8c32b1a2008-03-09 04:10:46 +0000161 CurDirLookup = CurDir;
Chris Lattner8c32b1a2008-03-09 04:10:46 +0000162
163 // Notify the client, if desired, that we are in a new source file.
164 if (Callbacks && !CurLexer->Is_PragmaLexer) {
Chris Lattner9d728512008-10-27 01:19:25 +0000165 SrcMgr::CharacteristicKind FileType =
Chris Lattner0b9e7362008-09-26 21:18:42 +0000166 SourceMgr.getFileCharacteristic(CurLexer->getFileLoc());
Chris Lattner8c32b1a2008-03-09 04:10:46 +0000167
168 Callbacks->FileChanged(CurLexer->getFileLoc(),
169 PPCallbacks::EnterFile, FileType);
170 }
171}
172
173
174
175/// EnterMacro - Add a Macro to the top of the include stack and start lexing
176/// tokens from it instead of the current buffer.
177void Preprocessor::EnterMacro(Token &Tok, MacroArgs *Args) {
Ted Kremeneked04c4c2008-11-13 16:51:03 +0000178 PushIncludeMacroStack();
Chris Lattner8c32b1a2008-03-09 04:10:46 +0000179 CurDirLookup = 0;
180
181 if (NumCachedTokenLexers == 0) {
Ted Kremenekcaaa7df2008-11-13 17:11:24 +0000182 CurTokenLexer.reset(new TokenLexer(Tok, Args, *this));
Chris Lattner8c32b1a2008-03-09 04:10:46 +0000183 } else {
Ted Kremenekcaaa7df2008-11-13 17:11:24 +0000184 CurTokenLexer.reset(TokenLexerCache[--NumCachedTokenLexers]);
Chris Lattner8c32b1a2008-03-09 04:10:46 +0000185 CurTokenLexer->Init(Tok, Args);
186 }
187}
188
189/// EnterTokenStream - Add a "macro" context to the top of the include stack,
Chris Lattner6b884502008-03-10 06:06:04 +0000190/// which will cause the lexer to start returning the specified tokens.
191///
192/// If DisableMacroExpansion is true, tokens lexed from the token stream will
193/// not be subject to further macro expansion. Otherwise, these tokens will
194/// be re-macro-expanded when/if expansion is enabled.
195///
196/// If OwnsTokens is false, this method assumes that the specified stream of
197/// tokens has a permanent owner somewhere, so they do not need to be copied.
198/// If it is true, it assumes the array of tokens is allocated with new[] and
199/// must be freed.
200///
201void Preprocessor::EnterTokenStream(const Token *Toks, unsigned NumToks,
202 bool DisableMacroExpansion,
203 bool OwnsTokens) {
Chris Lattner8c32b1a2008-03-09 04:10:46 +0000204 // Save our current state.
Ted Kremeneked04c4c2008-11-13 16:51:03 +0000205 PushIncludeMacroStack();
Chris Lattner8c32b1a2008-03-09 04:10:46 +0000206 CurDirLookup = 0;
207
208 // Create a macro expander to expand from the specified token stream.
209 if (NumCachedTokenLexers == 0) {
Ted Kremenekcaaa7df2008-11-13 17:11:24 +0000210 CurTokenLexer.reset(new TokenLexer(Toks, NumToks, DisableMacroExpansion,
211 OwnsTokens, *this));
Chris Lattner8c32b1a2008-03-09 04:10:46 +0000212 } else {
Ted Kremenekcaaa7df2008-11-13 17:11:24 +0000213 CurTokenLexer.reset(TokenLexerCache[--NumCachedTokenLexers]);
Chris Lattner6b884502008-03-10 06:06:04 +0000214 CurTokenLexer->Init(Toks, NumToks, DisableMacroExpansion, OwnsTokens);
Chris Lattner8c32b1a2008-03-09 04:10:46 +0000215 }
216}
217
218/// HandleEndOfFile - This callback is invoked when the lexer hits the end of
219/// the current file. This either returns the EOF token or pops a level off
220/// the include stack and keeps going.
221bool Preprocessor::HandleEndOfFile(Token &Result, bool isEndOfMacro) {
222 assert(!CurTokenLexer &&
223 "Ending a file when currently in a macro!");
224
225 // See if this file had a controlling macro.
Ted Kremenek1a531572008-11-19 22:43:49 +0000226 if (CurPPLexer) { // Not ending a macro, ignore it.
Chris Lattner8c32b1a2008-03-09 04:10:46 +0000227 if (const IdentifierInfo *ControllingMacro =
Ted Kremenek1a531572008-11-19 22:43:49 +0000228 CurPPLexer->MIOpt.GetControllingMacroAtEndOfFile()) {
Chris Lattner8c32b1a2008-03-09 04:10:46 +0000229 // Okay, this has a controlling macro, remember in PerFileInfo.
230 if (const FileEntry *FE =
Ted Kremenek1a531572008-11-19 22:43:49 +0000231 SourceMgr.getFileEntryForID(CurPPLexer->getFileID()))
Chris Lattner8c32b1a2008-03-09 04:10:46 +0000232 HeaderInfo.SetFileControllingMacro(FE, ControllingMacro);
233 }
234 }
235
236 // If this is a #include'd file, pop it off the include stack and continue
237 // lexing the #includer file.
238 if (!IncludeMacroStack.empty()) {
239 // We're done with the #included file.
240 RemoveTopOfLexerStack();
241
242 // Notify the client, if desired, that we are in a new source file.
Ted Kremenek1a531572008-11-19 22:43:49 +0000243 if (Callbacks && !isEndOfMacro && CurPPLexer) {
Chris Lattner9d728512008-10-27 01:19:25 +0000244 SrcMgr::CharacteristicKind FileType =
Ted Kremenek1a531572008-11-19 22:43:49 +0000245 SourceMgr.getFileCharacteristic(CurPPLexer->getFileID());
Ted Kremenek15ba2af2008-11-20 07:56:33 +0000246
Ted Kremeneka7512172008-11-20 01:52:55 +0000247 if (CurLexer) {
Ted Kremenek1a531572008-11-19 22:43:49 +0000248 Callbacks->FileChanged(CurLexer->getSourceLocation(CurLexer->BufferPtr),
249 PPCallbacks::ExitFile, FileType);
Ted Kremeneka7512172008-11-20 01:52:55 +0000250 }
251 else {
Ted Kremenek15ba2af2008-11-20 07:56:33 +0000252 // FIXME: Is this okay to use the location of 'Result'?
253 Callbacks->FileChanged(Result.getLocation(), PPCallbacks::ExitFile,
254 FileType);
Ted Kremeneka7512172008-11-20 01:52:55 +0000255 }
Chris Lattner8c32b1a2008-03-09 04:10:46 +0000256 }
257
258 // Client should lex another token.
259 return false;
260 }
261
262 // If the file ends with a newline, form the EOF token on the newline itself,
263 // rather than "on the line following it", which doesn't exist. This makes
264 // diagnostics relating to the end of file include the last file that the user
265 // actually typed, which is goodness.
Ted Kremenek1a531572008-11-19 22:43:49 +0000266 if (CurLexer) {
267 const char *EndPos = CurLexer->BufferEnd;
Chris Lattner8c32b1a2008-03-09 04:10:46 +0000268 if (EndPos != CurLexer->BufferStart &&
Ted Kremenek1a531572008-11-19 22:43:49 +0000269 (EndPos[-1] == '\n' || EndPos[-1] == '\r')) {
Chris Lattner8c32b1a2008-03-09 04:10:46 +0000270 --EndPos;
Ted Kremenek1a531572008-11-19 22:43:49 +0000271
272 // Handle \n\r and \r\n:
273 if (EndPos != CurLexer->BufferStart &&
274 (EndPos[-1] == '\n' || EndPos[-1] == '\r') &&
275 EndPos[-1] != EndPos[0])
276 --EndPos;
277 }
278
279 Result.startToken();
280 CurLexer->BufferPtr = EndPos;
281 CurLexer->FormTokenWithChars(Result, EndPos, tok::eof);
282
283 // We're done with the #included file.
284 CurLexer.reset();
285 }
286 else {
287 CurPTHLexer->setEOF(Result);
288 CurPTHLexer.reset();
Chris Lattner8c32b1a2008-03-09 04:10:46 +0000289 }
290
Ted Kremenek41938c82008-11-19 21:57:25 +0000291 CurPPLexer = 0;
Chris Lattner8c32b1a2008-03-09 04:10:46 +0000292
293 // This is the end of the top-level file. If the diag::pp_macro_not_used
294 // diagnostic is enabled, look for macros that have not been used.
295 if (Diags.getDiagnosticLevel(diag::pp_macro_not_used) != Diagnostic::Ignored){
296 for (llvm::DenseMap<IdentifierInfo*, MacroInfo*>::iterator I =
297 Macros.begin(), E = Macros.end(); I != E; ++I) {
298 if (!I->second->isUsed())
299 Diag(I->second->getDefinitionLoc(), diag::pp_macro_not_used);
300 }
301 }
302 return true;
303}
304
305/// HandleEndOfTokenLexer - This callback is invoked when the current TokenLexer
306/// hits the end of its token stream.
307bool Preprocessor::HandleEndOfTokenLexer(Token &Result) {
Ted Kremenek1a531572008-11-19 22:43:49 +0000308 assert(CurTokenLexer && !CurPPLexer &&
Chris Lattner8c32b1a2008-03-09 04:10:46 +0000309 "Ending a macro when currently in a #include file!");
310
311 // Delete or cache the now-dead macro expander.
312 if (NumCachedTokenLexers == TokenLexerCacheSize)
Ted Kremenekcaaa7df2008-11-13 17:11:24 +0000313 CurTokenLexer.reset();
Chris Lattner8c32b1a2008-03-09 04:10:46 +0000314 else
Ted Kremenekcaaa7df2008-11-13 17:11:24 +0000315 TokenLexerCache[NumCachedTokenLexers++] = CurTokenLexer.take();
Chris Lattner8c32b1a2008-03-09 04:10:46 +0000316
317 // Handle this like a #include file being popped off the stack.
Chris Lattner8c32b1a2008-03-09 04:10:46 +0000318 return HandleEndOfFile(Result, true);
319}
320
321/// RemoveTopOfLexerStack - Pop the current lexer/macro exp off the top of the
322/// lexer stack. This should only be used in situations where the current
323/// state of the top-of-stack lexer is unknown.
324void Preprocessor::RemoveTopOfLexerStack() {
325 assert(!IncludeMacroStack.empty() && "Ran out of stack entries to load");
326
327 if (CurTokenLexer) {
328 // Delete or cache the now-dead macro expander.
329 if (NumCachedTokenLexers == TokenLexerCacheSize)
Ted Kremenekcaaa7df2008-11-13 17:11:24 +0000330 CurTokenLexer.reset();
Chris Lattner8c32b1a2008-03-09 04:10:46 +0000331 else
Ted Kremenekcaaa7df2008-11-13 17:11:24 +0000332 TokenLexerCache[NumCachedTokenLexers++] = CurTokenLexer.take();
Ted Kremenek15ba2af2008-11-20 07:56:33 +0000333 }
Ted Kremeneked04c4c2008-11-13 16:51:03 +0000334
335 PopIncludeMacroStack();
Chris Lattner8c32b1a2008-03-09 04:10:46 +0000336}
337
338/// HandleMicrosoftCommentPaste - When the macro expander pastes together a
339/// comment (/##/) in microsoft mode, this method handles updating the current
340/// state, returning the token on the next source line.
341void Preprocessor::HandleMicrosoftCommentPaste(Token &Tok) {
Ted Kremenek1a531572008-11-19 22:43:49 +0000342 assert(CurTokenLexer && !CurPPLexer &&
Chris Lattner8c32b1a2008-03-09 04:10:46 +0000343 "Pasted comment can only be formed from macro");
344
345 // We handle this by scanning for the closest real lexer, switching it to
346 // raw mode and preprocessor mode. This will cause it to return \n as an
347 // explicit EOM token.
Ted Kremenek1a531572008-11-19 22:43:49 +0000348 PreprocessorLexer *FoundLexer = 0;
Chris Lattner8c32b1a2008-03-09 04:10:46 +0000349 bool LexerWasInPPMode = false;
350 for (unsigned i = 0, e = IncludeMacroStack.size(); i != e; ++i) {
351 IncludeStackInfo &ISI = *(IncludeMacroStack.end()-i-1);
Ted Kremenek1a531572008-11-19 22:43:49 +0000352 if (ISI.ThePPLexer == 0) continue; // Scan for a real lexer.
Chris Lattner8c32b1a2008-03-09 04:10:46 +0000353
354 // Once we find a real lexer, mark it as raw mode (disabling macro
355 // expansions) and preprocessor mode (return EOM). We know that the lexer
356 // was *not* in raw mode before, because the macro that the comment came
357 // from was expanded. However, it could have already been in preprocessor
358 // mode (#if COMMENT) in which case we have to return it to that mode and
359 // return EOM.
Ted Kremenek1a531572008-11-19 22:43:49 +0000360 FoundLexer = ISI.ThePPLexer;
Chris Lattner8c32b1a2008-03-09 04:10:46 +0000361 FoundLexer->LexingRawMode = true;
362 LexerWasInPPMode = FoundLexer->ParsingPreprocessorDirective;
363 FoundLexer->ParsingPreprocessorDirective = true;
364 break;
365 }
366
367 // Okay, we either found and switched over the lexer, or we didn't find a
368 // lexer. In either case, finish off the macro the comment came from, getting
369 // the next token.
370 if (!HandleEndOfTokenLexer(Tok)) Lex(Tok);
371
372 // Discarding comments as long as we don't have EOF or EOM. This 'comments
373 // out' the rest of the line, including any tokens that came from other macros
374 // that were active, as in:
375 // #define submacro a COMMENT b
376 // submacro c
377 // which should lex to 'a' only: 'b' and 'c' should be removed.
378 while (Tok.isNot(tok::eom) && Tok.isNot(tok::eof))
379 Lex(Tok);
380
381 // If we got an eom token, then we successfully found the end of the line.
382 if (Tok.is(tok::eom)) {
383 assert(FoundLexer && "Can't get end of line without an active lexer");
384 // Restore the lexer back to normal mode instead of raw mode.
385 FoundLexer->LexingRawMode = false;
386
387 // If the lexer was already in preprocessor mode, just return the EOM token
388 // to finish the preprocessor line.
389 if (LexerWasInPPMode) return;
390
391 // Otherwise, switch out of PP mode and return the next lexed token.
392 FoundLexer->ParsingPreprocessorDirective = false;
393 return Lex(Tok);
394 }
395
396 // If we got an EOF token, then we reached the end of the token stream but
397 // didn't find an explicit \n. This can only happen if there was no lexer
398 // active (an active lexer would return EOM at EOF if there was no \n in
399 // preprocessor directive mode), so just return EOF as our token.
400 assert(!FoundLexer && "Lexer should return EOM before EOF in PP mode");
401}