Chris Lattner | 22eb972 | 2006-06-18 05:43:12 +0000 | [diff] [blame] | 1 | //===--- MacroExpander.cpp - Lex from a macro expansion -------------------===// |
| 2 | // |
| 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
| 5 | // This file was developed by Chris Lattner and is distributed under |
| 6 | // the University of Illinois Open Source License. See LICENSE.TXT for details. |
| 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
| 9 | // |
| 10 | // This file implements the MacroExpander interface. |
| 11 | // |
| 12 | //===----------------------------------------------------------------------===// |
| 13 | |
| 14 | #include "clang/Lex/MacroExpander.h" |
| 15 | #include "clang/Lex/MacroInfo.h" |
| 16 | #include "clang/Lex/Preprocessor.h" |
Chris Lattner | 30709b03 | 2006-06-21 03:01:55 +0000 | [diff] [blame] | 17 | #include "clang/Basic/SourceManager.h" |
Chris Lattner | 0707bd3 | 2006-07-15 05:23:58 +0000 | [diff] [blame] | 18 | #include "clang/Basic/Diagnostic.h" |
Chris Lattner | 22eb972 | 2006-06-18 05:43:12 +0000 | [diff] [blame] | 19 | using namespace llvm; |
| 20 | using namespace clang; |
| 21 | |
Chris Lattner | 7818605 | 2006-07-09 00:45:31 +0000 | [diff] [blame] | 22 | //===----------------------------------------------------------------------===// |
| 23 | // MacroFormalArgs Implementation |
| 24 | //===----------------------------------------------------------------------===// |
| 25 | |
| 26 | MacroFormalArgs::MacroFormalArgs(const MacroInfo *MI) { |
| 27 | assert(MI->isFunctionLike() && |
| 28 | "Can't have formal args for an object-like macro!"); |
| 29 | // Reserve space for arguments to avoid reallocation. |
| 30 | unsigned NumArgs = MI->getNumArgs(); |
| 31 | if (MI->isC99Varargs() || MI->isGNUVarargs()) |
| 32 | NumArgs += 3; // Varargs can have more than this, just some guess. |
| 33 | |
| 34 | ArgTokens.reserve(NumArgs); |
| 35 | } |
| 36 | |
Chris Lattner | 0707bd3 | 2006-07-15 05:23:58 +0000 | [diff] [blame] | 37 | /// StringifyArgument - Implement C99 6.10.3.2p2, converting a sequence of |
| 38 | /// tokens into the literal string token that should be produced by the C # |
| 39 | /// preprocessor operator. |
| 40 | /// |
Chris Lattner | b935d8c | 2006-07-14 06:54:44 +0000 | [diff] [blame] | 41 | static LexerToken StringifyArgument(const std::vector<LexerToken> &Toks, |
Chris Lattner | c783d1d | 2006-07-15 06:11:25 +0000 | [diff] [blame] | 42 | Preprocessor &PP, bool Charify = false) { |
Chris Lattner | b935d8c | 2006-07-14 06:54:44 +0000 | [diff] [blame] | 43 | LexerToken Tok; |
| 44 | Tok.StartToken(); |
| 45 | Tok.SetKind(tok::string_literal); |
Chris Lattner | 0707bd3 | 2006-07-15 05:23:58 +0000 | [diff] [blame] | 46 | |
| 47 | // Stringify all the tokens. |
| 48 | std::string Result = "\""; |
| 49 | for (unsigned i = 0, e = Toks.size(); i != e; ++i) { |
| 50 | const LexerToken &Tok = Toks[i]; |
| 51 | // FIXME: Optimize this. |
| 52 | if (i != 0 && Tok.hasLeadingSpace()) |
| 53 | Result += ' '; |
| 54 | |
| 55 | // If this is a string or character constant, escape the token as specified |
| 56 | // by 6.10.3.2p2. |
| 57 | if (Tok.getKind() == tok::string_literal || // "foo" and L"foo". |
| 58 | Tok.getKind() == tok::char_constant) { // 'x' and L'x'. |
| 59 | Result += Lexer::Stringify(PP.getSpelling(Tok)); |
| 60 | } else { |
| 61 | // Otherwise, just append the token. |
| 62 | Result += PP.getSpelling(Tok); |
| 63 | } |
| 64 | } |
Chris Lattner | b935d8c | 2006-07-14 06:54:44 +0000 | [diff] [blame] | 65 | |
Chris Lattner | 0707bd3 | 2006-07-15 05:23:58 +0000 | [diff] [blame] | 66 | // If the last character of the string is a \, and if it isn't escaped, this |
| 67 | // is an invalid string literal, diagnose it as specified in C99. |
| 68 | if (Result[Result.size()-1] == '\\') { |
| 69 | // Count the number of consequtive \ characters. If even, then they are |
| 70 | // just escaped backslashes, otherwise it's an error. |
| 71 | unsigned FirstNonSlash = Result.size()-2; |
| 72 | // Guaranteed to find the starting " if nothing else. |
| 73 | while (Result[FirstNonSlash] == '\\') |
| 74 | --FirstNonSlash; |
| 75 | if ((Result.size()-1-FirstNonSlash) & 1) { |
Chris Lattner | f278150 | 2006-07-15 05:27:44 +0000 | [diff] [blame] | 76 | // Diagnose errors for things like: #define F(X) #X / F(\) |
Chris Lattner | 0707bd3 | 2006-07-15 05:23:58 +0000 | [diff] [blame] | 77 | PP.Diag(Toks.back(), diag::pp_invalid_string_literal); |
| 78 | Result.erase(Result.end()-1); // remove one of the \'s. |
| 79 | } |
| 80 | } |
Chris Lattner | 0707bd3 | 2006-07-15 05:23:58 +0000 | [diff] [blame] | 81 | Result += '"'; |
| 82 | |
Chris Lattner | c783d1d | 2006-07-15 06:11:25 +0000 | [diff] [blame] | 83 | // If this is the charify operation and the result is not a legal character |
| 84 | // constant, diagnose it. |
| 85 | if (Charify) { |
| 86 | // First step, turn double quotes into single quotes: |
| 87 | Result[0] = '\''; |
| 88 | Result[Result.size()-1] = '\''; |
| 89 | |
| 90 | // Check for bogus character. |
| 91 | bool isBad = false; |
| 92 | if (Result.size() == 2) { |
| 93 | Result = "' '"; // #@empty -> ' '. |
| 94 | } else if (Result.size() == 3) { |
| 95 | isBad = Result[1] == '\''; // ''' is not legal. '\' already fixed above. |
| 96 | } else { |
| 97 | isBad = (Result.size() != 4 || Result[1] != '\\'); // Not '\x' |
| 98 | } |
| 99 | |
| 100 | if (isBad) { |
| 101 | assert(!Toks.empty() && "No tokens to charize?"); |
| 102 | PP.Diag(Toks[0], diag::err_invalid_character_to_charify); |
| 103 | Result = "' '"; |
| 104 | } |
| 105 | } |
| 106 | |
Chris Lattner | 0707bd3 | 2006-07-15 05:23:58 +0000 | [diff] [blame] | 107 | Tok.SetLength(Result.size()); |
| 108 | Tok.SetLocation(PP.CreateString(&Result[0], Result.size())); |
Chris Lattner | b935d8c | 2006-07-14 06:54:44 +0000 | [diff] [blame] | 109 | return Tok; |
| 110 | } |
| 111 | |
| 112 | /// getStringifiedArgument - Compute, cache, and return the specified argument |
| 113 | /// that has been 'stringified' as required by the # operator. |
| 114 | const LexerToken &MacroFormalArgs::getStringifiedArgument(unsigned ArgNo, |
| 115 | Preprocessor &PP) { |
| 116 | assert(ArgNo < ArgTokens.size() && "Invalid argument number!"); |
| 117 | if (StringifiedArgs.empty()) { |
| 118 | StringifiedArgs.resize(ArgTokens.size()); |
| 119 | memset(&StringifiedArgs[0], 0, sizeof(StringifiedArgs[0])*ArgTokens.size()); |
| 120 | } |
| 121 | if (StringifiedArgs[ArgNo].getKind() != tok::string_literal) |
| 122 | StringifiedArgs[ArgNo] = StringifyArgument(ArgTokens[ArgNo], PP); |
| 123 | return StringifiedArgs[ArgNo]; |
| 124 | } |
| 125 | |
Chris Lattner | 7818605 | 2006-07-09 00:45:31 +0000 | [diff] [blame] | 126 | //===----------------------------------------------------------------------===// |
| 127 | // MacroExpander Implementation |
| 128 | //===----------------------------------------------------------------------===// |
| 129 | |
| 130 | MacroExpander::MacroExpander(LexerToken &Tok, MacroFormalArgs *Formals, |
| 131 | Preprocessor &pp) |
Chris Lattner | b935d8c | 2006-07-14 06:54:44 +0000 | [diff] [blame] | 132 | : Macro(*Tok.getIdentifierInfo()->getMacroInfo()), |
| 133 | FormalArgs(Formals), PP(pp), CurToken(0), |
Chris Lattner | 50b497e | 2006-06-18 16:32:35 +0000 | [diff] [blame] | 134 | InstantiateLoc(Tok.getLocation()), |
Chris Lattner | d01e291 | 2006-06-18 16:22:51 +0000 | [diff] [blame] | 135 | AtStartOfLine(Tok.isAtStartOfLine()), |
| 136 | HasLeadingSpace(Tok.hasLeadingSpace()) { |
Chris Lattner | b935d8c | 2006-07-14 06:54:44 +0000 | [diff] [blame] | 137 | MacroTokens = &Macro.getReplacementTokens(); |
| 138 | |
| 139 | // If this is a function-like macro, expand the arguments and change |
| 140 | // MacroTokens to point to the expanded tokens. |
| 141 | if (Macro.isFunctionLike() && Macro.getNumArgs()) |
| 142 | ExpandFunctionArguments(); |
Chris Lattner | d01e291 | 2006-06-18 16:22:51 +0000 | [diff] [blame] | 143 | } |
| 144 | |
Chris Lattner | b935d8c | 2006-07-14 06:54:44 +0000 | [diff] [blame] | 145 | MacroExpander::~MacroExpander() { |
| 146 | // If this was a function-like macro that actually uses its arguments, delete |
| 147 | // the expanded tokens. |
| 148 | if (MacroTokens != &Macro.getReplacementTokens()) |
| 149 | delete MacroTokens; |
| 150 | |
| 151 | // MacroExpander owns its formal arguments. |
| 152 | delete FormalArgs; |
| 153 | } |
| 154 | |
| 155 | /// Expand the arguments of a function-like macro so that we can quickly |
| 156 | /// return preexpanded tokens from MacroTokens. |
| 157 | void MacroExpander::ExpandFunctionArguments() { |
| 158 | std::vector<LexerToken> ResultToks; |
| 159 | |
| 160 | // Loop through the MacroTokens tokens, expanding them into ResultToks. Keep |
| 161 | // track of whether we change anything. If not, no need to keep them. If so, |
| 162 | // we install the newly expanded sequence as MacroTokens. |
| 163 | bool MadeChange = false; |
| 164 | for (unsigned i = 0, e = MacroTokens->size(); i != e; ++i) { |
| 165 | // If we found the stringify operator, get the argument stringified. The |
| 166 | // preprocessor already verified that the following token is a macro name |
| 167 | // when the #define was parsed. |
| 168 | const LexerToken &CurTok = (*MacroTokens)[i]; |
Chris Lattner | c783d1d | 2006-07-15 06:11:25 +0000 | [diff] [blame] | 169 | if (CurTok.getKind() == tok::hash || CurTok.getKind() == tok::hashat) { |
Chris Lattner | b935d8c | 2006-07-14 06:54:44 +0000 | [diff] [blame] | 170 | int ArgNo = Macro.getArgumentNum((*MacroTokens)[i+1].getIdentifierInfo()); |
| 171 | assert(ArgNo != -1 && "Token following # is not an argument?"); |
| 172 | |
Chris Lattner | c783d1d | 2006-07-15 06:11:25 +0000 | [diff] [blame] | 173 | if (CurTok.getKind() == tok::hash) // Stringify |
| 174 | ResultToks.push_back(FormalArgs->getStringifiedArgument(ArgNo, PP)); |
| 175 | else { |
| 176 | // 'charify': don't bother caching these. |
| 177 | ResultToks.push_back(StringifyArgument( |
| 178 | FormalArgs->getUnexpArgument(ArgNo), PP, true)); |
| 179 | } |
Chris Lattner | b935d8c | 2006-07-14 06:54:44 +0000 | [diff] [blame] | 180 | |
Chris Lattner | 6016169 | 2006-07-15 06:48:02 +0000 | [diff] [blame^] | 181 | // The stringified/charified string leading space flag gets set to match |
| 182 | // the #/#@ operator. |
| 183 | if (CurTok.hasLeadingSpace()) |
| 184 | ResultToks.back().SetFlag(LexerToken::LeadingSpace); |
Chris Lattner | b935d8c | 2006-07-14 06:54:44 +0000 | [diff] [blame] | 185 | |
| 186 | MadeChange = true; |
| 187 | ++i; // Skip arg name. |
| 188 | } else { |
Chris Lattner | b935d8c | 2006-07-14 06:54:44 +0000 | [diff] [blame] | 189 | ResultToks.push_back(CurTok); |
| 190 | } |
| 191 | } |
| 192 | |
| 193 | // If anything changed, install this as the new MacroTokens list. |
| 194 | if (MadeChange) { |
| 195 | // This is deleted in the dtor. |
| 196 | std::vector<LexerToken> *Res = new std::vector<LexerToken>(); |
| 197 | Res->swap(ResultToks); |
| 198 | MacroTokens = Res; |
| 199 | } |
| 200 | } |
Chris Lattner | 67b07cb | 2006-06-26 02:03:42 +0000 | [diff] [blame] | 201 | |
Chris Lattner | 22eb972 | 2006-06-18 05:43:12 +0000 | [diff] [blame] | 202 | /// Lex - Lex and return a token from this macro stream. |
Chris Lattner | d01e291 | 2006-06-18 16:22:51 +0000 | [diff] [blame] | 203 | /// |
Chris Lattner | cb28334 | 2006-06-18 06:48:37 +0000 | [diff] [blame] | 204 | void MacroExpander::Lex(LexerToken &Tok) { |
Chris Lattner | 22eb972 | 2006-06-18 05:43:12 +0000 | [diff] [blame] | 205 | // Lexing off the end of the macro, pop this macro off the expansion stack. |
Chris Lattner | b935d8c | 2006-07-14 06:54:44 +0000 | [diff] [blame] | 206 | if (isAtEnd()) |
Chris Lattner | 22eb972 | 2006-06-18 05:43:12 +0000 | [diff] [blame] | 207 | return PP.HandleEndOfMacro(Tok); |
| 208 | |
| 209 | // Get the next token to return. |
Chris Lattner | b935d8c | 2006-07-14 06:54:44 +0000 | [diff] [blame] | 210 | Tok = (*MacroTokens)[CurToken++]; |
Chris Lattner | 22eb972 | 2006-06-18 05:43:12 +0000 | [diff] [blame] | 211 | |
Chris Lattner | c673f90 | 2006-06-30 06:10:41 +0000 | [diff] [blame] | 212 | // The token's current location indicate where the token was lexed from. We |
| 213 | // need this information to compute the spelling of the token, but any |
| 214 | // diagnostics for the expanded token should appear as if they came from |
| 215 | // InstantiationLoc. Pull this information together into a new SourceLocation |
| 216 | // that captures all of this. |
| 217 | Tok.SetLocation(PP.getSourceManager().getInstantiationLoc(Tok.getLocation(), |
| 218 | InstantiateLoc)); |
Chris Lattner | 30709b03 | 2006-06-21 03:01:55 +0000 | [diff] [blame] | 219 | |
Chris Lattner | 22eb972 | 2006-06-18 05:43:12 +0000 | [diff] [blame] | 220 | // If this is the first token, set the lexical properties of the token to |
| 221 | // match the lexical properties of the macro identifier. |
| 222 | if (CurToken == 1) { |
| 223 | Tok.SetFlagValue(LexerToken::StartOfLine , AtStartOfLine); |
| 224 | Tok.SetFlagValue(LexerToken::LeadingSpace, HasLeadingSpace); |
| 225 | } |
| 226 | |
| 227 | // Handle recursive expansion! |
| 228 | if (Tok.getIdentifierInfo()) |
| 229 | return PP.HandleIdentifier(Tok); |
| 230 | |
| 231 | // Otherwise, return a normal token. |
Chris Lattner | 22eb972 | 2006-06-18 05:43:12 +0000 | [diff] [blame] | 232 | } |
Chris Lattner | afe603f | 2006-07-11 04:02:46 +0000 | [diff] [blame] | 233 | |
Chris Lattner | d8aee0e | 2006-07-11 05:04:55 +0000 | [diff] [blame] | 234 | /// isNextTokenLParen - If the next token lexed will pop this macro off the |
| 235 | /// expansion stack, return 2. If the next unexpanded token is a '(', return |
| 236 | /// 1, otherwise return 0. |
| 237 | unsigned MacroExpander::isNextTokenLParen() const { |
Chris Lattner | afe603f | 2006-07-11 04:02:46 +0000 | [diff] [blame] | 238 | // Out of tokens? |
Chris Lattner | b935d8c | 2006-07-14 06:54:44 +0000 | [diff] [blame] | 239 | if (isAtEnd()) |
Chris Lattner | d8aee0e | 2006-07-11 05:04:55 +0000 | [diff] [blame] | 240 | return 2; |
Chris Lattner | b935d8c | 2006-07-14 06:54:44 +0000 | [diff] [blame] | 241 | return (*MacroTokens)[CurToken].getKind() == tok::l_paren; |
Chris Lattner | afe603f | 2006-07-11 04:02:46 +0000 | [diff] [blame] | 242 | } |