Chris Lattner | cff9cc9 | 2008-10-12 05:44:03 +0000 | [diff] [blame] | 1 | //===--- TokenRewriter.cpp - Token-based code rewriting interface ---------===// |
| 2 | // |
| 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
| 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
| 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
| 9 | // |
| 10 | // This file implements the TokenRewriter class, which is used for code |
| 11 | // transformations. |
| 12 | // |
| 13 | //===----------------------------------------------------------------------===// |
| 14 | |
| 15 | #include "clang/Rewrite/TokenRewriter.h" |
| 16 | #include "clang/Lex/Lexer.h" |
Chris Lattner | 99bd46c | 2008-10-12 06:09:52 +0000 | [diff] [blame] | 17 | #include "clang/Lex/ScratchBuffer.h" |
Chris Lattner | cff9cc9 | 2008-10-12 05:44:03 +0000 | [diff] [blame] | 18 | #include "clang/Basic/SourceManager.h" |
| 19 | using namespace clang; |
| 20 | |
Chris Lattner | 2b2453a | 2009-01-17 06:22:33 +0000 | [diff] [blame^] | 21 | TokenRewriter::TokenRewriter(FileID FID, SourceManager &SM, |
Chris Lattner | cff9cc9 | 2008-10-12 05:44:03 +0000 | [diff] [blame] | 22 | const LangOptions &LangOpts) { |
Chris Lattner | 99bd46c | 2008-10-12 06:09:52 +0000 | [diff] [blame] | 23 | ScratchBuf.reset(new ScratchBuffer(SM)); |
Chris Lattner | cff9cc9 | 2008-10-12 05:44:03 +0000 | [diff] [blame] | 24 | |
Chris Lattner | 2b2453a | 2009-01-17 06:22:33 +0000 | [diff] [blame^] | 25 | std::pair<const char*,const char*> File = SM.getBufferData(FID); |
Chris Lattner | cff9cc9 | 2008-10-12 05:44:03 +0000 | [diff] [blame] | 26 | |
| 27 | // Create a lexer to lex all the tokens of the main file in raw mode. |
Chris Lattner | 2b2453a | 2009-01-17 06:22:33 +0000 | [diff] [blame^] | 28 | Lexer RawLex(SM.getLocForStartOfFile(FID), |
Chris Lattner | cff9cc9 | 2008-10-12 05:44:03 +0000 | [diff] [blame] | 29 | LangOpts, File.first, File.second); |
| 30 | |
| 31 | // Return all comments and whitespace as tokens. |
| 32 | RawLex.SetKeepWhitespaceMode(true); |
| 33 | |
| 34 | // Lex the file, populating our datastructures. |
| 35 | Token RawTok; |
| 36 | RawLex.LexFromRawLexer(RawTok); |
| 37 | while (RawTok.isNot(tok::eof)) { |
Chris Lattner | 99bd46c | 2008-10-12 06:09:52 +0000 | [diff] [blame] | 38 | #if 0 |
| 39 | if (Tok.is(tok::identifier)) { |
| 40 | // Look up the identifier info for the token. This should use |
| 41 | // IdentifierTable directly instead of PP. |
| 42 | Tok.setIdentifierInfo(PP.LookUpIdentifierInfo(Tok)); |
| 43 | } |
| 44 | #endif |
| 45 | |
Chris Lattner | cff9cc9 | 2008-10-12 05:44:03 +0000 | [diff] [blame] | 46 | AddToken(RawTok, TokenList.end()); |
| 47 | RawLex.LexFromRawLexer(RawTok); |
| 48 | } |
Chris Lattner | cff9cc9 | 2008-10-12 05:44:03 +0000 | [diff] [blame] | 49 | } |
| 50 | |
Chris Lattner | 99bd46c | 2008-10-12 06:09:52 +0000 | [diff] [blame] | 51 | TokenRewriter::~TokenRewriter() { |
| 52 | } |
| 53 | |
| 54 | |
| 55 | /// RemapIterator - Convert from token_iterator (a const iterator) to |
| 56 | /// TokenRefTy (a non-const iterator). |
| 57 | TokenRewriter::TokenRefTy TokenRewriter::RemapIterator(token_iterator I) { |
| 58 | if (I == token_end()) return TokenList.end(); |
| 59 | |
| 60 | // FIXME: This is horrible, we should use our own list or something to avoid |
| 61 | // this. |
| 62 | std::map<SourceLocation, TokenRefTy>::iterator MapIt = |
| 63 | TokenAtLoc.find(I->getLocation()); |
| 64 | assert(MapIt != TokenAtLoc.end() && "iterator not in rewriter?"); |
| 65 | return MapIt->second; |
| 66 | } |
| 67 | |
| 68 | |
Chris Lattner | cff9cc9 | 2008-10-12 05:44:03 +0000 | [diff] [blame] | 69 | /// AddToken - Add the specified token into the Rewriter before the other |
| 70 | /// position. |
Chris Lattner | 99bd46c | 2008-10-12 06:09:52 +0000 | [diff] [blame] | 71 | TokenRewriter::TokenRefTy |
| 72 | TokenRewriter::AddToken(const Token &T, TokenRefTy Where) { |
Chris Lattner | cff9cc9 | 2008-10-12 05:44:03 +0000 | [diff] [blame] | 73 | Where = TokenList.insert(Where, T); |
| 74 | |
| 75 | bool InsertSuccess = TokenAtLoc.insert(std::make_pair(T.getLocation(), |
| 76 | Where)).second; |
| 77 | assert(InsertSuccess && "Token location already in rewriter!"); |
| 78 | InsertSuccess = InsertSuccess; |
Chris Lattner | 99bd46c | 2008-10-12 06:09:52 +0000 | [diff] [blame] | 79 | return Where; |
Chris Lattner | cff9cc9 | 2008-10-12 05:44:03 +0000 | [diff] [blame] | 80 | } |
Chris Lattner | 99bd46c | 2008-10-12 06:09:52 +0000 | [diff] [blame] | 81 | |
| 82 | |
| 83 | TokenRewriter::token_iterator |
| 84 | TokenRewriter::AddTokenBefore(token_iterator I, const char *Val){ |
| 85 | unsigned Len = strlen(Val); |
| 86 | |
| 87 | // Plop the string into the scratch buffer, then create a token for this |
| 88 | // string. |
| 89 | Token Tok; |
| 90 | Tok.startToken(); |
| 91 | Tok.setLocation(ScratchBuf->getToken(Val, Len)); |
| 92 | Tok.setLength(Len); |
| 93 | |
| 94 | // TODO: Form a whole lexer around this and relex the token! For now, just |
| 95 | // set kind to tok::unknown. |
| 96 | Tok.setKind(tok::unknown); |
| 97 | |
| 98 | return AddToken(Tok, RemapIterator(I)); |
| 99 | } |
| 100 | |