clang API Documentation
00001 //===--- TokenRewriter.cpp - Token-based code rewriting interface ---------===// 00002 // 00003 // The LLVM Compiler Infrastructure 00004 // 00005 // This file is distributed under the University of Illinois Open Source 00006 // License. See LICENSE.TXT for details. 00007 // 00008 //===----------------------------------------------------------------------===// 00009 // 00010 // This file implements the TokenRewriter class, which is used for code 00011 // transformations. 00012 // 00013 //===----------------------------------------------------------------------===// 00014 00015 #include "clang/Rewrite/Core/TokenRewriter.h" 00016 #include "clang/Basic/SourceManager.h" 00017 #include "clang/Lex/Lexer.h" 00018 #include "clang/Lex/ScratchBuffer.h" 00019 using namespace clang; 00020 00021 TokenRewriter::TokenRewriter(FileID FID, SourceManager &SM, 00022 const LangOptions &LangOpts) { 00023 ScratchBuf.reset(new ScratchBuffer(SM)); 00024 00025 // Create a lexer to lex all the tokens of the main file in raw mode. 00026 const llvm::MemoryBuffer *FromFile = SM.getBuffer(FID); 00027 Lexer RawLex(FID, FromFile, SM, LangOpts); 00028 00029 // Return all comments and whitespace as tokens. 00030 RawLex.SetKeepWhitespaceMode(true); 00031 00032 // Lex the file, populating our datastructures. 00033 Token RawTok; 00034 RawLex.LexFromRawLexer(RawTok); 00035 while (RawTok.isNot(tok::eof)) { 00036 #if 0 00037 if (Tok.is(tok::raw_identifier)) { 00038 // Look up the identifier info for the token. This should use 00039 // IdentifierTable directly instead of PP. 00040 PP.LookUpIdentifierInfo(Tok); 00041 } 00042 #endif 00043 00044 AddToken(RawTok, TokenList.end()); 00045 RawLex.LexFromRawLexer(RawTok); 00046 } 00047 } 00048 00049 TokenRewriter::~TokenRewriter() { 00050 } 00051 00052 00053 /// RemapIterator - Convert from token_iterator (a const iterator) to 00054 /// TokenRefTy (a non-const iterator). 00055 TokenRewriter::TokenRefTy TokenRewriter::RemapIterator(token_iterator I) { 00056 if (I == token_end()) return TokenList.end(); 00057 00058 // FIXME: This is horrible, we should use our own list or something to avoid 00059 // this. 00060 std::map<SourceLocation, TokenRefTy>::iterator MapIt = 00061 TokenAtLoc.find(I->getLocation()); 00062 assert(MapIt != TokenAtLoc.end() && "iterator not in rewriter?"); 00063 return MapIt->second; 00064 } 00065 00066 00067 /// AddToken - Add the specified token into the Rewriter before the other 00068 /// position. 00069 TokenRewriter::TokenRefTy 00070 TokenRewriter::AddToken(const Token &T, TokenRefTy Where) { 00071 Where = TokenList.insert(Where, T); 00072 00073 bool InsertSuccess = TokenAtLoc.insert(std::make_pair(T.getLocation(), 00074 Where)).second; 00075 assert(InsertSuccess && "Token location already in rewriter!"); 00076 (void)InsertSuccess; 00077 return Where; 00078 } 00079 00080 00081 TokenRewriter::token_iterator 00082 TokenRewriter::AddTokenBefore(token_iterator I, const char *Val) { 00083 unsigned Len = strlen(Val); 00084 00085 // Plop the string into the scratch buffer, then create a token for this 00086 // string. 00087 Token Tok; 00088 Tok.startToken(); 00089 const char *Spelling; 00090 Tok.setLocation(ScratchBuf->getToken(Val, Len, Spelling)); 00091 Tok.setLength(Len); 00092 00093 // TODO: Form a whole lexer around this and relex the token! For now, just 00094 // set kind to tok::unknown. 00095 Tok.setKind(tok::unknown); 00096 00097 return AddToken(Tok, RemapIterator(I)); 00098 } 00099