clang API Documentation

Preprocessor.h
Go to the documentation of this file.
00001 //===--- Preprocessor.h - C Language Family Preprocessor --------*- C++ -*-===//
00002 //
00003 //                     The LLVM Compiler Infrastructure
00004 //
00005 // This file is distributed under the University of Illinois Open Source
00006 // License. See LICENSE.TXT for details.
00007 //
00008 //===----------------------------------------------------------------------===//
00009 ///
00010 /// \file
00011 /// \brief Defines the clang::Preprocessor interface.
00012 ///
00013 //===----------------------------------------------------------------------===//
00014 
00015 #ifndef LLVM_CLANG_LEX_PREPROCESSOR_H
00016 #define LLVM_CLANG_LEX_PREPROCESSOR_H
00017 
00018 #include "clang/Basic/Builtins.h"
00019 #include "clang/Basic/Diagnostic.h"
00020 #include "clang/Basic/IdentifierTable.h"
00021 #include "clang/Basic/SourceLocation.h"
00022 #include "clang/Lex/Lexer.h"
00023 #include "clang/Lex/MacroInfo.h"
00024 #include "clang/Lex/ModuleMap.h"
00025 #include "clang/Lex/PPCallbacks.h"
00026 #include "clang/Lex/PTHLexer.h"
00027 #include "clang/Lex/PTHManager.h"
00028 #include "clang/Lex/TokenLexer.h"
00029 #include "llvm/ADT/ArrayRef.h"
00030 #include "llvm/ADT/DenseMap.h"
00031 #include "llvm/ADT/IntrusiveRefCntPtr.h"
00032 #include "llvm/ADT/SmallPtrSet.h"
00033 #include "llvm/ADT/SmallVector.h"
00034 #include "llvm/Support/Allocator.h"
00035 #include <memory>
00036 #include <vector>
00037 
00038 namespace llvm {
00039   template<unsigned InternalLen> class SmallString;
00040 }
00041 
00042 namespace clang {
00043 
00044 class SourceManager;
00045 class ExternalPreprocessorSource;
00046 class FileManager;
00047 class FileEntry;
00048 class HeaderSearch;
00049 class PragmaNamespace;
00050 class PragmaHandler;
00051 class CommentHandler;
00052 class ScratchBuffer;
00053 class TargetInfo;
00054 class PPCallbacks;
00055 class CodeCompletionHandler;
00056 class DirectoryLookup;
00057 class PreprocessingRecord;
00058 class ModuleLoader;
00059 class PreprocessorOptions;
00060 
00061 /// \brief Stores token information for comparing actual tokens with
00062 /// predefined values.  Only handles simple tokens and identifiers.
00063 class TokenValue {
00064   tok::TokenKind Kind;
00065   IdentifierInfo *II;
00066 
00067 public:
00068   TokenValue(tok::TokenKind Kind) : Kind(Kind), II(nullptr) {
00069     assert(Kind != tok::raw_identifier && "Raw identifiers are not supported.");
00070     assert(Kind != tok::identifier &&
00071            "Identifiers should be created by TokenValue(IdentifierInfo *)");
00072     assert(!tok::isLiteral(Kind) && "Literals are not supported.");
00073     assert(!tok::isAnnotation(Kind) && "Annotations are not supported.");
00074   }
00075   TokenValue(IdentifierInfo *II) : Kind(tok::identifier), II(II) {}
00076   bool operator==(const Token &Tok) const {
00077     return Tok.getKind() == Kind &&
00078         (!II || II == Tok.getIdentifierInfo());
00079   }
00080 };
00081 
00082 /// \brief Context in which macro name is used.
00083 enum MacroUse {
00084   MU_Other  = 0,  // other than #define or #undef
00085   MU_Define = 1,  // macro name specified in #define
00086   MU_Undef  = 2   // macro name specified in #undef
00087 };
00088 
00089 /// \brief Engages in a tight little dance with the lexer to efficiently
00090 /// preprocess tokens.
00091 ///
00092 /// Lexers know only about tokens within a single source file, and don't
00093 /// know anything about preprocessor-level issues like the \#include stack,
00094 /// token expansion, etc.
00095 class Preprocessor : public RefCountedBase<Preprocessor> {
00096   IntrusiveRefCntPtr<PreprocessorOptions> PPOpts;
00097   DiagnosticsEngine        *Diags;
00098   LangOptions       &LangOpts;
00099   const TargetInfo  *Target;
00100   FileManager       &FileMgr;
00101   SourceManager     &SourceMgr;
00102   std::unique_ptr<ScratchBuffer> ScratchBuf;
00103   HeaderSearch      &HeaderInfo;
00104   ModuleLoader      &TheModuleLoader;
00105 
00106   /// \brief External source of macros.
00107   ExternalPreprocessorSource *ExternalSource;
00108 
00109 
00110   /// An optional PTHManager object used for getting tokens from
00111   /// a token cache rather than lexing the original source file.
00112   std::unique_ptr<PTHManager> PTH;
00113 
00114   /// A BumpPtrAllocator object used to quickly allocate and release
00115   /// objects internal to the Preprocessor.
00116   llvm::BumpPtrAllocator BP;
00117 
00118   /// Identifiers for builtin macros and other builtins.
00119   IdentifierInfo *Ident__LINE__, *Ident__FILE__;   // __LINE__, __FILE__
00120   IdentifierInfo *Ident__DATE__, *Ident__TIME__;   // __DATE__, __TIME__
00121   IdentifierInfo *Ident__INCLUDE_LEVEL__;          // __INCLUDE_LEVEL__
00122   IdentifierInfo *Ident__BASE_FILE__;              // __BASE_FILE__
00123   IdentifierInfo *Ident__TIMESTAMP__;              // __TIMESTAMP__
00124   IdentifierInfo *Ident__COUNTER__;                // __COUNTER__
00125   IdentifierInfo *Ident_Pragma, *Ident__pragma;    // _Pragma, __pragma
00126   IdentifierInfo *Ident__identifier;               // __identifier
00127   IdentifierInfo *Ident__VA_ARGS__;                // __VA_ARGS__
00128   IdentifierInfo *Ident__has_feature;              // __has_feature
00129   IdentifierInfo *Ident__has_extension;            // __has_extension
00130   IdentifierInfo *Ident__has_builtin;              // __has_builtin
00131   IdentifierInfo *Ident__has_attribute;            // __has_attribute
00132   IdentifierInfo *Ident__has_include;              // __has_include
00133   IdentifierInfo *Ident__has_include_next;         // __has_include_next
00134   IdentifierInfo *Ident__has_warning;              // __has_warning
00135   IdentifierInfo *Ident__is_identifier;            // __is_identifier
00136   IdentifierInfo *Ident__building_module;          // __building_module
00137   IdentifierInfo *Ident__MODULE__;                 // __MODULE__
00138   IdentifierInfo *Ident__has_cpp_attribute;        // __has_cpp_attribute
00139 
00140   SourceLocation DATELoc, TIMELoc;
00141   unsigned CounterValue;  // Next __COUNTER__ value.
00142 
00143   enum {
00144     /// \brief Maximum depth of \#includes.
00145     MaxAllowedIncludeStackDepth = 200
00146   };
00147 
00148   // State that is set before the preprocessor begins.
00149   bool KeepComments : 1;
00150   bool KeepMacroComments : 1;
00151   bool SuppressIncludeNotFoundError : 1;
00152 
00153   // State that changes while the preprocessor runs:
00154   bool InMacroArgs : 1;            // True if parsing fn macro invocation args.
00155 
00156   /// Whether the preprocessor owns the header search object.
00157   bool OwnsHeaderSearch : 1;
00158 
00159   /// True if macro expansion is disabled.
00160   bool DisableMacroExpansion : 1;
00161 
00162   /// Temporarily disables DisableMacroExpansion (i.e. enables expansion)
00163   /// when parsing preprocessor directives.
00164   bool MacroExpansionInDirectivesOverride : 1;
00165 
00166   class ResetMacroExpansionHelper;
00167 
00168   /// \brief Whether we have already loaded macros from the external source.
00169   mutable bool ReadMacrosFromExternalSource : 1;
00170 
00171   /// \brief True if pragmas are enabled.
00172   bool PragmasEnabled : 1;
00173 
00174   /// \brief True if the current build action is a preprocessing action.
00175   bool PreprocessedOutput : 1;
00176 
00177   /// \brief True if we are currently preprocessing a #if or #elif directive
00178   bool ParsingIfOrElifDirective;
00179 
00180   /// \brief True if we are pre-expanding macro arguments.
00181   bool InMacroArgPreExpansion;
00182 
00183   /// \brief Mapping/lookup information for all identifiers in
00184   /// the program, including program keywords.
00185   mutable IdentifierTable Identifiers;
00186 
00187   /// \brief This table contains all the selectors in the program.
00188   ///
00189   /// Unlike IdentifierTable above, this table *isn't* populated by the
00190   /// preprocessor. It is declared/expanded here because its role/lifetime is
00191   /// conceptually similar to the IdentifierTable. In addition, the current
00192   /// control flow (in clang::ParseAST()), make it convenient to put here.
00193   ///
00194   /// FIXME: Make sure the lifetime of Identifiers/Selectors *isn't* tied to
00195   /// the lifetime of the preprocessor.
00196   SelectorTable Selectors;
00197 
00198   /// \brief Information about builtins.
00199   Builtin::Context BuiltinInfo;
00200 
00201   /// \brief Tracks all of the pragmas that the client registered
00202   /// with this preprocessor.
00203   std::unique_ptr<PragmaNamespace> PragmaHandlers;
00204 
00205   /// \brief Pragma handlers of the original source is stored here during the
00206   /// parsing of a model file.
00207   std::unique_ptr<PragmaNamespace> PragmaHandlersBackup;
00208 
00209   /// \brief Tracks all of the comment handlers that the client registered
00210   /// with this preprocessor.
00211   std::vector<CommentHandler *> CommentHandlers;
00212 
00213   /// \brief True if we want to ignore EOF token and continue later on (thus 
00214   /// avoid tearing the Lexer and etc. down).
00215   bool IncrementalProcessing;
00216 
00217   /// The kind of translation unit we are processing.
00218   TranslationUnitKind TUKind;
00219 
00220   /// \brief The code-completion handler.
00221   CodeCompletionHandler *CodeComplete;
00222 
00223   /// \brief The file that we're performing code-completion for, if any.
00224   const FileEntry *CodeCompletionFile;
00225 
00226   /// \brief The offset in file for the code-completion point.
00227   unsigned CodeCompletionOffset;
00228 
00229   /// \brief The location for the code-completion point. This gets instantiated
00230   /// when the CodeCompletionFile gets \#include'ed for preprocessing.
00231   SourceLocation CodeCompletionLoc;
00232 
00233   /// \brief The start location for the file of the code-completion point.
00234   ///
00235   /// This gets instantiated when the CodeCompletionFile gets \#include'ed
00236   /// for preprocessing.
00237   SourceLocation CodeCompletionFileLoc;
00238 
00239   /// \brief The source location of the \c import contextual keyword we just 
00240   /// lexed, if any.
00241   SourceLocation ModuleImportLoc;
00242 
00243   /// \brief The module import path that we're currently processing.
00244   SmallVector<std::pair<IdentifierInfo *, SourceLocation>, 2> ModuleImportPath;
00245 
00246   /// \brief Whether the last token we lexed was an '@'.
00247   bool LastTokenWasAt;
00248 
00249   /// \brief Whether the module import expects an identifier next. Otherwise,
00250   /// it expects a '.' or ';'.
00251   bool ModuleImportExpectsIdentifier;
00252   
00253   /// \brief The source location of the currently-active
00254   /// \#pragma clang arc_cf_code_audited begin.
00255   SourceLocation PragmaARCCFCodeAuditedLoc;
00256 
00257   /// \brief True if we hit the code-completion point.
00258   bool CodeCompletionReached;
00259 
00260   /// \brief The number of bytes that we will initially skip when entering the
00261   /// main file, along with a flag that indicates whether skipping this number
00262   /// of bytes will place the lexer at the start of a line.
00263   ///
00264   /// This is used when loading a precompiled preamble.
00265   std::pair<int, bool> SkipMainFilePreamble;
00266 
00267   /// \brief The current top of the stack that we're lexing from if
00268   /// not expanding a macro and we are lexing directly from source code.
00269   ///
00270   /// Only one of CurLexer, CurPTHLexer, or CurTokenLexer will be non-null.
00271   std::unique_ptr<Lexer> CurLexer;
00272 
00273   /// \brief The current top of stack that we're lexing from if
00274   /// not expanding from a macro and we are lexing from a PTH cache.
00275   ///
00276   /// Only one of CurLexer, CurPTHLexer, or CurTokenLexer will be non-null.
00277   std::unique_ptr<PTHLexer> CurPTHLexer;
00278 
00279   /// \brief The current top of the stack what we're lexing from
00280   /// if not expanding a macro.
00281   ///
00282   /// This is an alias for either CurLexer or  CurPTHLexer.
00283   PreprocessorLexer *CurPPLexer;
00284 
00285   /// \brief Used to find the current FileEntry, if CurLexer is non-null
00286   /// and if applicable.
00287   ///
00288   /// This allows us to implement \#include_next and find directory-specific
00289   /// properties.
00290   const DirectoryLookup *CurDirLookup;
00291 
00292   /// \brief The current macro we are expanding, if we are expanding a macro.
00293   ///
00294   /// One of CurLexer and CurTokenLexer must be null.
00295   std::unique_ptr<TokenLexer> CurTokenLexer;
00296 
00297   /// \brief The kind of lexer we're currently working with.
00298   enum CurLexerKind {
00299     CLK_Lexer,
00300     CLK_PTHLexer,
00301     CLK_TokenLexer,
00302     CLK_CachingLexer,
00303     CLK_LexAfterModuleImport
00304   } CurLexerKind;
00305 
00306   /// \brief If the current lexer is for a submodule that is being built, this
00307   /// is that submodule.
00308   Module *CurSubmodule;
00309 
00310   /// \brief Keeps track of the stack of files currently
00311   /// \#included, and macros currently being expanded from, not counting
00312   /// CurLexer/CurTokenLexer.
00313   struct IncludeStackInfo {
00314     enum CurLexerKind           CurLexerKind;
00315     Module                     *TheSubmodule;
00316     std::unique_ptr<Lexer>      TheLexer;
00317     std::unique_ptr<PTHLexer>   ThePTHLexer;
00318     PreprocessorLexer          *ThePPLexer;
00319     std::unique_ptr<TokenLexer> TheTokenLexer;
00320     const DirectoryLookup      *TheDirLookup;
00321 
00322     // The following constructors are completely useless copies of the default
00323     // versions, only needed to pacify MSVC.
00324     IncludeStackInfo(enum CurLexerKind CurLexerKind, Module *TheSubmodule,
00325                      std::unique_ptr<Lexer> &&TheLexer,
00326                      std::unique_ptr<PTHLexer> &&ThePTHLexer,
00327                      PreprocessorLexer *ThePPLexer,
00328                      std::unique_ptr<TokenLexer> &&TheTokenLexer,
00329                      const DirectoryLookup *TheDirLookup)
00330         : CurLexerKind(std::move(CurLexerKind)),
00331           TheSubmodule(std::move(TheSubmodule)), TheLexer(std::move(TheLexer)),
00332           ThePTHLexer(std::move(ThePTHLexer)),
00333           ThePPLexer(std::move(ThePPLexer)),
00334           TheTokenLexer(std::move(TheTokenLexer)),
00335           TheDirLookup(std::move(TheDirLookup)) {}
00336     IncludeStackInfo(IncludeStackInfo &&RHS)
00337         : CurLexerKind(std::move(RHS.CurLexerKind)),
00338           TheSubmodule(std::move(RHS.TheSubmodule)),
00339           TheLexer(std::move(RHS.TheLexer)),
00340           ThePTHLexer(std::move(RHS.ThePTHLexer)),
00341           ThePPLexer(std::move(RHS.ThePPLexer)),
00342           TheTokenLexer(std::move(RHS.TheTokenLexer)),
00343           TheDirLookup(std::move(RHS.TheDirLookup)) {}
00344   };
00345   std::vector<IncludeStackInfo> IncludeMacroStack;
00346 
00347   /// \brief Actions invoked when some preprocessor activity is
00348   /// encountered (e.g. a file is \#included, etc).
00349   std::unique_ptr<PPCallbacks> Callbacks;
00350 
00351   struct MacroExpandsInfo {
00352     Token Tok;
00353     MacroDirective *MD;
00354     SourceRange Range;
00355     MacroExpandsInfo(Token Tok, MacroDirective *MD, SourceRange Range)
00356       : Tok(Tok), MD(MD), Range(Range) { }
00357   };
00358   SmallVector<MacroExpandsInfo, 2> DelayedMacroExpandsCallbacks;
00359 
00360   /// For each IdentifierInfo that was associated with a macro, we
00361   /// keep a mapping to the history of all macro definitions and #undefs in
00362   /// the reverse order (the latest one is in the head of the list).
00363   llvm::DenseMap<const IdentifierInfo*, MacroDirective*> Macros;
00364   friend class ASTReader;
00365   
00366   /// \brief Macros that we want to warn because they are not used at the end
00367   /// of the translation unit.
00368   ///
00369   /// We store just their SourceLocations instead of
00370   /// something like MacroInfo*. The benefit of this is that when we are
00371   /// deserializing from PCH, we don't need to deserialize identifier & macros
00372   /// just so that we can report that they are unused, we just warn using
00373   /// the SourceLocations of this set (that will be filled by the ASTReader).
00374   /// We are using SmallPtrSet instead of a vector for faster removal.
00375   typedef llvm::SmallPtrSet<SourceLocation, 32> WarnUnusedMacroLocsTy;
00376   WarnUnusedMacroLocsTy WarnUnusedMacroLocs;
00377 
00378   /// \brief A "freelist" of MacroArg objects that can be
00379   /// reused for quick allocation.
00380   MacroArgs *MacroArgCache;
00381   friend class MacroArgs;
00382 
00383   /// For each IdentifierInfo used in a \#pragma push_macro directive,
00384   /// we keep a MacroInfo stack used to restore the previous macro value.
00385   llvm::DenseMap<IdentifierInfo*, std::vector<MacroInfo*> > PragmaPushMacroInfo;
00386 
00387   // Various statistics we track for performance analysis.
00388   unsigned NumDirectives, NumDefined, NumUndefined, NumPragma;
00389   unsigned NumIf, NumElse, NumEndif;
00390   unsigned NumEnteredSourceFiles, MaxIncludeStackDepth;
00391   unsigned NumMacroExpanded, NumFnMacroExpanded, NumBuiltinMacroExpanded;
00392   unsigned NumFastMacroExpanded, NumTokenPaste, NumFastTokenPaste;
00393   unsigned NumSkipped;
00394 
00395   /// \brief The predefined macros that preprocessor should use from the
00396   /// command line etc.
00397   std::string Predefines;
00398 
00399   /// \brief The file ID for the preprocessor predefines.
00400   FileID PredefinesFileID;
00401 
00402   /// \{
00403   /// \brief Cache of macro expanders to reduce malloc traffic.
00404   enum { TokenLexerCacheSize = 8 };
00405   unsigned NumCachedTokenLexers;
00406   std::unique_ptr<TokenLexer> TokenLexerCache[TokenLexerCacheSize];
00407   /// \}
00408 
00409   /// \brief Keeps macro expanded tokens for TokenLexers.
00410   //
00411   /// Works like a stack; a TokenLexer adds the macro expanded tokens that is
00412   /// going to lex in the cache and when it finishes the tokens are removed
00413   /// from the end of the cache.
00414   SmallVector<Token, 16> MacroExpandedTokens;
00415   std::vector<std::pair<TokenLexer *, size_t> > MacroExpandingLexersStack;
00416 
00417   /// \brief A record of the macro definitions and expansions that
00418   /// occurred during preprocessing.
00419   ///
00420   /// This is an optional side structure that can be enabled with
00421   /// \c createPreprocessingRecord() prior to preprocessing.
00422   PreprocessingRecord *Record;
00423 
00424 private:  // Cached tokens state.
00425   typedef SmallVector<Token, 1> CachedTokensTy;
00426 
00427   /// \brief Cached tokens are stored here when we do backtracking or
00428   /// lookahead. They are "lexed" by the CachingLex() method.
00429   CachedTokensTy CachedTokens;
00430 
00431   /// \brief The position of the cached token that CachingLex() should
00432   /// "lex" next.
00433   ///
00434   /// If it points beyond the CachedTokens vector, it means that a normal
00435   /// Lex() should be invoked.
00436   CachedTokensTy::size_type CachedLexPos;
00437 
00438   /// \brief Stack of backtrack positions, allowing nested backtracks.
00439   ///
00440   /// The EnableBacktrackAtThisPos() method pushes a position to
00441   /// indicate where CachedLexPos should be set when the BackTrack() method is
00442   /// invoked (at which point the last position is popped).
00443   std::vector<CachedTokensTy::size_type> BacktrackPositions;
00444 
00445   struct MacroInfoChain {
00446     MacroInfo MI;
00447     MacroInfoChain *Next;
00448   };
00449 
00450   /// MacroInfos are managed as a chain for easy disposal.  This is the head
00451   /// of that list.
00452   MacroInfoChain *MIChainHead;
00453 
00454   struct DeserializedMacroInfoChain {
00455     MacroInfo MI;
00456     unsigned OwningModuleID; // MUST be immediately after the MacroInfo object
00457                      // so it can be accessed by MacroInfo::getOwningModuleID().
00458     DeserializedMacroInfoChain *Next;
00459   };
00460   DeserializedMacroInfoChain *DeserialMIChainHead;
00461 
00462 public:
00463   Preprocessor(IntrusiveRefCntPtr<PreprocessorOptions> PPOpts,
00464                DiagnosticsEngine &diags, LangOptions &opts,
00465                SourceManager &SM, HeaderSearch &Headers,
00466                ModuleLoader &TheModuleLoader,
00467                IdentifierInfoLookup *IILookup = nullptr,
00468                bool OwnsHeaderSearch = false,
00469                TranslationUnitKind TUKind = TU_Complete);
00470 
00471   ~Preprocessor();
00472 
00473   /// \brief Initialize the preprocessor using information about the target.
00474   ///
00475   /// \param Target is owned by the caller and must remain valid for the
00476   /// lifetime of the preprocessor.
00477   void Initialize(const TargetInfo &Target);
00478 
00479   /// \brief Initialize the preprocessor to parse a model file
00480   ///
00481   /// To parse model files the preprocessor of the original source is reused to
00482   /// preserver the identifier table. However to avoid some duplicate
00483   /// information in the preprocessor some cleanup is needed before it is used
00484   /// to parse model files. This method does that cleanup.
00485   void InitializeForModelFile();
00486 
00487   /// \brief Cleanup after model file parsing
00488   void FinalizeForModelFile();
00489 
00490   /// \brief Retrieve the preprocessor options used to initialize this
00491   /// preprocessor.
00492   PreprocessorOptions &getPreprocessorOpts() const { return *PPOpts; }
00493   
00494   DiagnosticsEngine &getDiagnostics() const { return *Diags; }
00495   void setDiagnostics(DiagnosticsEngine &D) { Diags = &D; }
00496 
00497   const LangOptions &getLangOpts() const { return LangOpts; }
00498   const TargetInfo &getTargetInfo() const { return *Target; }
00499   FileManager &getFileManager() const { return FileMgr; }
00500   SourceManager &getSourceManager() const { return SourceMgr; }
00501   HeaderSearch &getHeaderSearchInfo() const { return HeaderInfo; }
00502 
00503   IdentifierTable &getIdentifierTable() { return Identifiers; }
00504   SelectorTable &getSelectorTable() { return Selectors; }
00505   Builtin::Context &getBuiltinInfo() { return BuiltinInfo; }
00506   llvm::BumpPtrAllocator &getPreprocessorAllocator() { return BP; }
00507 
00508   void setPTHManager(PTHManager* pm);
00509 
00510   PTHManager *getPTHManager() { return PTH.get(); }
00511 
00512   void setExternalSource(ExternalPreprocessorSource *Source) {
00513     ExternalSource = Source;
00514   }
00515 
00516   ExternalPreprocessorSource *getExternalSource() const {
00517     return ExternalSource;
00518   }
00519 
00520   /// \brief Retrieve the module loader associated with this preprocessor.
00521   ModuleLoader &getModuleLoader() const { return TheModuleLoader; }
00522 
00523   bool hadModuleLoaderFatalFailure() const {
00524     return TheModuleLoader.HadFatalFailure;
00525   }
00526 
00527   /// \brief True if we are currently preprocessing a #if or #elif directive
00528   bool isParsingIfOrElifDirective() const { 
00529     return ParsingIfOrElifDirective;
00530   }
00531 
00532   /// \brief Control whether the preprocessor retains comments in output.
00533   void SetCommentRetentionState(bool KeepComments, bool KeepMacroComments) {
00534     this->KeepComments = KeepComments | KeepMacroComments;
00535     this->KeepMacroComments = KeepMacroComments;
00536   }
00537 
00538   bool getCommentRetentionState() const { return KeepComments; }
00539 
00540   void setPragmasEnabled(bool Enabled) { PragmasEnabled = Enabled; }
00541   bool getPragmasEnabled() const { return PragmasEnabled; }
00542 
00543   void SetSuppressIncludeNotFoundError(bool Suppress) {
00544     SuppressIncludeNotFoundError = Suppress;
00545   }
00546 
00547   bool GetSuppressIncludeNotFoundError() {
00548     return SuppressIncludeNotFoundError;
00549   }
00550 
00551   /// Sets whether the preprocessor is responsible for producing output or if
00552   /// it is producing tokens to be consumed by Parse and Sema.
00553   void setPreprocessedOutput(bool IsPreprocessedOutput) {
00554     PreprocessedOutput = IsPreprocessedOutput;
00555   }
00556 
00557   /// Returns true if the preprocessor is responsible for generating output,
00558   /// false if it is producing tokens to be consumed by Parse and Sema.
00559   bool isPreprocessedOutput() const { return PreprocessedOutput; }
00560 
00561   /// \brief Return true if we are lexing directly from the specified lexer.
00562   bool isCurrentLexer(const PreprocessorLexer *L) const {
00563     return CurPPLexer == L;
00564   }
00565 
00566   /// \brief Return the current lexer being lexed from.
00567   ///
00568   /// Note that this ignores any potentially active macro expansions and _Pragma
00569   /// expansions going on at the time.
00570   PreprocessorLexer *getCurrentLexer() const { return CurPPLexer; }
00571 
00572   /// \brief Return the current file lexer being lexed from.
00573   ///
00574   /// Note that this ignores any potentially active macro expansions and _Pragma
00575   /// expansions going on at the time.
00576   PreprocessorLexer *getCurrentFileLexer() const;
00577 
00578   /// \brief Return the submodule owning the file being lexed.
00579   Module *getCurrentSubmodule() const { return CurSubmodule; }
00580 
00581   /// \brief Returns the FileID for the preprocessor predefines.
00582   FileID getPredefinesFileID() const { return PredefinesFileID; }
00583 
00584   /// \{
00585   /// \brief Accessors for preprocessor callbacks.
00586   ///
00587   /// Note that this class takes ownership of any PPCallbacks object given to
00588   /// it.
00589   PPCallbacks *getPPCallbacks() const { return Callbacks.get(); }
00590   void addPPCallbacks(std::unique_ptr<PPCallbacks> C) {
00591     if (Callbacks)
00592       C = llvm::make_unique<PPChainedCallbacks>(std::move(C),
00593                                                 std::move(Callbacks));
00594     Callbacks = std::move(C);
00595   }
00596   /// \}
00597 
00598   /// \brief Given an identifier, return its latest MacroDirective if it is
00599   /// \#defined or null if it isn't \#define'd.
00600   MacroDirective *getMacroDirective(IdentifierInfo *II) const {
00601     if (!II->hasMacroDefinition())
00602       return nullptr;
00603 
00604     MacroDirective *MD = getMacroDirectiveHistory(II);
00605     assert(MD->isDefined() && "Macro is undefined!");
00606     return MD;
00607   }
00608 
00609   const MacroInfo *getMacroInfo(IdentifierInfo *II) const {
00610     return const_cast<Preprocessor*>(this)->getMacroInfo(II);
00611   }
00612 
00613   MacroInfo *getMacroInfo(IdentifierInfo *II) {
00614     if (MacroDirective *MD = getMacroDirective(II))
00615       return MD->getMacroInfo();
00616     return nullptr;
00617   }
00618 
00619   /// \brief Given an identifier, return the (probably #undef'd) MacroInfo
00620   /// representing the most recent macro definition.
00621   ///
00622   /// One can iterate over all previous macro definitions from the most recent
00623   /// one. This should only be called for identifiers that hadMacroDefinition().
00624   MacroDirective *getMacroDirectiveHistory(const IdentifierInfo *II) const;
00625 
00626   /// \brief Add a directive to the macro directive history for this identifier.
00627   void appendMacroDirective(IdentifierInfo *II, MacroDirective *MD);
00628   DefMacroDirective *appendDefMacroDirective(IdentifierInfo *II, MacroInfo *MI,
00629                                              SourceLocation Loc,
00630                                              unsigned ImportedFromModuleID,
00631                                              ArrayRef<unsigned> Overrides) {
00632     DefMacroDirective *MD =
00633         AllocateDefMacroDirective(MI, Loc, ImportedFromModuleID, Overrides);
00634     appendMacroDirective(II, MD);
00635     return MD;
00636   }
00637   DefMacroDirective *appendDefMacroDirective(IdentifierInfo *II, MacroInfo *MI){
00638     return appendDefMacroDirective(II, MI, MI->getDefinitionLoc(), 0, None);
00639   }
00640   /// \brief Set a MacroDirective that was loaded from a PCH file.
00641   void setLoadedMacroDirective(IdentifierInfo *II, MacroDirective *MD);
00642 
00643   /// \{
00644   /// Iterators for the macro history table. Currently defined macros have
00645   /// IdentifierInfo::hasMacroDefinition() set and an empty
00646   /// MacroInfo::getUndefLoc() at the head of the list.
00647   typedef llvm::DenseMap<const IdentifierInfo *,
00648                          MacroDirective*>::const_iterator macro_iterator;
00649   macro_iterator macro_begin(bool IncludeExternalMacros = true) const;
00650   macro_iterator macro_end(bool IncludeExternalMacros = true) const;
00651   /// \}
00652 
00653   /// \brief Return the name of the macro defined before \p Loc that has
00654   /// spelling \p Tokens.  If there are multiple macros with same spelling,
00655   /// return the last one defined.
00656   StringRef getLastMacroWithSpelling(SourceLocation Loc,
00657                                      ArrayRef<TokenValue> Tokens) const;
00658 
00659   const std::string &getPredefines() const { return Predefines; }
00660   /// \brief Set the predefines for this Preprocessor.
00661   ///
00662   /// These predefines are automatically injected when parsing the main file.
00663   void setPredefines(const char *P) { Predefines = P; }
00664   void setPredefines(const std::string &P) { Predefines = P; }
00665 
00666   /// Return information about the specified preprocessor
00667   /// identifier token.
00668   IdentifierInfo *getIdentifierInfo(StringRef Name) const {
00669     return &Identifiers.get(Name);
00670   }
00671 
00672   /// \brief Add the specified pragma handler to this preprocessor.
00673   ///
00674   /// If \p Namespace is non-null, then it is a token required to exist on the
00675   /// pragma line before the pragma string starts, e.g. "STDC" or "GCC".
00676   void AddPragmaHandler(StringRef Namespace, PragmaHandler *Handler);
00677   void AddPragmaHandler(PragmaHandler *Handler) {
00678     AddPragmaHandler(StringRef(), Handler);
00679   }
00680 
00681   /// \brief Remove the specific pragma handler from this preprocessor.
00682   ///
00683   /// If \p Namespace is non-null, then it should be the namespace that
00684   /// \p Handler was added to. It is an error to remove a handler that
00685   /// has not been registered.
00686   void RemovePragmaHandler(StringRef Namespace, PragmaHandler *Handler);
00687   void RemovePragmaHandler(PragmaHandler *Handler) {
00688     RemovePragmaHandler(StringRef(), Handler);
00689   }
00690 
00691   /// Install empty handlers for all pragmas (making them ignored).
00692   void IgnorePragmas();
00693 
00694   /// \brief Add the specified comment handler to the preprocessor.
00695   void addCommentHandler(CommentHandler *Handler);
00696 
00697   /// \brief Remove the specified comment handler.
00698   ///
00699   /// It is an error to remove a handler that has not been registered.
00700   void removeCommentHandler(CommentHandler *Handler);
00701 
00702   /// \brief Set the code completion handler to the given object.
00703   void setCodeCompletionHandler(CodeCompletionHandler &Handler) {
00704     CodeComplete = &Handler;
00705   }
00706 
00707   /// \brief Retrieve the current code-completion handler.
00708   CodeCompletionHandler *getCodeCompletionHandler() const {
00709     return CodeComplete;
00710   }
00711 
00712   /// \brief Clear out the code completion handler.
00713   void clearCodeCompletionHandler() {
00714     CodeComplete = nullptr;
00715   }
00716 
00717   /// \brief Hook used by the lexer to invoke the "natural language" code
00718   /// completion point.
00719   void CodeCompleteNaturalLanguage();
00720 
00721   /// \brief Retrieve the preprocessing record, or NULL if there is no
00722   /// preprocessing record.
00723   PreprocessingRecord *getPreprocessingRecord() const { return Record; }
00724 
00725   /// \brief Create a new preprocessing record, which will keep track of
00726   /// all macro expansions, macro definitions, etc.
00727   void createPreprocessingRecord();
00728 
00729   /// \brief Enter the specified FileID as the main source file,
00730   /// which implicitly adds the builtin defines etc.
00731   void EnterMainSourceFile();
00732 
00733   /// \brief Inform the preprocessor callbacks that processing is complete.
00734   void EndSourceFile();
00735 
00736   /// \brief Add a source file to the top of the include stack and
00737   /// start lexing tokens from it instead of the current buffer.
00738   ///
00739   /// Emits a diagnostic, doesn't enter the file, and returns true on error.
00740   bool EnterSourceFile(FileID CurFileID, const DirectoryLookup *Dir,
00741                        SourceLocation Loc);
00742 
00743   /// \brief Add a Macro to the top of the include stack and start lexing
00744   /// tokens from it instead of the current buffer.
00745   ///
00746   /// \param Args specifies the tokens input to a function-like macro.
00747   /// \param ILEnd specifies the location of the ')' for a function-like macro
00748   /// or the identifier for an object-like macro.
00749   void EnterMacro(Token &Identifier, SourceLocation ILEnd, MacroInfo *Macro,
00750                   MacroArgs *Args);
00751 
00752   /// \brief Add a "macro" context to the top of the include stack,
00753   /// which will cause the lexer to start returning the specified tokens.
00754   ///
00755   /// If \p DisableMacroExpansion is true, tokens lexed from the token stream
00756   /// will not be subject to further macro expansion. Otherwise, these tokens
00757   /// will be re-macro-expanded when/if expansion is enabled.
00758   ///
00759   /// If \p OwnsTokens is false, this method assumes that the specified stream
00760   /// of tokens has a permanent owner somewhere, so they do not need to be
00761   /// copied. If it is true, it assumes the array of tokens is allocated with
00762   /// \c new[] and must be freed.
00763   void EnterTokenStream(const Token *Toks, unsigned NumToks,
00764                         bool DisableMacroExpansion, bool OwnsTokens);
00765 
00766   /// \brief Pop the current lexer/macro exp off the top of the lexer stack.
00767   ///
00768   /// This should only be used in situations where the current state of the
00769   /// top-of-stack lexer is known.
00770   void RemoveTopOfLexerStack();
00771 
00772   /// From the point that this method is called, and until
00773   /// CommitBacktrackedTokens() or Backtrack() is called, the Preprocessor
00774   /// keeps track of the lexed tokens so that a subsequent Backtrack() call will
00775   /// make the Preprocessor re-lex the same tokens.
00776   ///
00777   /// Nested backtracks are allowed, meaning that EnableBacktrackAtThisPos can
00778   /// be called multiple times and CommitBacktrackedTokens/Backtrack calls will
00779   /// be combined with the EnableBacktrackAtThisPos calls in reverse order.
00780   ///
00781   /// NOTE: *DO NOT* forget to call either CommitBacktrackedTokens or Backtrack
00782   /// at some point after EnableBacktrackAtThisPos. If you don't, caching of
00783   /// tokens will continue indefinitely.
00784   ///
00785   void EnableBacktrackAtThisPos();
00786 
00787   /// \brief Disable the last EnableBacktrackAtThisPos call.
00788   void CommitBacktrackedTokens();
00789 
00790   /// \brief Make Preprocessor re-lex the tokens that were lexed since
00791   /// EnableBacktrackAtThisPos() was previously called.
00792   void Backtrack();
00793 
00794   /// \brief True if EnableBacktrackAtThisPos() was called and
00795   /// caching of tokens is on.
00796   bool isBacktrackEnabled() const { return !BacktrackPositions.empty(); }
00797 
00798   /// \brief Lex the next token for this preprocessor.
00799   void Lex(Token &Result);
00800 
00801   void LexAfterModuleImport(Token &Result);
00802 
00803   /// \brief Lex a string literal, which may be the concatenation of multiple
00804   /// string literals and may even come from macro expansion.
00805   /// \returns true on success, false if a error diagnostic has been generated.
00806   bool LexStringLiteral(Token &Result, std::string &String,
00807                         const char *DiagnosticTag, bool AllowMacroExpansion) {
00808     if (AllowMacroExpansion)
00809       Lex(Result);
00810     else
00811       LexUnexpandedToken(Result);
00812     return FinishLexStringLiteral(Result, String, DiagnosticTag,
00813                                   AllowMacroExpansion);
00814   }
00815 
00816   /// \brief Complete the lexing of a string literal where the first token has
00817   /// already been lexed (see LexStringLiteral).
00818   bool FinishLexStringLiteral(Token &Result, std::string &String,
00819                               const char *DiagnosticTag,
00820                               bool AllowMacroExpansion);
00821 
00822   /// \brief Lex a token.  If it's a comment, keep lexing until we get
00823   /// something not a comment.
00824   ///
00825   /// This is useful in -E -C mode where comments would foul up preprocessor
00826   /// directive handling.
00827   void LexNonComment(Token &Result) {
00828     do
00829       Lex(Result);
00830     while (Result.getKind() == tok::comment);
00831   }
00832 
00833   /// \brief Just like Lex, but disables macro expansion of identifier tokens.
00834   void LexUnexpandedToken(Token &Result) {
00835     // Disable macro expansion.
00836     bool OldVal = DisableMacroExpansion;
00837     DisableMacroExpansion = true;
00838     // Lex the token.
00839     Lex(Result);
00840 
00841     // Reenable it.
00842     DisableMacroExpansion = OldVal;
00843   }
00844 
00845   /// \brief Like LexNonComment, but this disables macro expansion of
00846   /// identifier tokens.
00847   void LexUnexpandedNonComment(Token &Result) {
00848     do
00849       LexUnexpandedToken(Result);
00850     while (Result.getKind() == tok::comment);
00851   }
00852 
00853   /// \brief Parses a simple integer literal to get its numeric value.  Floating
00854   /// point literals and user defined literals are rejected.  Used primarily to
00855   /// handle pragmas that accept integer arguments.
00856   bool parseSimpleIntegerLiteral(Token &Tok, uint64_t &Value);
00857 
00858   /// Disables macro expansion everywhere except for preprocessor directives.
00859   void SetMacroExpansionOnlyInDirectives() {
00860     DisableMacroExpansion = true;
00861     MacroExpansionInDirectivesOverride = true;
00862   }
00863 
00864   /// \brief Peeks ahead N tokens and returns that token without consuming any
00865   /// tokens.
00866   ///
00867   /// LookAhead(0) returns the next token that would be returned by Lex(),
00868   /// LookAhead(1) returns the token after it, etc.  This returns normal
00869   /// tokens after phase 5.  As such, it is equivalent to using
00870   /// 'Lex', not 'LexUnexpandedToken'.
00871   const Token &LookAhead(unsigned N) {
00872     if (CachedLexPos + N < CachedTokens.size())
00873       return CachedTokens[CachedLexPos+N];
00874     else
00875       return PeekAhead(N+1);
00876   }
00877 
00878   /// \brief When backtracking is enabled and tokens are cached,
00879   /// this allows to revert a specific number of tokens.
00880   ///
00881   /// Note that the number of tokens being reverted should be up to the last
00882   /// backtrack position, not more.
00883   void RevertCachedTokens(unsigned N) {
00884     assert(isBacktrackEnabled() &&
00885            "Should only be called when tokens are cached for backtracking");
00886     assert(signed(CachedLexPos) - signed(N) >= signed(BacktrackPositions.back())
00887          && "Should revert tokens up to the last backtrack position, not more");
00888     assert(signed(CachedLexPos) - signed(N) >= 0 &&
00889            "Corrupted backtrack positions ?");
00890     CachedLexPos -= N;
00891   }
00892 
00893   /// \brief Enters a token in the token stream to be lexed next.
00894   ///
00895   /// If BackTrack() is called afterwards, the token will remain at the
00896   /// insertion point.
00897   void EnterToken(const Token &Tok) {
00898     EnterCachingLexMode();
00899     CachedTokens.insert(CachedTokens.begin()+CachedLexPos, Tok);
00900   }
00901 
00902   /// We notify the Preprocessor that if it is caching tokens (because
00903   /// backtrack is enabled) it should replace the most recent cached tokens
00904   /// with the given annotation token. This function has no effect if
00905   /// backtracking is not enabled.
00906   ///
00907   /// Note that the use of this function is just for optimization, so that the
00908   /// cached tokens doesn't get re-parsed and re-resolved after a backtrack is
00909   /// invoked.
00910   void AnnotateCachedTokens(const Token &Tok) {
00911     assert(Tok.isAnnotation() && "Expected annotation token");
00912     if (CachedLexPos != 0 && isBacktrackEnabled())
00913       AnnotatePreviousCachedTokens(Tok);
00914   }
00915 
00916   /// Get the location of the last cached token, suitable for setting the end
00917   /// location of an annotation token.
00918   SourceLocation getLastCachedTokenLocation() const {
00919     assert(CachedLexPos != 0);
00920     return CachedTokens[CachedLexPos-1].getLocation();
00921   }
00922 
00923   /// \brief Replace the last token with an annotation token.
00924   ///
00925   /// Like AnnotateCachedTokens(), this routine replaces an
00926   /// already-parsed (and resolved) token with an annotation
00927   /// token. However, this routine only replaces the last token with
00928   /// the annotation token; it does not affect any other cached
00929   /// tokens. This function has no effect if backtracking is not
00930   /// enabled.
00931   void ReplaceLastTokenWithAnnotation(const Token &Tok) {
00932     assert(Tok.isAnnotation() && "Expected annotation token");
00933     if (CachedLexPos != 0 && isBacktrackEnabled())
00934       CachedTokens[CachedLexPos-1] = Tok;
00935   }
00936 
00937   /// Update the current token to represent the provided
00938   /// identifier, in order to cache an action performed by typo correction.
00939   void TypoCorrectToken(const Token &Tok) {
00940     assert(Tok.getIdentifierInfo() && "Expected identifier token");
00941     if (CachedLexPos != 0 && isBacktrackEnabled())
00942       CachedTokens[CachedLexPos-1] = Tok;
00943   }
00944 
00945   /// \brief Recompute the current lexer kind based on the CurLexer/CurPTHLexer/
00946   /// CurTokenLexer pointers.
00947   void recomputeCurLexerKind();
00948 
00949   /// \brief Returns true if incremental processing is enabled
00950   bool isIncrementalProcessingEnabled() const { return IncrementalProcessing; }
00951 
00952   /// \brief Enables the incremental processing
00953   void enableIncrementalProcessing(bool value = true) {
00954     IncrementalProcessing = value;
00955   }
00956   
00957   /// \brief Specify the point at which code-completion will be performed.
00958   ///
00959   /// \param File the file in which code completion should occur. If
00960   /// this file is included multiple times, code-completion will
00961   /// perform completion the first time it is included. If NULL, this
00962   /// function clears out the code-completion point.
00963   ///
00964   /// \param Line the line at which code completion should occur
00965   /// (1-based).
00966   ///
00967   /// \param Column the column at which code completion should occur
00968   /// (1-based).
00969   ///
00970   /// \returns true if an error occurred, false otherwise.
00971   bool SetCodeCompletionPoint(const FileEntry *File,
00972                               unsigned Line, unsigned Column);
00973 
00974   /// \brief Determine if we are performing code completion.
00975   bool isCodeCompletionEnabled() const { return CodeCompletionFile != nullptr; }
00976 
00977   /// \brief Returns the location of the code-completion point.
00978   ///
00979   /// Returns an invalid location if code-completion is not enabled or the file
00980   /// containing the code-completion point has not been lexed yet.
00981   SourceLocation getCodeCompletionLoc() const { return CodeCompletionLoc; }
00982 
00983   /// \brief Returns the start location of the file of code-completion point.
00984   ///
00985   /// Returns an invalid location if code-completion is not enabled or the file
00986   /// containing the code-completion point has not been lexed yet.
00987   SourceLocation getCodeCompletionFileLoc() const {
00988     return CodeCompletionFileLoc;
00989   }
00990 
00991   /// \brief Returns true if code-completion is enabled and we have hit the
00992   /// code-completion point.
00993   bool isCodeCompletionReached() const { return CodeCompletionReached; }
00994 
00995   /// \brief Note that we hit the code-completion point.
00996   void setCodeCompletionReached() {
00997     assert(isCodeCompletionEnabled() && "Code-completion not enabled!");
00998     CodeCompletionReached = true;
00999     // Silence any diagnostics that occur after we hit the code-completion.
01000     getDiagnostics().setSuppressAllDiagnostics(true);
01001   }
01002 
01003   /// \brief The location of the currently-active \#pragma clang
01004   /// arc_cf_code_audited begin.
01005   ///
01006   /// Returns an invalid location if there is no such pragma active.
01007   SourceLocation getPragmaARCCFCodeAuditedLoc() const {
01008     return PragmaARCCFCodeAuditedLoc;
01009   }
01010 
01011   /// \brief Set the location of the currently-active \#pragma clang
01012   /// arc_cf_code_audited begin.  An invalid location ends the pragma.
01013   void setPragmaARCCFCodeAuditedLoc(SourceLocation Loc) {
01014     PragmaARCCFCodeAuditedLoc = Loc;
01015   }
01016 
01017   /// \brief Instruct the preprocessor to skip part of the main source file.
01018   ///
01019   /// \param Bytes The number of bytes in the preamble to skip.
01020   ///
01021   /// \param StartOfLine Whether skipping these bytes puts the lexer at the
01022   /// start of a line.
01023   void setSkipMainFilePreamble(unsigned Bytes, bool StartOfLine) {
01024     SkipMainFilePreamble.first = Bytes;
01025     SkipMainFilePreamble.second = StartOfLine;
01026   }
01027 
01028   /// Forwarding function for diagnostics.  This emits a diagnostic at
01029   /// the specified Token's location, translating the token's start
01030   /// position in the current buffer into a SourcePosition object for rendering.
01031   DiagnosticBuilder Diag(SourceLocation Loc, unsigned DiagID) const {
01032     return Diags->Report(Loc, DiagID);
01033   }
01034 
01035   DiagnosticBuilder Diag(const Token &Tok, unsigned DiagID) const {
01036     return Diags->Report(Tok.getLocation(), DiagID);
01037   }
01038 
01039   /// Return the 'spelling' of the token at the given
01040   /// location; does not go up to the spelling location or down to the
01041   /// expansion location.
01042   ///
01043   /// \param buffer A buffer which will be used only if the token requires
01044   ///   "cleaning", e.g. if it contains trigraphs or escaped newlines
01045   /// \param invalid If non-null, will be set \c true if an error occurs.
01046   StringRef getSpelling(SourceLocation loc,
01047                         SmallVectorImpl<char> &buffer,
01048                         bool *invalid = nullptr) const {
01049     return Lexer::getSpelling(loc, buffer, SourceMgr, LangOpts, invalid);
01050   }
01051 
01052   /// \brief Return the 'spelling' of the Tok token.
01053   ///
01054   /// The spelling of a token is the characters used to represent the token in
01055   /// the source file after trigraph expansion and escaped-newline folding.  In
01056   /// particular, this wants to get the true, uncanonicalized, spelling of
01057   /// things like digraphs, UCNs, etc.
01058   ///
01059   /// \param Invalid If non-null, will be set \c true if an error occurs.
01060   std::string getSpelling(const Token &Tok, bool *Invalid = nullptr) const {
01061     return Lexer::getSpelling(Tok, SourceMgr, LangOpts, Invalid);
01062   }
01063 
01064   /// \brief Get the spelling of a token into a preallocated buffer, instead
01065   /// of as an std::string.
01066   ///
01067   /// The caller is required to allocate enough space for the token, which is
01068   /// guaranteed to be at least Tok.getLength() bytes long. The length of the
01069   /// actual result is returned.
01070   ///
01071   /// Note that this method may do two possible things: it may either fill in
01072   /// the buffer specified with characters, or it may *change the input pointer*
01073   /// to point to a constant buffer with the data already in it (avoiding a
01074   /// copy).  The caller is not allowed to modify the returned buffer pointer
01075   /// if an internal buffer is returned.
01076   unsigned getSpelling(const Token &Tok, const char *&Buffer,
01077                        bool *Invalid = nullptr) const {
01078     return Lexer::getSpelling(Tok, Buffer, SourceMgr, LangOpts, Invalid);
01079   }
01080 
01081   /// \brief Get the spelling of a token into a SmallVector.
01082   ///
01083   /// Note that the returned StringRef may not point to the
01084   /// supplied buffer if a copy can be avoided.
01085   StringRef getSpelling(const Token &Tok,
01086                         SmallVectorImpl<char> &Buffer,
01087                         bool *Invalid = nullptr) const;
01088 
01089   /// \brief Relex the token at the specified location.
01090   /// \returns true if there was a failure, false on success.
01091   bool getRawToken(SourceLocation Loc, Token &Result,
01092                    bool IgnoreWhiteSpace = false) {
01093     return Lexer::getRawToken(Loc, Result, SourceMgr, LangOpts, IgnoreWhiteSpace);
01094   }
01095 
01096   /// \brief Given a Token \p Tok that is a numeric constant with length 1,
01097   /// return the character.
01098   char
01099   getSpellingOfSingleCharacterNumericConstant(const Token &Tok,
01100                                               bool *Invalid = nullptr) const {
01101     assert(Tok.is(tok::numeric_constant) &&
01102            Tok.getLength() == 1 && "Called on unsupported token");
01103     assert(!Tok.needsCleaning() && "Token can't need cleaning with length 1");
01104 
01105     // If the token is carrying a literal data pointer, just use it.
01106     if (const char *D = Tok.getLiteralData())
01107       return *D;
01108 
01109     // Otherwise, fall back on getCharacterData, which is slower, but always
01110     // works.
01111     return *SourceMgr.getCharacterData(Tok.getLocation(), Invalid);
01112   }
01113 
01114   /// \brief Retrieve the name of the immediate macro expansion.
01115   ///
01116   /// This routine starts from a source location, and finds the name of the
01117   /// macro responsible for its immediate expansion. It looks through any
01118   /// intervening macro argument expansions to compute this. It returns a
01119   /// StringRef that refers to the SourceManager-owned buffer of the source
01120   /// where that macro name is spelled. Thus, the result shouldn't out-live
01121   /// the SourceManager.
01122   StringRef getImmediateMacroName(SourceLocation Loc) {
01123     return Lexer::getImmediateMacroName(Loc, SourceMgr, getLangOpts());
01124   }
01125 
01126   /// \brief Plop the specified string into a scratch buffer and set the
01127   /// specified token's location and length to it. 
01128   ///
01129   /// If specified, the source location provides a location of the expansion
01130   /// point of the token.
01131   void CreateString(StringRef Str, Token &Tok,
01132                     SourceLocation ExpansionLocStart = SourceLocation(),
01133                     SourceLocation ExpansionLocEnd = SourceLocation());
01134 
01135   /// \brief Computes the source location just past the end of the
01136   /// token at this source location.
01137   ///
01138   /// This routine can be used to produce a source location that
01139   /// points just past the end of the token referenced by \p Loc, and
01140   /// is generally used when a diagnostic needs to point just after a
01141   /// token where it expected something different that it received. If
01142   /// the returned source location would not be meaningful (e.g., if
01143   /// it points into a macro), this routine returns an invalid
01144   /// source location.
01145   ///
01146   /// \param Offset an offset from the end of the token, where the source
01147   /// location should refer to. The default offset (0) produces a source
01148   /// location pointing just past the end of the token; an offset of 1 produces
01149   /// a source location pointing to the last character in the token, etc.
01150   SourceLocation getLocForEndOfToken(SourceLocation Loc, unsigned Offset = 0) {
01151     return Lexer::getLocForEndOfToken(Loc, Offset, SourceMgr, LangOpts);
01152   }
01153 
01154   /// \brief Returns true if the given MacroID location points at the first
01155   /// token of the macro expansion.
01156   ///
01157   /// \param MacroBegin If non-null and function returns true, it is set to
01158   /// begin location of the macro.
01159   bool isAtStartOfMacroExpansion(SourceLocation loc,
01160                                  SourceLocation *MacroBegin = nullptr) const {
01161     return Lexer::isAtStartOfMacroExpansion(loc, SourceMgr, LangOpts,
01162                                             MacroBegin);
01163   }
01164 
01165   /// \brief Returns true if the given MacroID location points at the last
01166   /// token of the macro expansion.
01167   ///
01168   /// \param MacroEnd If non-null and function returns true, it is set to
01169   /// end location of the macro.
01170   bool isAtEndOfMacroExpansion(SourceLocation loc,
01171                                SourceLocation *MacroEnd = nullptr) const {
01172     return Lexer::isAtEndOfMacroExpansion(loc, SourceMgr, LangOpts, MacroEnd);
01173   }
01174 
01175   /// \brief Print the token to stderr, used for debugging.
01176   void DumpToken(const Token &Tok, bool DumpFlags = false) const;
01177   void DumpLocation(SourceLocation Loc) const;
01178   void DumpMacro(const MacroInfo &MI) const;
01179 
01180   /// \brief Given a location that specifies the start of a
01181   /// token, return a new location that specifies a character within the token.
01182   SourceLocation AdvanceToTokenCharacter(SourceLocation TokStart,
01183                                          unsigned Char) const {
01184     return Lexer::AdvanceToTokenCharacter(TokStart, Char, SourceMgr, LangOpts);
01185   }
01186 
01187   /// \brief Increment the counters for the number of token paste operations
01188   /// performed.
01189   ///
01190   /// If fast was specified, this is a 'fast paste' case we handled.
01191   void IncrementPasteCounter(bool isFast) {
01192     if (isFast)
01193       ++NumFastTokenPaste;
01194     else
01195       ++NumTokenPaste;
01196   }
01197 
01198   void PrintStats();
01199 
01200   size_t getTotalMemory() const;
01201 
01202   /// When the macro expander pastes together a comment (/##/) in Microsoft
01203   /// mode, this method handles updating the current state, returning the
01204   /// token on the next source line.
01205   void HandleMicrosoftCommentPaste(Token &Tok);
01206 
01207   //===--------------------------------------------------------------------===//
01208   // Preprocessor callback methods.  These are invoked by a lexer as various
01209   // directives and events are found.
01210 
01211   /// Given a tok::raw_identifier token, look up the
01212   /// identifier information for the token and install it into the token,
01213   /// updating the token kind accordingly.
01214   IdentifierInfo *LookUpIdentifierInfo(Token &Identifier) const;
01215 
01216 private:
01217   llvm::DenseMap<IdentifierInfo*,unsigned> PoisonReasons;
01218 
01219 public:
01220 
01221   /// \brief Specifies the reason for poisoning an identifier.
01222   ///
01223   /// If that identifier is accessed while poisoned, then this reason will be
01224   /// used instead of the default "poisoned" diagnostic.
01225   void SetPoisonReason(IdentifierInfo *II, unsigned DiagID);
01226 
01227   /// \brief Display reason for poisoned identifier.
01228   void HandlePoisonedIdentifier(Token & Tok);
01229 
01230   void MaybeHandlePoisonedIdentifier(Token & Identifier) {
01231     if(IdentifierInfo * II = Identifier.getIdentifierInfo()) {
01232       if(II->isPoisoned()) {
01233         HandlePoisonedIdentifier(Identifier);
01234       }
01235     }
01236   }
01237 
01238 private:
01239   /// Identifiers used for SEH handling in Borland. These are only
01240   /// allowed in particular circumstances
01241   // __except block
01242   IdentifierInfo *Ident__exception_code,
01243                  *Ident___exception_code,
01244                  *Ident_GetExceptionCode;
01245   // __except filter expression
01246   IdentifierInfo *Ident__exception_info,
01247                  *Ident___exception_info,
01248                  *Ident_GetExceptionInfo;
01249   // __finally
01250   IdentifierInfo *Ident__abnormal_termination,
01251                  *Ident___abnormal_termination,
01252                  *Ident_AbnormalTermination;
01253 
01254   const char *getCurLexerEndPos();
01255 
01256 public:
01257   void PoisonSEHIdentifiers(bool Poison = true); // Borland
01258 
01259   /// \brief Callback invoked when the lexer reads an identifier and has
01260   /// filled in the tokens IdentifierInfo member. 
01261   ///
01262   /// This callback potentially macro expands it or turns it into a named
01263   /// token (like 'for').
01264   ///
01265   /// \returns true if we actually computed a token, false if we need to
01266   /// lex again.
01267   bool HandleIdentifier(Token &Identifier);
01268 
01269 
01270   /// \brief Callback invoked when the lexer hits the end of the current file.
01271   ///
01272   /// This either returns the EOF token and returns true, or
01273   /// pops a level off the include stack and returns false, at which point the
01274   /// client should call lex again.
01275   bool HandleEndOfFile(Token &Result, bool isEndOfMacro = false);
01276 
01277   /// \brief Callback invoked when the current TokenLexer hits the end of its
01278   /// token stream.
01279   bool HandleEndOfTokenLexer(Token &Result);
01280 
01281   /// \brief Callback invoked when the lexer sees a # token at the start of a
01282   /// line.
01283   ///
01284   /// This consumes the directive, modifies the lexer/preprocessor state, and
01285   /// advances the lexer(s) so that the next token read is the correct one.
01286   void HandleDirective(Token &Result);
01287 
01288   /// \brief Ensure that the next token is a tok::eod token.
01289   ///
01290   /// If not, emit a diagnostic and consume up until the eod.
01291   /// If \p EnableMacros is true, then we consider macros that expand to zero
01292   /// tokens as being ok.
01293   void CheckEndOfDirective(const char *Directive, bool EnableMacros = false);
01294 
01295   /// \brief Read and discard all tokens remaining on the current line until
01296   /// the tok::eod token is found.
01297   void DiscardUntilEndOfDirective();
01298 
01299   /// \brief Returns true if the preprocessor has seen a use of
01300   /// __DATE__ or __TIME__ in the file so far.
01301   bool SawDateOrTime() const {
01302     return DATELoc != SourceLocation() || TIMELoc != SourceLocation();
01303   }
01304   unsigned getCounterValue() const { return CounterValue; }
01305   void setCounterValue(unsigned V) { CounterValue = V; }
01306 
01307   /// \brief Retrieves the module that we're currently building, if any.
01308   Module *getCurrentModule();
01309   
01310   /// \brief Allocate a new MacroInfo object with the provided SourceLocation.
01311   MacroInfo *AllocateMacroInfo(SourceLocation L);
01312 
01313   /// \brief Allocate a new MacroInfo object loaded from an AST file.
01314   MacroInfo *AllocateDeserializedMacroInfo(SourceLocation L,
01315                                            unsigned SubModuleID);
01316 
01317   /// \brief Turn the specified lexer token into a fully checked and spelled
01318   /// filename, e.g. as an operand of \#include. 
01319   ///
01320   /// The caller is expected to provide a buffer that is large enough to hold
01321   /// the spelling of the filename, but is also expected to handle the case
01322   /// when this method decides to use a different buffer.
01323   ///
01324   /// \returns true if the input filename was in <>'s or false if it was
01325   /// in ""'s.
01326   bool GetIncludeFilenameSpelling(SourceLocation Loc,StringRef &Filename);
01327 
01328   /// \brief Given a "foo" or <foo> reference, look up the indicated file.
01329   ///
01330   /// Returns null on failure.  \p isAngled indicates whether the file
01331   /// reference is for system \#include's or not (i.e. using <> instead of "").
01332   const FileEntry *LookupFile(SourceLocation FilenameLoc, StringRef Filename,
01333                               bool isAngled, const DirectoryLookup *FromDir,
01334                               const FileEntry *FromFile,
01335                               const DirectoryLookup *&CurDir,
01336                               SmallVectorImpl<char> *SearchPath,
01337                               SmallVectorImpl<char> *RelativePath,
01338                               ModuleMap::KnownHeader *SuggestedModule,
01339                               bool SkipCache = false);
01340 
01341   /// \brief Get the DirectoryLookup structure used to find the current
01342   /// FileEntry, if CurLexer is non-null and if applicable. 
01343   ///
01344   /// This allows us to implement \#include_next and find directory-specific
01345   /// properties.
01346   const DirectoryLookup *GetCurDirLookup() { return CurDirLookup; }
01347 
01348   /// \brief Return true if we're in the top-level file, not in a \#include.
01349   bool isInPrimaryFile() const;
01350 
01351   /// \brief Handle cases where the \#include name is expanded
01352   /// from a macro as multiple tokens, which need to be glued together. 
01353   ///
01354   /// This occurs for code like:
01355   /// \code
01356   ///    \#define FOO <x/y.h>
01357   ///    \#include FOO
01358   /// \endcode
01359   /// because in this case, "<x/y.h>" is returned as 7 tokens, not one.
01360   ///
01361   /// This code concatenates and consumes tokens up to the '>' token.  It
01362   /// returns false if the > was found, otherwise it returns true if it finds
01363   /// and consumes the EOD marker.
01364   bool ConcatenateIncludeName(SmallString<128> &FilenameBuffer,
01365                               SourceLocation &End);
01366 
01367   /// \brief Lex an on-off-switch (C99 6.10.6p2) and verify that it is
01368   /// followed by EOD.  Return true if the token is not a valid on-off-switch.
01369   bool LexOnOffSwitch(tok::OnOffSwitch &OOS);
01370 
01371   bool CheckMacroName(Token &MacroNameTok, MacroUse isDefineUndef);
01372 
01373 private:
01374 
01375   void PushIncludeMacroStack() {
01376     assert(CurLexerKind != CLK_CachingLexer && "cannot push a caching lexer");
01377     IncludeMacroStack.push_back(IncludeStackInfo(
01378         CurLexerKind, CurSubmodule, std::move(CurLexer), std::move(CurPTHLexer),
01379         CurPPLexer, std::move(CurTokenLexer), CurDirLookup));
01380     CurPPLexer = nullptr;
01381   }
01382 
01383   void PopIncludeMacroStack() {
01384     CurLexer = std::move(IncludeMacroStack.back().TheLexer);
01385     CurPTHLexer = std::move(IncludeMacroStack.back().ThePTHLexer);
01386     CurPPLexer = IncludeMacroStack.back().ThePPLexer;
01387     CurTokenLexer = std::move(IncludeMacroStack.back().TheTokenLexer);
01388     CurDirLookup  = IncludeMacroStack.back().TheDirLookup;
01389     CurSubmodule = IncludeMacroStack.back().TheSubmodule;
01390     CurLexerKind = IncludeMacroStack.back().CurLexerKind;
01391     IncludeMacroStack.pop_back();
01392   }
01393 
01394   void PropagateLineStartLeadingSpaceInfo(Token &Result);
01395 
01396   /// \brief Allocate a new MacroInfo object.
01397   MacroInfo *AllocateMacroInfo();
01398 
01399   DefMacroDirective *
01400   AllocateDefMacroDirective(MacroInfo *MI, SourceLocation Loc,
01401                             unsigned ImportedFromModuleID = 0,
01402                             ArrayRef<unsigned> Overrides = None);
01403   UndefMacroDirective *
01404   AllocateUndefMacroDirective(SourceLocation UndefLoc,
01405                               unsigned ImportedFromModuleID = 0,
01406                               ArrayRef<unsigned> Overrides = None);
01407   VisibilityMacroDirective *AllocateVisibilityMacroDirective(SourceLocation Loc,
01408                                                              bool isPublic);
01409 
01410   /// \brief Lex and validate a macro name, which occurs after a
01411   /// \#define or \#undef. 
01412   ///
01413   /// This emits a diagnostic, sets the token kind to eod,
01414   /// and discards the rest of the macro line if the macro name is invalid.
01415   void ReadMacroName(Token &MacroNameTok, MacroUse isDefineUndef = MU_Other);
01416 
01417   /// The ( starting an argument list of a macro definition has just been read.
01418   /// Lex the rest of the arguments and the closing ), updating \p MI with
01419   /// what we learn and saving in \p LastTok the last token read.
01420   /// Return true if an error occurs parsing the arg list.
01421   bool ReadMacroDefinitionArgList(MacroInfo *MI, Token& LastTok);
01422 
01423   /// We just read a \#if or related directive and decided that the
01424   /// subsequent tokens are in the \#if'd out portion of the
01425   /// file.  Lex the rest of the file, until we see an \#endif.  If \p
01426   /// FoundNonSkipPortion is true, then we have already emitted code for part of
01427   /// this \#if directive, so \#else/\#elif blocks should never be entered. If
01428   /// \p FoundElse is false, then \#else directives are ok, if not, then we have
01429   /// already seen one so a \#else directive is a duplicate.  When this returns,
01430   /// the caller can lex the first valid token.
01431   void SkipExcludedConditionalBlock(SourceLocation IfTokenLoc,
01432                                     bool FoundNonSkipPortion, bool FoundElse,
01433                                     SourceLocation ElseLoc = SourceLocation());
01434 
01435   /// \brief A fast PTH version of SkipExcludedConditionalBlock.
01436   void PTHSkipExcludedConditionalBlock();
01437 
01438   /// \brief Evaluate an integer constant expression that may occur after a
01439   /// \#if or \#elif directive and return it as a bool.
01440   ///
01441   /// If the expression is equivalent to "!defined(X)" return X in IfNDefMacro.
01442   bool EvaluateDirectiveExpression(IdentifierInfo *&IfNDefMacro);
01443 
01444   /// \brief Install the standard preprocessor pragmas:
01445   /// \#pragma GCC poison/system_header/dependency and \#pragma once.
01446   void RegisterBuiltinPragmas();
01447 
01448   /// \brief Register builtin macros such as __LINE__ with the identifier table.
01449   void RegisterBuiltinMacros();
01450 
01451   /// If an identifier token is read that is to be expanded as a macro, handle
01452   /// it and return the next token as 'Tok'.  If we lexed a token, return true;
01453   /// otherwise the caller should lex again.
01454   bool HandleMacroExpandedIdentifier(Token &Tok, MacroDirective *MD);
01455 
01456   /// \brief Cache macro expanded tokens for TokenLexers.
01457   //
01458   /// Works like a stack; a TokenLexer adds the macro expanded tokens that is
01459   /// going to lex in the cache and when it finishes the tokens are removed
01460   /// from the end of the cache.
01461   Token *cacheMacroExpandedTokens(TokenLexer *tokLexer,
01462                                   ArrayRef<Token> tokens);
01463   void removeCachedMacroExpandedTokensOfLastLexer();
01464   friend void TokenLexer::ExpandFunctionArguments();
01465 
01466   /// Determine whether the next preprocessor token to be
01467   /// lexed is a '('.  If so, consume the token and return true, if not, this
01468   /// method should have no observable side-effect on the lexed tokens.
01469   bool isNextPPTokenLParen();
01470 
01471   /// After reading "MACRO(", this method is invoked to read all of the formal
01472   /// arguments specified for the macro invocation.  Returns null on error.
01473   MacroArgs *ReadFunctionLikeMacroArgs(Token &MacroName, MacroInfo *MI,
01474                                        SourceLocation &ExpansionEnd);
01475 
01476   /// \brief If an identifier token is read that is to be expanded
01477   /// as a builtin macro, handle it and return the next token as 'Tok'.
01478   void ExpandBuiltinMacro(Token &Tok);
01479 
01480   /// \brief Read a \c _Pragma directive, slice it up, process it, then
01481   /// return the first token after the directive.
01482   /// This assumes that the \c _Pragma token has just been read into \p Tok.
01483   void Handle_Pragma(Token &Tok);
01484 
01485   /// \brief Like Handle_Pragma except the pragma text is not enclosed within
01486   /// a string literal.
01487   void HandleMicrosoft__pragma(Token &Tok);
01488 
01489   /// \brief Add a lexer to the top of the include stack and
01490   /// start lexing tokens from it instead of the current buffer.
01491   void EnterSourceFileWithLexer(Lexer *TheLexer, const DirectoryLookup *Dir);
01492 
01493   /// \brief Add a lexer to the top of the include stack and
01494   /// start getting tokens from it using the PTH cache.
01495   void EnterSourceFileWithPTH(PTHLexer *PL, const DirectoryLookup *Dir);
01496 
01497   /// \brief Set the FileID for the preprocessor predefines.
01498   void setPredefinesFileID(FileID FID) {
01499     assert(PredefinesFileID.isInvalid() && "PredefinesFileID already set!");
01500     PredefinesFileID = FID;
01501   }
01502 
01503   /// \brief Returns true if we are lexing from a file and not a
01504   /// pragma or a macro.
01505   static bool IsFileLexer(const Lexer* L, const PreprocessorLexer* P) {
01506     return L ? !L->isPragmaLexer() : P != nullptr;
01507   }
01508 
01509   static bool IsFileLexer(const IncludeStackInfo& I) {
01510     return IsFileLexer(I.TheLexer.get(), I.ThePPLexer);
01511   }
01512 
01513   bool IsFileLexer() const {
01514     return IsFileLexer(CurLexer.get(), CurPPLexer);
01515   }
01516 
01517   //===--------------------------------------------------------------------===//
01518   // Caching stuff.
01519   void CachingLex(Token &Result);
01520   bool InCachingLexMode() const {
01521     // If the Lexer pointers are 0 and IncludeMacroStack is empty, it means
01522     // that we are past EOF, not that we are in CachingLex mode.
01523     return !CurPPLexer && !CurTokenLexer && !CurPTHLexer &&
01524            !IncludeMacroStack.empty();
01525   }
01526   void EnterCachingLexMode();
01527   void ExitCachingLexMode() {
01528     if (InCachingLexMode())
01529       RemoveTopOfLexerStack();
01530   }
01531   const Token &PeekAhead(unsigned N);
01532   void AnnotatePreviousCachedTokens(const Token &Tok);
01533 
01534   //===--------------------------------------------------------------------===//
01535   /// Handle*Directive - implement the various preprocessor directives.  These
01536   /// should side-effect the current preprocessor object so that the next call
01537   /// to Lex() will return the appropriate token next.
01538   void HandleLineDirective(Token &Tok);
01539   void HandleDigitDirective(Token &Tok);
01540   void HandleUserDiagnosticDirective(Token &Tok, bool isWarning);
01541   void HandleIdentSCCSDirective(Token &Tok);
01542   void HandleMacroPublicDirective(Token &Tok);
01543   void HandleMacroPrivateDirective(Token &Tok);
01544 
01545   // File inclusion.
01546   void HandleIncludeDirective(SourceLocation HashLoc,
01547                               Token &Tok,
01548                               const DirectoryLookup *LookupFrom = nullptr,
01549                               const FileEntry *LookupFromFile = nullptr,
01550                               bool isImport = false);
01551   void HandleIncludeNextDirective(SourceLocation HashLoc, Token &Tok);
01552   void HandleIncludeMacrosDirective(SourceLocation HashLoc, Token &Tok);
01553   void HandleImportDirective(SourceLocation HashLoc, Token &Tok);
01554   void HandleMicrosoftImportDirective(Token &Tok);
01555 
01556   // Module inclusion testing.
01557   /// \brief Find the module for the source or header file that \p FilenameLoc
01558   /// points to.
01559   Module *getModuleForLocation(SourceLocation FilenameLoc);
01560 
01561   // Macro handling.
01562   void HandleDefineDirective(Token &Tok, bool ImmediatelyAfterTopLevelIfndef);
01563   void HandleUndefDirective(Token &Tok);
01564 
01565   // Conditional Inclusion.
01566   void HandleIfdefDirective(Token &Tok, bool isIfndef,
01567                             bool ReadAnyTokensBeforeDirective);
01568   void HandleIfDirective(Token &Tok, bool ReadAnyTokensBeforeDirective);
01569   void HandleEndifDirective(Token &Tok);
01570   void HandleElseDirective(Token &Tok);
01571   void HandleElifDirective(Token &Tok);
01572 
01573   // Pragmas.
01574   void HandlePragmaDirective(SourceLocation IntroducerLoc,
01575                              PragmaIntroducerKind Introducer);
01576 public:
01577   void HandlePragmaOnce(Token &OnceTok);
01578   void HandlePragmaMark();
01579   void HandlePragmaPoison(Token &PoisonTok);
01580   void HandlePragmaSystemHeader(Token &SysHeaderTok);
01581   void HandlePragmaDependency(Token &DependencyTok);
01582   void HandlePragmaPushMacro(Token &Tok);
01583   void HandlePragmaPopMacro(Token &Tok);
01584   void HandlePragmaIncludeAlias(Token &Tok);
01585   IdentifierInfo *ParsePragmaPushOrPopMacro(Token &Tok);
01586 
01587   // Return true and store the first token only if any CommentHandler
01588   // has inserted some tokens and getCommentRetentionState() is false.
01589   bool HandleComment(Token &Token, SourceRange Comment);
01590 
01591   /// \brief A macro is used, update information about macros that need unused
01592   /// warnings.
01593   void markMacroAsUsed(MacroInfo *MI);
01594 };
01595 
01596 /// \brief Abstract base class that describes a handler that will receive
01597 /// source ranges for each of the comments encountered in the source file.
01598 class CommentHandler {
01599 public:
01600   virtual ~CommentHandler();
01601 
01602   // The handler shall return true if it has pushed any tokens
01603   // to be read using e.g. EnterToken or EnterTokenStream.
01604   virtual bool HandleComment(Preprocessor &PP, SourceRange Comment) = 0;
01605 };
01606 
01607 }  // end namespace clang
01608 
01609 #endif