r195799 - Documentation cleanup: Doxygen-ification, typo fixes, and changing some of

James Dennett jdennett at google.com
Tue Nov 26 17:27:40 PST 2013


Author: jdennett
Date: Tue Nov 26 19:27:40 2013
New Revision: 195799

URL: http://llvm.org/viewvc/llvm-project?rev=195799&view=rev
Log:
Documentation cleanup: Doxygen-ification, typo fixes, and changing some of
the duplicated documentation from .cpp files so that it's not processed by
Doxygen and hence doesn't generate duplicate output.

Modified:
    cfe/trunk/include/clang/Lex/Preprocessor.h
    cfe/trunk/lib/Lex/PPCaching.cpp
    cfe/trunk/lib/Lex/PPDirectives.cpp

Modified: cfe/trunk/include/clang/Lex/Preprocessor.h
URL: http://llvm.org/viewvc/llvm-project/cfe/trunk/include/clang/Lex/Preprocessor.h?rev=195799&r1=195798&r2=195799&view=diff
==============================================================================
--- cfe/trunk/include/clang/Lex/Preprocessor.h (original)
+++ cfe/trunk/include/clang/Lex/Preprocessor.h Tue Nov 26 19:27:40 2013
@@ -6,9 +6,10 @@
 // License. See LICENSE.TXT for details.
 //
 //===----------------------------------------------------------------------===//
-//
-//  This file defines the Preprocessor interface.
-//
+///
+/// \file
+/// \brief Defines the clang::Preprocessor interface.
+///
 //===----------------------------------------------------------------------===//
 
 #ifndef LLVM_CLANG_LEX_PREPROCESSOR_H
@@ -78,11 +79,12 @@ public:
   }
 };
 
-/// Preprocessor - This object engages in a tight little dance with the lexer to
-/// efficiently preprocess tokens.  Lexers know only about tokens within a
-/// single source file, and don't know anything about preprocessor-level issues
-/// like the \#include stack, token expansion, etc.
+/// \brief Engages in a tight little dance with the lexer to efficiently
+/// preprocess tokens.
 ///
+/// Lexers know only about tokens within a single source file, and don't
+/// know anything about preprocessor-level issues like the \#include stack,
+/// token expansion, etc.
 class Preprocessor : public RefCountedBase<Preprocessor> {
   IntrusiveRefCntPtr<PreprocessorOptions> PPOpts;
   DiagnosticsEngine        *Diags;
@@ -98,12 +100,12 @@ class Preprocessor : public RefCountedBa
   ExternalPreprocessorSource *ExternalSource;
 
 
-  /// PTH - An optional PTHManager object used for getting tokens from
-  ///  a token cache rather than lexing the original source file.
+  /// An optional PTHManager object used for getting tokens from
+  /// a token cache rather than lexing the original source file.
   OwningPtr<PTHManager> PTH;
 
-  /// BP - A BumpPtrAllocator object used to quickly allocate and release
-  ///  objects internal to the Preprocessor.
+  /// A BumpPtrAllocator object used to quickly allocate and release
+  /// objects internal to the Preprocessor.
   llvm::BumpPtrAllocator BP;
 
   /// Identifiers for builtin macros and other builtins.
@@ -129,7 +131,7 @@ class Preprocessor : public RefCountedBa
   unsigned CounterValue;  // Next __COUNTER__ value.
 
   enum {
-    /// MaxIncludeStackDepth - Maximum depth of \#includes.
+    /// \brief Maximum depth of \#includes.
     MaxAllowedIncludeStackDepth = 200
   };
 
@@ -144,12 +146,11 @@ class Preprocessor : public RefCountedBa
   /// Whether the preprocessor owns the header search object.
   bool OwnsHeaderSearch : 1;
 
-  /// DisableMacroExpansion - True if macro expansion is disabled.
+  /// True if macro expansion is disabled.
   bool DisableMacroExpansion : 1;
 
-  /// MacroExpansionInDirectivesOverride - Temporarily disables
-  /// DisableMacroExpansion (i.e. enables expansion) when parsing preprocessor
-  /// directives.
+  /// Temporarily disables DisableMacroExpansion (i.e. enables expansion)
+  /// when parsing preprocessor directives.
   bool MacroExpansionInDirectivesOverride : 1;
 
   class ResetMacroExpansionHelper;
@@ -169,23 +170,25 @@ class Preprocessor : public RefCountedBa
   /// \brief True if we are pre-expanding macro arguments.
   bool InMacroArgPreExpansion;
 
-  /// Identifiers - This is mapping/lookup information for all identifiers in
+  /// \brief Mapping/lookup information for all identifiers in
   /// the program, including program keywords.
   mutable IdentifierTable Identifiers;
 
-  /// Selectors - This table contains all the selectors in the program. Unlike
-  /// IdentifierTable above, this table *isn't* populated by the preprocessor.
-  /// It is declared/expanded here because it's role/lifetime is
-  /// conceptually similar the IdentifierTable. In addition, the current control
-  /// flow (in clang::ParseAST()), make it convenient to put here.
+  /// \brief This table contains all the selectors in the program.
+  ///
+  /// Unlike IdentifierTable above, this table *isn't* populated by the
+  /// preprocessor. It is declared/expanded here because its role/lifetime is
+  /// conceptually similar to the IdentifierTable. In addition, the current
+  /// control flow (in clang::ParseAST()), make it convenient to put here.
+  ///
   /// FIXME: Make sure the lifetime of Identifiers/Selectors *isn't* tied to
   /// the lifetime of the preprocessor.
   SelectorTable Selectors;
 
-  /// BuiltinInfo - Information about builtins.
+  /// \brief Information about builtins.
   Builtin::Context BuiltinInfo;
 
-  /// PragmaHandlers - This tracks all of the pragmas that the client registered
+  /// \brief Tracks all of the pragmas that the client registered
   /// with this preprocessor.
   PragmaNamespace *PragmaHandlers;
 
@@ -216,7 +219,7 @@ class Preprocessor : public RefCountedBa
   /// for preprocessing.
   SourceLocation CodeCompletionFileLoc;
 
-  /// \brief The source location of the 'import' contextual keyword we just 
+  /// \brief The source location of the \c import contextual keyword we just 
   /// lexed, if any.
   SourceLocation ModuleImportLoc;
 
@@ -226,45 +229,52 @@ class Preprocessor : public RefCountedBa
   /// \brief Whether the last token we lexed was an '@'.
   bool LastTokenWasAt;
 
-  /// \brief Whether the module import expectes an identifier next. Otherwise,
+  /// \brief Whether the module import expects an identifier next. Otherwise,
   /// it expects a '.' or ';'.
   bool ModuleImportExpectsIdentifier;
   
   /// \brief The source location of the currently-active
-  /// #pragma clang arc_cf_code_audited begin.
+  /// \#pragma clang arc_cf_code_audited begin.
   SourceLocation PragmaARCCFCodeAuditedLoc;
 
   /// \brief True if we hit the code-completion point.
   bool CodeCompletionReached;
 
   /// \brief The number of bytes that we will initially skip when entering the
-  /// main file, which is used when loading a precompiled preamble, along
-  /// with a flag that indicates whether skipping this number of bytes will
-  /// place the lexer at the start of a line.
+  /// main file, along with a flag that indicates whether skipping this number
+  /// of bytes will place the lexer at the start of a line.
+  ///
+  /// This is used when loading a precompiled preamble.
   std::pair<unsigned, bool> SkipMainFilePreamble;
 
-  /// CurLexer - This is the current top of the stack that we're lexing from if
+  /// \brief The current top of the stack that we're lexing from if
   /// not expanding a macro and we are lexing directly from source code.
-  ///  Only one of CurLexer, CurPTHLexer, or CurTokenLexer will be non-null.
+  ///
+  /// Only one of CurLexer, CurPTHLexer, or CurTokenLexer will be non-null.
   OwningPtr<Lexer> CurLexer;
 
-  /// CurPTHLexer - This is the current top of stack that we're lexing from if
-  ///  not expanding from a macro and we are lexing from a PTH cache.
-  ///  Only one of CurLexer, CurPTHLexer, or CurTokenLexer will be non-null.
+  /// \brief The current top of stack that we're lexing from if
+  /// not expanding from a macro and we are lexing from a PTH cache.
+  ///
+  /// Only one of CurLexer, CurPTHLexer, or CurTokenLexer will be non-null.
   OwningPtr<PTHLexer> CurPTHLexer;
 
-  /// CurPPLexer - This is the current top of the stack what we're lexing from
-  ///  if not expanding a macro.  This is an alias for either CurLexer or
-  ///  CurPTHLexer.
+  /// \brief The current top of the stack what we're lexing from
+  /// if not expanding a macro.
+  ///
+  /// This is an alias for either CurLexer or  CurPTHLexer.
   PreprocessorLexer *CurPPLexer;
 
-  /// CurLookup - The DirectoryLookup structure used to find the current
-  /// FileEntry, if CurLexer is non-null and if applicable.  This allows us to
-  /// implement \#include_next and find directory-specific properties.
+  /// \brief Used to find the current FileEntry, if CurLexer is non-null
+  /// and if applicable.
+  ///
+  /// This allows us to implement \#include_next and find directory-specific
+  /// properties.
   const DirectoryLookup *CurDirLookup;
 
-  /// CurTokenLexer - This is the current macro we are expanding, if we are
-  /// expanding a macro.  One of CurLexer and CurTokenLexer must be null.
+  /// \brief The current macro we are expanding, if we are expanding a macro.
+  ///
+  /// One of CurLexer and CurTokenLexer must be null.
   OwningPtr<TokenLexer> CurTokenLexer;
 
   /// \brief The kind of lexer we're currently working with.
@@ -279,7 +289,7 @@ class Preprocessor : public RefCountedBa
   /// \brief True if the current lexer is for a submodule.
   bool CurIsSubmodule;
 
-  /// IncludeMacroStack - This keeps track of the stack of files currently
+  /// \brief Keeps track of the stack of files currently
   /// \#included, and macros currently being expanded from, not counting
   /// CurLexer/CurTokenLexer.
   struct IncludeStackInfo {
@@ -299,7 +309,7 @@ class Preprocessor : public RefCountedBa
   };
   std::vector<IncludeStackInfo> IncludeMacroStack;
 
-  /// Callbacks - These are actions invoked when some preprocessor activity is
+  /// \brief Actions invoked when some preprocessor activity is
   /// encountered (e.g. a file is \#included, etc).
   PPCallbacks *Callbacks;
 
@@ -312,14 +322,16 @@ class Preprocessor : public RefCountedBa
   };
   SmallVector<MacroExpandsInfo, 2> DelayedMacroExpandsCallbacks;
 
-  /// Macros - For each IdentifierInfo that was associated with a macro, we
+  /// For each IdentifierInfo that was associated with a macro, we
   /// keep a mapping to the history of all macro definitions and #undefs in
   /// the reverse order (the latest one is in the head of the list).
   llvm::DenseMap<const IdentifierInfo*, MacroDirective*> Macros;
   friend class ASTReader;
   
   /// \brief Macros that we want to warn because they are not used at the end
-  /// of the translation unit; we store just their SourceLocations instead
+  /// of the translation unit.
+  ///
+  /// We store just their SourceLocations instead of
   /// something like MacroInfo*. The benefit of this is that when we are
   /// deserializing from PCH, we don't need to deserialize identifier & macros
   /// just so that we can report that they are unused, we just warn using
@@ -328,14 +340,13 @@ class Preprocessor : public RefCountedBa
   typedef llvm::SmallPtrSet<SourceLocation, 32> WarnUnusedMacroLocsTy;
   WarnUnusedMacroLocsTy WarnUnusedMacroLocs;
 
-  /// MacroArgCache - This is a "freelist" of MacroArg objects that can be
+  /// \brief A "freelist" of MacroArg objects that can be
   /// reused for quick allocation.
   MacroArgs *MacroArgCache;
   friend class MacroArgs;
 
-  /// PragmaPushMacroInfo - For each IdentifierInfo used in a #pragma
-  /// push_macro directive, we keep a MacroInfo stack used to restore
-  /// previous macro value.
+  /// For each IdentifierInfo used in a \#pragma push_macro directive,
+  /// we keep a MacroInfo stack used to restore the previous macro value.
   llvm::DenseMap<IdentifierInfo*, std::vector<MacroInfo*> > PragmaPushMacroInfo;
 
   // Various statistics we track for performance analysis.
@@ -346,17 +357,19 @@ class Preprocessor : public RefCountedBa
   unsigned NumFastMacroExpanded, NumTokenPaste, NumFastTokenPaste;
   unsigned NumSkipped;
 
-  /// Predefines - This string is the predefined macros that preprocessor
-  /// should use from the command line etc.
+  /// \brief The predefined macros that preprocessor should use from the
+  /// command line etc.
   std::string Predefines;
 
   /// \brief The file ID for the preprocessor predefines.
   FileID PredefinesFileID;
 
-  /// TokenLexerCache - Cache macro expanders to reduce malloc traffic.
+  /// \{
+  /// \brief Cache of macro expanders to reduce malloc traffic.
   enum { TokenLexerCacheSize = 8 };
   unsigned NumCachedTokenLexers;
   TokenLexer *TokenLexerCache[TokenLexerCacheSize];
+  /// \}
 
   /// \brief Keeps macro expanded tokens for TokenLexers.
   //
@@ -376,17 +389,20 @@ class Preprocessor : public RefCountedBa
 private:  // Cached tokens state.
   typedef SmallVector<Token, 1> CachedTokensTy;
 
-  /// CachedTokens - Cached tokens are stored here when we do backtracking or
+  /// \brief Cached tokens are stored here when we do backtracking or
   /// lookahead. They are "lexed" by the CachingLex() method.
   CachedTokensTy CachedTokens;
 
-  /// CachedLexPos - The position of the cached token that CachingLex() should
-  /// "lex" next. If it points beyond the CachedTokens vector, it means that
-  /// a normal Lex() should be invoked.
+  /// \brief The position of the cached token that CachingLex() should
+  /// "lex" next.
+  ///
+  /// If it points beyond the CachedTokens vector, it means that a normal
+  /// Lex() should be invoked.
   CachedTokensTy::size_type CachedLexPos;
 
-  /// BacktrackPositions - Stack of backtrack positions, allowing nested
-  /// backtracks. The EnableBacktrackAtThisPos() method pushes a position to
+  /// \brief Stack of backtrack positions, allowing nested backtracks.
+  ///
+  /// The EnableBacktrackAtThisPos() method pushes a position to
   /// indicate where CachedLexPos should be set when the BackTrack() method is
   /// invoked (at which point the last position is popped).
   std::vector<CachedTokensTy::size_type> BacktrackPositions;
@@ -401,7 +417,7 @@ private:  // Cached tokens state.
   /// of that list.
   MacroInfoChain *MIChainHead;
 
-  /// MICache - A "freelist" of MacroInfo objects that can be reused for quick
+  /// A "freelist" of MacroInfo objects that can be reused for quick
   /// allocation.
   MacroInfoChain *MICache;
 
@@ -474,8 +490,7 @@ public:
     return ParsingIfOrElifDirective;
   }
 
-  /// SetCommentRetentionState - Control whether or not the preprocessor retains
-  /// comments in output.
+  /// \brief Control whether the preprocessor retains comments in output.
   void SetCommentRetentionState(bool KeepComments, bool KeepMacroComments) {
     this->KeepComments = KeepComments | KeepMacroComments;
     this->KeepMacroComments = KeepMacroComments;
@@ -504,26 +519,29 @@ public:
   /// false if it is producing tokens to be consumed by Parse and Sema.
   bool isPreprocessedOutput() const { return PreprocessedOutput; }
 
-  /// isCurrentLexer - Return true if we are lexing directly from the specified
-  /// lexer.
+  /// \brief Return true if we are lexing directly from the specified lexer.
   bool isCurrentLexer(const PreprocessorLexer *L) const {
     return CurPPLexer == L;
   }
 
-  /// getCurrentLexer - Return the current lexer being lexed from.  Note
-  /// that this ignores any potentially active macro expansions and _Pragma
+  /// \brief Return the current lexer being lexed from.
+  ///
+  /// Note that this ignores any potentially active macro expansions and _Pragma
   /// expansions going on at the time.
   PreprocessorLexer *getCurrentLexer() const { return CurPPLexer; }
 
-  /// getCurrentFileLexer - Return the current file lexer being lexed from.
+  /// \brief Return the current file lexer being lexed from.
+  ///
   /// Note that this ignores any potentially active macro expansions and _Pragma
   /// expansions going on at the time.
   PreprocessorLexer *getCurrentFileLexer() const;
 
-  /// \brief Returns the file ID for the preprocessor predefines.
+  /// \brief Returns the FileID for the preprocessor predefines.
   FileID getPredefinesFileID() const { return PredefinesFileID; }
 
-  /// getPPCallbacks/addPPCallbacks - Accessors for preprocessor callbacks.
+  /// \{
+  /// \brief Accessors for preprocessor callbacks.
+  ///
   /// Note that this class takes ownership of any PPCallbacks object given to
   /// it.
   PPCallbacks *getPPCallbacks() const { return Callbacks; }
@@ -532,9 +550,10 @@ public:
       C = new PPChainedCallbacks(C, Callbacks);
     Callbacks = C;
   }
+  /// \}
 
   /// \brief Given an identifier, return its latest MacroDirective if it is
-  // \#defined or null if it isn't \#define'd.
+  /// \#defined or null if it isn't \#define'd.
   MacroDirective *getMacroDirective(IdentifierInfo *II) const {
     if (!II->hasMacroDefinition())
       return 0;
@@ -555,9 +574,10 @@ public:
   }
 
   /// \brief Given an identifier, return the (probably #undef'd) MacroInfo
-  /// representing the most recent macro definition. One can iterate over all
-  /// previous macro definitions from it. This method should only be called for
-  /// identifiers that hadMacroDefinition().
+  /// representing the most recent macro definition.
+  ///
+  /// One can iterate over all previous macro definitions from the most recent
+  /// one. This should only be called for identifiers that hadMacroDefinition().
   MacroDirective *getMacroDirectiveHistory(const IdentifierInfo *II) const;
 
   /// \brief Add a directive to the macro directive history for this identifier.
@@ -575,14 +595,15 @@ public:
   /// \brief Set a MacroDirective that was loaded from a PCH file.
   void setLoadedMacroDirective(IdentifierInfo *II, MacroDirective *MD);
 
-  /// macro_iterator/macro_begin/macro_end - This allows you to walk the macro
-  /// history table. Currently defined macros have
+  /// \{
+  /// Iterators for the macro history table. Currently defined macros have
   /// IdentifierInfo::hasMacroDefinition() set and an empty
   /// MacroInfo::getUndefLoc() at the head of the list.
   typedef llvm::DenseMap<const IdentifierInfo *,
                          MacroDirective*>::const_iterator macro_iterator;
   macro_iterator macro_begin(bool IncludeExternalMacros = true) const;
   macro_iterator macro_end(bool IncludeExternalMacros = true) const;
+  /// \}
 
   /// \brief Return the name of the macro defined before \p Loc that has
   /// spelling \p Tokens.  If there are multiple macros with same spelling,
@@ -591,8 +612,9 @@ public:
                                      ArrayRef<TokenValue> Tokens) const;
 
   const std::string &getPredefines() const { return Predefines; }
-  /// setPredefines - Set the predefines for this Preprocessor.  These
-  /// predefines are automatically injected when parsing the main file.
+  /// \brief Set the predefines for this Preprocessor.
+  ///
+  /// These predefines are automatically injected when parsing the main file.
   void setPredefines(const char *P) { Predefines = P; }
   void setPredefines(const std::string &P) { Predefines = P; }
 
@@ -602,18 +624,20 @@ public:
     return &Identifiers.get(Name);
   }
 
-  /// AddPragmaHandler - Add the specified pragma handler to the preprocessor.
-  /// If 'Namespace' is non-null, then it is a token required to exist on the
+  /// \brief Add the specified pragma handler to this preprocessor.
+  ///
+  /// If \p Namespace is non-null, then it is a token required to exist on the
   /// pragma line before the pragma string starts, e.g. "STDC" or "GCC".
   void AddPragmaHandler(StringRef Namespace, PragmaHandler *Handler);
   void AddPragmaHandler(PragmaHandler *Handler) {
     AddPragmaHandler(StringRef(), Handler);
   }
 
-  /// RemovePragmaHandler - Remove the specific pragma handler from
-  /// the preprocessor. If \p Namespace is non-null, then it should
-  /// be the namespace that \p Handler was added to. It is an error
-  /// to remove a handler that has not been registered.
+  /// \brief Remove the specific pragma handler from this preprocessor.
+  ///
+  /// If \p Namespace is non-null, then it should be the namespace that
+  /// \p Handler was added to. It is an error to remove a handler that
+  /// has not been registered.
   void RemovePragmaHandler(StringRef Namespace, PragmaHandler *Handler);
   void RemovePragmaHandler(PragmaHandler *Handler) {
     RemovePragmaHandler(StringRef(), Handler);
@@ -654,51 +678,51 @@ public:
   /// all macro expansions, macro definitions, etc.
   void createPreprocessingRecord();
 
-  /// EnterMainSourceFile - Enter the specified FileID as the main source file,
+  /// \brief Enter the specified FileID as the main source file,
   /// which implicitly adds the builtin defines etc.
   void EnterMainSourceFile();
 
-  /// EndSourceFile - Inform the preprocessor callbacks that processing is
-  /// complete.
+  /// \brief Inform the preprocessor callbacks that processing is complete.
   void EndSourceFile();
 
-  /// EnterSourceFile - Add a source file to the top of the include stack and
-  /// start lexing tokens from it instead of the current buffer.  Emit an error
-  /// and don't enter the file on error.
+  /// \brief Add a source file to the top of the include stack and
+  /// start lexing tokens from it instead of the current buffer. 
+  ///
+  /// Emit an error and don't enter the file on error.
   void EnterSourceFile(FileID CurFileID, const DirectoryLookup *Dir,
                        SourceLocation Loc, bool IsSubmodule = false);
 
-  /// EnterMacro - Add a Macro to the top of the include stack and start lexing
-  /// tokens from it instead of the current buffer.  Args specifies the
-  /// tokens input to a function-like macro.
+  /// \brief Add a Macro to the top of the include stack and start lexing
+  /// tokens from it instead of the current buffer. 
   ///
-  /// ILEnd specifies the location of the ')' for a function-like macro or the
-  /// identifier for an object-like macro.
+  /// \param Args specifies the tokens input to a function-like macro.
+  /// \param ILEnd specifies the location of the ')' for a function-like macro
+  /// or the identifier for an object-like macro.
   void EnterMacro(Token &Identifier, SourceLocation ILEnd, MacroInfo *Macro,
                   MacroArgs *Args);
 
-  /// EnterTokenStream - Add a "macro" context to the top of the include stack,
+  /// \brief Add a "macro" context to the top of the include stack,
   /// which will cause the lexer to start returning the specified tokens.
   ///
-  /// If DisableMacroExpansion is true, tokens lexed from the token stream will
-  /// not be subject to further macro expansion.  Otherwise, these tokens will
-  /// be re-macro-expanded when/if expansion is enabled.
-  ///
-  /// If OwnsTokens is false, this method assumes that the specified stream of
-  /// tokens has a permanent owner somewhere, so they do not need to be copied.
-  /// If it is true, it assumes the array of tokens is allocated with new[] and
-  /// must be freed.
-  ///
+  /// If \p DisableMacroExpansion is true, tokens lexed from the token stream
+  /// will not be subject to further macro expansion. Otherwise, these tokens
+  /// will be re-macro-expanded when/if expansion is enabled.
+  ///
+  /// If \p OwnsTokens is false, this method assumes that the specified stream
+  /// of tokens has a permanent owner somewhere, so they do not need to be
+  /// copied. If it is true, it assumes the array of tokens is allocated with
+  /// \c new[] and must be freed.
   void EnterTokenStream(const Token *Toks, unsigned NumToks,
                         bool DisableMacroExpansion, bool OwnsTokens);
 
-  /// RemoveTopOfLexerStack - Pop the current lexer/macro exp off the top of the
-  /// lexer stack.  This should only be used in situations where the current
-  /// state of the top-of-stack lexer is known.
+  /// \brief Pop the current lexer/macro exp off the top of the lexer stack.
+  ///
+  /// This should only be used in situations where the current state of the
+  /// top-of-stack lexer is known.
   void RemoveTopOfLexerStack();
 
-  /// EnableBacktrackAtThisPos - From the point that this method is called, and
-  /// until CommitBacktrackedTokens() or Backtrack() is called, the Preprocessor
+  /// From the point that this method is called, and until
+  /// CommitBacktrackedTokens() or Backtrack() is called, the Preprocessor
   /// keeps track of the lexed tokens so that a subsequent Backtrack() call will
   /// make the Preprocessor re-lex the same tokens.
   ///
@@ -712,18 +736,18 @@ public:
   ///
   void EnableBacktrackAtThisPos();
 
-  /// CommitBacktrackedTokens - Disable the last EnableBacktrackAtThisPos call.
+  /// \brief Disable the last EnableBacktrackAtThisPos call.
   void CommitBacktrackedTokens();
 
-  /// Backtrack - Make Preprocessor re-lex the tokens that were lexed since
+  /// \brief Make Preprocessor re-lex the tokens that were lexed since
   /// EnableBacktrackAtThisPos() was previously called.
   void Backtrack();
 
-  /// isBacktrackEnabled - True if EnableBacktrackAtThisPos() was called and
+  /// \brief True if EnableBacktrackAtThisPos() was called and
   /// caching of tokens is on.
   bool isBacktrackEnabled() const { return !BacktrackPositions.empty(); }
 
-  /// Lex - Lex the next token for this preprocessor.
+  /// \brief Lex the next token for this preprocessor.
   void Lex(Token &Result);
 
   void LexAfterModuleImport(Token &Result);
@@ -747,17 +771,18 @@ public:
                               const char *DiagnosticTag,
                               bool AllowMacroExpansion);
 
-  /// LexNonComment - Lex a token.  If it's a comment, keep lexing until we get
-  /// something not a comment.  This is useful in -E -C mode where comments
-  /// would foul up preprocessor directive handling.
+  /// \brief Lex a token.  If it's a comment, keep lexing until we get
+  /// something not a comment.
+  ///
+  /// This is useful in -E -C mode where comments would foul up preprocessor
+  /// directive handling.
   void LexNonComment(Token &Result) {
     do
       Lex(Result);
     while (Result.getKind() == tok::comment);
   }
 
-  /// LexUnexpandedToken - This is just like Lex, but this disables macro
-  /// expansion of identifier tokens.
+  /// \brief Just like Lex, but disables macro expansion of identifier tokens.
   void LexUnexpandedToken(Token &Result) {
     // Disable macro expansion.
     bool OldVal = DisableMacroExpansion;
@@ -769,8 +794,8 @@ public:
     DisableMacroExpansion = OldVal;
   }
 
-  /// LexUnexpandedNonComment - Like LexNonComment, but this disables macro
-  /// expansion of identifier tokens.
+  /// \brief Like LexNonComment, but this disables macro expansion of
+  /// identifier tokens.
   void LexUnexpandedNonComment(Token &Result) {
     do
       LexUnexpandedToken(Result);
@@ -783,10 +808,12 @@ public:
     MacroExpansionInDirectivesOverride = true;
   }
 
-  /// LookAhead - This peeks ahead N tokens and returns that token without
-  /// consuming any tokens.  LookAhead(0) returns the next token that would be
-  /// returned by Lex(), LookAhead(1) returns the token after it, etc.  This
-  /// returns normal tokens after phase 5.  As such, it is equivalent to using
+  /// \brief Peeks ahead N tokens and returns that token without consuming any
+  /// tokens.
+  ///
+  /// LookAhead(0) returns the next token that would be returned by Lex(),
+  /// LookAhead(1) returns the token after it, etc.  This returns normal
+  /// tokens after phase 5.  As such, it is equivalent to using
   /// 'Lex', not 'LexUnexpandedToken'.
   const Token &LookAhead(unsigned N) {
     if (CachedLexPos + N < CachedTokens.size())
@@ -795,8 +822,9 @@ public:
       return PeekAhead(N+1);
   }
 
-  /// RevertCachedTokens - When backtracking is enabled and tokens are cached,
+  /// \brief When backtracking is enabled and tokens are cached,
   /// this allows to revert a specific number of tokens.
+  ///
   /// Note that the number of tokens being reverted should be up to the last
   /// backtrack position, not more.
   void RevertCachedTokens(unsigned N) {
@@ -809,20 +837,21 @@ public:
     CachedLexPos -= N;
   }
 
-  /// EnterToken - Enters a token in the token stream to be lexed next. If
-  /// BackTrack() is called afterwards, the token will remain at the insertion
-  /// point.
+  /// \brief Enters a token in the token stream to be lexed next.
+  ///
+  /// If BackTrack() is called afterwards, the token will remain at the
+  /// insertion point.
   void EnterToken(const Token &Tok) {
     EnterCachingLexMode();
     CachedTokens.insert(CachedTokens.begin()+CachedLexPos, Tok);
   }
 
-  /// AnnotateCachedTokens - We notify the Preprocessor that if it is caching
-  /// tokens (because backtrack is enabled) it should replace the most recent
-  /// cached tokens with the given annotation token. This function has no effect
-  /// if backtracking is not enabled.
+  /// We notify the Preprocessor that if it is caching tokens (because
+  /// backtrack is enabled) it should replace the most recent cached tokens
+  /// with the given annotation token. This function has no effect if
+  /// backtracking is not enabled.
   ///
-  /// Note that the use of this function is just for optimization; so that the
+  /// Note that the use of this function is just for optimization, so that the
   /// cached tokens doesn't get re-parsed and re-resolved after a backtrack is
   /// invoked.
   void AnnotateCachedTokens(const Token &Tok) {
@@ -852,7 +881,7 @@ public:
       CachedTokens[CachedLexPos-1] = Tok;
   }
 
-  /// TypoCorrectToken - Update the current token to represent the provided
+  /// Update the current token to represent the provided
   /// identifier, in order to cache an action performed by typo correction.
   void TypoCorrectToken(const Token &Tok) {
     assert(Tok.getIdentifierInfo() && "Expected identifier token");
@@ -893,11 +922,13 @@ public:
   bool isCodeCompletionEnabled() const { return CodeCompletionFile != 0; }
 
   /// \brief Returns the location of the code-completion point.
+  ///
   /// Returns an invalid location if code-completion is not enabled or the file
   /// containing the code-completion point has not been lexed yet.
   SourceLocation getCodeCompletionLoc() const { return CodeCompletionLoc; }
 
   /// \brief Returns the start location of the file of code-completion point.
+  ///
   /// Returns an invalid location if code-completion is not enabled or the file
   /// containing the code-completion point has not been lexed yet.
   SourceLocation getCodeCompletionFileLoc() const {
@@ -917,8 +948,9 @@ public:
   }
 
   /// \brief The location of the currently-active \#pragma clang
-  /// arc_cf_code_audited begin.  Returns an invalid location if there
-  /// is no such pragma active.
+  /// arc_cf_code_audited begin.
+  ///
+  /// Returns an invalid location if there is no such pragma active.
   SourceLocation getPragmaARCCFCodeAuditedLoc() const {
     return PragmaARCCFCodeAuditedLoc;
   }
@@ -940,7 +972,7 @@ public:
     SkipMainFilePreamble.second = StartOfLine;
   }
 
-  /// Diag - Forwarding function for diagnostics.  This emits a diagnostic at
+  /// Forwarding function for diagnostics.  This emits a diagnostic at
   /// the specified Token's location, translating the token's start
   /// position in the current buffer into a SourcePosition object for rendering.
   DiagnosticBuilder Diag(SourceLocation Loc, unsigned DiagID) const {
@@ -951,7 +983,7 @@ public:
     return Diags->Report(Tok.getLocation(), DiagID);
   }
 
-  /// getSpelling() - Return the 'spelling' of the token at the given
+  /// Return the 'spelling' of the token at the given
   /// location; does not go up to the spelling location or down to the
   /// expansion location.
   ///
@@ -964,21 +996,24 @@ public:
     return Lexer::getSpelling(loc, buffer, SourceMgr, LangOpts, invalid);
   }
 
-  /// getSpelling() - Return the 'spelling' of the Tok token.  The spelling of a
-  /// token is the characters used to represent the token in the source file
-  /// after trigraph expansion and escaped-newline folding.  In particular, this
-  /// wants to get the true, uncanonicalized, spelling of things like digraphs
-  /// UCNs, etc.
+  /// \brief Return the 'spelling' of the Tok token.
+  ///
+  /// The spelling of a token is the characters used to represent the token in
+  /// the source file after trigraph expansion and escaped-newline folding.  In
+  /// particular, this wants to get the true, uncanonicalized, spelling of
+  /// things like digraphs, UCNs, etc.
   ///
   /// \param Invalid If non-null, will be set \c true if an error occurs.
   std::string getSpelling(const Token &Tok, bool *Invalid = 0) const {
     return Lexer::getSpelling(Tok, SourceMgr, LangOpts, Invalid);
   }
 
-  /// getSpelling - This method is used to get the spelling of a token into a
-  /// preallocated buffer, instead of as an std::string.  The caller is required
-  /// to allocate enough space for the token, which is guaranteed to be at least
-  /// Tok.getLength() bytes long.  The length of the actual result is returned.
+  /// \brief Get the spelling of a token into a preallocated buffer, instead
+  /// of as an std::string.
+  ///
+  /// The caller is required to allocate enough space for the token, which is
+  /// guaranteed to be at least Tok.getLength() bytes long. The length of the
+  /// actual result is returned.
   ///
   /// Note that this method may do two possible things: it may either fill in
   /// the buffer specified with characters, or it may *change the input pointer*
@@ -990,8 +1025,9 @@ public:
     return Lexer::getSpelling(Tok, Buffer, SourceMgr, LangOpts, Invalid);
   }
 
-  /// getSpelling - This method is used to get the spelling of a token into a
-  /// SmallVector. Note that the returned StringRef may not point to the
+  /// \brief Get the spelling of a token into a SmallVector.
+  ///
+  /// Note that the returned StringRef may not point to the
   /// supplied buffer if a copy can be avoided.
   StringRef getSpelling(const Token &Tok,
                         SmallVectorImpl<char> &Buffer,
@@ -1004,8 +1040,8 @@ public:
     return Lexer::getRawToken(Loc, Result, SourceMgr, LangOpts, IgnoreWhiteSpace);
   }
 
-  /// getSpellingOfSingleCharacterNumericConstant - Tok is a numeric constant
-  /// with length 1, return the character.
+  /// \brief Given a Token \p Tok that is a numeric constant with length 1,
+  /// return the character.
   char getSpellingOfSingleCharacterNumericConstant(const Token &Tok,
                                                    bool *Invalid = 0) const {
     assert(Tok.is(tok::numeric_constant) &&
@@ -1023,18 +1059,21 @@ public:
 
   /// \brief Retrieve the name of the immediate macro expansion.
   ///
-  /// This routine starts from a source location, and finds the name of the macro
-  /// responsible for its immediate expansion. It looks through any intervening
-  /// macro argument expansions to compute this. It returns a StringRef which
-  /// refers to the SourceManager-owned buffer of the source where that macro
-  /// name is spelled. Thus, the result shouldn't out-live the SourceManager.
+  /// This routine starts from a source location, and finds the name of the
+  /// macro responsible for its immediate expansion. It looks through any
+  /// intervening macro argument expansions to compute this. It returns a
+  /// StringRef that refers to the SourceManager-owned buffer of the source
+  /// where that macro name is spelled. Thus, the result shouldn't out-live
+  /// the SourceManager.
   StringRef getImmediateMacroName(SourceLocation Loc) {
     return Lexer::getImmediateMacroName(Loc, SourceMgr, getLangOpts());
   }
 
-  /// CreateString - Plop the specified string into a scratch buffer and set the
-  /// specified token's location and length to it.  If specified, the source
-  /// location provides a location of the expansion point of the token.
+  /// \brief Plop the specified string into a scratch buffer and set the
+  /// specified token's location and length to it. 
+  ///
+  /// If specified, the source location provides a location of the expansion
+  /// point of the token.
   void CreateString(StringRef Str, Token &Tok,
                     SourceLocation ExpansionLocStart = SourceLocation(),
                     SourceLocation ExpansionLocEnd = SourceLocation());
@@ -1079,23 +1118,22 @@ public:
     return Lexer::isAtEndOfMacroExpansion(loc, SourceMgr, LangOpts, MacroEnd);
   }
 
-  /// DumpToken - Print the token to stderr, used for debugging.
-  ///
+  /// \brief Print the token to stderr, used for debugging.
   void DumpToken(const Token &Tok, bool DumpFlags = false) const;
   void DumpLocation(SourceLocation Loc) const;
   void DumpMacro(const MacroInfo &MI) const;
 
-  /// AdvanceToTokenCharacter - Given a location that specifies the start of a
+  /// \brief Given a location that specifies the start of a
   /// token, return a new location that specifies a character within the token.
   SourceLocation AdvanceToTokenCharacter(SourceLocation TokStart,
                                          unsigned Char) const {
     return Lexer::AdvanceToTokenCharacter(TokStart, Char, SourceMgr, LangOpts);
   }
 
-  /// IncrementPasteCounter - Increment the counters for the number of token
-  /// paste operations performed.  If fast was specified, this is a 'fast paste'
-  /// case we handled.
+  /// \brief Increment the counters for the number of token paste operations
+  /// performed.
   ///
+  /// If fast was specified, this is a 'fast paste' case we handled.
   void IncrementPasteCounter(bool isFast) {
     if (isFast)
       ++NumFastTokenPaste;
@@ -1107,16 +1145,16 @@ public:
 
   size_t getTotalMemory() const;
 
-  /// HandleMicrosoftCommentPaste - When the macro expander pastes together a
-  /// comment (/##/) in microsoft mode, this method handles updating the current
-  /// state, returning the token on the next source line.
+  /// When the macro expander pastes together a comment (/##/) in Microsoft
+  /// mode, this method handles updating the current state, returning the
+  /// token on the next source line.
   void HandleMicrosoftCommentPaste(Token &Tok);
 
   //===--------------------------------------------------------------------===//
   // Preprocessor callback methods.  These are invoked by a lexer as various
   // directives and events are found.
 
-  /// LookUpIdentifierInfo - Given a tok::raw_identifier token, look up the
+  /// Given a tok::raw_identifier token, look up the
   /// identifier information for the token and install it into the token,
   /// updating the token kind accordingly.
   IdentifierInfo *LookUpIdentifierInfo(Token &Identifier) const;
@@ -1126,14 +1164,13 @@ private:
 
 public:
 
-  // SetPoisonReason - Call this function to indicate the reason for
-  // poisoning an identifier. If that identifier is accessed while
-  // poisoned, then this reason will be used instead of the default
-  // "poisoned" diagnostic.
+  /// \brief Specifies the reason for poisoning an identifier.
+  ///
+  /// If that identifier is accessed while poisoned, then this reason will be
+  /// used instead of the default "poisoned" diagnostic.
   void SetPoisonReason(IdentifierInfo *II, unsigned DiagID);
 
-  // HandlePoisonedIdentifier - Display reason for poisoned
-  // identifier.
+  /// \brief Display reason for poisoned identifier.
   void HandlePoisonedIdentifier(Token & Tok);
 
   void MaybeHandlePoisonedIdentifier(Token & Identifier) {
@@ -1165,42 +1202,47 @@ private:
 public:
   void PoisonSEHIdentifiers(bool Poison = true); // Borland
 
-  /// HandleIdentifier - This callback is invoked when the lexer reads an
-  /// identifier and has filled in the tokens IdentifierInfo member.  This
-  /// callback potentially macro expands it or turns it into a named token (like
-  /// 'for').
+  /// \brief Callback invoked when the lexer reads an identifier and has
+  /// filled in the tokens IdentifierInfo member. 
+  ///
+  /// This callback potentially macro expands it or turns it into a named
+  /// token (like 'for').
   ///
   /// \returns true if we actually computed a token, false if we need to
   /// lex again.
   bool HandleIdentifier(Token &Identifier);
 
 
-  /// HandleEndOfFile - This callback is invoked when the lexer hits the end of
-  /// the current file.  This either returns the EOF token and returns true, or
+  /// \brief Callback invoked when the lexer hits the end of the current file.
+  ///
+  /// This either returns the EOF token and returns true, or
   /// pops a level off the include stack and returns false, at which point the
   /// client should call lex again.
   bool HandleEndOfFile(Token &Result, bool isEndOfMacro = false);
 
-  /// HandleEndOfTokenLexer - This callback is invoked when the current
-  /// TokenLexer hits the end of its token stream.
+  /// \brief Callback invoked when the current TokenLexer hits the end of its
+  /// token stream.
   bool HandleEndOfTokenLexer(Token &Result);
 
-  /// HandleDirective - This callback is invoked when the lexer sees a # token
-  /// at the start of a line.  This consumes the directive, modifies the
-  /// lexer/preprocessor state, and advances the lexer(s) so that the next token
-  /// read is the correct one.
+  /// \brief Callback invoked when the lexer sees a # token at the start of a
+  /// line.
+  ///
+  /// This consumes the directive, modifies the lexer/preprocessor state, and
+  /// advances the lexer(s) so that the next token read is the correct one.
   void HandleDirective(Token &Result);
 
-  /// CheckEndOfDirective - Ensure that the next token is a tok::eod token.  If
-  /// not, emit a diagnostic and consume up until the eod.  If EnableMacros is
-  /// true, then we consider macros that expand to zero tokens as being ok.
+  /// \brief Ensure that the next token is a tok::eod token.
+  ///
+  /// If not, emit a diagnostic and consume up until the eod.
+  /// If \p EnableMacros is true, then we consider macros that expand to zero
+  /// tokens as being ok.
   void CheckEndOfDirective(const char *Directive, bool EnableMacros = false);
 
-  /// DiscardUntilEndOfDirective - Read and discard all tokens remaining on the
-  /// current line until the tok::eod token is found.
+  /// \brief Read and discard all tokens remaining on the current line until
+  /// the tok::eod token is found.
   void DiscardUntilEndOfDirective();
 
-  /// SawDateOrTime - This returns true if the preprocessor has seen a use of
+  /// \brief Returns true if the preprocessor has seen a use of
   /// __DATE__ or __TIME__ in the file so far.
   bool SawDateOrTime() const {
     return DATELoc != SourceLocation() || TIMELoc != SourceLocation();
@@ -1241,17 +1283,20 @@ public:
                               ModuleMap::KnownHeader *SuggestedModule,
                               bool SkipCache = false);
 
-  /// GetCurLookup - The DirectoryLookup structure used to find the current
-  /// FileEntry, if CurLexer is non-null and if applicable.  This allows us to
-  /// implement \#include_next and find directory-specific properties.
+  /// \brief Get the DirectoryLookup structure used to find the current
+  /// FileEntry, if CurLexer is non-null and if applicable. 
+  ///
+  /// This allows us to implement \#include_next and find directory-specific
+  /// properties.
   const DirectoryLookup *GetCurDirLookup() { return CurDirLookup; }
 
   /// \brief Return true if we're in the top-level file, not in a \#include.
   bool isInPrimaryFile() const;
 
-  /// ConcatenateIncludeName - Handle cases where the \#include name is expanded
-  /// from a macro as multiple tokens, which need to be glued together.  This
-  /// occurs for code like:
+  /// \brief Handle cases where the \#include name is expanded
+  /// from a macro as multiple tokens, which need to be glued together. 
+  ///
+  /// This occurs for code like:
   /// \code
   ///    \#define FOO <x/y.h>
   ///    \#include FOO
@@ -1264,7 +1309,7 @@ public:
   bool ConcatenateIncludeName(SmallString<128> &FilenameBuffer,
                               SourceLocation &End);
 
-  /// LexOnOffSwitch - Lex an on-off-switch (C99 6.10.6p2) and verify that it is
+  /// \brief Lex an on-off-switch (C99 6.10.6p2) and verify that it is
   /// followed by EOD.  Return true if the token is not a valid on-off-switch.
   bool LexOnOffSwitch(tok::OnOffSwitch &OOS);
 
@@ -1309,15 +1354,16 @@ private:
   /// This memory will  be reused for allocating new MacroInfo objects.
   void ReleaseMacroInfo(MacroInfo* MI);
 
-  /// ReadMacroName - Lex and validate a macro name, which occurs after a
-  /// \#define or \#undef.  This emits a diagnostic, sets the token kind to eod,
+  /// \brief Lex and validate a macro name, which occurs after a
+  /// \#define or \#undef. 
+  ///
+  /// This emits a diagnostic, sets the token kind to eod,
   /// and discards the rest of the macro line if the macro name is invalid.
   void ReadMacroName(Token &MacroNameTok, char isDefineUndef = 0);
 
-  /// ReadMacroDefinitionArgList - The ( starting an argument list of a macro
-  /// definition has just been read.  Lex the rest of the arguments and the
-  /// closing ), updating MI with what we learn and saving in LastTok the
-  /// last token read.
+  /// The ( starting an argument list of a macro definition has just been read.
+  /// Lex the rest of the arguments and the closing ), updating \p MI with
+  /// what we learn and saving in \p LastTok the last token read.
   /// Return true if an error occurs parsing the arg list.
   bool ReadMacroDefinitionArgList(MacroInfo *MI, Token& LastTok);
 
@@ -1336,21 +1382,22 @@ private:
   /// \brief A fast PTH version of SkipExcludedConditionalBlock.
   void PTHSkipExcludedConditionalBlock();
 
-  /// EvaluateDirectiveExpression - Evaluate an integer constant expression that
-  /// may occur after a #if or #elif directive and return it as a bool.  If the
-  /// expression is equivalent to "!defined(X)" return X in IfNDefMacro.
+  /// \brief Evaluate an integer constant expression that may occur after a
+  /// \#if or \#elif directive and return it as a bool.
+  ///
+  /// If the expression is equivalent to "!defined(X)" return X in IfNDefMacro.
   bool EvaluateDirectiveExpression(IdentifierInfo *&IfNDefMacro);
 
-  /// RegisterBuiltinPragmas - Install the standard preprocessor pragmas:
+  /// \brief Install the standard preprocessor pragmas:
   /// \#pragma GCC poison/system_header/dependency and \#pragma once.
   void RegisterBuiltinPragmas();
 
   /// \brief Register builtin macros such as __LINE__ with the identifier table.
   void RegisterBuiltinMacros();
 
-  /// HandleMacroExpandedIdentifier - If an identifier token is read that is to
-  /// be expanded as a macro, handle it and return the next token as 'Tok'.  If
-  /// we lexed a token, return true; otherwise the caller should lex again.
+  /// If an identifier token is read that is to be expanded as a macro, handle
+  /// it and return the next token as 'Tok'.  If we lexed a token, return true;
+  /// otherwise the caller should lex again.
   bool HandleMacroExpandedIdentifier(Token &Tok, MacroDirective *MD);
 
   /// \brief Cache macro expanded tokens for TokenLexers.
@@ -1363,48 +1410,47 @@ private:
   void removeCachedMacroExpandedTokensOfLastLexer();
   friend void TokenLexer::ExpandFunctionArguments();
 
-  /// isNextPPTokenLParen - Determine whether the next preprocessor token to be
+  /// Determine whether the next preprocessor token to be
   /// lexed is a '('.  If so, consume the token and return true, if not, this
   /// method should have no observable side-effect on the lexed tokens.
   bool isNextPPTokenLParen();
 
-  /// ReadFunctionLikeMacroArgs - After reading "MACRO(", this method is
-  /// invoked to read all of the formal arguments specified for the macro
-  /// invocation.  This returns null on error.
+  /// After reading "MACRO(", this method is invoked to read all of the formal
+  /// arguments specified for the macro invocation.  Returns null on error.
   MacroArgs *ReadFunctionLikeMacroArgs(Token &MacroName, MacroInfo *MI,
                                        SourceLocation &ExpansionEnd);
 
-  /// ExpandBuiltinMacro - If an identifier token is read that is to be expanded
+  /// \brief If an identifier token is read that is to be expanded
   /// as a builtin macro, handle it and return the next token as 'Tok'.
   void ExpandBuiltinMacro(Token &Tok);
 
-  /// Handle_Pragma - Read a _Pragma directive, slice it up, process it, then
-  /// return the first token after the directive.  The _Pragma token has just
-  /// been read into 'Tok'.
+  /// \brief Read a \c _Pragma directive, slice it up, process it, then
+  /// return the first token after the directive.
+  /// This assumes that the \c _Pragma token has just been read into \p Tok.
   void Handle_Pragma(Token &Tok);
 
-  /// HandleMicrosoft__pragma - Like Handle_Pragma except the pragma text
-  /// is not enclosed within a string literal.
+  /// \brief Like Handle_Pragma except the pragma text is not enclosed within
+  /// a string literal.
   void HandleMicrosoft__pragma(Token &Tok);
 
-  /// EnterSourceFileWithLexer - Add a lexer to the top of the include stack and
+  /// \brief Add a lexer to the top of the include stack and
   /// start lexing tokens from it instead of the current buffer.
   void EnterSourceFileWithLexer(Lexer *TheLexer, const DirectoryLookup *Dir,
                                 bool IsSubmodule = false);
 
-  /// EnterSourceFileWithPTH - Add a lexer to the top of the include stack and
+  /// \brief Add a lexer to the top of the include stack and
   /// start getting tokens from it using the PTH cache.
   void EnterSourceFileWithPTH(PTHLexer *PL, const DirectoryLookup *Dir,
                               bool IsSubmodule = false);
 
-  /// \brief Set the file ID for the preprocessor predefines.
+  /// \brief Set the FileID for the preprocessor predefines.
   void setPredefinesFileID(FileID FID) {
     assert(PredefinesFileID.isInvalid() && "PredefinesFileID already set!");
     PredefinesFileID = FID;
   }
 
-  /// IsFileLexer - Returns true if we are lexing from a file and not a
-  ///  pragma or a macro.
+  /// \brief Returns true if we are lexing from a file and not a
+  /// pragma or a macro.
   static bool IsFileLexer(const Lexer* L, const PreprocessorLexer* P) {
     return L ? !L->isPragmaLexer() : P != 0;
   }

Modified: cfe/trunk/lib/Lex/PPCaching.cpp
URL: http://llvm.org/viewvc/llvm-project/cfe/trunk/lib/Lex/PPCaching.cpp?rev=195799&r1=195798&r2=195799&view=diff
==============================================================================
--- cfe/trunk/lib/Lex/PPCaching.cpp (original)
+++ cfe/trunk/lib/Lex/PPCaching.cpp Tue Nov 26 19:27:40 2013
@@ -15,28 +15,28 @@
 #include "clang/Lex/Preprocessor.h"
 using namespace clang;
 
-/// EnableBacktrackAtThisPos - From the point that this method is called, and
-/// until CommitBacktrackedTokens() or Backtrack() is called, the Preprocessor
-/// keeps track of the lexed tokens so that a subsequent Backtrack() call will
-/// make the Preprocessor re-lex the same tokens.
-///
-/// Nested backtracks are allowed, meaning that EnableBacktrackAtThisPos can
-/// be called multiple times and CommitBacktrackedTokens/Backtrack calls will
-/// be combined with the EnableBacktrackAtThisPos calls in reverse order.
+// EnableBacktrackAtThisPos - From the point that this method is called, and
+// until CommitBacktrackedTokens() or Backtrack() is called, the Preprocessor
+// keeps track of the lexed tokens so that a subsequent Backtrack() call will
+// make the Preprocessor re-lex the same tokens.
+//
+// Nested backtracks are allowed, meaning that EnableBacktrackAtThisPos can
+// be called multiple times and CommitBacktrackedTokens/Backtrack calls will
+// be combined with the EnableBacktrackAtThisPos calls in reverse order.
 void Preprocessor::EnableBacktrackAtThisPos() {
   BacktrackPositions.push_back(CachedLexPos);
   EnterCachingLexMode();
 }
 
-/// CommitBacktrackedTokens - Disable the last EnableBacktrackAtThisPos call.
+// Disable the last EnableBacktrackAtThisPos call.
 void Preprocessor::CommitBacktrackedTokens() {
   assert(!BacktrackPositions.empty()
          && "EnableBacktrackAtThisPos was not called!");
   BacktrackPositions.pop_back();
 }
 
-/// Backtrack - Make Preprocessor re-lex the tokens that were lexed since
-/// EnableBacktrackAtThisPos() was previously called.
+// Make Preprocessor re-lex the tokens that were lexed since
+// EnableBacktrackAtThisPos() was previously called.
 void Preprocessor::Backtrack() {
   assert(!BacktrackPositions.empty()
          && "EnableBacktrackAtThisPos was not called!");
@@ -114,5 +114,4 @@ void Preprocessor::AnnotatePreviousCache
       CachedLexPos = i;
       return;
     }
-  }
-}
+  

Modified: cfe/trunk/lib/Lex/PPDirectives.cpp
URL: http://llvm.org/viewvc/llvm-project/cfe/trunk/lib/Lex/PPDirectives.cpp?rev=195799&r1=195798&r2=195799&view=diff
==============================================================================
--- cfe/trunk/lib/Lex/PPDirectives.cpp (original)
+++ cfe/trunk/lib/Lex/PPDirectives.cpp Tue Nov 26 19:27:40 2013
@@ -1327,21 +1327,20 @@ bool Preprocessor::GetIncludeFilenameSpe
   return isAngled;
 }
 
-/// \brief Handle cases where the \#include name is expanded from a macro
-/// as multiple tokens, which need to be glued together.
-///
-/// This occurs for code like:
-/// \code
-///    \#define FOO <a/b.h>
-///    \#include FOO
-/// \endcode
-/// because in this case, "<a/b.h>" is returned as 7 tokens, not one.
-///
-/// This code concatenates and consumes tokens up to the '>' token.  It returns
-/// false if the > was found, otherwise it returns true if it finds and consumes
-/// the EOD marker.
-bool Preprocessor::ConcatenateIncludeName(
-                                        SmallString<128> &FilenameBuffer,
+// \brief Handle cases where the \#include name is expanded from a macro
+// as multiple tokens, which need to be glued together.
+//
+// This occurs for code like:
+// \code
+//    \#define FOO <a/b.h>
+//    \#include FOO
+// \endcode
+// because in this case, "<a/b.h>" is returned as 7 tokens, not one.
+//
+// This code concatenates and consumes tokens up to the '>' token.  It returns
+// false if the > was found, otherwise it returns true if it finds and consumes
+// the EOD marker.
+bool Preprocessor::ConcatenateIncludeName(SmallString<128> &FilenameBuffer,
                                           SourceLocation &End) {
   Token CurTok;
 





More information about the cfe-commits mailing list