]> git.proxmox.com Git - rustc.git/blob - src/llvm/tools/clang/include/clang/Lex/Preprocessor.h
Imported Upstream version 0.6
[rustc.git] / src / llvm / tools / clang / include / clang / Lex / Preprocessor.h
1 //===--- Preprocessor.h - C Language Family Preprocessor --------*- C++ -*-===//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This file defines the Preprocessor interface.
11 //
12 //===----------------------------------------------------------------------===//
13
14 #ifndef LLVM_CLANG_LEX_PREPROCESSOR_H
15 #define LLVM_CLANG_LEX_PREPROCESSOR_H
16
17 #include "clang/Lex/MacroInfo.h"
18 #include "clang/Lex/Lexer.h"
19 #include "clang/Lex/PTHLexer.h"
20 #include "clang/Lex/PPCallbacks.h"
21 #include "clang/Lex/TokenLexer.h"
22 #include "clang/Lex/PTHManager.h"
23 #include "clang/Basic/Builtins.h"
24 #include "clang/Basic/Diagnostic.h"
25 #include "clang/Basic/IdentifierTable.h"
26 #include "clang/Basic/SourceLocation.h"
27 #include "llvm/ADT/DenseMap.h"
28 #include "llvm/ADT/IntrusiveRefCntPtr.h"
29 #include "llvm/ADT/SmallPtrSet.h"
30 #include "llvm/ADT/OwningPtr.h"
31 #include "llvm/ADT/SmallVector.h"
32 #include "llvm/ADT/ArrayRef.h"
33 #include "llvm/Support/Allocator.h"
34 #include <vector>
35
36 namespace llvm {
37 template<unsigned InternalLen> class SmallString;
38 }
39
40 namespace clang {
41
42 class SourceManager;
43 class ExternalPreprocessorSource;
44 class FileManager;
45 class FileEntry;
46 class HeaderSearch;
47 class PragmaNamespace;
48 class PragmaHandler;
49 class CommentHandler;
50 class ScratchBuffer;
51 class TargetInfo;
52 class PPCallbacks;
53 class CodeCompletionHandler;
54 class DirectoryLookup;
55 class PreprocessingRecord;
56 class ModuleLoader;
57
58 /// \brief Stores token information for comparing actual tokens with
59 /// predefined values. Only handles simple tokens and identifiers.
60 class TokenValue {
61 tok::TokenKind Kind;
62 IdentifierInfo *II;
63
64 public:
65 TokenValue(tok::TokenKind Kind) : Kind(Kind), II(0) {
66 assert(Kind != tok::raw_identifier && "Raw identifiers are not supported.");
67 assert(Kind != tok::identifier &&
68 "Identifiers should be created by TokenValue(IdentifierInfo *)");
69 assert(!tok::isLiteral(Kind) && "Literals are not supported.");
70 assert(!tok::isAnnotation(Kind) && "Annotations are not supported.");
71 }
72 TokenValue(IdentifierInfo *II) : Kind(tok::identifier), II(II) {}
73 bool operator==(const Token &Tok) const {
74 return Tok.getKind() == Kind &&
75 (!II || II == Tok.getIdentifierInfo());
76 }
77 };
78
79 /// Preprocessor - This object engages in a tight little dance with the lexer to
80 /// efficiently preprocess tokens. Lexers know only about tokens within a
81 /// single source file, and don't know anything about preprocessor-level issues
82 /// like the \#include stack, token expansion, etc.
83 ///
84 class Preprocessor : public RefCountedBase<Preprocessor> {
85 DiagnosticsEngine *Diags;
86 LangOptions &LangOpts;
87 const TargetInfo *Target;
88 FileManager &FileMgr;
89 SourceManager &SourceMgr;
90 ScratchBuffer *ScratchBuf;
91 HeaderSearch &HeaderInfo;
92 ModuleLoader &TheModuleLoader;
93
94 /// \brief External source of macros.
95 ExternalPreprocessorSource *ExternalSource;
96
97
98 /// PTH - An optional PTHManager object used for getting tokens from
99 /// a token cache rather than lexing the original source file.
100 OwningPtr<PTHManager> PTH;
101
102 /// BP - A BumpPtrAllocator object used to quickly allocate and release
103 /// objects internal to the Preprocessor.
104 llvm::BumpPtrAllocator BP;
105
106 /// Identifiers for builtin macros and other builtins.
107 IdentifierInfo *Ident__LINE__, *Ident__FILE__; // __LINE__, __FILE__
108 IdentifierInfo *Ident__DATE__, *Ident__TIME__; // __DATE__, __TIME__
109 IdentifierInfo *Ident__INCLUDE_LEVEL__; // __INCLUDE_LEVEL__
110 IdentifierInfo *Ident__BASE_FILE__; // __BASE_FILE__
111 IdentifierInfo *Ident__TIMESTAMP__; // __TIMESTAMP__
112 IdentifierInfo *Ident__COUNTER__; // __COUNTER__
113 IdentifierInfo *Ident_Pragma, *Ident__pragma; // _Pragma, __pragma
114 IdentifierInfo *Ident__VA_ARGS__; // __VA_ARGS__
115 IdentifierInfo *Ident__has_feature; // __has_feature
116 IdentifierInfo *Ident__has_extension; // __has_extension
117 IdentifierInfo *Ident__has_builtin; // __has_builtin
118 IdentifierInfo *Ident__has_attribute; // __has_attribute
119 IdentifierInfo *Ident__has_include; // __has_include
120 IdentifierInfo *Ident__has_include_next; // __has_include_next
121 IdentifierInfo *Ident__has_warning; // __has_warning
122 IdentifierInfo *Ident__building_module; // __building_module
123 IdentifierInfo *Ident__MODULE__; // __MODULE__
124
125 SourceLocation DATELoc, TIMELoc;
126 unsigned CounterValue; // Next __COUNTER__ value.
127
128 enum {
129 /// MaxIncludeStackDepth - Maximum depth of \#includes.
130 MaxAllowedIncludeStackDepth = 200
131 };
132
133 // State that is set before the preprocessor begins.
134 bool KeepComments : 1;
135 bool KeepMacroComments : 1;
136 bool SuppressIncludeNotFoundError : 1;
137
138 // State that changes while the preprocessor runs:
139 bool InMacroArgs : 1; // True if parsing fn macro invocation args.
140
141 /// Whether the preprocessor owns the header search object.
142 bool OwnsHeaderSearch : 1;
143
144 /// DisableMacroExpansion - True if macro expansion is disabled.
145 bool DisableMacroExpansion : 1;
146
147 /// MacroExpansionInDirectivesOverride - Temporarily disables
148 /// DisableMacroExpansion (i.e. enables expansion) when parsing preprocessor
149 /// directives.
150 bool MacroExpansionInDirectivesOverride : 1;
151
152 class ResetMacroExpansionHelper;
153
154 /// \brief Whether we have already loaded macros from the external source.
155 mutable bool ReadMacrosFromExternalSource : 1;
156
157 /// \brief True if pragmas are enabled.
158 bool PragmasEnabled : 1;
159
160 /// \brief True if we are pre-expanding macro arguments.
161 bool InMacroArgPreExpansion;
162
163 /// Identifiers - This is mapping/lookup information for all identifiers in
164 /// the program, including program keywords.
165 mutable IdentifierTable Identifiers;
166
167 /// Selectors - This table contains all the selectors in the program. Unlike
168 /// IdentifierTable above, this table *isn't* populated by the preprocessor.
169 /// It is declared/expanded here because it's role/lifetime is
170 /// conceptually similar the IdentifierTable. In addition, the current control
171 /// flow (in clang::ParseAST()), make it convenient to put here.
172 /// FIXME: Make sure the lifetime of Identifiers/Selectors *isn't* tied to
173 /// the lifetime of the preprocessor.
174 SelectorTable Selectors;
175
176 /// BuiltinInfo - Information about builtins.
177 Builtin::Context BuiltinInfo;
178
179 /// PragmaHandlers - This tracks all of the pragmas that the client registered
180 /// with this preprocessor.
181 PragmaNamespace *PragmaHandlers;
182
183 /// \brief Tracks all of the comment handlers that the client registered
184 /// with this preprocessor.
185 std::vector<CommentHandler *> CommentHandlers;
186
187 /// \brief True if we want to ignore EOF token and continue later on (thus
188 /// avoid tearing the Lexer and etc. down).
189 bool IncrementalProcessing;
190
191 /// \brief The code-completion handler.
192 CodeCompletionHandler *CodeComplete;
193
194 /// \brief The file that we're performing code-completion for, if any.
195 const FileEntry *CodeCompletionFile;
196
197 /// \brief The offset in file for the code-completion point.
198 unsigned CodeCompletionOffset;
199
200 /// \brief The location for the code-completion point. This gets instantiated
201 /// when the CodeCompletionFile gets \#include'ed for preprocessing.
202 SourceLocation CodeCompletionLoc;
203
204 /// \brief The start location for the file of the code-completion point.
205 ///
206 /// This gets instantiated when the CodeCompletionFile gets \#include'ed
207 /// for preprocessing.
208 SourceLocation CodeCompletionFileLoc;
209
210 /// \brief The source location of the 'import' contextual keyword we just
211 /// lexed, if any.
212 SourceLocation ModuleImportLoc;
213
214 /// \brief The module import path that we're currently processing.
215 llvm::SmallVector<std::pair<IdentifierInfo *, SourceLocation>, 2>
216 ModuleImportPath;
217
218 /// \brief Whether the module import expectes an identifier next. Otherwise,
219 /// it expects a '.' or ';'.
220 bool ModuleImportExpectsIdentifier;
221
222 /// \brief The source location of the currently-active
223 /// #pragma clang arc_cf_code_audited begin.
224 SourceLocation PragmaARCCFCodeAuditedLoc;
225
226 /// \brief True if we hit the code-completion point.
227 bool CodeCompletionReached;
228
229 /// \brief The number of bytes that we will initially skip when entering the
230 /// main file, which is used when loading a precompiled preamble, along
231 /// with a flag that indicates whether skipping this number of bytes will
232 /// place the lexer at the start of a line.
233 std::pair<unsigned, bool> SkipMainFilePreamble;
234
235 /// CurLexer - This is the current top of the stack that we're lexing from if
236 /// not expanding a macro and we are lexing directly from source code.
237 /// Only one of CurLexer, CurPTHLexer, or CurTokenLexer will be non-null.
238 OwningPtr<Lexer> CurLexer;
239
240 /// CurPTHLexer - This is the current top of stack that we're lexing from if
241 /// not expanding from a macro and we are lexing from a PTH cache.
242 /// Only one of CurLexer, CurPTHLexer, or CurTokenLexer will be non-null.
243 OwningPtr<PTHLexer> CurPTHLexer;
244
245 /// CurPPLexer - This is the current top of the stack what we're lexing from
246 /// if not expanding a macro. This is an alias for either CurLexer or
247 /// CurPTHLexer.
248 PreprocessorLexer *CurPPLexer;
249
250 /// CurLookup - The DirectoryLookup structure used to find the current
251 /// FileEntry, if CurLexer is non-null and if applicable. This allows us to
252 /// implement \#include_next and find directory-specific properties.
253 const DirectoryLookup *CurDirLookup;
254
255 /// CurTokenLexer - This is the current macro we are expanding, if we are
256 /// expanding a macro. One of CurLexer and CurTokenLexer must be null.
257 OwningPtr<TokenLexer> CurTokenLexer;
258
259 /// \brief The kind of lexer we're currently working with.
260 enum CurLexerKind {
261 CLK_Lexer,
262 CLK_PTHLexer,
263 CLK_TokenLexer,
264 CLK_CachingLexer,
265 CLK_LexAfterModuleImport
266 } CurLexerKind;
267
268 /// IncludeMacroStack - This keeps track of the stack of files currently
269 /// \#included, and macros currently being expanded from, not counting
270 /// CurLexer/CurTokenLexer.
271 struct IncludeStackInfo {
272 enum CurLexerKind CurLexerKind;
273 Lexer *TheLexer;
274 PTHLexer *ThePTHLexer;
275 PreprocessorLexer *ThePPLexer;
276 TokenLexer *TheTokenLexer;
277 const DirectoryLookup *TheDirLookup;
278
279 IncludeStackInfo(enum CurLexerKind K, Lexer *L, PTHLexer* P,
280 PreprocessorLexer* PPL,
281 TokenLexer* TL, const DirectoryLookup *D)
282 : CurLexerKind(K), TheLexer(L), ThePTHLexer(P), ThePPLexer(PPL),
283 TheTokenLexer(TL), TheDirLookup(D) {}
284 };
285 std::vector<IncludeStackInfo> IncludeMacroStack;
286
287 /// Callbacks - These are actions invoked when some preprocessor activity is
288 /// encountered (e.g. a file is \#included, etc).
289 PPCallbacks *Callbacks;
290
291 struct MacroExpandsInfo {
292 Token Tok;
293 MacroInfo *MI;
294 SourceRange Range;
295 MacroExpandsInfo(Token Tok, MacroInfo *MI, SourceRange Range)
296 : Tok(Tok), MI(MI), Range(Range) { }
297 };
298 SmallVector<MacroExpandsInfo, 2> DelayedMacroExpandsCallbacks;
299
300 /// Macros - For each IdentifierInfo that was associated with a macro, we
301 /// keep a mapping to the history of all macro definitions and #undefs in
302 /// the reverse order (the latest one is in the head of the list).
303 llvm::DenseMap<IdentifierInfo*, MacroInfo*> Macros;
304 friend class ASTReader;
305
306 /// \brief Macros that we want to warn because they are not used at the end
307 /// of the translation unit; we store just their SourceLocations instead
308 /// something like MacroInfo*. The benefit of this is that when we are
309 /// deserializing from PCH, we don't need to deserialize identifier & macros
310 /// just so that we can report that they are unused, we just warn using
311 /// the SourceLocations of this set (that will be filled by the ASTReader).
312 /// We are using SmallPtrSet instead of a vector for faster removal.
313 typedef llvm::SmallPtrSet<SourceLocation, 32> WarnUnusedMacroLocsTy;
314 WarnUnusedMacroLocsTy WarnUnusedMacroLocs;
315
316 /// MacroArgCache - This is a "freelist" of MacroArg objects that can be
317 /// reused for quick allocation.
318 MacroArgs *MacroArgCache;
319 friend class MacroArgs;
320
321 /// PragmaPushMacroInfo - For each IdentifierInfo used in a #pragma
322 /// push_macro directive, we keep a MacroInfo stack used to restore
323 /// previous macro value.
324 llvm::DenseMap<IdentifierInfo*, std::vector<MacroInfo*> > PragmaPushMacroInfo;
325
326 // Various statistics we track for performance analysis.
327 unsigned NumDirectives, NumIncluded, NumDefined, NumUndefined, NumPragma;
328 unsigned NumIf, NumElse, NumEndif;
329 unsigned NumEnteredSourceFiles, MaxIncludeStackDepth;
330 unsigned NumMacroExpanded, NumFnMacroExpanded, NumBuiltinMacroExpanded;
331 unsigned NumFastMacroExpanded, NumTokenPaste, NumFastTokenPaste;
332 unsigned NumSkipped;
333
334 /// Predefines - This string is the predefined macros that preprocessor
335 /// should use from the command line etc.
336 std::string Predefines;
337
338 /// TokenLexerCache - Cache macro expanders to reduce malloc traffic.
339 enum { TokenLexerCacheSize = 8 };
340 unsigned NumCachedTokenLexers;
341 TokenLexer *TokenLexerCache[TokenLexerCacheSize];
342
343 /// \brief Keeps macro expanded tokens for TokenLexers.
344 //
345 /// Works like a stack; a TokenLexer adds the macro expanded tokens that is
346 /// going to lex in the cache and when it finishes the tokens are removed
347 /// from the end of the cache.
348 SmallVector<Token, 16> MacroExpandedTokens;
349 std::vector<std::pair<TokenLexer *, size_t> > MacroExpandingLexersStack;
350
351 /// \brief A record of the macro definitions and expansions that
352 /// occurred during preprocessing.
353 ///
354 /// This is an optional side structure that can be enabled with
355 /// \c createPreprocessingRecord() prior to preprocessing.
356 PreprocessingRecord *Record;
357
358 private: // Cached tokens state.
359 typedef SmallVector<Token, 1> CachedTokensTy;
360
361 /// CachedTokens - Cached tokens are stored here when we do backtracking or
362 /// lookahead. They are "lexed" by the CachingLex() method.
363 CachedTokensTy CachedTokens;
364
365 /// CachedLexPos - The position of the cached token that CachingLex() should
366 /// "lex" next. If it points beyond the CachedTokens vector, it means that
367 /// a normal Lex() should be invoked.
368 CachedTokensTy::size_type CachedLexPos;
369
370 /// BacktrackPositions - Stack of backtrack positions, allowing nested
371 /// backtracks. The EnableBacktrackAtThisPos() method pushes a position to
372 /// indicate where CachedLexPos should be set when the BackTrack() method is
373 /// invoked (at which point the last position is popped).
374 std::vector<CachedTokensTy::size_type> BacktrackPositions;
375
376 struct MacroInfoChain {
377 MacroInfo MI;
378 MacroInfoChain *Next;
379 MacroInfoChain *Prev;
380 };
381
382 /// MacroInfos are managed as a chain for easy disposal. This is the head
383 /// of that list.
384 MacroInfoChain *MIChainHead;
385
386 /// MICache - A "freelist" of MacroInfo objects that can be reused for quick
387 /// allocation.
388 MacroInfoChain *MICache;
389
390 public:
391 Preprocessor(DiagnosticsEngine &diags, LangOptions &opts,
392 const TargetInfo *target,
393 SourceManager &SM, HeaderSearch &Headers,
394 ModuleLoader &TheModuleLoader,
395 IdentifierInfoLookup *IILookup = 0,
396 bool OwnsHeaderSearch = false,
397 bool DelayInitialization = false,
398 bool IncrProcessing = false);
399
400 ~Preprocessor();
401
402 /// \brief Initialize the preprocessor, if the constructor did not already
403 /// perform the initialization.
404 ///
405 /// \param Target Information about the target.
406 void Initialize(const TargetInfo &Target);
407
408 DiagnosticsEngine &getDiagnostics() const { return *Diags; }
409 void setDiagnostics(DiagnosticsEngine &D) { Diags = &D; }
410
411 const LangOptions &getLangOpts() const { return LangOpts; }
412 const TargetInfo &getTargetInfo() const { return *Target; }
413 FileManager &getFileManager() const { return FileMgr; }
414 SourceManager &getSourceManager() const { return SourceMgr; }
415 HeaderSearch &getHeaderSearchInfo() const { return HeaderInfo; }
416
417 IdentifierTable &getIdentifierTable() { return Identifiers; }
418 SelectorTable &getSelectorTable() { return Selectors; }
419 Builtin::Context &getBuiltinInfo() { return BuiltinInfo; }
420 llvm::BumpPtrAllocator &getPreprocessorAllocator() { return BP; }
421
422 void setPTHManager(PTHManager* pm);
423
424 PTHManager *getPTHManager() { return PTH.get(); }
425
426 void setExternalSource(ExternalPreprocessorSource *Source) {
427 ExternalSource = Source;
428 }
429
430 ExternalPreprocessorSource *getExternalSource() const {
431 return ExternalSource;
432 }
433
434 /// \brief Retrieve the module loader associated with this preprocessor.
435 ModuleLoader &getModuleLoader() const { return TheModuleLoader; }
436
437 /// SetCommentRetentionState - Control whether or not the preprocessor retains
438 /// comments in output.
439 void SetCommentRetentionState(bool KeepComments, bool KeepMacroComments) {
440 this->KeepComments = KeepComments | KeepMacroComments;
441 this->KeepMacroComments = KeepMacroComments;
442 }
443
444 bool getCommentRetentionState() const { return KeepComments; }
445
446 void setPragmasEnabled(bool Enabled) { PragmasEnabled = Enabled; }
447 bool getPragmasEnabled() const { return PragmasEnabled; }
448
449 void SetSuppressIncludeNotFoundError(bool Suppress) {
450 SuppressIncludeNotFoundError = Suppress;
451 }
452
453 bool GetSuppressIncludeNotFoundError() {
454 return SuppressIncludeNotFoundError;
455 }
456
457 /// isCurrentLexer - Return true if we are lexing directly from the specified
458 /// lexer.
459 bool isCurrentLexer(const PreprocessorLexer *L) const {
460 return CurPPLexer == L;
461 }
462
463 /// getCurrentLexer - Return the current lexer being lexed from. Note
464 /// that this ignores any potentially active macro expansions and _Pragma
465 /// expansions going on at the time.
466 PreprocessorLexer *getCurrentLexer() const { return CurPPLexer; }
467
468 /// getCurrentFileLexer - Return the current file lexer being lexed from.
469 /// Note that this ignores any potentially active macro expansions and _Pragma
470 /// expansions going on at the time.
471 PreprocessorLexer *getCurrentFileLexer() const;
472
473 /// getPPCallbacks/addPPCallbacks - Accessors for preprocessor callbacks.
474 /// Note that this class takes ownership of any PPCallbacks object given to
475 /// it.
476 PPCallbacks *getPPCallbacks() const { return Callbacks; }
477 void addPPCallbacks(PPCallbacks *C) {
478 if (Callbacks)
479 C = new PPChainedCallbacks(C, Callbacks);
480 Callbacks = C;
481 }
482
483 /// \brief Given an identifier, return the MacroInfo it is \#defined to
484 /// or null if it isn't \#define'd.
485 MacroInfo *getMacroInfo(IdentifierInfo *II) const {
486 if (!II->hasMacroDefinition())
487 return 0;
488
489 MacroInfo *MI = getMacroInfoHistory(II);
490 assert(MI->getUndefLoc().isInvalid() && "Macro is undefined!");
491 return MI;
492 }
493
494 /// \brief Given an identifier, return the (probably #undef'd) MacroInfo
495 /// representing the most recent macro definition. One can iterate over all
496 /// previous macro definitions from it. This method should only be called for
497 /// identifiers that hadMacroDefinition().
498 MacroInfo *getMacroInfoHistory(IdentifierInfo *II) const;
499
500 /// \brief Specify a macro for this identifier.
501 void setMacroInfo(IdentifierInfo *II, MacroInfo *MI,
502 bool LoadedFromAST = false);
503 /// \brief Undefine a macro for this identifier.
504 void clearMacroInfo(IdentifierInfo *II);
505
506 /// macro_iterator/macro_begin/macro_end - This allows you to walk the macro
507 /// history table. Currently defined macros have
508 /// IdentifierInfo::hasMacroDefinition() set and an empty
509 /// MacroInfo::getUndefLoc() at the head of the list.
510 typedef llvm::DenseMap<IdentifierInfo*,
511 MacroInfo*>::const_iterator macro_iterator;
512 macro_iterator macro_begin(bool IncludeExternalMacros = true) const;
513 macro_iterator macro_end(bool IncludeExternalMacros = true) const;
514
515 /// \brief Return the name of the macro defined before \p Loc that has
516 /// spelling \p Tokens. If there are multiple macros with same spelling,
517 /// return the last one defined.
518 StringRef getLastMacroWithSpelling(SourceLocation Loc,
519 ArrayRef<TokenValue> Tokens) const;
520
521 const std::string &getPredefines() const { return Predefines; }
522 /// setPredefines - Set the predefines for this Preprocessor. These
523 /// predefines are automatically injected when parsing the main file.
524 void setPredefines(const char *P) { Predefines = P; }
525 void setPredefines(const std::string &P) { Predefines = P; }
526
527 /// getIdentifierInfo - Return information about the specified preprocessor
528 /// identifier token. The version of this method that takes two character
529 /// pointers is preferred unless the identifier is already available as a
530 /// string (this avoids allocation and copying of memory to construct an
531 /// std::string).
532 IdentifierInfo *getIdentifierInfo(StringRef Name) const {
533 return &Identifiers.get(Name);
534 }
535
536 /// AddPragmaHandler - Add the specified pragma handler to the preprocessor.
537 /// If 'Namespace' is non-null, then it is a token required to exist on the
538 /// pragma line before the pragma string starts, e.g. "STDC" or "GCC".
539 void AddPragmaHandler(StringRef Namespace, PragmaHandler *Handler);
540 void AddPragmaHandler(PragmaHandler *Handler) {
541 AddPragmaHandler(StringRef(), Handler);
542 }
543
544 /// RemovePragmaHandler - Remove the specific pragma handler from
545 /// the preprocessor. If \p Namespace is non-null, then it should
546 /// be the namespace that \p Handler was added to. It is an error
547 /// to remove a handler that has not been registered.
548 void RemovePragmaHandler(StringRef Namespace, PragmaHandler *Handler);
549 void RemovePragmaHandler(PragmaHandler *Handler) {
550 RemovePragmaHandler(StringRef(), Handler);
551 }
552
553 /// \brief Add the specified comment handler to the preprocessor.
554 void addCommentHandler(CommentHandler *Handler);
555
556 /// \brief Remove the specified comment handler.
557 ///
558 /// It is an error to remove a handler that has not been registered.
559 void removeCommentHandler(CommentHandler *Handler);
560
561 /// \brief Set the code completion handler to the given object.
562 void setCodeCompletionHandler(CodeCompletionHandler &Handler) {
563 CodeComplete = &Handler;
564 }
565
566 /// \brief Retrieve the current code-completion handler.
567 CodeCompletionHandler *getCodeCompletionHandler() const {
568 return CodeComplete;
569 }
570
571 /// \brief Clear out the code completion handler.
572 void clearCodeCompletionHandler() {
573 CodeComplete = 0;
574 }
575
576 /// \brief Hook used by the lexer to invoke the "natural language" code
577 /// completion point.
578 void CodeCompleteNaturalLanguage();
579
580 /// \brief Retrieve the preprocessing record, or NULL if there is no
581 /// preprocessing record.
582 PreprocessingRecord *getPreprocessingRecord() const { return Record; }
583
584 /// \brief Create a new preprocessing record, which will keep track of
585 /// all macro expansions, macro definitions, etc.
586 void createPreprocessingRecord(bool RecordConditionalDirectives);
587
588 /// EnterMainSourceFile - Enter the specified FileID as the main source file,
589 /// which implicitly adds the builtin defines etc.
590 void EnterMainSourceFile();
591
592 /// EndSourceFile - Inform the preprocessor callbacks that processing is
593 /// complete.
594 void EndSourceFile();
595
596 /// EnterSourceFile - Add a source file to the top of the include stack and
597 /// start lexing tokens from it instead of the current buffer. Emit an error
598 /// and don't enter the file on error.
599 void EnterSourceFile(FileID CurFileID, const DirectoryLookup *Dir,
600 SourceLocation Loc);
601
602 /// EnterMacro - Add a Macro to the top of the include stack and start lexing
603 /// tokens from it instead of the current buffer. Args specifies the
604 /// tokens input to a function-like macro.
605 ///
606 /// ILEnd specifies the location of the ')' for a function-like macro or the
607 /// identifier for an object-like macro.
608 void EnterMacro(Token &Identifier, SourceLocation ILEnd, MacroInfo *Macro,
609 MacroArgs *Args);
610
611 /// EnterTokenStream - Add a "macro" context to the top of the include stack,
612 /// which will cause the lexer to start returning the specified tokens.
613 ///
614 /// If DisableMacroExpansion is true, tokens lexed from the token stream will
615 /// not be subject to further macro expansion. Otherwise, these tokens will
616 /// be re-macro-expanded when/if expansion is enabled.
617 ///
618 /// If OwnsTokens is false, this method assumes that the specified stream of
619 /// tokens has a permanent owner somewhere, so they do not need to be copied.
620 /// If it is true, it assumes the array of tokens is allocated with new[] and
621 /// must be freed.
622 ///
623 void EnterTokenStream(const Token *Toks, unsigned NumToks,
624 bool DisableMacroExpansion, bool OwnsTokens);
625
626 /// RemoveTopOfLexerStack - Pop the current lexer/macro exp off the top of the
627 /// lexer stack. This should only be used in situations where the current
628 /// state of the top-of-stack lexer is known.
629 void RemoveTopOfLexerStack();
630
631 /// EnableBacktrackAtThisPos - From the point that this method is called, and
632 /// until CommitBacktrackedTokens() or Backtrack() is called, the Preprocessor
633 /// keeps track of the lexed tokens so that a subsequent Backtrack() call will
634 /// make the Preprocessor re-lex the same tokens.
635 ///
636 /// Nested backtracks are allowed, meaning that EnableBacktrackAtThisPos can
637 /// be called multiple times and CommitBacktrackedTokens/Backtrack calls will
638 /// be combined with the EnableBacktrackAtThisPos calls in reverse order.
639 ///
640 /// NOTE: *DO NOT* forget to call either CommitBacktrackedTokens or Backtrack
641 /// at some point after EnableBacktrackAtThisPos. If you don't, caching of
642 /// tokens will continue indefinitely.
643 ///
644 void EnableBacktrackAtThisPos();
645
646 /// CommitBacktrackedTokens - Disable the last EnableBacktrackAtThisPos call.
647 void CommitBacktrackedTokens();
648
649 /// Backtrack - Make Preprocessor re-lex the tokens that were lexed since
650 /// EnableBacktrackAtThisPos() was previously called.
651 void Backtrack();
652
653 /// isBacktrackEnabled - True if EnableBacktrackAtThisPos() was called and
654 /// caching of tokens is on.
655 bool isBacktrackEnabled() const { return !BacktrackPositions.empty(); }
656
657 /// Lex - To lex a token from the preprocessor, just pull a token from the
658 /// current lexer or macro object.
659 void Lex(Token &Result) {
660 switch (CurLexerKind) {
661 case CLK_Lexer: CurLexer->Lex(Result); break;
662 case CLK_PTHLexer: CurPTHLexer->Lex(Result); break;
663 case CLK_TokenLexer: CurTokenLexer->Lex(Result); break;
664 case CLK_CachingLexer: CachingLex(Result); break;
665 case CLK_LexAfterModuleImport: LexAfterModuleImport(Result); break;
666 }
667 }
668
669 void LexAfterModuleImport(Token &Result);
670
671 /// LexNonComment - Lex a token. If it's a comment, keep lexing until we get
672 /// something not a comment. This is useful in -E -C mode where comments
673 /// would foul up preprocessor directive handling.
674 void LexNonComment(Token &Result) {
675 do
676 Lex(Result);
677 while (Result.getKind() == tok::comment);
678 }
679
680 /// LexUnexpandedToken - This is just like Lex, but this disables macro
681 /// expansion of identifier tokens.
682 void LexUnexpandedToken(Token &Result) {
683 // Disable macro expansion.
684 bool OldVal = DisableMacroExpansion;
685 DisableMacroExpansion = true;
686 // Lex the token.
687 Lex(Result);
688
689 // Reenable it.
690 DisableMacroExpansion = OldVal;
691 }
692
693 /// LexUnexpandedNonComment - Like LexNonComment, but this disables macro
694 /// expansion of identifier tokens.
695 void LexUnexpandedNonComment(Token &Result) {
696 do
697 LexUnexpandedToken(Result);
698 while (Result.getKind() == tok::comment);
699 }
700
701 /// Disables macro expansion everywhere except for preprocessor directives.
702 void SetMacroExpansionOnlyInDirectives() {
703 DisableMacroExpansion = true;
704 MacroExpansionInDirectivesOverride = true;
705 }
706
707 /// LookAhead - This peeks ahead N tokens and returns that token without
708 /// consuming any tokens. LookAhead(0) returns the next token that would be
709 /// returned by Lex(), LookAhead(1) returns the token after it, etc. This
710 /// returns normal tokens after phase 5. As such, it is equivalent to using
711 /// 'Lex', not 'LexUnexpandedToken'.
712 const Token &LookAhead(unsigned N) {
713 if (CachedLexPos + N < CachedTokens.size())
714 return CachedTokens[CachedLexPos+N];
715 else
716 return PeekAhead(N+1);
717 }
718
719 /// RevertCachedTokens - When backtracking is enabled and tokens are cached,
720 /// this allows to revert a specific number of tokens.
721 /// Note that the number of tokens being reverted should be up to the last
722 /// backtrack position, not more.
723 void RevertCachedTokens(unsigned N) {
724 assert(isBacktrackEnabled() &&
725 "Should only be called when tokens are cached for backtracking");
726 assert(signed(CachedLexPos) - signed(N) >= signed(BacktrackPositions.back())
727 && "Should revert tokens up to the last backtrack position, not more");
728 assert(signed(CachedLexPos) - signed(N) >= 0 &&
729 "Corrupted backtrack positions ?");
730 CachedLexPos -= N;
731 }
732
733 /// EnterToken - Enters a token in the token stream to be lexed next. If
734 /// BackTrack() is called afterwards, the token will remain at the insertion
735 /// point.
736 void EnterToken(const Token &Tok) {
737 EnterCachingLexMode();
738 CachedTokens.insert(CachedTokens.begin()+CachedLexPos, Tok);
739 }
740
741 /// AnnotateCachedTokens - We notify the Preprocessor that if it is caching
742 /// tokens (because backtrack is enabled) it should replace the most recent
743 /// cached tokens with the given annotation token. This function has no effect
744 /// if backtracking is not enabled.
745 ///
746 /// Note that the use of this function is just for optimization; so that the
747 /// cached tokens doesn't get re-parsed and re-resolved after a backtrack is
748 /// invoked.
749 void AnnotateCachedTokens(const Token &Tok) {
750 assert(Tok.isAnnotation() && "Expected annotation token");
751 if (CachedLexPos != 0 && isBacktrackEnabled())
752 AnnotatePreviousCachedTokens(Tok);
753 }
754
755 /// \brief Replace the last token with an annotation token.
756 ///
757 /// Like AnnotateCachedTokens(), this routine replaces an
758 /// already-parsed (and resolved) token with an annotation
759 /// token. However, this routine only replaces the last token with
760 /// the annotation token; it does not affect any other cached
761 /// tokens. This function has no effect if backtracking is not
762 /// enabled.
763 void ReplaceLastTokenWithAnnotation(const Token &Tok) {
764 assert(Tok.isAnnotation() && "Expected annotation token");
765 if (CachedLexPos != 0 && isBacktrackEnabled())
766 CachedTokens[CachedLexPos-1] = Tok;
767 }
768
769 /// TypoCorrectToken - Update the current token to represent the provided
770 /// identifier, in order to cache an action performed by typo correction.
771 void TypoCorrectToken(const Token &Tok) {
772 assert(Tok.getIdentifierInfo() && "Expected identifier token");
773 if (CachedLexPos != 0 && isBacktrackEnabled())
774 CachedTokens[CachedLexPos-1] = Tok;
775 }
776
777 /// \brief Recompute the current lexer kind based on the CurLexer/CurPTHLexer/
778 /// CurTokenLexer pointers.
779 void recomputeCurLexerKind();
780
781 /// \brief Returns true if incremental processing is enabled
782 bool isIncrementalProcessingEnabled() const { return IncrementalProcessing; }
783
784 /// \brief Enables the incremental processing
785 void enableIncrementalProcessing(bool value = true) {
786 IncrementalProcessing = value;
787 }
788
789 /// \brief Specify the point at which code-completion will be performed.
790 ///
791 /// \param File the file in which code completion should occur. If
792 /// this file is included multiple times, code-completion will
793 /// perform completion the first time it is included. If NULL, this
794 /// function clears out the code-completion point.
795 ///
796 /// \param Line the line at which code completion should occur
797 /// (1-based).
798 ///
799 /// \param Column the column at which code completion should occur
800 /// (1-based).
801 ///
802 /// \returns true if an error occurred, false otherwise.
803 bool SetCodeCompletionPoint(const FileEntry *File,
804 unsigned Line, unsigned Column);
805
806 /// \brief Determine if we are performing code completion.
807 bool isCodeCompletionEnabled() const { return CodeCompletionFile != 0; }
808
809 /// \brief Returns the location of the code-completion point.
810 /// Returns an invalid location if code-completion is not enabled or the file
811 /// containing the code-completion point has not been lexed yet.
812 SourceLocation getCodeCompletionLoc() const { return CodeCompletionLoc; }
813
814 /// \brief Returns the start location of the file of code-completion point.
815 /// Returns an invalid location if code-completion is not enabled or the file
816 /// containing the code-completion point has not been lexed yet.
817 SourceLocation getCodeCompletionFileLoc() const {
818 return CodeCompletionFileLoc;
819 }
820
821 /// \brief Returns true if code-completion is enabled and we have hit the
822 /// code-completion point.
823 bool isCodeCompletionReached() const { return CodeCompletionReached; }
824
825 /// \brief Note that we hit the code-completion point.
826 void setCodeCompletionReached() {
827 assert(isCodeCompletionEnabled() && "Code-completion not enabled!");
828 CodeCompletionReached = true;
829 // Silence any diagnostics that occur after we hit the code-completion.
830 getDiagnostics().setSuppressAllDiagnostics(true);
831 }
832
833 /// \brief The location of the currently-active \#pragma clang
834 /// arc_cf_code_audited begin. Returns an invalid location if there
835 /// is no such pragma active.
836 SourceLocation getPragmaARCCFCodeAuditedLoc() const {
837 return PragmaARCCFCodeAuditedLoc;
838 }
839
840 /// \brief Set the location of the currently-active \#pragma clang
841 /// arc_cf_code_audited begin. An invalid location ends the pragma.
842 void setPragmaARCCFCodeAuditedLoc(SourceLocation Loc) {
843 PragmaARCCFCodeAuditedLoc = Loc;
844 }
845
846 /// \brief Instruct the preprocessor to skip part of the main source file.
847 ///
848 /// \param Bytes The number of bytes in the preamble to skip.
849 ///
850 /// \param StartOfLine Whether skipping these bytes puts the lexer at the
851 /// start of a line.
852 void setSkipMainFilePreamble(unsigned Bytes, bool StartOfLine) {
853 SkipMainFilePreamble.first = Bytes;
854 SkipMainFilePreamble.second = StartOfLine;
855 }
856
857 /// Diag - Forwarding function for diagnostics. This emits a diagnostic at
858 /// the specified Token's location, translating the token's start
859 /// position in the current buffer into a SourcePosition object for rendering.
860 DiagnosticBuilder Diag(SourceLocation Loc, unsigned DiagID) const {
861 return Diags->Report(Loc, DiagID);
862 }
863
864 DiagnosticBuilder Diag(const Token &Tok, unsigned DiagID) const {
865 return Diags->Report(Tok.getLocation(), DiagID);
866 }
867
868 /// getSpelling() - Return the 'spelling' of the token at the given
869 /// location; does not go up to the spelling location or down to the
870 /// expansion location.
871 ///
872 /// \param buffer A buffer which will be used only if the token requires
873 /// "cleaning", e.g. if it contains trigraphs or escaped newlines
874 /// \param invalid If non-null, will be set \c true if an error occurs.
875 StringRef getSpelling(SourceLocation loc,
876 SmallVectorImpl<char> &buffer,
877 bool *invalid = 0) const {
878 return Lexer::getSpelling(loc, buffer, SourceMgr, LangOpts, invalid);
879 }
880
881 /// getSpelling() - Return the 'spelling' of the Tok token. The spelling of a
882 /// token is the characters used to represent the token in the source file
883 /// after trigraph expansion and escaped-newline folding. In particular, this
884 /// wants to get the true, uncanonicalized, spelling of things like digraphs
885 /// UCNs, etc.
886 ///
887 /// \param Invalid If non-null, will be set \c true if an error occurs.
888 std::string getSpelling(const Token &Tok, bool *Invalid = 0) const {
889 return Lexer::getSpelling(Tok, SourceMgr, LangOpts, Invalid);
890 }
891
892 /// getSpelling - This method is used to get the spelling of a token into a
893 /// preallocated buffer, instead of as an std::string. The caller is required
894 /// to allocate enough space for the token, which is guaranteed to be at least
895 /// Tok.getLength() bytes long. The length of the actual result is returned.
896 ///
897 /// Note that this method may do two possible things: it may either fill in
898 /// the buffer specified with characters, or it may *change the input pointer*
899 /// to point to a constant buffer with the data already in it (avoiding a
900 /// copy). The caller is not allowed to modify the returned buffer pointer
901 /// if an internal buffer is returned.
902 unsigned getSpelling(const Token &Tok, const char *&Buffer,
903 bool *Invalid = 0) const {
904 return Lexer::getSpelling(Tok, Buffer, SourceMgr, LangOpts, Invalid);
905 }
906
907 /// getSpelling - This method is used to get the spelling of a token into a
908 /// SmallVector. Note that the returned StringRef may not point to the
909 /// supplied buffer if a copy can be avoided.
910 StringRef getSpelling(const Token &Tok,
911 SmallVectorImpl<char> &Buffer,
912 bool *Invalid = 0) const;
913
914 /// getSpellingOfSingleCharacterNumericConstant - Tok is a numeric constant
915 /// with length 1, return the character.
916 char getSpellingOfSingleCharacterNumericConstant(const Token &Tok,
917 bool *Invalid = 0) const {
918 assert(Tok.is(tok::numeric_constant) &&
919 Tok.getLength() == 1 && "Called on unsupported token");
920 assert(!Tok.needsCleaning() && "Token can't need cleaning with length 1");
921
922 // If the token is carrying a literal data pointer, just use it.
923 if (const char *D = Tok.getLiteralData())
924 return *D;
925
926 // Otherwise, fall back on getCharacterData, which is slower, but always
927 // works.
928 return *SourceMgr.getCharacterData(Tok.getLocation(), Invalid);
929 }
930
931 /// \brief Retrieve the name of the immediate macro expansion.
932 ///
933 /// This routine starts from a source location, and finds the name of the macro
934 /// responsible for its immediate expansion. It looks through any intervening
935 /// macro argument expansions to compute this. It returns a StringRef which
936 /// refers to the SourceManager-owned buffer of the source where that macro
937 /// name is spelled. Thus, the result shouldn't out-live the SourceManager.
938 StringRef getImmediateMacroName(SourceLocation Loc) {
939 return Lexer::getImmediateMacroName(Loc, SourceMgr, getLangOpts());
940 }
941
942 /// CreateString - Plop the specified string into a scratch buffer and set the
943 /// specified token's location and length to it. If specified, the source
944 /// location provides a location of the expansion point of the token.
945 void CreateString(StringRef Str, Token &Tok,
946 SourceLocation ExpansionLocStart = SourceLocation(),
947 SourceLocation ExpansionLocEnd = SourceLocation());
948
949 /// \brief Computes the source location just past the end of the
950 /// token at this source location.
951 ///
952 /// This routine can be used to produce a source location that
953 /// points just past the end of the token referenced by \p Loc, and
954 /// is generally used when a diagnostic needs to point just after a
955 /// token where it expected something different that it received. If
956 /// the returned source location would not be meaningful (e.g., if
957 /// it points into a macro), this routine returns an invalid
958 /// source location.
959 ///
960 /// \param Offset an offset from the end of the token, where the source
961 /// location should refer to. The default offset (0) produces a source
962 /// location pointing just past the end of the token; an offset of 1 produces
963 /// a source location pointing to the last character in the token, etc.
964 SourceLocation getLocForEndOfToken(SourceLocation Loc, unsigned Offset = 0) {
965 return Lexer::getLocForEndOfToken(Loc, Offset, SourceMgr, LangOpts);
966 }
967
968 /// \brief Returns true if the given MacroID location points at the first
969 /// token of the macro expansion.
970 ///
971 /// \param MacroBegin If non-null and function returns true, it is set to
972 /// begin location of the macro.
973 bool isAtStartOfMacroExpansion(SourceLocation loc,
974 SourceLocation *MacroBegin = 0) const {
975 return Lexer::isAtStartOfMacroExpansion(loc, SourceMgr, LangOpts,
976 MacroBegin);
977 }
978
979 /// \brief Returns true if the given MacroID location points at the last
980 /// token of the macro expansion.
981 ///
982 /// \param MacroEnd If non-null and function returns true, it is set to
983 /// end location of the macro.
984 bool isAtEndOfMacroExpansion(SourceLocation loc,
985 SourceLocation *MacroEnd = 0) const {
986 return Lexer::isAtEndOfMacroExpansion(loc, SourceMgr, LangOpts, MacroEnd);
987 }
988
989 /// DumpToken - Print the token to stderr, used for debugging.
990 ///
991 void DumpToken(const Token &Tok, bool DumpFlags = false) const;
992 void DumpLocation(SourceLocation Loc) const;
993 void DumpMacro(const MacroInfo &MI) const;
994
995 /// AdvanceToTokenCharacter - Given a location that specifies the start of a
996 /// token, return a new location that specifies a character within the token.
997 SourceLocation AdvanceToTokenCharacter(SourceLocation TokStart,
998 unsigned Char) const {
999 return Lexer::AdvanceToTokenCharacter(TokStart, Char, SourceMgr, LangOpts);
1000 }
1001
1002 /// IncrementPasteCounter - Increment the counters for the number of token
1003 /// paste operations performed. If fast was specified, this is a 'fast paste'
1004 /// case we handled.
1005 ///
1006 void IncrementPasteCounter(bool isFast) {
1007 if (isFast)
1008 ++NumFastTokenPaste;
1009 else
1010 ++NumTokenPaste;
1011 }
1012
1013 void PrintStats();
1014
1015 size_t getTotalMemory() const;
1016
1017 /// HandleMicrosoftCommentPaste - When the macro expander pastes together a
1018 /// comment (/##/) in microsoft mode, this method handles updating the current
1019 /// state, returning the token on the next source line.
1020 void HandleMicrosoftCommentPaste(Token &Tok);
1021
1022 //===--------------------------------------------------------------------===//
1023 // Preprocessor callback methods. These are invoked by a lexer as various
1024 // directives and events are found.
1025
1026 /// LookUpIdentifierInfo - Given a tok::raw_identifier token, look up the
1027 /// identifier information for the token and install it into the token,
1028 /// updating the token kind accordingly.
1029 IdentifierInfo *LookUpIdentifierInfo(Token &Identifier) const;
1030
1031 private:
1032 llvm::DenseMap<IdentifierInfo*,unsigned> PoisonReasons;
1033
1034 public:
1035
1036 // SetPoisonReason - Call this function to indicate the reason for
1037 // poisoning an identifier. If that identifier is accessed while
1038 // poisoned, then this reason will be used instead of the default
1039 // "poisoned" diagnostic.
1040 void SetPoisonReason(IdentifierInfo *II, unsigned DiagID);
1041
1042 // HandlePoisonedIdentifier - Display reason for poisoned
1043 // identifier.
1044 void HandlePoisonedIdentifier(Token & Tok);
1045
1046 void MaybeHandlePoisonedIdentifier(Token & Identifier) {
1047 if(IdentifierInfo * II = Identifier.getIdentifierInfo()) {
1048 if(II->isPoisoned()) {
1049 HandlePoisonedIdentifier(Identifier);
1050 }
1051 }
1052 }
1053
1054 private:
1055 /// Identifiers used for SEH handling in Borland. These are only
1056 /// allowed in particular circumstances
1057 // __except block
1058 IdentifierInfo *Ident__exception_code,
1059 *Ident___exception_code,
1060 *Ident_GetExceptionCode;
1061 // __except filter expression
1062 IdentifierInfo *Ident__exception_info,
1063 *Ident___exception_info,
1064 *Ident_GetExceptionInfo;
1065 // __finally
1066 IdentifierInfo *Ident__abnormal_termination,
1067 *Ident___abnormal_termination,
1068 *Ident_AbnormalTermination;
1069 public:
1070 void PoisonSEHIdentifiers(bool Poison = true); // Borland
1071
1072 /// HandleIdentifier - This callback is invoked when the lexer reads an
1073 /// identifier and has filled in the tokens IdentifierInfo member. This
1074 /// callback potentially macro expands it or turns it into a named token (like
1075 /// 'for').
1076 void HandleIdentifier(Token &Identifier);
1077
1078
1079 /// HandleEndOfFile - This callback is invoked when the lexer hits the end of
1080 /// the current file. This either returns the EOF token and returns true, or
1081 /// pops a level off the include stack and returns false, at which point the
1082 /// client should call lex again.
1083 bool HandleEndOfFile(Token &Result, bool isEndOfMacro = false);
1084
1085 /// HandleEndOfTokenLexer - This callback is invoked when the current
1086 /// TokenLexer hits the end of its token stream.
1087 bool HandleEndOfTokenLexer(Token &Result);
1088
1089 /// HandleDirective - This callback is invoked when the lexer sees a # token
1090 /// at the start of a line. This consumes the directive, modifies the
1091 /// lexer/preprocessor state, and advances the lexer(s) so that the next token
1092 /// read is the correct one.
1093 void HandleDirective(Token &Result);
1094
1095 /// CheckEndOfDirective - Ensure that the next token is a tok::eod token. If
1096 /// not, emit a diagnostic and consume up until the eod. If EnableMacros is
1097 /// true, then we consider macros that expand to zero tokens as being ok.
1098 void CheckEndOfDirective(const char *Directive, bool EnableMacros = false);
1099
1100 /// DiscardUntilEndOfDirective - Read and discard all tokens remaining on the
1101 /// current line until the tok::eod token is found.
1102 void DiscardUntilEndOfDirective();
1103
1104 /// SawDateOrTime - This returns true if the preprocessor has seen a use of
1105 /// __DATE__ or __TIME__ in the file so far.
1106 bool SawDateOrTime() const {
1107 return DATELoc != SourceLocation() || TIMELoc != SourceLocation();
1108 }
1109 unsigned getCounterValue() const { return CounterValue; }
1110 void setCounterValue(unsigned V) { CounterValue = V; }
1111
1112 /// \brief Retrieves the module that we're currently building, if any.
1113 Module *getCurrentModule();
1114
1115 /// \brief Allocate a new MacroInfo object with the provided SourceLocation.
1116 MacroInfo *AllocateMacroInfo(SourceLocation L);
1117
1118 /// \brief Allocate a new MacroInfo object which is clone of \p MI.
1119 MacroInfo *CloneMacroInfo(const MacroInfo &MI);
1120
1121 /// \brief Turn the specified lexer token into a fully checked and spelled
1122 /// filename, e.g. as an operand of \#include.
1123 ///
1124 /// The caller is expected to provide a buffer that is large enough to hold
1125 /// the spelling of the filename, but is also expected to handle the case
1126 /// when this method decides to use a different buffer.
1127 ///
1128 /// \returns true if the input filename was in <>'s or false if it was
1129 /// in ""'s.
1130 bool GetIncludeFilenameSpelling(SourceLocation Loc,StringRef &Filename);
1131
1132 /// \brief Given a "foo" or \<foo> reference, look up the indicated file.
1133 ///
1134 /// Returns null on failure. \p isAngled indicates whether the file
1135 /// reference is for system \#include's or not (i.e. using <> instead of "").
1136 const FileEntry *LookupFile(StringRef Filename,
1137 bool isAngled, const DirectoryLookup *FromDir,
1138 const DirectoryLookup *&CurDir,
1139 SmallVectorImpl<char> *SearchPath,
1140 SmallVectorImpl<char> *RelativePath,
1141 Module **SuggestedModule,
1142 bool SkipCache = false);
1143
1144 /// GetCurLookup - The DirectoryLookup structure used to find the current
1145 /// FileEntry, if CurLexer is non-null and if applicable. This allows us to
1146 /// implement \#include_next and find directory-specific properties.
1147 const DirectoryLookup *GetCurDirLookup() { return CurDirLookup; }
1148
1149 /// \brief Return true if we're in the top-level file, not in a \#include.
1150 bool isInPrimaryFile() const;
1151
1152 /// ConcatenateIncludeName - Handle cases where the \#include name is expanded
1153 /// from a macro as multiple tokens, which need to be glued together. This
1154 /// occurs for code like:
1155 /// \code
1156 /// \#define FOO <x/y.h>
1157 /// \#include FOO
1158 /// \endcode
1159 /// because in this case, "<x/y.h>" is returned as 7 tokens, not one.
1160 ///
1161 /// This code concatenates and consumes tokens up to the '>' token. It
1162 /// returns false if the > was found, otherwise it returns true if it finds
1163 /// and consumes the EOD marker.
1164 bool ConcatenateIncludeName(SmallString<128> &FilenameBuffer,
1165 SourceLocation &End);
1166
1167 /// LexOnOffSwitch - Lex an on-off-switch (C99 6.10.6p2) and verify that it is
1168 /// followed by EOD. Return true if the token is not a valid on-off-switch.
1169 bool LexOnOffSwitch(tok::OnOffSwitch &OOS);
1170
1171 private:
1172
1173 void PushIncludeMacroStack() {
1174 IncludeMacroStack.push_back(IncludeStackInfo(CurLexerKind,
1175 CurLexer.take(),
1176 CurPTHLexer.take(),
1177 CurPPLexer,
1178 CurTokenLexer.take(),
1179 CurDirLookup));
1180 CurPPLexer = 0;
1181 }
1182
1183 void PopIncludeMacroStack() {
1184 CurLexer.reset(IncludeMacroStack.back().TheLexer);
1185 CurPTHLexer.reset(IncludeMacroStack.back().ThePTHLexer);
1186 CurPPLexer = IncludeMacroStack.back().ThePPLexer;
1187 CurTokenLexer.reset(IncludeMacroStack.back().TheTokenLexer);
1188 CurDirLookup = IncludeMacroStack.back().TheDirLookup;
1189 CurLexerKind = IncludeMacroStack.back().CurLexerKind;
1190 IncludeMacroStack.pop_back();
1191 }
1192
1193 /// \brief Allocate a new MacroInfo object.
1194 MacroInfo *AllocateMacroInfo();
1195
1196 /// \brief Release the specified MacroInfo for re-use.
1197 ///
1198 /// This memory will be reused for allocating new MacroInfo objects.
1199 void ReleaseMacroInfo(MacroInfo* MI);
1200
1201 /// ReadMacroName - Lex and validate a macro name, which occurs after a
1202 /// \#define or \#undef. This emits a diagnostic, sets the token kind to eod,
1203 /// and discards the rest of the macro line if the macro name is invalid.
1204 void ReadMacroName(Token &MacroNameTok, char isDefineUndef = 0);
1205
1206 /// ReadMacroDefinitionArgList - The ( starting an argument list of a macro
1207 /// definition has just been read. Lex the rest of the arguments and the
1208 /// closing ), updating MI with what we learn and saving in LastTok the
1209 /// last token read.
1210 /// Return true if an error occurs parsing the arg list.
1211 bool ReadMacroDefinitionArgList(MacroInfo *MI, Token& LastTok);
1212
1213 /// We just read a \#if or related directive and decided that the
1214 /// subsequent tokens are in the \#if'd out portion of the
1215 /// file. Lex the rest of the file, until we see an \#endif. If \p
1216 /// FoundNonSkipPortion is true, then we have already emitted code for part of
1217 /// this \#if directive, so \#else/\#elif blocks should never be entered. If
1218 /// \p FoundElse is false, then \#else directives are ok, if not, then we have
1219 /// already seen one so a \#else directive is a duplicate. When this returns,
1220 /// the caller can lex the first valid token.
1221 void SkipExcludedConditionalBlock(SourceLocation IfTokenLoc,
1222 bool FoundNonSkipPortion, bool FoundElse,
1223 SourceLocation ElseLoc = SourceLocation());
1224
1225 /// \brief A fast PTH version of SkipExcludedConditionalBlock.
1226 void PTHSkipExcludedConditionalBlock();
1227
1228 /// EvaluateDirectiveExpression - Evaluate an integer constant expression that
1229 /// may occur after a #if or #elif directive and return it as a bool. If the
1230 /// expression is equivalent to "!defined(X)" return X in IfNDefMacro.
1231 bool EvaluateDirectiveExpression(IdentifierInfo *&IfNDefMacro);
1232
1233 /// RegisterBuiltinPragmas - Install the standard preprocessor pragmas:
1234 /// \#pragma GCC poison/system_header/dependency and \#pragma once.
1235 void RegisterBuiltinPragmas();
1236
1237 /// \brief Register builtin macros such as __LINE__ with the identifier table.
1238 void RegisterBuiltinMacros();
1239
1240 /// HandleMacroExpandedIdentifier - If an identifier token is read that is to
1241 /// be expanded as a macro, handle it and return the next token as 'Tok'. If
1242 /// the macro should not be expanded return true, otherwise return false.
1243 bool HandleMacroExpandedIdentifier(Token &Tok, MacroInfo *MI);
1244
1245 /// \brief Cache macro expanded tokens for TokenLexers.
1246 //
1247 /// Works like a stack; a TokenLexer adds the macro expanded tokens that is
1248 /// going to lex in the cache and when it finishes the tokens are removed
1249 /// from the end of the cache.
1250 Token *cacheMacroExpandedTokens(TokenLexer *tokLexer,
1251 ArrayRef<Token> tokens);
1252 void removeCachedMacroExpandedTokensOfLastLexer();
1253 friend void TokenLexer::ExpandFunctionArguments();
1254
1255 /// isNextPPTokenLParen - Determine whether the next preprocessor token to be
1256 /// lexed is a '('. If so, consume the token and return true, if not, this
1257 /// method should have no observable side-effect on the lexed tokens.
1258 bool isNextPPTokenLParen();
1259
1260 /// ReadFunctionLikeMacroArgs - After reading "MACRO(", this method is
1261 /// invoked to read all of the formal arguments specified for the macro
1262 /// invocation. This returns null on error.
1263 MacroArgs *ReadFunctionLikeMacroArgs(Token &MacroName, MacroInfo *MI,
1264 SourceLocation &ExpansionEnd);
1265
1266 /// ExpandBuiltinMacro - If an identifier token is read that is to be expanded
1267 /// as a builtin macro, handle it and return the next token as 'Tok'.
1268 void ExpandBuiltinMacro(Token &Tok);
1269
1270 /// Handle_Pragma - Read a _Pragma directive, slice it up, process it, then
1271 /// return the first token after the directive. The _Pragma token has just
1272 /// been read into 'Tok'.
1273 void Handle_Pragma(Token &Tok);
1274
1275 /// HandleMicrosoft__pragma - Like Handle_Pragma except the pragma text
1276 /// is not enclosed within a string literal.
1277 void HandleMicrosoft__pragma(Token &Tok);
1278
1279 /// EnterSourceFileWithLexer - Add a lexer to the top of the include stack and
1280 /// start lexing tokens from it instead of the current buffer.
1281 void EnterSourceFileWithLexer(Lexer *TheLexer, const DirectoryLookup *Dir);
1282
1283 /// EnterSourceFileWithPTH - Add a lexer to the top of the include stack and
1284 /// start getting tokens from it using the PTH cache.
1285 void EnterSourceFileWithPTH(PTHLexer *PL, const DirectoryLookup *Dir);
1286
1287 /// IsFileLexer - Returns true if we are lexing from a file and not a
1288 /// pragma or a macro.
1289 static bool IsFileLexer(const Lexer* L, const PreprocessorLexer* P) {
1290 return L ? !L->isPragmaLexer() : P != 0;
1291 }
1292
1293 static bool IsFileLexer(const IncludeStackInfo& I) {
1294 return IsFileLexer(I.TheLexer, I.ThePPLexer);
1295 }
1296
1297 bool IsFileLexer() const {
1298 return IsFileLexer(CurLexer.get(), CurPPLexer);
1299 }
1300
1301 //===--------------------------------------------------------------------===//
1302 // Caching stuff.
1303 void CachingLex(Token &Result);
1304 bool InCachingLexMode() const {
1305 // If the Lexer pointers are 0 and IncludeMacroStack is empty, it means
1306 // that we are past EOF, not that we are in CachingLex mode.
1307 return CurPPLexer == 0 && CurTokenLexer == 0 && CurPTHLexer == 0 &&
1308 !IncludeMacroStack.empty();
1309 }
1310 void EnterCachingLexMode();
1311 void ExitCachingLexMode() {
1312 if (InCachingLexMode())
1313 RemoveTopOfLexerStack();
1314 }
1315 const Token &PeekAhead(unsigned N);
1316 void AnnotatePreviousCachedTokens(const Token &Tok);
1317
1318 //===--------------------------------------------------------------------===//
1319 /// Handle*Directive - implement the various preprocessor directives. These
1320 /// should side-effect the current preprocessor object so that the next call
1321 /// to Lex() will return the appropriate token next.
1322 void HandleLineDirective(Token &Tok);
1323 void HandleDigitDirective(Token &Tok);
1324 void HandleUserDiagnosticDirective(Token &Tok, bool isWarning);
1325 void HandleIdentSCCSDirective(Token &Tok);
1326 void HandleMacroPublicDirective(Token &Tok);
1327 void HandleMacroPrivateDirective(Token &Tok);
1328
1329 // File inclusion.
1330 void HandleIncludeDirective(SourceLocation HashLoc,
1331 Token &Tok,
1332 const DirectoryLookup *LookupFrom = 0,
1333 bool isImport = false);
1334 void HandleIncludeNextDirective(SourceLocation HashLoc, Token &Tok);
1335 void HandleIncludeMacrosDirective(SourceLocation HashLoc, Token &Tok);
1336 void HandleImportDirective(SourceLocation HashLoc, Token &Tok);
1337 void HandleMicrosoftImportDirective(Token &Tok);
1338
1339 // Macro handling.
1340 void HandleDefineDirective(Token &Tok);
1341 void HandleUndefDirective(Token &Tok);
1342
1343 // Conditional Inclusion.
1344 void HandleIfdefDirective(Token &Tok, bool isIfndef,
1345 bool ReadAnyTokensBeforeDirective);
1346 void HandleIfDirective(Token &Tok, bool ReadAnyTokensBeforeDirective);
1347 void HandleEndifDirective(Token &Tok);
1348 void HandleElseDirective(Token &Tok);
1349 void HandleElifDirective(Token &Tok);
1350
1351 // Pragmas.
1352 void HandlePragmaDirective(unsigned Introducer);
1353 public:
1354 void HandlePragmaOnce(Token &OnceTok);
1355 void HandlePragmaMark();
1356 void HandlePragmaPoison(Token &PoisonTok);
1357 void HandlePragmaSystemHeader(Token &SysHeaderTok);
1358 void HandlePragmaDependency(Token &DependencyTok);
1359 void HandlePragmaComment(Token &CommentTok);
1360 void HandlePragmaMessage(Token &MessageTok);
1361 void HandlePragmaPushMacro(Token &Tok);
1362 void HandlePragmaPopMacro(Token &Tok);
1363 void HandlePragmaIncludeAlias(Token &Tok);
1364 IdentifierInfo *ParsePragmaPushOrPopMacro(Token &Tok);
1365
1366 // Return true and store the first token only if any CommentHandler
1367 // has inserted some tokens and getCommentRetentionState() is false.
1368 bool HandleComment(Token &Token, SourceRange Comment);
1369
1370 /// \brief A macro is used, update information about macros that need unused
1371 /// warnings.
1372 void markMacroAsUsed(MacroInfo *MI);
1373 };
1374
1375 /// \brief Abstract base class that describes a handler that will receive
1376 /// source ranges for each of the comments encountered in the source file.
1377 class CommentHandler {
1378 public:
1379 virtual ~CommentHandler();
1380
1381 // The handler shall return true if it has pushed any tokens
1382 // to be read using e.g. EnterToken or EnterTokenStream.
1383 virtual bool HandleComment(Preprocessor &PP, SourceRange Comment) = 0;
1384 };
1385
1386 } // end namespace clang
1387
1388 #endif