]> git.proxmox.com Git - rustc.git/blob - src/libproc_macro/lib.rs
New upstream version 1.26.0+dfsg1
[rustc.git] / src / libproc_macro / lib.rs
1 // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 //! A support library for macro authors when defining new macros.
12 //!
13 //! This library, provided by the standard distribution, provides the types
14 //! consumed in the interfaces of procedurally defined macro definitions.
15 //! Currently the primary use of this crate is to provide the ability to define
16 //! new custom derive modes through `#[proc_macro_derive]`.
17 //!
18 //! Note that this crate is intentionally very bare-bones currently. The main
19 //! type, `TokenStream`, only supports `fmt::Display` and `FromStr`
20 //! implementations, indicating that it can only go to and come from a string.
21 //! This functionality is intended to be expanded over time as more surface
22 //! area for macro authors is stabilized.
23 //!
24 //! See [the book](../book/first-edition/procedural-macros.html) for more.
25
26 #![stable(feature = "proc_macro_lib", since = "1.15.0")]
27 #![deny(warnings)]
28 #![deny(missing_docs)]
29 #![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
30 html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
31 html_root_url = "https://doc.rust-lang.org/nightly/",
32 html_playground_url = "https://play.rust-lang.org/",
33 issue_tracker_base_url = "https://github.com/rust-lang/rust/issues/",
34 test(no_crate_inject, attr(deny(warnings))),
35 test(attr(allow(dead_code, deprecated, unused_variables, unused_mut))))]
36
37 #![cfg_attr(stage0, feature(i128_type))]
38 #![feature(rustc_private)]
39 #![feature(staged_api)]
40 #![feature(lang_items)]
41 #![feature(optin_builtin_traits)]
42
43 extern crate syntax;
44 extern crate syntax_pos;
45 extern crate rustc_errors;
46 extern crate rustc_data_structures;
47
48 mod diagnostic;
49
50 #[unstable(feature = "proc_macro", issue = "38356")]
51 pub use diagnostic::{Diagnostic, Level};
52
53 use std::{ascii, fmt, iter};
54 use rustc_data_structures::sync::Lrc;
55 use std::str::FromStr;
56
57 use syntax::ast;
58 use syntax::errors::DiagnosticBuilder;
59 use syntax::parse::{self, token};
60 use syntax::symbol::Symbol;
61 use syntax::tokenstream;
62 use syntax_pos::DUMMY_SP;
63 use syntax_pos::{FileMap, Pos, SyntaxContext, FileName};
64 use syntax_pos::hygiene::Mark;
65
66 /// The main type provided by this crate, representing an abstract stream of
67 /// tokens.
68 ///
69 /// This is both the input and output of `#[proc_macro_derive]` definitions.
70 /// Currently it's required to be a list of valid Rust items, but this
71 /// restriction may be lifted in the future.
72 ///
73 /// The API of this type is intentionally bare-bones, but it'll be expanded over
74 /// time!
75 #[stable(feature = "proc_macro_lib", since = "1.15.0")]
76 #[derive(Clone, Debug)]
77 pub struct TokenStream(tokenstream::TokenStream);
78
79 /// Error returned from `TokenStream::from_str`.
80 #[stable(feature = "proc_macro_lib", since = "1.15.0")]
81 #[derive(Debug)]
82 pub struct LexError {
83 _inner: (),
84 }
85
86 #[stable(feature = "proc_macro_lib", since = "1.15.0")]
87 impl FromStr for TokenStream {
88 type Err = LexError;
89
90 fn from_str(src: &str) -> Result<TokenStream, LexError> {
91 __internal::with_sess(|(sess, mark)| {
92 let src = src.to_string();
93 let name = FileName::ProcMacroSourceCode;
94 let expn_info = mark.expn_info().unwrap();
95 let call_site = expn_info.call_site;
96 // notify the expansion info that it is unhygienic
97 let mark = Mark::fresh(mark);
98 mark.set_expn_info(expn_info);
99 let span = call_site.with_ctxt(SyntaxContext::empty().apply_mark(mark));
100 let stream = parse::parse_stream_from_source_str(name, src, sess, Some(span));
101 Ok(__internal::token_stream_wrap(stream))
102 })
103 }
104 }
105
106 #[stable(feature = "proc_macro_lib", since = "1.15.0")]
107 impl fmt::Display for TokenStream {
108 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
109 self.0.fmt(f)
110 }
111 }
112
113 /// `quote!(..)` accepts arbitrary tokens and expands into a `TokenStream` describing the input.
114 /// For example, `quote!(a + b)` will produce a expression, that, when evaluated, constructs
115 /// the `TokenStream` `[Word("a"), Op('+', Alone), Word("b")]`.
116 ///
117 /// Unquoting is done with `$`, and works by taking the single next ident as the unquoted term.
118 /// To quote `$` itself, use `$$`.
119 #[unstable(feature = "proc_macro", issue = "38356")]
120 #[macro_export]
121 macro_rules! quote { () => {} }
122
123 #[unstable(feature = "proc_macro_internals", issue = "27812")]
124 #[doc(hidden)]
125 mod quote;
126
127 #[unstable(feature = "proc_macro", issue = "38356")]
128 impl From<TokenTree> for TokenStream {
129 fn from(tree: TokenTree) -> TokenStream {
130 TokenStream(tree.to_internal())
131 }
132 }
133
134 #[unstable(feature = "proc_macro", issue = "38356")]
135 impl From<TokenNode> for TokenStream {
136 fn from(kind: TokenNode) -> TokenStream {
137 TokenTree::from(kind).into()
138 }
139 }
140
141 #[unstable(feature = "proc_macro", issue = "38356")]
142 impl<T: Into<TokenStream>> iter::FromIterator<T> for TokenStream {
143 fn from_iter<I: IntoIterator<Item = T>>(streams: I) -> Self {
144 let mut builder = tokenstream::TokenStreamBuilder::new();
145 for stream in streams {
146 builder.push(stream.into().0);
147 }
148 TokenStream(builder.build())
149 }
150 }
151
152 #[unstable(feature = "proc_macro", issue = "38356")]
153 impl IntoIterator for TokenStream {
154 type Item = TokenTree;
155 type IntoIter = TokenTreeIter;
156
157 fn into_iter(self) -> TokenTreeIter {
158 TokenTreeIter { cursor: self.0.trees(), stack: Vec::new() }
159 }
160 }
161
162 impl TokenStream {
163 /// Returns an empty `TokenStream`.
164 #[unstable(feature = "proc_macro", issue = "38356")]
165 pub fn empty() -> TokenStream {
166 TokenStream(tokenstream::TokenStream::empty())
167 }
168
169 /// Checks if this `TokenStream` is empty.
170 #[unstable(feature = "proc_macro", issue = "38356")]
171 pub fn is_empty(&self) -> bool {
172 self.0.is_empty()
173 }
174 }
175
176 /// A region of source code, along with macro expansion information.
177 #[unstable(feature = "proc_macro", issue = "38356")]
178 #[derive(Copy, Clone, Debug, PartialEq, Eq)]
179 pub struct Span(syntax_pos::Span);
180
181 impl Span {
182 /// A span that resolves at the macro definition site.
183 #[unstable(feature = "proc_macro", issue = "38356")]
184 pub fn def_site() -> Span {
185 ::__internal::with_sess(|(_, mark)| {
186 let call_site = mark.expn_info().unwrap().call_site;
187 Span(call_site.with_ctxt(SyntaxContext::empty().apply_mark(mark)))
188 })
189 }
190 }
191
192 /// Quote a `Span` into a `TokenStream`.
193 /// This is needed to implement a custom quoter.
194 #[unstable(feature = "proc_macro", issue = "38356")]
195 pub fn quote_span(span: Span) -> TokenStream {
196 quote::Quote::quote(span)
197 }
198
199 macro_rules! diagnostic_method {
200 ($name:ident, $level:expr) => (
201 /// Create a new `Diagnostic` with the given `message` at the span
202 /// `self`.
203 #[unstable(feature = "proc_macro", issue = "38356")]
204 pub fn $name<T: Into<String>>(self, message: T) -> Diagnostic {
205 Diagnostic::spanned(self, $level, message)
206 }
207 )
208 }
209
210 impl Span {
211 /// The span of the invocation of the current procedural macro.
212 #[unstable(feature = "proc_macro", issue = "38356")]
213 pub fn call_site() -> Span {
214 ::__internal::with_sess(|(_, mark)| Span(mark.expn_info().unwrap().call_site))
215 }
216
217 /// The original source file into which this span points.
218 #[unstable(feature = "proc_macro", issue = "38356")]
219 pub fn source_file(&self) -> SourceFile {
220 SourceFile {
221 filemap: __internal::lookup_char_pos(self.0.lo()).file,
222 }
223 }
224
225 /// The `Span` for the tokens in the previous macro expansion from which
226 /// `self` was generated from, if any.
227 #[unstable(feature = "proc_macro", issue = "38356")]
228 pub fn parent(&self) -> Option<Span> {
229 self.0.ctxt().outer().expn_info().map(|i| Span(i.call_site))
230 }
231
232 /// The span for the origin source code that `self` was generated from. If
233 /// this `Span` wasn't generated from other macro expansions then the return
234 /// value is the same as `*self`.
235 #[unstable(feature = "proc_macro", issue = "38356")]
236 pub fn source(&self) -> Span {
237 Span(self.0.source_callsite())
238 }
239
240 /// Get the starting line/column in the source file for this span.
241 #[unstable(feature = "proc_macro", issue = "38356")]
242 pub fn start(&self) -> LineColumn {
243 let loc = __internal::lookup_char_pos(self.0.lo());
244 LineColumn {
245 line: loc.line,
246 column: loc.col.to_usize()
247 }
248 }
249
250 /// Get the ending line/column in the source file for this span.
251 #[unstable(feature = "proc_macro", issue = "38356")]
252 pub fn end(&self) -> LineColumn {
253 let loc = __internal::lookup_char_pos(self.0.hi());
254 LineColumn {
255 line: loc.line,
256 column: loc.col.to_usize()
257 }
258 }
259
260 /// Create a new span encompassing `self` and `other`.
261 ///
262 /// Returns `None` if `self` and `other` are from different files.
263 #[unstable(feature = "proc_macro", issue = "38356")]
264 pub fn join(&self, other: Span) -> Option<Span> {
265 let self_loc = __internal::lookup_char_pos(self.0.lo());
266 let other_loc = __internal::lookup_char_pos(other.0.lo());
267
268 if self_loc.file.name != other_loc.file.name { return None }
269
270 Some(Span(self.0.to(other.0)))
271 }
272
273 /// Creates a new span with the same line/column information as `self` but
274 /// that resolves symbols as though it were at `other`.
275 #[unstable(feature = "proc_macro", issue = "38356")]
276 pub fn resolved_at(&self, other: Span) -> Span {
277 Span(self.0.with_ctxt(other.0.ctxt()))
278 }
279
280 /// Creates a new span with the same name resolution behavior as `self` but
281 /// with the line/column information of `other`.
282 #[unstable(feature = "proc_macro", issue = "38356")]
283 pub fn located_at(&self, other: Span) -> Span {
284 other.resolved_at(*self)
285 }
286
287 diagnostic_method!(error, Level::Error);
288 diagnostic_method!(warning, Level::Warning);
289 diagnostic_method!(note, Level::Note);
290 diagnostic_method!(help, Level::Help);
291 }
292
293 /// A line-column pair representing the start or end of a `Span`.
294 #[unstable(feature = "proc_macro", issue = "38356")]
295 #[derive(Copy, Clone, Debug, PartialEq, Eq)]
296 pub struct LineColumn {
297 /// The 1-indexed line in the source file on which the span starts or ends (inclusive).
298 #[unstable(feature = "proc_macro", issue = "38356")]
299 pub line: usize,
300 /// The 0-indexed column (in UTF-8 characters) in the source file on which
301 /// the span starts or ends (inclusive).
302 #[unstable(feature = "proc_macro", issue = "38356")]
303 pub column: usize
304 }
305
306 /// The source file of a given `Span`.
307 #[unstable(feature = "proc_macro", issue = "38356")]
308 #[derive(Clone)]
309 pub struct SourceFile {
310 filemap: Lrc<FileMap>,
311 }
312
313 #[unstable(feature = "proc_macro", issue = "38356")]
314 impl !Send for SourceFile {}
315 #[unstable(feature = "proc_macro", issue = "38356")]
316 impl !Sync for SourceFile {}
317
318 impl SourceFile {
319 /// Get the path to this source file.
320 ///
321 /// ### Note
322 /// If the code span associated with this `SourceFile` was generated by an external macro, this
323 /// may not be an actual path on the filesystem. Use [`is_real`] to check.
324 ///
325 /// Also note that even if `is_real` returns `true`, if `--remap-path-prefix` was passed on
326 /// the command line, the path as given may not actually be valid.
327 ///
328 /// [`is_real`]: #method.is_real
329 # [unstable(feature = "proc_macro", issue = "38356")]
330 pub fn path(&self) -> &FileName {
331 &self.filemap.name
332 }
333
334 /// Returns `true` if this source file is a real source file, and not generated by an external
335 /// macro's expansion.
336 # [unstable(feature = "proc_macro", issue = "38356")]
337 pub fn is_real(&self) -> bool {
338 // This is a hack until intercrate spans are implemented and we can have real source files
339 // for spans generated in external macros.
340 // https://github.com/rust-lang/rust/pull/43604#issuecomment-333334368
341 self.filemap.is_real_file()
342 }
343 }
344
345 #[unstable(feature = "proc_macro", issue = "38356")]
346 impl AsRef<FileName> for SourceFile {
347 fn as_ref(&self) -> &FileName {
348 self.path()
349 }
350 }
351
352 #[unstable(feature = "proc_macro", issue = "38356")]
353 impl fmt::Debug for SourceFile {
354 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
355 f.debug_struct("SourceFile")
356 .field("path", self.path())
357 .field("is_real", &self.is_real())
358 .finish()
359 }
360 }
361
362 #[unstable(feature = "proc_macro", issue = "38356")]
363 impl PartialEq for SourceFile {
364 fn eq(&self, other: &Self) -> bool {
365 Lrc::ptr_eq(&self.filemap, &other.filemap)
366 }
367 }
368
369 #[unstable(feature = "proc_macro", issue = "38356")]
370 impl Eq for SourceFile {}
371
372 #[unstable(feature = "proc_macro", issue = "38356")]
373 impl PartialEq<FileName> for SourceFile {
374 fn eq(&self, other: &FileName) -> bool {
375 self.as_ref() == other
376 }
377 }
378
379 /// A single token or a delimited sequence of token trees (e.g. `[1, (), ..]`).
380 #[unstable(feature = "proc_macro", issue = "38356")]
381 #[derive(Clone, Debug)]
382 pub struct TokenTree {
383 /// The `TokenTree`'s span
384 pub span: Span,
385 /// Description of the `TokenTree`
386 pub kind: TokenNode,
387 }
388
389 #[unstable(feature = "proc_macro", issue = "38356")]
390 impl From<TokenNode> for TokenTree {
391 fn from(kind: TokenNode) -> TokenTree {
392 TokenTree { span: Span::def_site(), kind: kind }
393 }
394 }
395
396 #[unstable(feature = "proc_macro", issue = "38356")]
397 impl fmt::Display for TokenTree {
398 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
399 TokenStream::from(self.clone()).fmt(f)
400 }
401 }
402
403 /// Description of a `TokenTree`
404 #[derive(Clone, Debug)]
405 #[unstable(feature = "proc_macro", issue = "38356")]
406 pub enum TokenNode {
407 /// A delimited tokenstream.
408 Group(Delimiter, TokenStream),
409 /// A unicode identifier.
410 Term(Term),
411 /// A punctuation character (`+`, `,`, `$`, etc.).
412 Op(char, Spacing),
413 /// A literal character (`'a'`), string (`"hello"`), or number (`2.3`).
414 Literal(Literal),
415 }
416
417 /// Describes how a sequence of token trees is delimited.
418 #[derive(Copy, Clone, Debug, PartialEq, Eq)]
419 #[unstable(feature = "proc_macro", issue = "38356")]
420 pub enum Delimiter {
421 /// `( ... )`
422 Parenthesis,
423 /// `{ ... }`
424 Brace,
425 /// `[ ... ]`
426 Bracket,
427 /// An implicit delimiter, e.g. `$var`, where $var is `...`.
428 None,
429 }
430
431 /// An interned string.
432 #[derive(Copy, Clone, Debug)]
433 #[unstable(feature = "proc_macro", issue = "38356")]
434 pub struct Term(Symbol);
435
436 impl Term {
437 /// Intern a string into a `Term`.
438 #[unstable(feature = "proc_macro", issue = "38356")]
439 pub fn intern(string: &str) -> Term {
440 Term(Symbol::intern(string))
441 }
442
443 /// Get a reference to the interned string.
444 #[unstable(feature = "proc_macro", issue = "38356")]
445 pub fn as_str(&self) -> &str {
446 unsafe { &*(&*self.0.as_str() as *const str) }
447 }
448 }
449
450 /// Whether an `Op` is either followed immediately by another `Op` or followed by whitespace.
451 #[derive(Copy, Clone, Debug, PartialEq, Eq)]
452 #[unstable(feature = "proc_macro", issue = "38356")]
453 pub enum Spacing {
454 /// e.g. `+` is `Alone` in `+ =`.
455 Alone,
456 /// e.g. `+` is `Joint` in `+=`.
457 Joint,
458 }
459
460 /// A literal character (`'a'`), string (`"hello"`), or number (`2.3`).
461 #[derive(Clone, Debug)]
462 #[unstable(feature = "proc_macro", issue = "38356")]
463 pub struct Literal(token::Token);
464
465 #[unstable(feature = "proc_macro", issue = "38356")]
466 impl fmt::Display for Literal {
467 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
468 TokenTree { kind: TokenNode::Literal(self.clone()), span: Span(DUMMY_SP) }.fmt(f)
469 }
470 }
471
472 macro_rules! int_literals {
473 ($($int_kind:ident),*) => {$(
474 /// Integer literal.
475 #[unstable(feature = "proc_macro", issue = "38356")]
476 pub fn $int_kind(n: $int_kind) -> Literal {
477 Literal::typed_integer(n as i128, stringify!($int_kind))
478 }
479 )*}
480 }
481
482 impl Literal {
483 /// Integer literal
484 #[unstable(feature = "proc_macro", issue = "38356")]
485 pub fn integer(n: i128) -> Literal {
486 Literal(token::Literal(token::Lit::Integer(Symbol::intern(&n.to_string())), None))
487 }
488
489 int_literals!(u8, i8, u16, i16, u32, i32, u64, i64, usize, isize);
490 fn typed_integer(n: i128, kind: &'static str) -> Literal {
491 Literal(token::Literal(token::Lit::Integer(Symbol::intern(&n.to_string())),
492 Some(Symbol::intern(kind))))
493 }
494
495 /// Floating point literal.
496 #[unstable(feature = "proc_macro", issue = "38356")]
497 pub fn float(n: f64) -> Literal {
498 if !n.is_finite() {
499 panic!("Invalid float literal {}", n);
500 }
501 Literal(token::Literal(token::Lit::Float(Symbol::intern(&n.to_string())), None))
502 }
503
504 /// Floating point literal.
505 #[unstable(feature = "proc_macro", issue = "38356")]
506 pub fn f32(n: f32) -> Literal {
507 if !n.is_finite() {
508 panic!("Invalid f32 literal {}", n);
509 }
510 Literal(token::Literal(token::Lit::Float(Symbol::intern(&n.to_string())),
511 Some(Symbol::intern("f32"))))
512 }
513
514 /// Floating point literal.
515 #[unstable(feature = "proc_macro", issue = "38356")]
516 pub fn f64(n: f64) -> Literal {
517 if !n.is_finite() {
518 panic!("Invalid f64 literal {}", n);
519 }
520 Literal(token::Literal(token::Lit::Float(Symbol::intern(&n.to_string())),
521 Some(Symbol::intern("f64"))))
522 }
523
524 /// String literal.
525 #[unstable(feature = "proc_macro", issue = "38356")]
526 pub fn string(string: &str) -> Literal {
527 let mut escaped = String::new();
528 for ch in string.chars() {
529 escaped.extend(ch.escape_debug());
530 }
531 Literal(token::Literal(token::Lit::Str_(Symbol::intern(&escaped)), None))
532 }
533
534 /// Character literal.
535 #[unstable(feature = "proc_macro", issue = "38356")]
536 pub fn character(ch: char) -> Literal {
537 let mut escaped = String::new();
538 escaped.extend(ch.escape_unicode());
539 Literal(token::Literal(token::Lit::Char(Symbol::intern(&escaped)), None))
540 }
541
542 /// Byte string literal.
543 #[unstable(feature = "proc_macro", issue = "38356")]
544 pub fn byte_string(bytes: &[u8]) -> Literal {
545 let string = bytes.iter().cloned().flat_map(ascii::escape_default)
546 .map(Into::<char>::into).collect::<String>();
547 Literal(token::Literal(token::Lit::ByteStr(Symbol::intern(&string)), None))
548 }
549 }
550
551 /// An iterator over `TokenTree`s.
552 #[derive(Clone)]
553 #[unstable(feature = "proc_macro", issue = "38356")]
554 pub struct TokenTreeIter {
555 cursor: tokenstream::Cursor,
556 stack: Vec<TokenTree>,
557 }
558
559 #[unstable(feature = "proc_macro", issue = "38356")]
560 impl Iterator for TokenTreeIter {
561 type Item = TokenTree;
562
563 fn next(&mut self) -> Option<TokenTree> {
564 loop {
565 let tree = self.stack.pop().or_else(|| {
566 let next = self.cursor.next_as_stream()?;
567 Some(TokenTree::from_internal(next, &mut self.stack))
568 })?;
569 if tree.span.0 == DUMMY_SP {
570 if let TokenNode::Group(Delimiter::None, stream) = tree.kind {
571 self.cursor.insert(stream.0);
572 continue
573 }
574 }
575 return Some(tree);
576 }
577 }
578 }
579
580 impl Delimiter {
581 fn from_internal(delim: token::DelimToken) -> Delimiter {
582 match delim {
583 token::Paren => Delimiter::Parenthesis,
584 token::Brace => Delimiter::Brace,
585 token::Bracket => Delimiter::Bracket,
586 token::NoDelim => Delimiter::None,
587 }
588 }
589
590 fn to_internal(self) -> token::DelimToken {
591 match self {
592 Delimiter::Parenthesis => token::Paren,
593 Delimiter::Brace => token::Brace,
594 Delimiter::Bracket => token::Bracket,
595 Delimiter::None => token::NoDelim,
596 }
597 }
598 }
599
600 impl TokenTree {
601 fn from_internal(stream: tokenstream::TokenStream, stack: &mut Vec<TokenTree>)
602 -> TokenTree {
603 use syntax::parse::token::*;
604
605 let (tree, is_joint) = stream.as_tree();
606 let (span, token) = match tree {
607 tokenstream::TokenTree::Token(span, token) => (span, token),
608 tokenstream::TokenTree::Delimited(span, delimed) => {
609 let delimiter = Delimiter::from_internal(delimed.delim);
610 return TokenTree {
611 span: Span(span),
612 kind: TokenNode::Group(delimiter, TokenStream(delimed.tts.into())),
613 };
614 }
615 };
616
617 let op_kind = if is_joint { Spacing::Joint } else { Spacing::Alone };
618 macro_rules! tt {
619 ($e:expr) => (TokenTree { span: Span(span), kind: $e })
620 }
621 macro_rules! op {
622 ($a:expr) => (TokenNode::Op($a, op_kind));
623 ($a:expr, $b:expr) => ({
624 stack.push(tt!(TokenNode::Op($b, op_kind).into()));
625 TokenNode::Op($a, Spacing::Joint)
626 });
627 ($a:expr, $b:expr, $c:expr) => ({
628 stack.push(tt!(TokenNode::Op($c, op_kind)));
629 stack.push(tt!(TokenNode::Op($b, Spacing::Joint)));
630 TokenNode::Op($a, Spacing::Joint)
631 })
632 }
633
634 let kind = match token {
635 Eq => op!('='),
636 Lt => op!('<'),
637 Le => op!('<', '='),
638 EqEq => op!('=', '='),
639 Ne => op!('!', '='),
640 Ge => op!('>', '='),
641 Gt => op!('>'),
642 AndAnd => op!('&', '&'),
643 OrOr => op!('|', '|'),
644 Not => op!('!'),
645 Tilde => op!('~'),
646 BinOp(Plus) => op!('+'),
647 BinOp(Minus) => op!('-'),
648 BinOp(Star) => op!('*'),
649 BinOp(Slash) => op!('/'),
650 BinOp(Percent) => op!('%'),
651 BinOp(Caret) => op!('^'),
652 BinOp(And) => op!('&'),
653 BinOp(Or) => op!('|'),
654 BinOp(Shl) => op!('<', '<'),
655 BinOp(Shr) => op!('>', '>'),
656 BinOpEq(Plus) => op!('+', '='),
657 BinOpEq(Minus) => op!('-', '='),
658 BinOpEq(Star) => op!('*', '='),
659 BinOpEq(Slash) => op!('/', '='),
660 BinOpEq(Percent) => op!('%', '='),
661 BinOpEq(Caret) => op!('^', '='),
662 BinOpEq(And) => op!('&', '='),
663 BinOpEq(Or) => op!('|', '='),
664 BinOpEq(Shl) => op!('<', '<', '='),
665 BinOpEq(Shr) => op!('>', '>', '='),
666 At => op!('@'),
667 Dot => op!('.'),
668 DotDot => op!('.', '.'),
669 DotDotDot => op!('.', '.', '.'),
670 DotDotEq => op!('.', '.', '='),
671 Comma => op!(','),
672 Semi => op!(';'),
673 Colon => op!(':'),
674 ModSep => op!(':', ':'),
675 RArrow => op!('-', '>'),
676 LArrow => op!('<', '-'),
677 FatArrow => op!('=', '>'),
678 Pound => op!('#'),
679 Dollar => op!('$'),
680 Question => op!('?'),
681
682 Ident(ident, false) | Lifetime(ident) => TokenNode::Term(Term(ident.name)),
683 Ident(ident, true) => TokenNode::Term(Term(Symbol::intern(&format!("r#{}", ident)))),
684 Literal(..) => TokenNode::Literal(self::Literal(token)),
685 DocComment(c) => {
686 let stream = vec![
687 tt!(TokenNode::Term(Term::intern("doc"))),
688 tt!(op!('=')),
689 tt!(TokenNode::Literal(self::Literal(Literal(Lit::Str_(c), None)))),
690 ].into_iter().collect();
691 stack.push(tt!(TokenNode::Group(Delimiter::Bracket, stream)));
692 op!('#')
693 }
694
695 Interpolated(_) => {
696 __internal::with_sess(|(sess, _)| {
697 let tts = token.interpolated_to_tokenstream(sess, span);
698 TokenNode::Group(Delimiter::None, TokenStream(tts))
699 })
700 }
701
702 DotEq => op!('.', '='),
703 OpenDelim(..) | CloseDelim(..) => unreachable!(),
704 Whitespace | Comment | Shebang(..) | Eof => unreachable!(),
705 };
706
707 TokenTree { span: Span(span), kind: kind }
708 }
709
710 fn to_internal(self) -> tokenstream::TokenStream {
711 use syntax::parse::token::*;
712 use syntax::tokenstream::{TokenTree, Delimited};
713
714 let (op, kind) = match self.kind {
715 TokenNode::Op(op, kind) => (op, kind),
716 TokenNode::Group(delimiter, tokens) => {
717 return TokenTree::Delimited(self.span.0, Delimited {
718 delim: delimiter.to_internal(),
719 tts: tokens.0.into(),
720 }).into();
721 },
722 TokenNode::Term(symbol) => {
723 let ident = ast::Ident { name: symbol.0, ctxt: self.span.0.ctxt() };
724 let sym_str = symbol.0.as_str();
725 let token =
726 if sym_str.starts_with("'") { Lifetime(ident) }
727 else if sym_str.starts_with("r#") {
728 let name = Symbol::intern(&sym_str[2..]);
729 let ident = ast::Ident { name, ctxt: self.span.0.ctxt() };
730 Ident(ident, true)
731 } else { Ident(ident, false) };
732 return TokenTree::Token(self.span.0, token).into();
733 }
734 TokenNode::Literal(self::Literal(Literal(Lit::Integer(ref a), b)))
735 if a.as_str().starts_with("-") =>
736 {
737 let minus = BinOp(BinOpToken::Minus);
738 let integer = Symbol::intern(&a.as_str()[1..]);
739 let integer = Literal(Lit::Integer(integer), b);
740 let a = TokenTree::Token(self.span.0, minus);
741 let b = TokenTree::Token(self.span.0, integer);
742 return vec![a, b].into_iter().collect()
743 }
744 TokenNode::Literal(self::Literal(Literal(Lit::Float(ref a), b)))
745 if a.as_str().starts_with("-") =>
746 {
747 let minus = BinOp(BinOpToken::Minus);
748 let float = Symbol::intern(&a.as_str()[1..]);
749 let float = Literal(Lit::Float(float), b);
750 let a = TokenTree::Token(self.span.0, minus);
751 let b = TokenTree::Token(self.span.0, float);
752 return vec![a, b].into_iter().collect()
753 }
754 TokenNode::Literal(token) => {
755 return TokenTree::Token(self.span.0, token.0).into()
756 }
757 };
758
759 let token = match op {
760 '=' => Eq,
761 '<' => Lt,
762 '>' => Gt,
763 '!' => Not,
764 '~' => Tilde,
765 '+' => BinOp(Plus),
766 '-' => BinOp(Minus),
767 '*' => BinOp(Star),
768 '/' => BinOp(Slash),
769 '%' => BinOp(Percent),
770 '^' => BinOp(Caret),
771 '&' => BinOp(And),
772 '|' => BinOp(Or),
773 '@' => At,
774 '.' => Dot,
775 ',' => Comma,
776 ';' => Semi,
777 ':' => Colon,
778 '#' => Pound,
779 '$' => Dollar,
780 '?' => Question,
781 _ => panic!("unsupported character {}", op),
782 };
783
784 let tree = TokenTree::Token(self.span.0, token);
785 match kind {
786 Spacing::Alone => tree.into(),
787 Spacing::Joint => tree.joint(),
788 }
789 }
790 }
791
792 /// Permanently unstable internal implementation details of this crate. This
793 /// should not be used.
794 ///
795 /// These methods are used by the rest of the compiler to generate instances of
796 /// `TokenStream` to hand to macro definitions, as well as consume the output.
797 ///
798 /// Note that this module is also intentionally separate from the rest of the
799 /// crate. This allows the `#[unstable]` directive below to naturally apply to
800 /// all of the contents.
801 #[unstable(feature = "proc_macro_internals", issue = "27812")]
802 #[doc(hidden)]
803 pub mod __internal {
804 pub use quote::{LiteralKind, Quoter, unquote};
805
806 use std::cell::Cell;
807
808 use syntax::ast;
809 use syntax::ext::base::ExtCtxt;
810 use syntax::ext::hygiene::Mark;
811 use syntax::ptr::P;
812 use syntax::parse::{self, ParseSess};
813 use syntax::parse::token::{self, Token};
814 use syntax::tokenstream;
815 use syntax_pos::{BytePos, Loc, DUMMY_SP};
816
817 use super::{TokenStream, LexError};
818
819 pub fn lookup_char_pos(pos: BytePos) -> Loc {
820 with_sess(|(sess, _)| sess.codemap().lookup_char_pos(pos))
821 }
822
823 pub fn new_token_stream(item: P<ast::Item>) -> TokenStream {
824 let token = Token::interpolated(token::NtItem(item));
825 TokenStream(tokenstream::TokenTree::Token(DUMMY_SP, token).into())
826 }
827
828 pub fn token_stream_wrap(inner: tokenstream::TokenStream) -> TokenStream {
829 TokenStream(inner)
830 }
831
832 pub fn token_stream_parse_items(stream: TokenStream) -> Result<Vec<P<ast::Item>>, LexError> {
833 with_sess(move |(sess, _)| {
834 let mut parser = parse::stream_to_parser(sess, stream.0);
835 let mut items = Vec::new();
836
837 while let Some(item) = try!(parser.parse_item().map_err(super::parse_to_lex_err)) {
838 items.push(item)
839 }
840
841 Ok(items)
842 })
843 }
844
845 pub fn token_stream_inner(stream: TokenStream) -> tokenstream::TokenStream {
846 stream.0
847 }
848
849 pub trait Registry {
850 fn register_custom_derive(&mut self,
851 trait_name: &str,
852 expand: fn(TokenStream) -> TokenStream,
853 attributes: &[&'static str]);
854
855 fn register_attr_proc_macro(&mut self,
856 name: &str,
857 expand: fn(TokenStream, TokenStream) -> TokenStream);
858
859 fn register_bang_proc_macro(&mut self,
860 name: &str,
861 expand: fn(TokenStream) -> TokenStream);
862 }
863
864 // Emulate scoped_thread_local!() here essentially
865 thread_local! {
866 static CURRENT_SESS: Cell<(*const ParseSess, Mark)> =
867 Cell::new((0 as *const _, Mark::root()));
868 }
869
870 pub fn set_sess<F, R>(cx: &ExtCtxt, f: F) -> R
871 where F: FnOnce() -> R
872 {
873 struct Reset { prev: (*const ParseSess, Mark) }
874
875 impl Drop for Reset {
876 fn drop(&mut self) {
877 CURRENT_SESS.with(|p| p.set(self.prev));
878 }
879 }
880
881 CURRENT_SESS.with(|p| {
882 let _reset = Reset { prev: p.get() };
883 p.set((cx.parse_sess, cx.current_expansion.mark));
884 f()
885 })
886 }
887
888 pub fn in_sess() -> bool
889 {
890 let p = CURRENT_SESS.with(|p| p.get());
891 !p.0.is_null()
892 }
893
894 pub fn with_sess<F, R>(f: F) -> R
895 where F: FnOnce((&ParseSess, Mark)) -> R
896 {
897 let p = CURRENT_SESS.with(|p| p.get());
898 assert!(!p.0.is_null(), "proc_macro::__internal::with_sess() called \
899 before set_parse_sess()!");
900 f(unsafe { (&*p.0, p.1) })
901 }
902 }
903
904 fn parse_to_lex_err(mut err: DiagnosticBuilder) -> LexError {
905 err.cancel();
906 LexError { _inner: () }
907 }