]> git.proxmox.com Git - rustc.git/blob - src/libproc_macro/lib.rs
New upstream version 1.21.0+dfsg1
[rustc.git] / src / libproc_macro / lib.rs
1 // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 //! A support library for macro authors when defining new macros.
12 //!
13 //! This library, provided by the standard distribution, provides the types
14 //! consumed in the interfaces of procedurally defined macro definitions.
15 //! Currently the primary use of this crate is to provide the ability to define
16 //! new custom derive modes through `#[proc_macro_derive]`.
17 //!
18 //! Note that this crate is intentionally very bare-bones currently. The main
19 //! type, `TokenStream`, only supports `fmt::Display` and `FromStr`
20 //! implementations, indicating that it can only go to and come from a string.
21 //! This functionality is intended to be expanded over time as more surface
22 //! area for macro authors is stabilized.
23 //!
24 //! See [the book](../book/first-edition/procedural-macros.html) for more.
25
26 #![stable(feature = "proc_macro_lib", since = "1.15.0")]
27 #![deny(warnings)]
28 #![deny(missing_docs)]
29 #![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
30 html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
31 html_root_url = "https://doc.rust-lang.org/nightly/",
32 html_playground_url = "https://play.rust-lang.org/",
33 issue_tracker_base_url = "https://github.com/rust-lang/rust/issues/",
34 test(no_crate_inject, attr(deny(warnings))),
35 test(attr(allow(dead_code, deprecated, unused_variables, unused_mut))))]
36
37 #![feature(i128_type)]
38 #![feature(rustc_private)]
39 #![feature(staged_api)]
40 #![feature(lang_items)]
41
42 #[macro_use]
43 extern crate syntax;
44 extern crate syntax_pos;
45
46 use std::{ascii, fmt, iter};
47 use std::str::FromStr;
48
49 use syntax::ast;
50 use syntax::errors::DiagnosticBuilder;
51 use syntax::parse::{self, token, parse_stream_from_source_str};
52 use syntax::print::pprust;
53 use syntax::symbol::Symbol;
54 use syntax::tokenstream;
55 use syntax_pos::DUMMY_SP;
56 use syntax_pos::SyntaxContext;
57 use syntax_pos::hygiene::Mark;
58
59 /// The main type provided by this crate, representing an abstract stream of
60 /// tokens.
61 ///
62 /// This is both the input and output of `#[proc_macro_derive]` definitions.
63 /// Currently it's required to be a list of valid Rust items, but this
64 /// restriction may be lifted in the future.
65 ///
66 /// The API of this type is intentionally bare-bones, but it'll be expanded over
67 /// time!
68 #[stable(feature = "proc_macro_lib", since = "1.15.0")]
69 #[derive(Clone, Debug)]
70 pub struct TokenStream(tokenstream::TokenStream);
71
72 /// Error returned from `TokenStream::from_str`.
73 #[stable(feature = "proc_macro_lib", since = "1.15.0")]
74 #[derive(Debug)]
75 pub struct LexError {
76 _inner: (),
77 }
78
79 #[stable(feature = "proc_macro_lib", since = "1.15.0")]
80 impl FromStr for TokenStream {
81 type Err = LexError;
82
83 fn from_str(src: &str) -> Result<TokenStream, LexError> {
84 __internal::with_sess(|(sess, mark)| {
85 let src = src.to_string();
86 let name = "<proc-macro source code>".to_string();
87 let expn_info = mark.expn_info().unwrap();
88 let call_site = expn_info.call_site;
89 // notify the expansion info that it is unhygienic
90 let mark = Mark::fresh(mark);
91 mark.set_expn_info(expn_info);
92 let span = syntax_pos::Span {
93 ctxt: SyntaxContext::empty().apply_mark(mark),
94 ..call_site
95 };
96 let stream = parse::parse_stream_from_source_str(name, src, sess, Some(span));
97 Ok(__internal::token_stream_wrap(stream))
98 })
99 }
100 }
101
102 #[stable(feature = "proc_macro_lib", since = "1.15.0")]
103 impl fmt::Display for TokenStream {
104 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
105 self.0.fmt(f)
106 }
107 }
108
109 /// `quote!(..)` accepts arbitrary tokens and expands into a `TokenStream` describing the input.
110 /// For example, `quote!(a + b)` will produce a expression, that, when evaluated, constructs
111 /// the `TokenStream` `[Word("a"), Op('+', Alone), Word("b")]`.
112 ///
113 /// Unquoting is done with `$`, and works by taking the single next ident as the unquoted term.
114 /// To quote `$` itself, use `$$`.
115 #[unstable(feature = "proc_macro", issue = "38356")]
116 #[macro_export]
117 macro_rules! quote { () => {} }
118
119 #[unstable(feature = "proc_macro_internals", issue = "27812")]
120 #[doc(hidden)]
121 mod quote;
122
123 #[unstable(feature = "proc_macro", issue = "38356")]
124 impl From<TokenTree> for TokenStream {
125 fn from(tree: TokenTree) -> TokenStream {
126 TokenStream(tree.to_internal())
127 }
128 }
129
130 #[unstable(feature = "proc_macro", issue = "38356")]
131 impl From<TokenNode> for TokenStream {
132 fn from(kind: TokenNode) -> TokenStream {
133 TokenTree::from(kind).into()
134 }
135 }
136
137 #[unstable(feature = "proc_macro", issue = "38356")]
138 impl<T: Into<TokenStream>> iter::FromIterator<T> for TokenStream {
139 fn from_iter<I: IntoIterator<Item = T>>(streams: I) -> Self {
140 let mut builder = tokenstream::TokenStreamBuilder::new();
141 for stream in streams {
142 builder.push(stream.into().0);
143 }
144 TokenStream(builder.build())
145 }
146 }
147
148 #[unstable(feature = "proc_macro", issue = "38356")]
149 impl IntoIterator for TokenStream {
150 type Item = TokenTree;
151 type IntoIter = TokenTreeIter;
152
153 fn into_iter(self) -> TokenTreeIter {
154 TokenTreeIter { cursor: self.0.trees(), next: None }
155 }
156 }
157
158 impl TokenStream {
159 /// Returns an empty `TokenStream`.
160 #[unstable(feature = "proc_macro", issue = "38356")]
161 pub fn empty() -> TokenStream {
162 TokenStream(tokenstream::TokenStream::empty())
163 }
164
165 /// Checks if this `TokenStream` is empty.
166 #[unstable(feature = "proc_macro", issue = "38356")]
167 pub fn is_empty(&self) -> bool {
168 self.0.is_empty()
169 }
170 }
171
172 /// A region of source code, along with macro expansion information.
173 #[unstable(feature = "proc_macro", issue = "38356")]
174 #[derive(Copy, Clone, Debug)]
175 pub struct Span(syntax_pos::Span);
176
177 #[unstable(feature = "proc_macro", issue = "38356")]
178 impl Default for Span {
179 fn default() -> Span {
180 ::__internal::with_sess(|(_, mark)| Span(syntax_pos::Span {
181 ctxt: SyntaxContext::empty().apply_mark(mark),
182 ..mark.expn_info().unwrap().call_site
183 }))
184 }
185 }
186
187 /// Quote a `Span` into a `TokenStream`.
188 /// This is needed to implement a custom quoter.
189 #[unstable(feature = "proc_macro", issue = "38356")]
190 pub fn quote_span(span: Span) -> TokenStream {
191 TokenStream(quote::Quote::quote(&span.0))
192 }
193
194 impl Span {
195 /// The span of the invocation of the current procedural macro.
196 #[unstable(feature = "proc_macro", issue = "38356")]
197 pub fn call_site() -> Span {
198 ::__internal::with_sess(|(_, mark)| Span(mark.expn_info().unwrap().call_site))
199 }
200 }
201
202 /// A single token or a delimited sequence of token trees (e.g. `[1, (), ..]`).
203 #[unstable(feature = "proc_macro", issue = "38356")]
204 #[derive(Clone, Debug)]
205 pub struct TokenTree {
206 /// The `TokenTree`'s span
207 pub span: Span,
208 /// Description of the `TokenTree`
209 pub kind: TokenNode,
210 }
211
212 #[unstable(feature = "proc_macro", issue = "38356")]
213 impl From<TokenNode> for TokenTree {
214 fn from(kind: TokenNode) -> TokenTree {
215 TokenTree { span: Span::default(), kind: kind }
216 }
217 }
218
219 #[unstable(feature = "proc_macro", issue = "38356")]
220 impl fmt::Display for TokenTree {
221 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
222 TokenStream::from(self.clone()).fmt(f)
223 }
224 }
225
226 /// Description of a `TokenTree`
227 #[derive(Clone, Debug)]
228 #[unstable(feature = "proc_macro", issue = "38356")]
229 pub enum TokenNode {
230 /// A delimited tokenstream.
231 Group(Delimiter, TokenStream),
232 /// A unicode identifier.
233 Term(Term),
234 /// A punctuation character (`+`, `,`, `$`, etc.).
235 Op(char, Spacing),
236 /// A literal character (`'a'`), string (`"hello"`), or number (`2.3`).
237 Literal(Literal),
238 }
239
240 /// Describes how a sequence of token trees is delimited.
241 #[derive(Copy, Clone, Debug, PartialEq, Eq)]
242 #[unstable(feature = "proc_macro", issue = "38356")]
243 pub enum Delimiter {
244 /// `( ... )`
245 Parenthesis,
246 /// `{ ... }`
247 Brace,
248 /// `[ ... ]`
249 Bracket,
250 /// An implicit delimiter, e.g. `$var`, where $var is `...`.
251 None,
252 }
253
254 /// An interned string.
255 #[derive(Copy, Clone, Debug)]
256 #[unstable(feature = "proc_macro", issue = "38356")]
257 pub struct Term(Symbol);
258
259 impl Term {
260 /// Intern a string into a `Term`.
261 #[unstable(feature = "proc_macro", issue = "38356")]
262 pub fn intern(string: &str) -> Term {
263 Term(Symbol::intern(string))
264 }
265
266 /// Get a reference to the interned string.
267 #[unstable(feature = "proc_macro", issue = "38356")]
268 pub fn as_str(&self) -> &str {
269 unsafe { &*(&*self.0.as_str() as *const str) }
270 }
271 }
272
273 /// Whether an `Op` is either followed immediately by another `Op` or followed by whitespace.
274 #[derive(Copy, Clone, Debug, PartialEq, Eq)]
275 #[unstable(feature = "proc_macro", issue = "38356")]
276 pub enum Spacing {
277 /// e.g. `+` is `Alone` in `+ =`.
278 Alone,
279 /// e.g. `+` is `Joint` in `+=`.
280 Joint,
281 }
282
283 /// A literal character (`'a'`), string (`"hello"`), or number (`2.3`).
284 #[derive(Clone, Debug)]
285 #[unstable(feature = "proc_macro", issue = "38356")]
286 pub struct Literal(token::Token);
287
288 #[unstable(feature = "proc_macro", issue = "38356")]
289 impl fmt::Display for Literal {
290 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
291 TokenTree { kind: TokenNode::Literal(self.clone()), span: Span(DUMMY_SP) }.fmt(f)
292 }
293 }
294
295 macro_rules! int_literals {
296 ($($int_kind:ident),*) => {$(
297 /// Integer literal.
298 #[unstable(feature = "proc_macro", issue = "38356")]
299 pub fn $int_kind(n: $int_kind) -> Literal {
300 Literal::typed_integer(n as i128, stringify!($int_kind))
301 }
302 )*}
303 }
304
305 impl Literal {
306 /// Integer literal
307 #[unstable(feature = "proc_macro", issue = "38356")]
308 pub fn integer(n: i128) -> Literal {
309 Literal(token::Literal(token::Lit::Integer(Symbol::intern(&n.to_string())), None))
310 }
311
312 int_literals!(u8, i8, u16, i16, u32, i32, u64, i64, usize, isize);
313 fn typed_integer(n: i128, kind: &'static str) -> Literal {
314 Literal(token::Literal(token::Lit::Integer(Symbol::intern(&n.to_string())),
315 Some(Symbol::intern(kind))))
316 }
317
318 /// Floating point literal.
319 #[unstable(feature = "proc_macro", issue = "38356")]
320 pub fn float(n: f64) -> Literal {
321 if !n.is_finite() {
322 panic!("Invalid float literal {}", n);
323 }
324 Literal(token::Literal(token::Lit::Float(Symbol::intern(&n.to_string())), None))
325 }
326
327 /// Floating point literal.
328 #[unstable(feature = "proc_macro", issue = "38356")]
329 pub fn f32(n: f32) -> Literal {
330 if !n.is_finite() {
331 panic!("Invalid f32 literal {}", n);
332 }
333 Literal(token::Literal(token::Lit::Float(Symbol::intern(&n.to_string())),
334 Some(Symbol::intern("f32"))))
335 }
336
337 /// Floating point literal.
338 #[unstable(feature = "proc_macro", issue = "38356")]
339 pub fn f64(n: f64) -> Literal {
340 if !n.is_finite() {
341 panic!("Invalid f64 literal {}", n);
342 }
343 Literal(token::Literal(token::Lit::Float(Symbol::intern(&n.to_string())),
344 Some(Symbol::intern("f64"))))
345 }
346
347 /// String literal.
348 #[unstable(feature = "proc_macro", issue = "38356")]
349 pub fn string(string: &str) -> Literal {
350 let mut escaped = String::new();
351 for ch in string.chars() {
352 escaped.extend(ch.escape_unicode());
353 }
354 Literal(token::Literal(token::Lit::Str_(Symbol::intern(&escaped)), None))
355 }
356
357 /// Character literal.
358 #[unstable(feature = "proc_macro", issue = "38356")]
359 pub fn character(ch: char) -> Literal {
360 let mut escaped = String::new();
361 escaped.extend(ch.escape_unicode());
362 Literal(token::Literal(token::Lit::Char(Symbol::intern(&escaped)), None))
363 }
364
365 /// Byte string literal.
366 #[unstable(feature = "proc_macro", issue = "38356")]
367 pub fn byte_string(bytes: &[u8]) -> Literal {
368 let string = bytes.iter().cloned().flat_map(ascii::escape_default)
369 .map(Into::<char>::into).collect::<String>();
370 Literal(token::Literal(token::Lit::ByteStr(Symbol::intern(&string)), None))
371 }
372 }
373
374 /// An iterator over `TokenTree`s.
375 #[derive(Clone)]
376 #[unstable(feature = "proc_macro", issue = "38356")]
377 pub struct TokenTreeIter {
378 cursor: tokenstream::Cursor,
379 next: Option<tokenstream::TokenStream>,
380 }
381
382 #[unstable(feature = "proc_macro", issue = "38356")]
383 impl Iterator for TokenTreeIter {
384 type Item = TokenTree;
385
386 fn next(&mut self) -> Option<TokenTree> {
387 loop {
388 let next =
389 unwrap_or!(self.next.take().or_else(|| self.cursor.next_as_stream()), return None);
390 let tree = TokenTree::from_internal(next, &mut self.next);
391 if tree.span.0 == DUMMY_SP {
392 if let TokenNode::Group(Delimiter::None, stream) = tree.kind {
393 self.cursor.insert(stream.0);
394 continue
395 }
396 }
397 return Some(tree);
398 }
399 }
400 }
401
402 impl Delimiter {
403 fn from_internal(delim: token::DelimToken) -> Delimiter {
404 match delim {
405 token::Paren => Delimiter::Parenthesis,
406 token::Brace => Delimiter::Brace,
407 token::Bracket => Delimiter::Bracket,
408 token::NoDelim => Delimiter::None,
409 }
410 }
411
412 fn to_internal(self) -> token::DelimToken {
413 match self {
414 Delimiter::Parenthesis => token::Paren,
415 Delimiter::Brace => token::Brace,
416 Delimiter::Bracket => token::Bracket,
417 Delimiter::None => token::NoDelim,
418 }
419 }
420 }
421
422 impl TokenTree {
423 fn from_internal(stream: tokenstream::TokenStream, next: &mut Option<tokenstream::TokenStream>)
424 -> TokenTree {
425 use syntax::parse::token::*;
426
427 let (tree, is_joint) = stream.as_tree();
428 let (mut span, token) = match tree {
429 tokenstream::TokenTree::Token(span, token) => (span, token),
430 tokenstream::TokenTree::Delimited(span, delimed) => {
431 let delimiter = Delimiter::from_internal(delimed.delim);
432 return TokenTree {
433 span: Span(span),
434 kind: TokenNode::Group(delimiter, TokenStream(delimed.tts.into())),
435 };
436 }
437 };
438
439 let op_kind = if is_joint { Spacing::Joint } else { Spacing::Alone };
440 macro_rules! op {
441 ($op:expr) => { TokenNode::Op($op, op_kind) }
442 }
443
444 macro_rules! joint {
445 ($first:expr, $rest:expr) => { joint($first, $rest, is_joint, &mut span, next) }
446 }
447
448 fn joint(first: char, rest: Token, is_joint: bool, span: &mut syntax_pos::Span,
449 next: &mut Option<tokenstream::TokenStream>)
450 -> TokenNode {
451 let (first_span, rest_span) = (*span, *span);
452 *span = first_span;
453 let tree = tokenstream::TokenTree::Token(rest_span, rest);
454 *next = Some(if is_joint { tree.joint() } else { tree.into() });
455 TokenNode::Op(first, Spacing::Joint)
456 }
457
458 let kind = match token {
459 Eq => op!('='),
460 Lt => op!('<'),
461 Le => joint!('<', Eq),
462 EqEq => joint!('=', Eq),
463 Ne => joint!('!', Eq),
464 Ge => joint!('>', Eq),
465 Gt => op!('>'),
466 AndAnd => joint!('&', BinOp(And)),
467 OrOr => joint!('|', BinOp(Or)),
468 Not => op!('!'),
469 Tilde => op!('~'),
470 BinOp(Plus) => op!('+'),
471 BinOp(Minus) => op!('-'),
472 BinOp(Star) => op!('*'),
473 BinOp(Slash) => op!('/'),
474 BinOp(Percent) => op!('%'),
475 BinOp(Caret) => op!('^'),
476 BinOp(And) => op!('&'),
477 BinOp(Or) => op!('|'),
478 BinOp(Shl) => joint!('<', Lt),
479 BinOp(Shr) => joint!('>', Gt),
480 BinOpEq(Plus) => joint!('+', Eq),
481 BinOpEq(Minus) => joint!('-', Eq),
482 BinOpEq(Star) => joint!('*', Eq),
483 BinOpEq(Slash) => joint!('/', Eq),
484 BinOpEq(Percent) => joint!('%', Eq),
485 BinOpEq(Caret) => joint!('^', Eq),
486 BinOpEq(And) => joint!('&', Eq),
487 BinOpEq(Or) => joint!('|', Eq),
488 BinOpEq(Shl) => joint!('<', Le),
489 BinOpEq(Shr) => joint!('>', Ge),
490 At => op!('@'),
491 Dot => op!('.'),
492 DotDot => joint!('.', Dot),
493 DotDotDot => joint!('.', DotDot),
494 Comma => op!(','),
495 Semi => op!(';'),
496 Colon => op!(':'),
497 ModSep => joint!(':', Colon),
498 RArrow => joint!('-', Gt),
499 LArrow => joint!('<', BinOp(Minus)),
500 FatArrow => joint!('=', Gt),
501 Pound => op!('#'),
502 Dollar => op!('$'),
503 Question => op!('?'),
504 Underscore => op!('_'),
505
506 Ident(ident) | Lifetime(ident) => TokenNode::Term(Term(ident.name)),
507 Literal(..) | DocComment(..) => TokenNode::Literal(self::Literal(token)),
508
509 Interpolated(ref nt) => {
510 // An `Interpolated` token means that we have a `Nonterminal`
511 // which is often a parsed AST item. At this point we now need
512 // to convert the parsed AST to an actual token stream, e.g.
513 // un-parse it basically.
514 //
515 // Unfortunately there's not really a great way to do that in a
516 // guaranteed lossless fashion right now. The fallback here is
517 // to just stringify the AST node and reparse it, but this loses
518 // all span information.
519 //
520 // As a result, some AST nodes are annotated with the token
521 // stream they came from. Attempt to extract these lossless
522 // token streams before we fall back to the stringification.
523 let mut tokens = None;
524
525 match nt.0 {
526 Nonterminal::NtItem(ref item) => {
527 tokens = prepend_attrs(&item.attrs, item.tokens.as_ref(), span);
528 }
529 Nonterminal::NtTraitItem(ref item) => {
530 tokens = prepend_attrs(&item.attrs, item.tokens.as_ref(), span);
531 }
532 Nonterminal::NtImplItem(ref item) => {
533 tokens = prepend_attrs(&item.attrs, item.tokens.as_ref(), span);
534 }
535 _ => {}
536 }
537
538 tokens.map(|tokens| {
539 TokenNode::Group(Delimiter::None,
540 TokenStream(tokens.clone()))
541 }).unwrap_or_else(|| {
542 __internal::with_sess(|(sess, _)| {
543 TokenNode::Group(Delimiter::None, TokenStream(nt.1.force(|| {
544 // FIXME(jseyfried): Avoid this pretty-print + reparse hack
545 let name = "<macro expansion>".to_owned();
546 let source = pprust::token_to_string(&token);
547 parse_stream_from_source_str(name, source, sess, Some(span))
548 })))
549 })
550 })
551 }
552
553 OpenDelim(..) | CloseDelim(..) => unreachable!(),
554 Whitespace | Comment | Shebang(..) | Eof => unreachable!(),
555 };
556
557 TokenTree { span: Span(span), kind: kind }
558 }
559
560 fn to_internal(self) -> tokenstream::TokenStream {
561 use syntax::parse::token::*;
562 use syntax::tokenstream::{TokenTree, Delimited};
563
564 let (op, kind) = match self.kind {
565 TokenNode::Op(op, kind) => (op, kind),
566 TokenNode::Group(delimiter, tokens) => {
567 return TokenTree::Delimited(self.span.0, Delimited {
568 delim: delimiter.to_internal(),
569 tts: tokens.0.into(),
570 }).into();
571 },
572 TokenNode::Term(symbol) => {
573 let ident = ast::Ident { name: symbol.0, ctxt: self.span.0.ctxt };
574 let token =
575 if symbol.0.as_str().starts_with("'") { Lifetime(ident) } else { Ident(ident) };
576 return TokenTree::Token(self.span.0, token).into();
577 }
578 TokenNode::Literal(token) => return TokenTree::Token(self.span.0, token.0).into(),
579 };
580
581 let token = match op {
582 '=' => Eq,
583 '<' => Lt,
584 '>' => Gt,
585 '!' => Not,
586 '~' => Tilde,
587 '+' => BinOp(Plus),
588 '-' => BinOp(Minus),
589 '*' => BinOp(Star),
590 '/' => BinOp(Slash),
591 '%' => BinOp(Percent),
592 '^' => BinOp(Caret),
593 '&' => BinOp(And),
594 '|' => BinOp(Or),
595 '@' => At,
596 '.' => Dot,
597 ',' => Comma,
598 ';' => Semi,
599 ':' => Colon,
600 '#' => Pound,
601 '$' => Dollar,
602 '?' => Question,
603 '_' => Underscore,
604 _ => panic!("unsupported character {}", op),
605 };
606
607 let tree = TokenTree::Token(self.span.0, token);
608 match kind {
609 Spacing::Alone => tree.into(),
610 Spacing::Joint => tree.joint(),
611 }
612 }
613 }
614
615 fn prepend_attrs(attrs: &[ast::Attribute],
616 tokens: Option<&tokenstream::TokenStream>,
617 span: syntax_pos::Span)
618 -> Option<tokenstream::TokenStream>
619 {
620 let tokens = match tokens {
621 Some(tokens) => tokens,
622 None => return None,
623 };
624 if attrs.len() == 0 {
625 return Some(tokens.clone())
626 }
627 let mut builder = tokenstream::TokenStreamBuilder::new();
628 for attr in attrs {
629 assert_eq!(attr.style, ast::AttrStyle::Outer,
630 "inner attributes should prevent cached tokens from existing");
631 let stream = __internal::with_sess(|(sess, _)| {
632 // FIXME: Avoid this pretty-print + reparse hack as bove
633 let name = "<macro expansion>".to_owned();
634 let source = pprust::attr_to_string(attr);
635 parse_stream_from_source_str(name, source, sess, Some(span))
636 });
637 builder.push(stream);
638 }
639 builder.push(tokens.clone());
640 Some(builder.build())
641 }
642
643 /// Permanently unstable internal implementation details of this crate. This
644 /// should not be used.
645 ///
646 /// These methods are used by the rest of the compiler to generate instances of
647 /// `TokenStream` to hand to macro definitions, as well as consume the output.
648 ///
649 /// Note that this module is also intentionally separate from the rest of the
650 /// crate. This allows the `#[unstable]` directive below to naturally apply to
651 /// all of the contents.
652 #[unstable(feature = "proc_macro_internals", issue = "27812")]
653 #[doc(hidden)]
654 pub mod __internal {
655 pub use quote::{Quoter, __rt};
656
657 use std::cell::Cell;
658
659 use syntax::ast;
660 use syntax::ext::base::ExtCtxt;
661 use syntax::ext::hygiene::Mark;
662 use syntax::ptr::P;
663 use syntax::parse::{self, ParseSess};
664 use syntax::parse::token::{self, Token};
665 use syntax::tokenstream;
666 use syntax_pos::DUMMY_SP;
667
668 use super::{TokenStream, LexError};
669
670 pub fn new_token_stream(item: P<ast::Item>) -> TokenStream {
671 let token = Token::interpolated(token::NtItem(item));
672 TokenStream(tokenstream::TokenTree::Token(DUMMY_SP, token).into())
673 }
674
675 pub fn token_stream_wrap(inner: tokenstream::TokenStream) -> TokenStream {
676 TokenStream(inner)
677 }
678
679 pub fn token_stream_parse_items(stream: TokenStream) -> Result<Vec<P<ast::Item>>, LexError> {
680 with_sess(move |(sess, _)| {
681 let mut parser = parse::stream_to_parser(sess, stream.0);
682 let mut items = Vec::new();
683
684 while let Some(item) = try!(parser.parse_item().map_err(super::parse_to_lex_err)) {
685 items.push(item)
686 }
687
688 Ok(items)
689 })
690 }
691
692 pub fn token_stream_inner(stream: TokenStream) -> tokenstream::TokenStream {
693 stream.0
694 }
695
696 pub trait Registry {
697 fn register_custom_derive(&mut self,
698 trait_name: &str,
699 expand: fn(TokenStream) -> TokenStream,
700 attributes: &[&'static str]);
701
702 fn register_attr_proc_macro(&mut self,
703 name: &str,
704 expand: fn(TokenStream, TokenStream) -> TokenStream);
705
706 fn register_bang_proc_macro(&mut self,
707 name: &str,
708 expand: fn(TokenStream) -> TokenStream);
709 }
710
711 // Emulate scoped_thread_local!() here essentially
712 thread_local! {
713 static CURRENT_SESS: Cell<(*const ParseSess, Mark)> =
714 Cell::new((0 as *const _, Mark::root()));
715 }
716
717 pub fn set_sess<F, R>(cx: &ExtCtxt, f: F) -> R
718 where F: FnOnce() -> R
719 {
720 struct Reset { prev: (*const ParseSess, Mark) }
721
722 impl Drop for Reset {
723 fn drop(&mut self) {
724 CURRENT_SESS.with(|p| p.set(self.prev));
725 }
726 }
727
728 CURRENT_SESS.with(|p| {
729 let _reset = Reset { prev: p.get() };
730 p.set((cx.parse_sess, cx.current_expansion.mark));
731 f()
732 })
733 }
734
735 pub fn with_sess<F, R>(f: F) -> R
736 where F: FnOnce((&ParseSess, Mark)) -> R
737 {
738 let p = CURRENT_SESS.with(|p| p.get());
739 assert!(!p.0.is_null(), "proc_macro::__internal::with_sess() called \
740 before set_parse_sess()!");
741 f(unsafe { (&*p.0, p.1) })
742 }
743 }
744
745 fn parse_to_lex_err(mut err: DiagnosticBuilder) -> LexError {
746 err.cancel();
747 LexError { _inner: () }
748 }