1 // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! A support library for macro authors when defining new macros.
13 //! This library, provided by the standard distribution, provides the types
14 //! consumed in the interfaces of procedurally defined macro definitions.
15 //! Currently the primary use of this crate is to provide the ability to define
16 //! new custom derive modes through `#[proc_macro_derive]`.
18 //! Note that this crate is intentionally very bare-bones currently. The main
19 //! type, `TokenStream`, only supports `fmt::Display` and `FromStr`
20 //! implementations, indicating that it can only go to and come from a string.
21 //! This functionality is intended to be expanded over time as more surface
22 //! area for macro authors is stabilized.
24 //! See [the book](../book/first-edition/procedural-macros.html) for more.
26 #![stable(feature = "proc_macro_lib", since = "1.15.0")]
28 #![deny(missing_docs)]
29 #![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
30 html_favicon_url
= "https://doc.rust-lang.org/favicon.ico",
31 html_root_url
= "https://doc.rust-lang.org/nightly/",
32 html_playground_url
= "https://play.rust-lang.org/",
33 issue_tracker_base_url
= "https://github.com/rust-lang/rust/issues/",
34 test(no_crate_inject
, attr(deny(warnings
))),
35 test(attr(allow(dead_code
, deprecated
, unused_variables
, unused_mut
))))]
37 #![feature(i128_type)]
38 #![feature(rustc_private)]
39 #![feature(staged_api)]
40 #![feature(lang_items)]
44 extern crate syntax_pos
;
46 use std
::{ascii, fmt, iter}
;
47 use std
::str::FromStr
;
50 use syntax
::errors
::DiagnosticBuilder
;
51 use syntax
::parse
::{self, token, parse_stream_from_source_str}
;
52 use syntax
::print
::pprust
;
53 use syntax
::symbol
::Symbol
;
54 use syntax
::tokenstream
;
55 use syntax_pos
::DUMMY_SP
;
56 use syntax_pos
::SyntaxContext
;
57 use syntax_pos
::hygiene
::Mark
;
59 /// The main type provided by this crate, representing an abstract stream of
62 /// This is both the input and output of `#[proc_macro_derive]` definitions.
63 /// Currently it's required to be a list of valid Rust items, but this
64 /// restriction may be lifted in the future.
66 /// The API of this type is intentionally bare-bones, but it'll be expanded over
68 #[stable(feature = "proc_macro_lib", since = "1.15.0")]
69 #[derive(Clone, Debug)]
70 pub struct TokenStream(tokenstream
::TokenStream
);
72 /// Error returned from `TokenStream::from_str`.
73 #[stable(feature = "proc_macro_lib", since = "1.15.0")]
79 #[stable(feature = "proc_macro_lib", since = "1.15.0")]
80 impl FromStr
for TokenStream
{
83 fn from_str(src
: &str) -> Result
<TokenStream
, LexError
> {
84 __internal
::with_sess(|(sess
, mark
)| {
85 let src
= src
.to_string();
86 let name
= "<proc-macro source code>".to_string();
87 let expn_info
= mark
.expn_info().unwrap();
88 let call_site
= expn_info
.call_site
;
89 // notify the expansion info that it is unhygienic
90 let mark
= Mark
::fresh(mark
);
91 mark
.set_expn_info(expn_info
);
92 let span
= syntax_pos
::Span
{
93 ctxt
: SyntaxContext
::empty().apply_mark(mark
),
96 let stream
= parse
::parse_stream_from_source_str(name
, src
, sess
, Some(span
));
97 Ok(__internal
::token_stream_wrap(stream
))
102 #[stable(feature = "proc_macro_lib", since = "1.15.0")]
103 impl fmt
::Display
for TokenStream
{
104 fn fmt(&self, f
: &mut fmt
::Formatter
) -> fmt
::Result
{
109 /// `quote!(..)` accepts arbitrary tokens and expands into a `TokenStream` describing the input.
110 /// For example, `quote!(a + b)` will produce a expression, that, when evaluated, constructs
111 /// the `TokenStream` `[Word("a"), Op('+', Alone), Word("b")]`.
113 /// Unquoting is done with `$`, and works by taking the single next ident as the unquoted term.
114 /// To quote `$` itself, use `$$`.
115 #[unstable(feature = "proc_macro", issue = "38356")]
117 macro_rules
! quote { () => {}
}
119 #[unstable(feature = "proc_macro_internals", issue = "27812")]
123 #[unstable(feature = "proc_macro", issue = "38356")]
124 impl From
<TokenTree
> for TokenStream
{
125 fn from(tree
: TokenTree
) -> TokenStream
{
126 TokenStream(tree
.to_internal())
130 #[unstable(feature = "proc_macro", issue = "38356")]
131 impl From
<TokenNode
> for TokenStream
{
132 fn from(kind
: TokenNode
) -> TokenStream
{
133 TokenTree
::from(kind
).into()
137 #[unstable(feature = "proc_macro", issue = "38356")]
138 impl<T
: Into
<TokenStream
>> iter
::FromIterator
<T
> for TokenStream
{
139 fn from_iter
<I
: IntoIterator
<Item
= T
>>(streams
: I
) -> Self {
140 let mut builder
= tokenstream
::TokenStreamBuilder
::new();
141 for stream
in streams
{
142 builder
.push(stream
.into().0);
144 TokenStream(builder
.build())
148 #[unstable(feature = "proc_macro", issue = "38356")]
149 impl IntoIterator
for TokenStream
{
150 type Item
= TokenTree
;
151 type IntoIter
= TokenTreeIter
;
153 fn into_iter(self) -> TokenTreeIter
{
154 TokenTreeIter { cursor: self.0.trees(), next: None }
159 /// Returns an empty `TokenStream`.
160 #[unstable(feature = "proc_macro", issue = "38356")]
161 pub fn empty() -> TokenStream
{
162 TokenStream(tokenstream
::TokenStream
::empty())
165 /// Checks if this `TokenStream` is empty.
166 #[unstable(feature = "proc_macro", issue = "38356")]
167 pub fn is_empty(&self) -> bool
{
172 /// A region of source code, along with macro expansion information.
173 #[unstable(feature = "proc_macro", issue = "38356")]
174 #[derive(Copy, Clone, Debug)]
175 pub struct Span(syntax_pos
::Span
);
177 #[unstable(feature = "proc_macro", issue = "38356")]
178 impl Default
for Span
{
179 fn default() -> Span
{
180 ::__internal
::with_sess(|(_
, mark
)| Span(syntax_pos
::Span
{
181 ctxt
: SyntaxContext
::empty().apply_mark(mark
),
182 ..mark
.expn_info().unwrap().call_site
187 /// Quote a `Span` into a `TokenStream`.
188 /// This is needed to implement a custom quoter.
189 #[unstable(feature = "proc_macro", issue = "38356")]
190 pub fn quote_span(span
: Span
) -> TokenStream
{
191 TokenStream(quote
::Quote
::quote(&span
.0))
195 /// The span of the invocation of the current procedural macro.
196 #[unstable(feature = "proc_macro", issue = "38356")]
197 pub fn call_site() -> Span
{
198 ::__internal
::with_sess(|(_
, mark
)| Span(mark
.expn_info().unwrap().call_site
))
202 /// A single token or a delimited sequence of token trees (e.g. `[1, (), ..]`).
203 #[unstable(feature = "proc_macro", issue = "38356")]
204 #[derive(Clone, Debug)]
205 pub struct TokenTree
{
206 /// The `TokenTree`'s span
208 /// Description of the `TokenTree`
212 #[unstable(feature = "proc_macro", issue = "38356")]
213 impl From
<TokenNode
> for TokenTree
{
214 fn from(kind
: TokenNode
) -> TokenTree
{
215 TokenTree { span: Span::default(), kind: kind }
219 #[unstable(feature = "proc_macro", issue = "38356")]
220 impl fmt
::Display
for TokenTree
{
221 fn fmt(&self, f
: &mut fmt
::Formatter
) -> fmt
::Result
{
222 TokenStream
::from(self.clone()).fmt(f
)
226 /// Description of a `TokenTree`
227 #[derive(Clone, Debug)]
228 #[unstable(feature = "proc_macro", issue = "38356")]
230 /// A delimited tokenstream.
231 Group(Delimiter
, TokenStream
),
232 /// A unicode identifier.
234 /// A punctuation character (`+`, `,`, `$`, etc.).
236 /// A literal character (`'a'`), string (`"hello"`), or number (`2.3`).
240 /// Describes how a sequence of token trees is delimited.
241 #[derive(Copy, Clone, Debug, PartialEq, Eq)]
242 #[unstable(feature = "proc_macro", issue = "38356")]
250 /// An implicit delimiter, e.g. `$var`, where $var is `...`.
254 /// An interned string.
255 #[derive(Copy, Clone, Debug)]
256 #[unstable(feature = "proc_macro", issue = "38356")]
257 pub struct Term(Symbol
);
260 /// Intern a string into a `Term`.
261 #[unstable(feature = "proc_macro", issue = "38356")]
262 pub fn intern(string
: &str) -> Term
{
263 Term(Symbol
::intern(string
))
266 /// Get a reference to the interned string.
267 #[unstable(feature = "proc_macro", issue = "38356")]
268 pub fn as_str(&self) -> &str {
269 unsafe { &*(&*self.0.as_str() as *const str) }
273 /// Whether an `Op` is either followed immediately by another `Op` or followed by whitespace.
274 #[derive(Copy, Clone, Debug, PartialEq, Eq)]
275 #[unstable(feature = "proc_macro", issue = "38356")]
277 /// e.g. `+` is `Alone` in `+ =`.
279 /// e.g. `+` is `Joint` in `+=`.
283 /// A literal character (`'a'`), string (`"hello"`), or number (`2.3`).
284 #[derive(Clone, Debug)]
285 #[unstable(feature = "proc_macro", issue = "38356")]
286 pub struct Literal(token
::Token
);
288 #[unstable(feature = "proc_macro", issue = "38356")]
289 impl fmt
::Display
for Literal
{
290 fn fmt(&self, f
: &mut fmt
::Formatter
) -> fmt
::Result
{
291 TokenTree { kind: TokenNode::Literal(self.clone()), span: Span(DUMMY_SP) }
.fmt(f
)
295 macro_rules
! int_literals
{
296 ($
($int_kind
:ident
),*) => {$
(
298 #[unstable(feature = "proc_macro", issue = "38356")]
299 pub fn $
int_kind(n
: $int_kind
) -> Literal
{
300 Literal
::typed_integer(n
as i128
, stringify
!($int_kind
))
307 #[unstable(feature = "proc_macro", issue = "38356")]
308 pub fn integer(n
: i128
) -> Literal
{
309 Literal(token
::Literal(token
::Lit
::Integer(Symbol
::intern(&n
.to_string())), None
))
312 int_literals
!(u8, i8, u16, i16, u32, i32, u64, i64, usize, isize);
313 fn typed_integer(n
: i128
, kind
: &'
static str) -> Literal
{
314 Literal(token
::Literal(token
::Lit
::Integer(Symbol
::intern(&n
.to_string())),
315 Some(Symbol
::intern(kind
))))
318 /// Floating point literal.
319 #[unstable(feature = "proc_macro", issue = "38356")]
320 pub fn float(n
: f64) -> Literal
{
322 panic
!("Invalid float literal {}", n
);
324 Literal(token
::Literal(token
::Lit
::Float(Symbol
::intern(&n
.to_string())), None
))
327 /// Floating point literal.
328 #[unstable(feature = "proc_macro", issue = "38356")]
329 pub fn f32(n
: f32) -> Literal
{
331 panic
!("Invalid f32 literal {}", n
);
333 Literal(token
::Literal(token
::Lit
::Float(Symbol
::intern(&n
.to_string())),
334 Some(Symbol
::intern("f32"))))
337 /// Floating point literal.
338 #[unstable(feature = "proc_macro", issue = "38356")]
339 pub fn f64(n
: f64) -> Literal
{
341 panic
!("Invalid f64 literal {}", n
);
343 Literal(token
::Literal(token
::Lit
::Float(Symbol
::intern(&n
.to_string())),
344 Some(Symbol
::intern("f64"))))
348 #[unstable(feature = "proc_macro", issue = "38356")]
349 pub fn string(string
: &str) -> Literal
{
350 let mut escaped
= String
::new();
351 for ch
in string
.chars() {
352 escaped
.extend(ch
.escape_unicode());
354 Literal(token
::Literal(token
::Lit
::Str_(Symbol
::intern(&escaped
)), None
))
357 /// Character literal.
358 #[unstable(feature = "proc_macro", issue = "38356")]
359 pub fn character(ch
: char) -> Literal
{
360 let mut escaped
= String
::new();
361 escaped
.extend(ch
.escape_unicode());
362 Literal(token
::Literal(token
::Lit
::Char(Symbol
::intern(&escaped
)), None
))
365 /// Byte string literal.
366 #[unstable(feature = "proc_macro", issue = "38356")]
367 pub fn byte_string(bytes
: &[u8]) -> Literal
{
368 let string
= bytes
.iter().cloned().flat_map(ascii
::escape_default
)
369 .map(Into
::<char>::into
).collect
::<String
>();
370 Literal(token
::Literal(token
::Lit
::ByteStr(Symbol
::intern(&string
)), None
))
374 /// An iterator over `TokenTree`s.
376 #[unstable(feature = "proc_macro", issue = "38356")]
377 pub struct TokenTreeIter
{
378 cursor
: tokenstream
::Cursor
,
379 next
: Option
<tokenstream
::TokenStream
>,
382 #[unstable(feature = "proc_macro", issue = "38356")]
383 impl Iterator
for TokenTreeIter
{
384 type Item
= TokenTree
;
386 fn next(&mut self) -> Option
<TokenTree
> {
389 unwrap_or
!(self.next
.take().or_else(|| self.cursor
.next_as_stream()), return None
);
390 let tree
= TokenTree
::from_internal(next
, &mut self.next
);
391 if tree
.span
.0 == DUMMY_SP
{
392 if let TokenNode
::Group(Delimiter
::None
, stream
) = tree
.kind
{
393 self.cursor
.insert(stream
.0);
403 fn from_internal(delim
: token
::DelimToken
) -> Delimiter
{
405 token
::Paren
=> Delimiter
::Parenthesis
,
406 token
::Brace
=> Delimiter
::Brace
,
407 token
::Bracket
=> Delimiter
::Bracket
,
408 token
::NoDelim
=> Delimiter
::None
,
412 fn to_internal(self) -> token
::DelimToken
{
414 Delimiter
::Parenthesis
=> token
::Paren
,
415 Delimiter
::Brace
=> token
::Brace
,
416 Delimiter
::Bracket
=> token
::Bracket
,
417 Delimiter
::None
=> token
::NoDelim
,
423 fn from_internal(stream
: tokenstream
::TokenStream
, next
: &mut Option
<tokenstream
::TokenStream
>)
425 use syntax
::parse
::token
::*;
427 let (tree
, is_joint
) = stream
.as_tree();
428 let (mut span
, token
) = match tree
{
429 tokenstream
::TokenTree
::Token(span
, token
) => (span
, token
),
430 tokenstream
::TokenTree
::Delimited(span
, delimed
) => {
431 let delimiter
= Delimiter
::from_internal(delimed
.delim
);
434 kind
: TokenNode
::Group(delimiter
, TokenStream(delimed
.tts
.into())),
439 let op_kind
= if is_joint { Spacing::Joint }
else { Spacing::Alone }
;
441 ($op
:expr
) => { TokenNode::Op($op, op_kind) }
445 ($first
:expr
, $rest
:expr
) => { joint($first, $rest, is_joint, &mut span, next) }
448 fn joint(first
: char, rest
: Token
, is_joint
: bool
, span
: &mut syntax_pos
::Span
,
449 next
: &mut Option
<tokenstream
::TokenStream
>)
451 let (first_span
, rest_span
) = (*span
, *span
);
453 let tree
= tokenstream
::TokenTree
::Token(rest_span
, rest
);
454 *next
= Some(if is_joint { tree.joint() }
else { tree.into() }
);
455 TokenNode
::Op(first
, Spacing
::Joint
)
458 let kind
= match token
{
461 Le
=> joint
!('
<'
, Eq
),
462 EqEq
=> joint
!('
='
, Eq
),
463 Ne
=> joint
!('
!'
, Eq
),
464 Ge
=> joint
!('
>'
, Eq
),
466 AndAnd
=> joint
!('
&'
, BinOp(And
)),
467 OrOr
=> joint
!('
|'
, BinOp(Or
)),
470 BinOp(Plus
) => op
!('
+'
),
471 BinOp(Minus
) => op
!('
-'
),
472 BinOp(Star
) => op
!('
*'
),
473 BinOp(Slash
) => op
!('
/'
),
474 BinOp(Percent
) => op
!('
%'
),
475 BinOp(Caret
) => op
!('
^'),
476 BinOp(And
) => op
!('
&'
),
477 BinOp(Or
) => op
!('
|'
),
478 BinOp(Shl
) => joint
!('
<'
, Lt
),
479 BinOp(Shr
) => joint
!('
>'
, Gt
),
480 BinOpEq(Plus
) => joint
!('
+'
, Eq
),
481 BinOpEq(Minus
) => joint
!('
-'
, Eq
),
482 BinOpEq(Star
) => joint
!('
*'
, Eq
),
483 BinOpEq(Slash
) => joint
!('
/'
, Eq
),
484 BinOpEq(Percent
) => joint
!('
%'
, Eq
),
485 BinOpEq(Caret
) => joint
!('
^', Eq
),
486 BinOpEq(And
) => joint
!('
&'
, Eq
),
487 BinOpEq(Or
) => joint
!('
|'
, Eq
),
488 BinOpEq(Shl
) => joint
!('
<'
, Le
),
489 BinOpEq(Shr
) => joint
!('
>'
, Ge
),
492 DotDot
=> joint
!('
.'
, Dot
),
493 DotDotDot
=> joint
!('
.'
, DotDot
),
497 ModSep
=> joint
!('
:'
, Colon
),
498 RArrow
=> joint
!('
-'
, Gt
),
499 LArrow
=> joint
!('
<'
, BinOp(Minus
)),
500 FatArrow
=> joint
!('
='
, Gt
),
503 Question
=> op
!('?'
),
504 Underscore
=> op
!('_'
),
506 Ident(ident
) | Lifetime(ident
) => TokenNode
::Term(Term(ident
.name
)),
507 Literal(..) | DocComment(..) => TokenNode
::Literal(self::Literal(token
)),
509 Interpolated(ref nt
) => {
510 // An `Interpolated` token means that we have a `Nonterminal`
511 // which is often a parsed AST item. At this point we now need
512 // to convert the parsed AST to an actual token stream, e.g.
513 // un-parse it basically.
515 // Unfortunately there's not really a great way to do that in a
516 // guaranteed lossless fashion right now. The fallback here is
517 // to just stringify the AST node and reparse it, but this loses
518 // all span information.
520 // As a result, some AST nodes are annotated with the token
521 // stream they came from. Attempt to extract these lossless
522 // token streams before we fall back to the stringification.
523 let mut tokens
= None
;
526 Nonterminal
::NtItem(ref item
) => {
527 tokens
= prepend_attrs(&item
.attrs
, item
.tokens
.as_ref(), span
);
529 Nonterminal
::NtTraitItem(ref item
) => {
530 tokens
= prepend_attrs(&item
.attrs
, item
.tokens
.as_ref(), span
);
532 Nonterminal
::NtImplItem(ref item
) => {
533 tokens
= prepend_attrs(&item
.attrs
, item
.tokens
.as_ref(), span
);
538 tokens
.map(|tokens
| {
539 TokenNode
::Group(Delimiter
::None
,
540 TokenStream(tokens
.clone()))
541 }).unwrap_or_else(|| {
542 __internal
::with_sess(|(sess
, _
)| {
543 TokenNode
::Group(Delimiter
::None
, TokenStream(nt
.1.force
(|| {
544 // FIXME(jseyfried): Avoid this pretty-print + reparse hack
545 let name
= "<macro expansion>".to_owned();
546 let source
= pprust
::token_to_string(&token
);
547 parse_stream_from_source_str(name
, source
, sess
, Some(span
))
553 OpenDelim(..) | CloseDelim(..) => unreachable
!(),
554 Whitespace
| Comment
| Shebang(..) | Eof
=> unreachable
!(),
557 TokenTree { span: Span(span), kind: kind }
560 fn to_internal(self) -> tokenstream
::TokenStream
{
561 use syntax
::parse
::token
::*;
562 use syntax
::tokenstream
::{TokenTree, Delimited}
;
564 let (op
, kind
) = match self.kind
{
565 TokenNode
::Op(op
, kind
) => (op
, kind
),
566 TokenNode
::Group(delimiter
, tokens
) => {
567 return TokenTree
::Delimited(self.span
.0, Delimited
{
568 delim
: delimiter
.to_internal(),
569 tts
: tokens
.0.into
(),
572 TokenNode
::Term(symbol
) => {
573 let ident
= ast
::Ident { name: symbol.0, ctxt: self.span.0.ctxt }
;
575 if symbol
.0.as_str().starts_with("'") { Lifetime(ident) }
else { Ident(ident) }
;
576 return TokenTree
::Token(self.span
.0, token
).into();
578 TokenNode
::Literal(token
) => return TokenTree
::Token(self.span
.0, token
.0).into(),
581 let token
= match op
{
591 '
%'
=> BinOp(Percent
),
604 _
=> panic
!("unsupported character {}", op
),
607 let tree
= TokenTree
::Token(self.span
.0, token
);
609 Spacing
::Alone
=> tree
.into(),
610 Spacing
::Joint
=> tree
.joint(),
615 fn prepend_attrs(attrs
: &[ast
::Attribute
],
616 tokens
: Option
<&tokenstream
::TokenStream
>,
617 span
: syntax_pos
::Span
)
618 -> Option
<tokenstream
::TokenStream
>
620 let tokens
= match tokens
{
621 Some(tokens
) => tokens
,
624 if attrs
.len() == 0 {
625 return Some(tokens
.clone())
627 let mut builder
= tokenstream
::TokenStreamBuilder
::new();
629 assert_eq
!(attr
.style
, ast
::AttrStyle
::Outer
,
630 "inner attributes should prevent cached tokens from existing");
631 let stream
= __internal
::with_sess(|(sess
, _
)| {
632 // FIXME: Avoid this pretty-print + reparse hack as bove
633 let name
= "<macro expansion>".to_owned();
634 let source
= pprust
::attr_to_string(attr
);
635 parse_stream_from_source_str(name
, source
, sess
, Some(span
))
637 builder
.push(stream
);
639 builder
.push(tokens
.clone());
640 Some(builder
.build())
643 /// Permanently unstable internal implementation details of this crate. This
644 /// should not be used.
646 /// These methods are used by the rest of the compiler to generate instances of
647 /// `TokenStream` to hand to macro definitions, as well as consume the output.
649 /// Note that this module is also intentionally separate from the rest of the
650 /// crate. This allows the `#[unstable]` directive below to naturally apply to
651 /// all of the contents.
652 #[unstable(feature = "proc_macro_internals", issue = "27812")]
655 pub use quote
::{Quoter, __rt}
;
660 use syntax
::ext
::base
::ExtCtxt
;
661 use syntax
::ext
::hygiene
::Mark
;
663 use syntax
::parse
::{self, ParseSess}
;
664 use syntax
::parse
::token
::{self, Token}
;
665 use syntax
::tokenstream
;
666 use syntax_pos
::DUMMY_SP
;
668 use super::{TokenStream, LexError}
;
670 pub fn new_token_stream(item
: P
<ast
::Item
>) -> TokenStream
{
671 let token
= Token
::interpolated(token
::NtItem(item
));
672 TokenStream(tokenstream
::TokenTree
::Token(DUMMY_SP
, token
).into())
675 pub fn token_stream_wrap(inner
: tokenstream
::TokenStream
) -> TokenStream
{
679 pub fn token_stream_parse_items(stream
: TokenStream
) -> Result
<Vec
<P
<ast
::Item
>>, LexError
> {
680 with_sess(move |(sess
, _
)| {
681 let mut parser
= parse
::stream_to_parser(sess
, stream
.0);
682 let mut items
= Vec
::new();
684 while let Some(item
) = try
!(parser
.parse_item().map_err(super::parse_to_lex_err
)) {
692 pub fn token_stream_inner(stream
: TokenStream
) -> tokenstream
::TokenStream
{
697 fn register_custom_derive(&mut self,
699 expand
: fn(TokenStream
) -> TokenStream
,
700 attributes
: &[&'
static str]);
702 fn register_attr_proc_macro(&mut self,
704 expand
: fn(TokenStream
, TokenStream
) -> TokenStream
);
706 fn register_bang_proc_macro(&mut self,
708 expand
: fn(TokenStream
) -> TokenStream
);
711 // Emulate scoped_thread_local!() here essentially
713 static CURRENT_SESS
: Cell
<(*const ParseSess
, Mark
)> =
714 Cell
::new((0 as *const _
, Mark
::root()));
717 pub fn set_sess
<F
, R
>(cx
: &ExtCtxt
, f
: F
) -> R
718 where F
: FnOnce() -> R
720 struct Reset { prev: (*const ParseSess, Mark) }
722 impl Drop
for Reset
{
724 CURRENT_SESS
.with(|p
| p
.set(self.prev
));
728 CURRENT_SESS
.with(|p
| {
729 let _reset
= Reset { prev: p.get() }
;
730 p
.set((cx
.parse_sess
, cx
.current_expansion
.mark
));
735 pub fn with_sess
<F
, R
>(f
: F
) -> R
736 where F
: FnOnce((&ParseSess
, Mark
)) -> R
738 let p
= CURRENT_SESS
.with(|p
| p
.get());
739 assert
!(!p
.0.is_null
(), "proc_macro::__internal::with_sess() called \
740 before set_parse_sess()!");
741 f(unsafe { (&*p.0, p.1) }
)
745 fn parse_to_lex_err(mut err
: DiagnosticBuilder
) -> LexError
{
747 LexError { _inner: () }