1 // Copyright 2012-2016 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
12 //! TokenTrees are syntactic forms for dealing with tokens. The description below is
13 //! more complete; in short a TokenTree is a single token, a delimited sequence of token
14 //! trees, or a sequence with repetition for list splicing as part of macro expansion.
19 use ext
::tt
::macro_parser
;
20 use parse
::lexer
::comments
::{doc_comment_style, strip_doc_comment_decoration}
;
24 /// A delimited sequence of token trees
25 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
26 pub struct Delimited
{
27 /// The type of delimiter
28 pub delim
: token
::DelimToken
,
29 /// The span covering the opening delimiter
31 /// The delimited sequence of token trees
32 pub tts
: Vec
<TokenTree
>,
33 /// The span covering the closing delimiter
38 /// Returns the opening delimiter as a token.
39 pub fn open_token(&self) -> token
::Token
{
40 token
::OpenDelim(self.delim
)
43 /// Returns the closing delimiter as a token.
44 pub fn close_token(&self) -> token
::Token
{
45 token
::CloseDelim(self.delim
)
48 /// Returns the opening delimiter as a token tree.
49 pub fn open_tt(&self) -> TokenTree
{
50 TokenTree
::Token(self.open_span
, self.open_token())
53 /// Returns the closing delimiter as a token tree.
54 pub fn close_tt(&self) -> TokenTree
{
55 TokenTree
::Token(self.close_span
, self.close_token())
59 /// A sequence of token trees
60 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
61 pub struct SequenceRepetition
{
62 /// The sequence of token trees
63 pub tts
: Vec
<TokenTree
>,
64 /// The optional separator
65 pub separator
: Option
<token
::Token
>,
66 /// Whether the sequence can be repeated zero (*), or one or more times (+)
68 /// The number of `MatchNt`s that appear in the sequence (and subsequences)
69 pub num_captures
: usize,
72 /// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star)
73 /// for token sequences.
74 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
80 /// When the main rust parser encounters a syntax-extension invocation, it
81 /// parses the arguments to the invocation as a token-tree. This is a very
82 /// loose structure, such that all sorts of different AST-fragments can
83 /// be passed to syntax extensions using a uniform type.
85 /// If the syntax extension is an MBE macro, it will attempt to match its
86 /// LHS token tree against the provided token tree, and if it finds a
87 /// match, will transcribe the RHS token tree, splicing in any captured
88 /// macro_parser::matched_nonterminals into the `SubstNt`s it finds.
90 /// The RHS of an MBE macro is the only place `SubstNt`s are substituted.
91 /// Nothing special happens to misnamed or misplaced `SubstNt`s.
92 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
95 Token(Span
, token
::Token
),
96 /// A delimited sequence of token trees
97 Delimited(Span
, Delimited
),
99 // This only makes sense in MBE macros.
101 /// A kleene-style repetition sequence with a span
102 // FIXME(eddyb) #12938 Use DST.
103 Sequence(Span
, SequenceRepetition
),
107 pub fn len(&self) -> usize {
109 TokenTree
::Token(_
, token
::DocComment(name
)) => {
110 match doc_comment_style(&name
.as_str()) {
111 AttrStyle
::Outer
=> 2,
112 AttrStyle
::Inner
=> 3
115 TokenTree
::Token(_
, token
::SpecialVarNt(..)) => 2,
116 TokenTree
::Token(_
, token
::MatchNt(..)) => 3,
117 TokenTree
::Delimited(_
, ref delimed
) => {
118 delimed
.tts
.len() + 2
120 TokenTree
::Sequence(_
, ref seq
) => {
123 TokenTree
::Token(..) => 0
127 pub fn get_tt(&self, index
: usize) -> TokenTree
{
128 match (self, index
) {
129 (&TokenTree
::Token(sp
, token
::DocComment(_
)), 0) => {
130 TokenTree
::Token(sp
, token
::Pound
)
132 (&TokenTree
::Token(sp
, token
::DocComment(name
)), 1)
133 if doc_comment_style(&name
.as_str()) == AttrStyle
::Inner
=> {
134 TokenTree
::Token(sp
, token
::Not
)
136 (&TokenTree
::Token(sp
, token
::DocComment(name
)), _
) => {
137 let stripped
= strip_doc_comment_decoration(&name
.as_str());
139 // Searches for the occurrences of `"#*` and returns the minimum number of `#`s
140 // required to wrap the text.
141 let num_of_hashes
= stripped
.chars().scan(0, |cnt
, x
| {
144 } else if *cnt != 0 && x == '#' {
150 }).max().unwrap_or(0);
152 TokenTree::Delimited(sp, Delimited {
153 delim: token::Bracket,
155 tts: vec![TokenTree::Token(sp, token::Ident(token::str_to_ident("doc
"))),
156 TokenTree::Token(sp, token::Eq),
157 TokenTree::Token(sp, token::Literal(
158 token::StrRaw(token::intern(&stripped), num_of_hashes), None))],
162 (&TokenTree::Delimited(_, ref delimed), _) => {
164 return delimed.open_tt();
166 if index == delimed.tts.len() + 1 {
167 return delimed.close_tt();
169 delimed.tts[index - 1].clone()
171 (&TokenTree::Token(sp, token::SpecialVarNt(var)), _) => {
172 let v = [TokenTree::Token(sp, token::Dollar),
173 TokenTree::Token(sp, token::Ident(token::str_to_ident(var.as_str())))];
176 (&TokenTree::Token(sp, token::MatchNt(name, kind)), _) => {
177 let v = [TokenTree::Token(sp, token::SubstNt(name)),
178 TokenTree::Token(sp, token::Colon),
179 TokenTree::Token(sp, token::Ident(kind))];
182 (&TokenTree::Sequence(_, ref seq), _) => {
183 seq.tts[index].clone()
185 _ => panic!("Cannot expand a token tree
")
189 /// Returns the `Span` corresponding to this token tree.
190 pub fn get_span(&self) -> Span {
192 TokenTree::Token(span, _) => span,
193 TokenTree::Delimited(span, _) => span,
194 TokenTree::Sequence(span, _) => span,
198 /// Use this token tree as a matcher to parse given tts.
199 pub fn parse(cx: &base::ExtCtxt, mtch: &[TokenTree], tts: &[TokenTree])
200 -> macro_parser::NamedParseResult {
201 // `None` is because we're not interpolating
202 let arg_rdr = lexer::new_tt_reader_with_doc_flag(&cx.parse_sess().span_diagnostic,
205 tts.iter().cloned().collect(),
207 macro_parser::parse(cx.parse_sess(), cx.cfg(), arg_rdr, mtch)