]> git.proxmox.com Git - rustc.git/blob - src/libsyntax/tokenstream.rs
Imported Upstream version 1.11.0+dfsg1
[rustc.git] / src / libsyntax / tokenstream.rs
1 // Copyright 2012-2016 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 //! # Token Trees
12 //! TokenTrees are syntactic forms for dealing with tokens. The description below is
13 //! more complete; in short a TokenTree is a single token, a delimited sequence of token
14 //! trees, or a sequence with repetition for list splicing as part of macro expansion.
15
16 use ast::{AttrStyle};
17 use codemap::{Span};
18 use ext::base;
19 use ext::tt::macro_parser;
20 use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
21 use parse::lexer;
22 use parse::token;
23
24 /// A delimited sequence of token trees
25 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
26 pub struct Delimited {
27 /// The type of delimiter
28 pub delim: token::DelimToken,
29 /// The span covering the opening delimiter
30 pub open_span: Span,
31 /// The delimited sequence of token trees
32 pub tts: Vec<TokenTree>,
33 /// The span covering the closing delimiter
34 pub close_span: Span,
35 }
36
37 impl Delimited {
38 /// Returns the opening delimiter as a token.
39 pub fn open_token(&self) -> token::Token {
40 token::OpenDelim(self.delim)
41 }
42
43 /// Returns the closing delimiter as a token.
44 pub fn close_token(&self) -> token::Token {
45 token::CloseDelim(self.delim)
46 }
47
48 /// Returns the opening delimiter as a token tree.
49 pub fn open_tt(&self) -> TokenTree {
50 TokenTree::Token(self.open_span, self.open_token())
51 }
52
53 /// Returns the closing delimiter as a token tree.
54 pub fn close_tt(&self) -> TokenTree {
55 TokenTree::Token(self.close_span, self.close_token())
56 }
57 }
58
59 /// A sequence of token trees
60 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
61 pub struct SequenceRepetition {
62 /// The sequence of token trees
63 pub tts: Vec<TokenTree>,
64 /// The optional separator
65 pub separator: Option<token::Token>,
66 /// Whether the sequence can be repeated zero (*), or one or more times (+)
67 pub op: KleeneOp,
68 /// The number of `MatchNt`s that appear in the sequence (and subsequences)
69 pub num_captures: usize,
70 }
71
72 /// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star)
73 /// for token sequences.
74 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
75 pub enum KleeneOp {
76 ZeroOrMore,
77 OneOrMore,
78 }
79
80 /// When the main rust parser encounters a syntax-extension invocation, it
81 /// parses the arguments to the invocation as a token-tree. This is a very
82 /// loose structure, such that all sorts of different AST-fragments can
83 /// be passed to syntax extensions using a uniform type.
84 ///
85 /// If the syntax extension is an MBE macro, it will attempt to match its
86 /// LHS token tree against the provided token tree, and if it finds a
87 /// match, will transcribe the RHS token tree, splicing in any captured
88 /// macro_parser::matched_nonterminals into the `SubstNt`s it finds.
89 ///
90 /// The RHS of an MBE macro is the only place `SubstNt`s are substituted.
91 /// Nothing special happens to misnamed or misplaced `SubstNt`s.
92 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
93 pub enum TokenTree {
94 /// A single token
95 Token(Span, token::Token),
96 /// A delimited sequence of token trees
97 Delimited(Span, Delimited),
98
99 // This only makes sense in MBE macros.
100
101 /// A kleene-style repetition sequence with a span
102 // FIXME(eddyb) #12938 Use DST.
103 Sequence(Span, SequenceRepetition),
104 }
105
106 impl TokenTree {
107 pub fn len(&self) -> usize {
108 match *self {
109 TokenTree::Token(_, token::DocComment(name)) => {
110 match doc_comment_style(&name.as_str()) {
111 AttrStyle::Outer => 2,
112 AttrStyle::Inner => 3
113 }
114 }
115 TokenTree::Token(_, token::SpecialVarNt(..)) => 2,
116 TokenTree::Token(_, token::MatchNt(..)) => 3,
117 TokenTree::Delimited(_, ref delimed) => {
118 delimed.tts.len() + 2
119 }
120 TokenTree::Sequence(_, ref seq) => {
121 seq.tts.len()
122 }
123 TokenTree::Token(..) => 0
124 }
125 }
126
127 pub fn get_tt(&self, index: usize) -> TokenTree {
128 match (self, index) {
129 (&TokenTree::Token(sp, token::DocComment(_)), 0) => {
130 TokenTree::Token(sp, token::Pound)
131 }
132 (&TokenTree::Token(sp, token::DocComment(name)), 1)
133 if doc_comment_style(&name.as_str()) == AttrStyle::Inner => {
134 TokenTree::Token(sp, token::Not)
135 }
136 (&TokenTree::Token(sp, token::DocComment(name)), _) => {
137 let stripped = strip_doc_comment_decoration(&name.as_str());
138
139 // Searches for the occurrences of `"#*` and returns the minimum number of `#`s
140 // required to wrap the text.
141 let num_of_hashes = stripped.chars().scan(0, |cnt, x| {
142 *cnt = if x == '"' {
143 1
144 } else if *cnt != 0 && x == '#' {
145 *cnt + 1
146 } else {
147 0
148 };
149 Some(*cnt)
150 }).max().unwrap_or(0);
151
152 TokenTree::Delimited(sp, Delimited {
153 delim: token::Bracket,
154 open_span: sp,
155 tts: vec![TokenTree::Token(sp, token::Ident(token::str_to_ident("doc"))),
156 TokenTree::Token(sp, token::Eq),
157 TokenTree::Token(sp, token::Literal(
158 token::StrRaw(token::intern(&stripped), num_of_hashes), None))],
159 close_span: sp,
160 })
161 }
162 (&TokenTree::Delimited(_, ref delimed), _) => {
163 if index == 0 {
164 return delimed.open_tt();
165 }
166 if index == delimed.tts.len() + 1 {
167 return delimed.close_tt();
168 }
169 delimed.tts[index - 1].clone()
170 }
171 (&TokenTree::Token(sp, token::SpecialVarNt(var)), _) => {
172 let v = [TokenTree::Token(sp, token::Dollar),
173 TokenTree::Token(sp, token::Ident(token::str_to_ident(var.as_str())))];
174 v[index].clone()
175 }
176 (&TokenTree::Token(sp, token::MatchNt(name, kind)), _) => {
177 let v = [TokenTree::Token(sp, token::SubstNt(name)),
178 TokenTree::Token(sp, token::Colon),
179 TokenTree::Token(sp, token::Ident(kind))];
180 v[index].clone()
181 }
182 (&TokenTree::Sequence(_, ref seq), _) => {
183 seq.tts[index].clone()
184 }
185 _ => panic!("Cannot expand a token tree")
186 }
187 }
188
189 /// Returns the `Span` corresponding to this token tree.
190 pub fn get_span(&self) -> Span {
191 match *self {
192 TokenTree::Token(span, _) => span,
193 TokenTree::Delimited(span, _) => span,
194 TokenTree::Sequence(span, _) => span,
195 }
196 }
197
198 /// Use this token tree as a matcher to parse given tts.
199 pub fn parse(cx: &base::ExtCtxt, mtch: &[TokenTree], tts: &[TokenTree])
200 -> macro_parser::NamedParseResult {
201 // `None` is because we're not interpolating
202 let arg_rdr = lexer::new_tt_reader_with_doc_flag(&cx.parse_sess().span_diagnostic,
203 None,
204 None,
205 tts.iter().cloned().collect(),
206 true);
207 macro_parser::parse(cx.parse_sess(), cx.cfg(), arg_rdr, mtch)
208 }
209 }
210