]> git.proxmox.com Git - rustc.git/blob - src/libsyntax/ext/tt/transcribe.rs
New upstream version 1.12.0+dfsg1
[rustc.git] / src / libsyntax / ext / tt / transcribe.rs
1 // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10 use self::LockstepIterSize::*;
11
12 use ast::Ident;
13 use syntax_pos::{Span, DUMMY_SP};
14 use errors::{Handler, DiagnosticBuilder};
15 use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
16 use parse::token::{DocComment, MatchNt, SubstNt};
17 use parse::token::{Token, Interpolated, NtIdent, NtTT, SpecialMacroVar};
18 use parse::token;
19 use parse::lexer::TokenAndSpan;
20 use tokenstream::{self, TokenTree};
21
22 use std::rc::Rc;
23 use std::ops::Add;
24 use std::collections::HashMap;
25
26 ///an unzipping of `TokenTree`s
27 #[derive(Clone)]
28 struct TtFrame {
29 forest: TokenTree,
30 idx: usize,
31 dotdotdoted: bool,
32 sep: Option<Token>,
33 }
34
35 #[derive(Clone)]
36 pub struct TtReader<'a> {
37 pub sp_diag: &'a Handler,
38 /// the unzipped tree:
39 stack: Vec<TtFrame>,
40 /* for MBE-style macro transcription */
41 interpolations: HashMap<Ident, Rc<NamedMatch>>,
42 imported_from: Option<Ident>,
43
44 // Some => return imported_from as the next token
45 crate_name_next: Option<Span>,
46 repeat_idx: Vec<usize>,
47 repeat_len: Vec<usize>,
48 /* cached: */
49 pub cur_tok: Token,
50 pub cur_span: Span,
51 /// Transform doc comments. Only useful in macro invocations
52 pub desugar_doc_comments: bool,
53 pub fatal_errs: Vec<DiagnosticBuilder<'a>>,
54 }
55
56 /// This can do Macro-By-Example transcription. On the other hand, if
57 /// `src` contains no `TokenTree::Sequence`s, `MatchNt`s or `SubstNt`s, `interp` can
58 /// (and should) be None.
59 pub fn new_tt_reader(sp_diag: &Handler,
60 interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
61 imported_from: Option<Ident>,
62 src: Vec<tokenstream::TokenTree>)
63 -> TtReader {
64 new_tt_reader_with_doc_flag(sp_diag, interp, imported_from, src, false)
65 }
66
67 /// The extra `desugar_doc_comments` flag enables reading doc comments
68 /// like any other attribute which consists of `meta` and surrounding #[ ] tokens.
69 ///
70 /// This can do Macro-By-Example transcription. On the other hand, if
71 /// `src` contains no `TokenTree::Sequence`s, `MatchNt`s or `SubstNt`s, `interp` can
72 /// (and should) be None.
73 pub fn new_tt_reader_with_doc_flag(sp_diag: &Handler,
74 interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
75 imported_from: Option<Ident>,
76 src: Vec<tokenstream::TokenTree>,
77 desugar_doc_comments: bool)
78 -> TtReader {
79 let mut r = TtReader {
80 sp_diag: sp_diag,
81 stack: vec!(TtFrame {
82 forest: TokenTree::Sequence(DUMMY_SP, Rc::new(tokenstream::SequenceRepetition {
83 tts: src,
84 // doesn't matter. This merely holds the root unzipping.
85 separator: None, op: tokenstream::KleeneOp::ZeroOrMore, num_captures: 0
86 })),
87 idx: 0,
88 dotdotdoted: false,
89 sep: None,
90 }),
91 interpolations: match interp { /* just a convenience */
92 None => HashMap::new(),
93 Some(x) => x,
94 },
95 imported_from: imported_from,
96 crate_name_next: None,
97 repeat_idx: Vec::new(),
98 repeat_len: Vec::new(),
99 desugar_doc_comments: desugar_doc_comments,
100 /* dummy values, never read: */
101 cur_tok: token::Eof,
102 cur_span: DUMMY_SP,
103 fatal_errs: Vec::new(),
104 };
105 tt_next_token(&mut r); /* get cur_tok and cur_span set up */
106 r
107 }
108
109 fn lookup_cur_matched_by_matched(r: &TtReader, start: Rc<NamedMatch>) -> Rc<NamedMatch> {
110 r.repeat_idx.iter().fold(start, |ad, idx| {
111 match *ad {
112 MatchedNonterminal(_) => {
113 // end of the line; duplicate henceforth
114 ad.clone()
115 }
116 MatchedSeq(ref ads, _) => ads[*idx].clone()
117 }
118 })
119 }
120
121 fn lookup_cur_matched(r: &TtReader, name: Ident) -> Option<Rc<NamedMatch>> {
122 let matched_opt = r.interpolations.get(&name).cloned();
123 matched_opt.map(|s| lookup_cur_matched_by_matched(r, s))
124 }
125
126 #[derive(Clone)]
127 enum LockstepIterSize {
128 LisUnconstrained,
129 LisConstraint(usize, Ident),
130 LisContradiction(String),
131 }
132
133 impl Add for LockstepIterSize {
134 type Output = LockstepIterSize;
135
136 fn add(self, other: LockstepIterSize) -> LockstepIterSize {
137 match self {
138 LisUnconstrained => other,
139 LisContradiction(_) => self,
140 LisConstraint(l_len, ref l_id) => match other {
141 LisUnconstrained => self.clone(),
142 LisContradiction(_) => other,
143 LisConstraint(r_len, _) if l_len == r_len => self.clone(),
144 LisConstraint(r_len, r_id) => {
145 LisContradiction(format!("inconsistent lockstep iteration: \
146 '{}' has {} items, but '{}' has {}",
147 l_id, l_len, r_id, r_len))
148 }
149 },
150 }
151 }
152 }
153
154 fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize {
155 match *t {
156 TokenTree::Delimited(_, ref delimed) => {
157 delimed.tts.iter().fold(LisUnconstrained, |size, tt| {
158 size + lockstep_iter_size(tt, r)
159 })
160 },
161 TokenTree::Sequence(_, ref seq) => {
162 seq.tts.iter().fold(LisUnconstrained, |size, tt| {
163 size + lockstep_iter_size(tt, r)
164 })
165 },
166 TokenTree::Token(_, SubstNt(name)) | TokenTree::Token(_, MatchNt(name, _)) =>
167 match lookup_cur_matched(r, name) {
168 Some(matched) => match *matched {
169 MatchedNonterminal(_) => LisUnconstrained,
170 MatchedSeq(ref ads, _) => LisConstraint(ads.len(), name),
171 },
172 _ => LisUnconstrained
173 },
174 TokenTree::Token(..) => LisUnconstrained,
175 }
176 }
177
178 /// Return the next token from the TtReader.
179 /// EFFECT: advances the reader's token field
180 pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
181 // FIXME(pcwalton): Bad copy?
182 let ret_val = TokenAndSpan {
183 tok: r.cur_tok.clone(),
184 sp: r.cur_span.clone(),
185 };
186 loop {
187 match r.crate_name_next.take() {
188 None => (),
189 Some(sp) => {
190 r.cur_span = sp;
191 r.cur_tok = token::Ident(r.imported_from.unwrap());
192 return ret_val;
193 },
194 }
195 let should_pop = match r.stack.last() {
196 None => {
197 assert_eq!(ret_val.tok, token::Eof);
198 return ret_val;
199 }
200 Some(frame) => {
201 if frame.idx < frame.forest.len() {
202 break;
203 }
204 !frame.dotdotdoted ||
205 *r.repeat_idx.last().unwrap() == *r.repeat_len.last().unwrap() - 1
206 }
207 };
208
209 /* done with this set; pop or repeat? */
210 if should_pop {
211 let prev = r.stack.pop().unwrap();
212 match r.stack.last_mut() {
213 None => {
214 r.cur_tok = token::Eof;
215 return ret_val;
216 }
217 Some(frame) => {
218 frame.idx += 1;
219 }
220 }
221 if prev.dotdotdoted {
222 r.repeat_idx.pop();
223 r.repeat_len.pop();
224 }
225 } else { /* repeat */
226 *r.repeat_idx.last_mut().unwrap() += 1;
227 r.stack.last_mut().unwrap().idx = 0;
228 if let Some(tk) = r.stack.last().unwrap().sep.clone() {
229 r.cur_tok = tk; // repeat same span, I guess
230 return ret_val;
231 }
232 }
233 }
234 loop { /* because it's easiest, this handles `TokenTree::Delimited` not starting
235 with a `TokenTree::Token`, even though it won't happen */
236 let t = {
237 let frame = r.stack.last().unwrap();
238 // FIXME(pcwalton): Bad copy.
239 frame.forest.get_tt(frame.idx)
240 };
241 match t {
242 TokenTree::Sequence(sp, seq) => {
243 // FIXME(pcwalton): Bad copy.
244 match lockstep_iter_size(&TokenTree::Sequence(sp, seq.clone()),
245 r) {
246 LisUnconstrained => {
247 panic!(r.sp_diag.span_fatal(
248 sp.clone(), /* blame macro writer */
249 "attempted to repeat an expression \
250 containing no syntax \
251 variables matched as repeating at this depth"));
252 }
253 LisContradiction(ref msg) => {
254 // FIXME #2887 blame macro invoker instead
255 panic!(r.sp_diag.span_fatal(sp.clone(), &msg[..]));
256 }
257 LisConstraint(len, _) => {
258 if len == 0 {
259 if seq.op == tokenstream::KleeneOp::OneOrMore {
260 // FIXME #2887 blame invoker
261 panic!(r.sp_diag.span_fatal(sp.clone(),
262 "this must repeat at least once"));
263 }
264
265 r.stack.last_mut().unwrap().idx += 1;
266 return tt_next_token(r);
267 }
268 r.repeat_len.push(len);
269 r.repeat_idx.push(0);
270 r.stack.push(TtFrame {
271 idx: 0,
272 dotdotdoted: true,
273 sep: seq.separator.clone(),
274 forest: TokenTree::Sequence(sp, seq),
275 });
276 }
277 }
278 }
279 // FIXME #2887: think about span stuff here
280 TokenTree::Token(sp, SubstNt(ident)) => {
281 match lookup_cur_matched(r, ident) {
282 None => {
283 r.stack.last_mut().unwrap().idx += 1;
284 r.cur_span = sp;
285 r.cur_tok = SubstNt(ident);
286 return ret_val;
287 // this can't be 0 length, just like TokenTree::Delimited
288 }
289 Some(cur_matched) => {
290 match *cur_matched {
291 // sidestep the interpolation tricks for ident because
292 // (a) idents can be in lots of places, so it'd be a pain
293 // (b) we actually can, since it's a token.
294 MatchedNonterminal(NtIdent(ref sn)) => {
295 r.stack.last_mut().unwrap().idx += 1;
296 r.cur_span = sn.span;
297 r.cur_tok = token::Ident(sn.node);
298 return ret_val;
299 }
300 MatchedNonterminal(NtTT(ref tt)) => {
301 r.stack.push(TtFrame {
302 forest: TokenTree::Token(sp, Interpolated(NtTT(tt.clone()))),
303 idx: 0,
304 dotdotdoted: false,
305 sep: None,
306 });
307 }
308 MatchedNonterminal(ref other_whole_nt) => {
309 r.stack.last_mut().unwrap().idx += 1;
310 // FIXME(pcwalton): Bad copy.
311 r.cur_span = sp;
312 r.cur_tok = Interpolated((*other_whole_nt).clone());
313 return ret_val;
314 }
315 MatchedSeq(..) => {
316 panic!(r.sp_diag.span_fatal(
317 sp, /* blame the macro writer */
318 &format!("variable '{}' is still repeating at this depth",
319 ident)));
320 }
321 }
322 }
323 }
324 }
325 // TokenTree::Delimited or any token that can be unzipped
326 seq @ TokenTree::Delimited(..) | seq @ TokenTree::Token(_, MatchNt(..)) => {
327 // do not advance the idx yet
328 r.stack.push(TtFrame {
329 forest: seq,
330 idx: 0,
331 dotdotdoted: false,
332 sep: None
333 });
334 // if this could be 0-length, we'd need to potentially recur here
335 }
336 TokenTree::Token(sp, DocComment(name)) if r.desugar_doc_comments => {
337 r.stack.push(TtFrame {
338 forest: TokenTree::Token(sp, DocComment(name)),
339 idx: 0,
340 dotdotdoted: false,
341 sep: None
342 });
343 }
344 TokenTree::Token(sp, token::SpecialVarNt(SpecialMacroVar::CrateMacroVar)) => {
345 r.stack.last_mut().unwrap().idx += 1;
346
347 if r.imported_from.is_some() {
348 r.cur_span = sp;
349 r.cur_tok = token::ModSep;
350 r.crate_name_next = Some(sp);
351 return ret_val;
352 }
353
354 // otherwise emit nothing and proceed to the next token
355 }
356 TokenTree::Token(sp, tok) => {
357 r.cur_span = sp;
358 r.cur_tok = tok;
359 r.stack.last_mut().unwrap().idx += 1;
360 return ret_val;
361 }
362 }
363 }
364 }