]>
Commit | Line | Data |
---|---|---|
223e47cc LB |
1 | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT |
2 | // file at the top-level directory of this distribution and at | |
3 | // http://rust-lang.org/COPYRIGHT. | |
4 | // | |
5 | // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or | |
6 | // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license | |
7 | // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your | |
8 | // option. This file may not be copied, modified, or distributed | |
9 | // except according to those terms. | |
1a4d82fc | 10 | use self::LockstepIterSize::*; |
223e47cc | 11 | |
223e47cc | 12 | use ast; |
1a4d82fc JJ |
13 | use ast::{TokenTree, TtDelimited, TtToken, TtSequence, Ident}; |
14 | use codemap::{Span, DUMMY_SP}; | |
15 | use diagnostic::SpanHandler; | |
16 | use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal}; | |
17 | use parse::token::{Eof, DocComment, Interpolated, MatchNt, SubstNt}; | |
18 | use parse::token::{Token, NtIdent, SpecialMacroVar}; | |
19 | use parse::token; | |
223e47cc LB |
20 | use parse::lexer::TokenAndSpan; |
21 | ||
1a4d82fc JJ |
22 | use std::rc::Rc; |
23 | use std::ops::Add; | |
24 | use std::collections::HashMap; | |
223e47cc | 25 | |
1a4d82fc JJ |
26 | ///an unzipping of `TokenTree`s |
27 | #[derive(Clone)] | |
223e47cc | 28 | struct TtFrame { |
1a4d82fc | 29 | forest: TokenTree, |
85aaf69f | 30 | idx: usize, |
223e47cc LB |
31 | dotdotdoted: bool, |
32 | sep: Option<Token>, | |
223e47cc LB |
33 | } |
34 | ||
1a4d82fc JJ |
35 | #[derive(Clone)] |
36 | pub struct TtReader<'a> { | |
37 | pub sp_diag: &'a SpanHandler, | |
38 | /// the unzipped tree: | |
39 | stack: Vec<TtFrame>, | |
223e47cc | 40 | /* for MBE-style macro transcription */ |
1a4d82fc JJ |
41 | interpolations: HashMap<Ident, Rc<NamedMatch>>, |
42 | imported_from: Option<Ident>, | |
43 | ||
44 | // Some => return imported_from as the next token | |
45 | crate_name_next: Option<Span>, | |
85aaf69f SL |
46 | repeat_idx: Vec<usize>, |
47 | repeat_len: Vec<usize>, | |
223e47cc | 48 | /* cached: */ |
1a4d82fc JJ |
49 | pub cur_tok: Token, |
50 | pub cur_span: Span, | |
51 | /// Transform doc comments. Only useful in macro invocations | |
52 | pub desugar_doc_comments: bool, | |
53 | } | |
54 | ||
55 | /// This can do Macro-By-Example transcription. On the other hand, if | |
56 | /// `src` contains no `TtSequence`s, `MatchNt`s or `SubstNt`s, `interp` can | |
57 | /// (and should) be None. | |
58 | pub fn new_tt_reader<'a>(sp_diag: &'a SpanHandler, | |
59 | interp: Option<HashMap<Ident, Rc<NamedMatch>>>, | |
60 | imported_from: Option<Ident>, | |
61 | src: Vec<ast::TokenTree>) | |
62 | -> TtReader<'a> { | |
63 | new_tt_reader_with_doc_flag(sp_diag, interp, imported_from, src, false) | |
223e47cc LB |
64 | } |
65 | ||
1a4d82fc JJ |
66 | /// The extra `desugar_doc_comments` flag enables reading doc comments |
67 | /// like any other attribute which consists of `meta` and surrounding #[ ] tokens. | |
68 | /// | |
69 | /// This can do Macro-By-Example transcription. On the other hand, if | |
70 | /// `src` contains no `TtSequence`s, `MatchNt`s or `SubstNt`s, `interp` can | |
71 | /// (and should) be None. | |
72 | pub fn new_tt_reader_with_doc_flag<'a>(sp_diag: &'a SpanHandler, | |
73 | interp: Option<HashMap<Ident, Rc<NamedMatch>>>, | |
74 | imported_from: Option<Ident>, | |
75 | src: Vec<ast::TokenTree>, | |
76 | desugar_doc_comments: bool) | |
77 | -> TtReader<'a> { | |
78 | let mut r = TtReader { | |
223e47cc | 79 | sp_diag: sp_diag, |
1a4d82fc JJ |
80 | stack: vec!(TtFrame { |
81 | forest: TtSequence(DUMMY_SP, Rc::new(ast::SequenceRepetition { | |
82 | tts: src, | |
83 | // doesn't matter. This merely holds the root unzipping. | |
84 | separator: None, op: ast::ZeroOrMore, num_captures: 0 | |
85 | })), | |
86 | idx: 0, | |
223e47cc LB |
87 | dotdotdoted: false, |
88 | sep: None, | |
1a4d82fc JJ |
89 | }), |
90 | interpolations: match interp { /* just a convenience */ | |
970d7e83 | 91 | None => HashMap::new(), |
1a4d82fc | 92 | Some(x) => x, |
223e47cc | 93 | }, |
1a4d82fc JJ |
94 | imported_from: imported_from, |
95 | crate_name_next: None, | |
96 | repeat_idx: Vec::new(), | |
97 | repeat_len: Vec::new(), | |
98 | desugar_doc_comments: desugar_doc_comments, | |
223e47cc | 99 | /* dummy values, never read: */ |
1a4d82fc JJ |
100 | cur_tok: token::Eof, |
101 | cur_span: DUMMY_SP, | |
223e47cc | 102 | }; |
1a4d82fc JJ |
103 | tt_next_token(&mut r); /* get cur_tok and cur_span set up */ |
104 | r | |
223e47cc LB |
105 | } |
106 | ||
1a4d82fc JJ |
107 | fn lookup_cur_matched_by_matched(r: &TtReader, start: Rc<NamedMatch>) -> Rc<NamedMatch> { |
108 | r.repeat_idx.iter().fold(start, |ad, idx| { | |
109 | match *ad { | |
110 | MatchedNonterminal(_) => { | |
111 | // end of the line; duplicate henceforth | |
112 | ad.clone() | |
113 | } | |
114 | MatchedSeq(ref ads, _) => ads[*idx].clone() | |
223e47cc | 115 | } |
1a4d82fc | 116 | }) |
223e47cc LB |
117 | } |
118 | ||
1a4d82fc JJ |
119 | fn lookup_cur_matched(r: &TtReader, name: Ident) -> Option<Rc<NamedMatch>> { |
120 | let matched_opt = r.interpolations.get(&name).cloned(); | |
121 | matched_opt.map(|s| lookup_cur_matched_by_matched(r, s)) | |
223e47cc LB |
122 | } |
123 | ||
1a4d82fc JJ |
124 | #[derive(Clone)] |
125 | enum LockstepIterSize { | |
126 | LisUnconstrained, | |
85aaf69f | 127 | LisConstraint(usize, Ident), |
1a4d82fc | 128 | LisContradiction(String), |
223e47cc LB |
129 | } |
130 | ||
1a4d82fc JJ |
131 | impl Add for LockstepIterSize { |
132 | type Output = LockstepIterSize; | |
133 | ||
134 | fn add(self, other: LockstepIterSize) -> LockstepIterSize { | |
135 | match self { | |
136 | LisUnconstrained => other, | |
137 | LisContradiction(_) => self, | |
138 | LisConstraint(l_len, ref l_id) => match other { | |
139 | LisUnconstrained => self.clone(), | |
140 | LisContradiction(_) => other, | |
141 | LisConstraint(r_len, _) if l_len == r_len => self.clone(), | |
142 | LisConstraint(r_len, r_id) => { | |
143 | let l_n = token::get_ident(l_id.clone()); | |
144 | let r_n = token::get_ident(r_id); | |
145 | LisContradiction(format!("inconsistent lockstep iteration: \ | |
146 | '{:?}' has {} items, but '{:?}' has {}", | |
147 | l_n, l_len, r_n, r_len).to_string()) | |
148 | } | |
149 | }, | |
970d7e83 LB |
150 | } |
151 | } | |
223e47cc | 152 | } |
223e47cc | 153 | |
1a4d82fc | 154 | fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize { |
970d7e83 | 155 | match *t { |
1a4d82fc JJ |
156 | TtDelimited(_, ref delimed) => { |
157 | delimed.tts.iter().fold(LisUnconstrained, |size, tt| { | |
158 | size + lockstep_iter_size(tt, r) | |
159 | }) | |
160 | }, | |
161 | TtSequence(_, ref seq) => { | |
162 | seq.tts.iter().fold(LisUnconstrained, |size, tt| { | |
163 | size + lockstep_iter_size(tt, r) | |
164 | }) | |
165 | }, | |
166 | TtToken(_, SubstNt(name, _)) | TtToken(_, MatchNt(name, _, _, _)) => | |
167 | match lookup_cur_matched(r, name) { | |
168 | Some(matched) => match *matched { | |
169 | MatchedNonterminal(_) => LisUnconstrained, | |
170 | MatchedSeq(ref ads, _) => LisConstraint(ads.len(), name), | |
171 | }, | |
172 | _ => LisUnconstrained | |
173 | }, | |
174 | TtToken(..) => LisUnconstrained, | |
223e47cc LB |
175 | } |
176 | } | |
177 | ||
1a4d82fc JJ |
178 | /// Return the next token from the TtReader. |
179 | /// EFFECT: advances the reader's token field | |
223e47cc | 180 | pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { |
1a4d82fc | 181 | // FIXME(pcwalton): Bad copy? |
223e47cc | 182 | let ret_val = TokenAndSpan { |
1a4d82fc JJ |
183 | tok: r.cur_tok.clone(), |
184 | sp: r.cur_span.clone(), | |
223e47cc LB |
185 | }; |
186 | loop { | |
1a4d82fc JJ |
187 | match r.crate_name_next.take() { |
188 | None => (), | |
189 | Some(sp) => { | |
190 | r.cur_span = sp; | |
191 | r.cur_tok = token::Ident(r.imported_from.unwrap(), token::Plain); | |
192 | return ret_val; | |
193 | }, | |
223e47cc | 194 | } |
1a4d82fc JJ |
195 | let should_pop = match r.stack.last() { |
196 | None => { | |
197 | assert_eq!(ret_val.tok, token::Eof); | |
223e47cc | 198 | return ret_val; |
1a4d82fc JJ |
199 | } |
200 | Some(frame) => { | |
201 | if frame.idx < frame.forest.len() { | |
202 | break; | |
223e47cc | 203 | } |
1a4d82fc JJ |
204 | !frame.dotdotdoted || |
205 | *r.repeat_idx.last().unwrap() == *r.repeat_len.last().unwrap() - 1 | |
223e47cc | 206 | } |
1a4d82fc | 207 | }; |
223e47cc | 208 | |
1a4d82fc JJ |
209 | /* done with this set; pop or repeat? */ |
210 | if should_pop { | |
211 | let prev = r.stack.pop().unwrap(); | |
212 | match r.stack.last_mut() { | |
213 | None => { | |
214 | r.cur_tok = token::Eof; | |
215 | return ret_val; | |
216 | } | |
217 | Some(frame) => { | |
218 | frame.idx += 1; | |
219 | } | |
220 | } | |
221 | if prev.dotdotdoted { | |
222 | r.repeat_idx.pop(); | |
223 | r.repeat_len.pop(); | |
224 | } | |
223e47cc | 225 | } else { /* repeat */ |
85aaf69f | 226 | *r.repeat_idx.last_mut().unwrap() += 1; |
1a4d82fc JJ |
227 | r.stack.last_mut().unwrap().idx = 0; |
228 | match r.stack.last().unwrap().sep.clone() { | |
229 | Some(tk) => { | |
230 | r.cur_tok = tk; /* repeat same span, I guess */ | |
231 | return ret_val; | |
232 | } | |
233 | None => {} | |
223e47cc LB |
234 | } |
235 | } | |
236 | } | |
1a4d82fc JJ |
237 | loop { /* because it's easiest, this handles `TtDelimited` not starting |
238 | with a `TtToken`, even though it won't happen */ | |
239 | let t = { | |
240 | let frame = r.stack.last().unwrap(); | |
241 | // FIXME(pcwalton): Bad copy. | |
242 | frame.forest.get_tt(frame.idx) | |
243 | }; | |
244 | match t { | |
245 | TtSequence(sp, seq) => { | |
246 | // FIXME(pcwalton): Bad copy. | |
247 | match lockstep_iter_size(&TtSequence(sp, seq.clone()), | |
248 | r) { | |
249 | LisUnconstrained => { | |
9346a6ac | 250 | panic!(r.sp_diag.span_fatal( |
1a4d82fc JJ |
251 | sp.clone(), /* blame macro writer */ |
252 | "attempted to repeat an expression \ | |
253 | containing no syntax \ | |
9346a6ac | 254 | variables matched as repeating at this depth")); |
1a4d82fc JJ |
255 | } |
256 | LisContradiction(ref msg) => { | |
257 | // FIXME #2887 blame macro invoker instead | |
9346a6ac | 258 | panic!(r.sp_diag.span_fatal(sp.clone(), &msg[..])); |
1a4d82fc JJ |
259 | } |
260 | LisConstraint(len, _) => { | |
261 | if len == 0 { | |
262 | if seq.op == ast::OneOrMore { | |
263 | // FIXME #2887 blame invoker | |
9346a6ac AL |
264 | panic!(r.sp_diag.span_fatal(sp.clone(), |
265 | "this must repeat at least once")); | |
1a4d82fc | 266 | } |
223e47cc | 267 | |
1a4d82fc JJ |
268 | r.stack.last_mut().unwrap().idx += 1; |
269 | return tt_next_token(r); | |
270 | } | |
271 | r.repeat_len.push(len); | |
272 | r.repeat_idx.push(0); | |
273 | r.stack.push(TtFrame { | |
274 | idx: 0, | |
275 | dotdotdoted: true, | |
276 | sep: seq.separator.clone(), | |
277 | forest: TtSequence(sp, seq), | |
278 | }); | |
279 | } | |
223e47cc | 280 | } |
223e47cc | 281 | } |
1a4d82fc JJ |
282 | // FIXME #2887: think about span stuff here |
283 | TtToken(sp, SubstNt(ident, namep)) => { | |
284 | r.stack.last_mut().unwrap().idx += 1; | |
285 | match lookup_cur_matched(r, ident) { | |
286 | None => { | |
287 | r.cur_span = sp; | |
288 | r.cur_tok = SubstNt(ident, namep); | |
289 | return ret_val; | |
290 | // this can't be 0 length, just like TtDelimited | |
291 | } | |
292 | Some(cur_matched) => { | |
293 | match *cur_matched { | |
294 | // sidestep the interpolation tricks for ident because | |
295 | // (a) idents can be in lots of places, so it'd be a pain | |
296 | // (b) we actually can, since it's a token. | |
d9579d0f | 297 | MatchedNonterminal(NtIdent(ref sn, b)) => { |
1a4d82fc | 298 | r.cur_span = sp; |
d9579d0f | 299 | r.cur_tok = token::Ident(**sn, b); |
1a4d82fc JJ |
300 | return ret_val; |
301 | } | |
302 | MatchedNonterminal(ref other_whole_nt) => { | |
303 | // FIXME(pcwalton): Bad copy. | |
304 | r.cur_span = sp; | |
305 | r.cur_tok = token::Interpolated((*other_whole_nt).clone()); | |
306 | return ret_val; | |
307 | } | |
308 | MatchedSeq(..) => { | |
9346a6ac | 309 | panic!(r.sp_diag.span_fatal( |
1a4d82fc JJ |
310 | r.cur_span, /* blame the macro writer */ |
311 | &format!("variable '{:?}' is still repeating at this depth", | |
9346a6ac | 312 | token::get_ident(ident)))); |
1a4d82fc JJ |
313 | } |
314 | } | |
315 | } | |
316 | } | |
317 | } | |
318 | // TtDelimited or any token that can be unzipped | |
319 | seq @ TtDelimited(..) | seq @ TtToken(_, MatchNt(..)) => { | |
320 | // do not advance the idx yet | |
321 | r.stack.push(TtFrame { | |
322 | forest: seq, | |
323 | idx: 0, | |
324 | dotdotdoted: false, | |
325 | sep: None | |
326 | }); | |
327 | // if this could be 0-length, we'd need to potentially recur here | |
328 | } | |
329 | TtToken(sp, DocComment(name)) if r.desugar_doc_comments => { | |
330 | r.stack.push(TtFrame { | |
331 | forest: TtToken(sp, DocComment(name)), | |
332 | idx: 0, | |
333 | dotdotdoted: false, | |
334 | sep: None | |
335 | }); | |
336 | } | |
337 | TtToken(sp, token::SpecialVarNt(SpecialMacroVar::CrateMacroVar)) => { | |
338 | r.stack.last_mut().unwrap().idx += 1; | |
339 | ||
340 | if r.imported_from.is_some() { | |
341 | r.cur_span = sp; | |
342 | r.cur_tok = token::ModSep; | |
343 | r.crate_name_next = Some(sp); | |
344 | return ret_val; | |
345 | } | |
346 | ||
347 | // otherwise emit nothing and proceed to the next token | |
348 | } | |
349 | TtToken(sp, tok) => { | |
223e47cc | 350 | r.cur_span = sp; |
1a4d82fc JJ |
351 | r.cur_tok = tok; |
352 | r.stack.last_mut().unwrap().idx += 1; | |
223e47cc | 353 | return ret_val; |
223e47cc | 354 | } |
223e47cc LB |
355 | } |
356 | } | |
223e47cc | 357 | } |