]> git.proxmox.com Git - rustc.git/blob - src/libsyntax/ext/tt/macro_rules.rs
Imported Upstream version 1.1.0+dfsg1
[rustc.git] / src / libsyntax / ext / tt / macro_rules.rs
1 // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 use ast::{self, TokenTree, TtDelimited, TtSequence, TtToken};
12 use codemap::{Span, DUMMY_SP};
13 use ext::base::{ExtCtxt, MacResult, SyntaxExtension};
14 use ext::base::{NormalTT, TTMacroExpander};
15 use ext::tt::macro_parser::{Success, Error, Failure};
16 use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
17 use ext::tt::macro_parser::{parse, parse_or_else};
18 use parse::lexer::new_tt_reader;
19 use parse::parser::Parser;
20 use parse::token::{self, special_idents, gensym_ident, NtTT, Token};
21 use parse::token::Token::*;
22 use print;
23 use ptr::P;
24
25 use util::small_vector::SmallVector;
26
27 use std::cell::RefCell;
28 use std::rc::Rc;
29
30 struct ParserAnyMacro<'a> {
31 parser: RefCell<Parser<'a>>,
32
33 /// Span of the expansion site of the macro this parser is for
34 site_span: Span,
35 /// The ident of the macro we're parsing
36 macro_ident: ast::Ident
37 }
38
39 impl<'a> ParserAnyMacro<'a> {
40 /// Make sure we don't have any tokens left to parse, so we don't
41 /// silently drop anything. `allow_semi` is so that "optional"
42 /// semicolons at the end of normal expressions aren't complained
43 /// about e.g. the semicolon in `macro_rules! kapow { () => {
44 /// panic!(); } }` doesn't get picked up by .parse_expr(), but it's
45 /// allowed to be there.
46 fn ensure_complete_parse(&self, allow_semi: bool) {
47 let mut parser = self.parser.borrow_mut();
48 if allow_semi && parser.token == token::Semi {
49 panictry!(parser.bump())
50 }
51 if parser.token != token::Eof {
52 let token_str = parser.this_token_to_string();
53 let msg = format!("macro expansion ignores token `{}` and any \
54 following",
55 token_str);
56 let span = parser.span;
57 parser.span_err(span, &msg[..]);
58
59 let name = token::get_ident(self.macro_ident);
60 let msg = format!("caused by the macro expansion here; the usage \
61 of `{}` is likely invalid in this context",
62 name);
63 parser.span_note(self.site_span, &msg[..]);
64 }
65 }
66 }
67
68 impl<'a> MacResult for ParserAnyMacro<'a> {
69 fn make_expr(self: Box<ParserAnyMacro<'a>>) -> Option<P<ast::Expr>> {
70 let ret = self.parser.borrow_mut().parse_expr();
71 self.ensure_complete_parse(true);
72 Some(ret)
73 }
74 fn make_pat(self: Box<ParserAnyMacro<'a>>) -> Option<P<ast::Pat>> {
75 let ret = self.parser.borrow_mut().parse_pat();
76 self.ensure_complete_parse(false);
77 Some(ret)
78 }
79 fn make_items(self: Box<ParserAnyMacro<'a>>) -> Option<SmallVector<P<ast::Item>>> {
80 let mut ret = SmallVector::zero();
81 while let Some(item) = self.parser.borrow_mut().parse_item() {
82 ret.push(item);
83 }
84 self.ensure_complete_parse(false);
85 Some(ret)
86 }
87
88 fn make_impl_items(self: Box<ParserAnyMacro<'a>>)
89 -> Option<SmallVector<P<ast::ImplItem>>> {
90 let mut ret = SmallVector::zero();
91 loop {
92 let mut parser = self.parser.borrow_mut();
93 match parser.token {
94 token::Eof => break,
95 _ => ret.push(panictry!(parser.parse_impl_item()))
96 }
97 }
98 self.ensure_complete_parse(false);
99 Some(ret)
100 }
101
102 fn make_stmts(self: Box<ParserAnyMacro<'a>>)
103 -> Option<SmallVector<P<ast::Stmt>>> {
104 let mut ret = SmallVector::zero();
105 loop {
106 let mut parser = self.parser.borrow_mut();
107 match parser.token {
108 token::Eof => break,
109 _ => match parser.parse_stmt_nopanic() {
110 Ok(maybe_stmt) => match maybe_stmt {
111 Some(stmt) => ret.push(stmt),
112 None => (),
113 },
114 Err(_) => break,
115 }
116 }
117 }
118 self.ensure_complete_parse(false);
119 Some(ret)
120 }
121 }
122
123 struct MacroRulesMacroExpander {
124 name: ast::Ident,
125 imported_from: Option<ast::Ident>,
126 lhses: Vec<Rc<NamedMatch>>,
127 rhses: Vec<Rc<NamedMatch>>,
128 }
129
130 impl TTMacroExpander for MacroRulesMacroExpander {
131 fn expand<'cx>(&self,
132 cx: &'cx mut ExtCtxt,
133 sp: Span,
134 arg: &[ast::TokenTree])
135 -> Box<MacResult+'cx> {
136 generic_extension(cx,
137 sp,
138 self.name,
139 self.imported_from,
140 arg,
141 &self.lhses,
142 &self.rhses)
143 }
144 }
145
146 /// Given `lhses` and `rhses`, this is the new macro we create
147 fn generic_extension<'cx>(cx: &'cx ExtCtxt,
148 sp: Span,
149 name: ast::Ident,
150 imported_from: Option<ast::Ident>,
151 arg: &[ast::TokenTree],
152 lhses: &[Rc<NamedMatch>],
153 rhses: &[Rc<NamedMatch>])
154 -> Box<MacResult+'cx> {
155 if cx.trace_macros() {
156 println!("{}! {{ {} }}",
157 token::get_ident(name),
158 print::pprust::tts_to_string(arg));
159 }
160
161 // Which arm's failure should we report? (the one furthest along)
162 let mut best_fail_spot = DUMMY_SP;
163 let mut best_fail_msg = "internal error: ran no matchers".to_string();
164
165 for (i, lhs) in lhses.iter().enumerate() { // try each arm's matchers
166 match **lhs {
167 MatchedNonterminal(NtTT(ref lhs_tt)) => {
168 let lhs_tt = match **lhs_tt {
169 TtDelimited(_, ref delim) => &delim.tts[..],
170 _ => panic!(cx.span_fatal(sp, "malformed macro lhs"))
171 };
172
173 match TokenTree::parse(cx, lhs_tt, arg) {
174 Success(named_matches) => {
175 let rhs = match *rhses[i] {
176 // okay, what's your transcriber?
177 MatchedNonterminal(NtTT(ref tt)) => {
178 match **tt {
179 // ignore delimiters
180 TtDelimited(_, ref delimed) => delimed.tts.clone(),
181 _ => panic!(cx.span_fatal(sp, "macro rhs must be delimited")),
182 }
183 },
184 _ => cx.span_bug(sp, "bad thing in rhs")
185 };
186 // rhs has holes ( `$id` and `$(...)` that need filled)
187 let trncbr = new_tt_reader(&cx.parse_sess().span_diagnostic,
188 Some(named_matches),
189 imported_from,
190 rhs);
191 let mut p = Parser::new(cx.parse_sess(), cx.cfg(), Box::new(trncbr));
192 panictry!(p.check_unknown_macro_variable());
193 // Let the context choose how to interpret the result.
194 // Weird, but useful for X-macros.
195 return Box::new(ParserAnyMacro {
196 parser: RefCell::new(p),
197
198 // Pass along the original expansion site and the name of the macro
199 // so we can print a useful error message if the parse of the expanded
200 // macro leaves unparsed tokens.
201 site_span: sp,
202 macro_ident: name
203 })
204 }
205 Failure(sp, ref msg) => if sp.lo >= best_fail_spot.lo {
206 best_fail_spot = sp;
207 best_fail_msg = (*msg).clone();
208 },
209 Error(sp, ref msg) => panic!(cx.span_fatal(sp, &msg[..]))
210 }
211 }
212 _ => cx.bug("non-matcher found in parsed lhses")
213 }
214 }
215 panic!(cx.span_fatal(best_fail_spot, &best_fail_msg[..]));
216 }
217
218 // Note that macro-by-example's input is also matched against a token tree:
219 // $( $lhs:tt => $rhs:tt );+
220 //
221 // Holy self-referential!
222
223 /// Converts a `macro_rules!` invocation into a syntax extension.
224 pub fn compile<'cx>(cx: &'cx mut ExtCtxt,
225 def: &ast::MacroDef) -> SyntaxExtension {
226
227 let lhs_nm = gensym_ident("lhs");
228 let rhs_nm = gensym_ident("rhs");
229
230 // The pattern that macro_rules matches.
231 // The grammar for macro_rules! is:
232 // $( $lhs:tt => $rhs:tt );+
233 // ...quasiquoting this would be nice.
234 // These spans won't matter, anyways
235 let match_lhs_tok = MatchNt(lhs_nm, special_idents::tt, token::Plain, token::Plain);
236 let match_rhs_tok = MatchNt(rhs_nm, special_idents::tt, token::Plain, token::Plain);
237 let argument_gram = vec!(
238 TtSequence(DUMMY_SP,
239 Rc::new(ast::SequenceRepetition {
240 tts: vec![
241 TtToken(DUMMY_SP, match_lhs_tok),
242 TtToken(DUMMY_SP, token::FatArrow),
243 TtToken(DUMMY_SP, match_rhs_tok)],
244 separator: Some(token::Semi),
245 op: ast::OneOrMore,
246 num_captures: 2
247 })),
248 //to phase into semicolon-termination instead of
249 //semicolon-separation
250 TtSequence(DUMMY_SP,
251 Rc::new(ast::SequenceRepetition {
252 tts: vec![TtToken(DUMMY_SP, token::Semi)],
253 separator: None,
254 op: ast::ZeroOrMore,
255 num_captures: 0
256 })));
257
258
259 // Parse the macro_rules! invocation (`none` is for no interpolations):
260 let arg_reader = new_tt_reader(&cx.parse_sess().span_diagnostic,
261 None,
262 None,
263 def.body.clone());
264 let argument_map = parse_or_else(cx.parse_sess(),
265 cx.cfg(),
266 arg_reader,
267 argument_gram);
268
269 // Extract the arguments:
270 let lhses = match **argument_map.get(&lhs_nm).unwrap() {
271 MatchedSeq(ref s, _) => /* FIXME (#2543) */ (*s).clone(),
272 _ => cx.span_bug(def.span, "wrong-structured lhs")
273 };
274
275 for lhs in &lhses {
276 check_lhs_nt_follows(cx, &**lhs, def.span);
277 }
278
279 let rhses = match **argument_map.get(&rhs_nm).unwrap() {
280 MatchedSeq(ref s, _) => /* FIXME (#2543) */ (*s).clone(),
281 _ => cx.span_bug(def.span, "wrong-structured rhs")
282 };
283
284 let exp: Box<_> = Box::new(MacroRulesMacroExpander {
285 name: def.ident,
286 imported_from: def.imported_from,
287 lhses: lhses,
288 rhses: rhses,
289 });
290
291 NormalTT(exp, Some(def.span), def.allow_internal_unstable)
292 }
293
294 fn check_lhs_nt_follows(cx: &mut ExtCtxt, lhs: &NamedMatch, sp: Span) {
295 // lhs is going to be like MatchedNonterminal(NtTT(TtDelimited(...))), where the entire lhs is
296 // those tts. Or, it can be a "bare sequence", not wrapped in parens.
297 match lhs {
298 &MatchedNonterminal(NtTT(ref inner)) => match &**inner {
299 &TtDelimited(_, ref tts) => {
300 check_matcher(cx, tts.tts.iter(), &Eof);
301 },
302 tt @ &TtSequence(..) => {
303 check_matcher(cx, Some(tt).into_iter(), &Eof);
304 },
305 _ => cx.span_bug(sp, "wrong-structured lhs for follow check (didn't find \
306 a TtDelimited or TtSequence)")
307 },
308 _ => cx.span_bug(sp, "wrong-structured lhs for follow check (didn't find a \
309 MatchedNonterminal)")
310 };
311 // we don't abort on errors on rejection, the driver will do that for us
312 // after parsing/expansion. we can report every error in every macro this way.
313 }
314
315 // returns the last token that was checked, for TtSequence. this gets used later on.
316 fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token)
317 -> Option<(Span, Token)> where I: Iterator<Item=&'a TokenTree> {
318 use print::pprust::token_to_string;
319
320 let mut last = None;
321
322 // 2. For each token T in M:
323 let mut tokens = matcher.peekable();
324 while let Some(token) = tokens.next() {
325 last = match *token {
326 TtToken(sp, MatchNt(ref name, ref frag_spec, _, _)) => {
327 // ii. If T is a simple NT, look ahead to the next token T' in
328 // M.
329 let next_token = match tokens.peek() {
330 // If T' closes a complex NT, replace T' with F
331 Some(&&TtToken(_, CloseDelim(_))) => follow.clone(),
332 Some(&&TtToken(_, ref tok)) => tok.clone(),
333 Some(&&TtSequence(sp, _)) => {
334 cx.span_err(sp,
335 &format!("`${0}:{1}` is followed by a \
336 sequence repetition, which is not \
337 allowed for `{1}` fragments",
338 name.as_str(), frag_spec.as_str())
339 );
340 Eof
341 },
342 // die next iteration
343 Some(&&TtDelimited(_, ref delim)) => delim.close_token(),
344 // else, we're at the end of the macro or sequence
345 None => follow.clone()
346 };
347
348 let tok = if let TtToken(_, ref tok) = *token { tok } else { unreachable!() };
349 // If T' is in the set FOLLOW(NT), continue. Else, reject.
350 match (&next_token, is_in_follow(cx, &next_token, frag_spec.as_str())) {
351 (_, Err(msg)) => {
352 cx.span_err(sp, &msg);
353 continue
354 }
355 (&Eof, _) => return Some((sp, tok.clone())),
356 (_, Ok(true)) => continue,
357 (next, Ok(false)) => {
358 cx.span_err(sp, &format!("`${0}:{1}` is followed by `{2}`, which \
359 is not allowed for `{1}` fragments",
360 name.as_str(), frag_spec.as_str(),
361 token_to_string(next)));
362 continue
363 },
364 }
365 },
366 TtSequence(sp, ref seq) => {
367 // iii. Else, T is a complex NT.
368 match seq.separator {
369 // If T has the form $(...)U+ or $(...)U* for some token U,
370 // run the algorithm on the contents with F set to U. If it
371 // accepts, continue, else, reject.
372 Some(ref u) => {
373 let last = check_matcher(cx, seq.tts.iter(), u);
374 match last {
375 // Since the delimiter isn't required after the last
376 // repetition, make sure that the *next* token is
377 // sane. This doesn't actually compute the FIRST of
378 // the rest of the matcher yet, it only considers
379 // single tokens and simple NTs. This is imprecise,
380 // but conservatively correct.
381 Some((span, tok)) => {
382 let fol = match tokens.peek() {
383 Some(&&TtToken(_, ref tok)) => tok.clone(),
384 Some(&&TtDelimited(_, ref delim)) => delim.close_token(),
385 Some(_) => {
386 cx.span_err(sp, "sequence repetition followed by \
387 another sequence repetition, which is not allowed");
388 Eof
389 },
390 None => Eof
391 };
392 check_matcher(cx, Some(&TtToken(span, tok.clone())).into_iter(),
393 &fol)
394 },
395 None => last,
396 }
397 },
398 // If T has the form $(...)+ or $(...)*, run the algorithm
399 // on the contents with F set to the token following the
400 // sequence. If it accepts, continue, else, reject.
401 None => {
402 let fol = match tokens.peek() {
403 Some(&&TtToken(_, ref tok)) => tok.clone(),
404 Some(&&TtDelimited(_, ref delim)) => delim.close_token(),
405 Some(_) => {
406 cx.span_err(sp, "sequence repetition followed by another \
407 sequence repetition, which is not allowed");
408 Eof
409 },
410 None => Eof
411 };
412 check_matcher(cx, seq.tts.iter(), &fol)
413 }
414 }
415 },
416 TtToken(..) => {
417 // i. If T is not an NT, continue.
418 continue
419 },
420 TtDelimited(_, ref tts) => {
421 // if we don't pass in that close delimiter, we'll incorrectly consider the matcher
422 // `{ $foo:ty }` as having a follow that isn't `RBrace`
423 check_matcher(cx, tts.tts.iter(), &tts.close_token())
424 }
425 }
426 }
427 last
428 }
429
430 fn is_in_follow(_: &ExtCtxt, tok: &Token, frag: &str) -> Result<bool, String> {
431 if let &CloseDelim(_) = tok {
432 Ok(true)
433 } else {
434 match frag {
435 "item" => {
436 // since items *must* be followed by either a `;` or a `}`, we can
437 // accept anything after them
438 Ok(true)
439 },
440 "block" => {
441 // anything can follow block, the braces provide a easy boundary to
442 // maintain
443 Ok(true)
444 },
445 "stmt" | "expr" => {
446 match *tok {
447 FatArrow | Comma | Semi => Ok(true),
448 _ => Ok(false)
449 }
450 },
451 "pat" => {
452 match *tok {
453 FatArrow | Comma | Eq => Ok(true),
454 _ => Ok(false)
455 }
456 },
457 "path" | "ty" => {
458 match *tok {
459 Comma | FatArrow | Colon | Eq | Gt => Ok(true),
460 Ident(i, _) if i.as_str() == "as" => Ok(true),
461 _ => Ok(false)
462 }
463 },
464 "ident" => {
465 // being a single token, idents are harmless
466 Ok(true)
467 },
468 "meta" | "tt" => {
469 // being either a single token or a delimited sequence, tt is
470 // harmless
471 Ok(true)
472 },
473 _ => Err(format!("invalid fragment specifier `{}`", frag))
474 }
475 }
476 }