]> git.proxmox.com Git - rustc.git/blob - src/libsyntax/ext/quote.rs
New upstream version 1.17.0+dfsg1
[rustc.git] / src / libsyntax / ext / quote.rs
1 // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 use ast::{self, Arg, Arm, Block, Expr, Item, Pat, Stmt, Ty};
12 use syntax_pos::Span;
13 use ext::base::ExtCtxt;
14 use ext::base;
15 use ext::build::AstBuilder;
16 use parse::parser::{Parser, PathStyle};
17 use parse::token;
18 use ptr::P;
19 use tokenstream::{TokenStream, TokenTree};
20
21
22 /// Quasiquoting works via token trees.
23 ///
24 /// This is registered as a set of expression syntax extension called quote!
25 /// that lifts its argument token-tree to an AST representing the
26 /// construction of the same token tree, with token::SubstNt interpreted
27 /// as antiquotes (splices).
28
29 pub mod rt {
30 use ast;
31 use codemap::Spanned;
32 use ext::base::ExtCtxt;
33 use parse::{self, token, classify};
34 use ptr::P;
35 use std::rc::Rc;
36 use symbol::Symbol;
37
38 use tokenstream::{self, TokenTree, TokenStream};
39
40 pub use parse::new_parser_from_tts;
41 pub use syntax_pos::{BytePos, Span, DUMMY_SP};
42 pub use codemap::{dummy_spanned};
43
44 pub trait ToTokens {
45 fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree>;
46 }
47
48 impl ToTokens for TokenTree {
49 fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
50 vec![self.clone()]
51 }
52 }
53
54 impl<T: ToTokens> ToTokens for Vec<T> {
55 fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
56 self.iter().flat_map(|t| t.to_tokens(cx)).collect()
57 }
58 }
59
60 impl<T: ToTokens> ToTokens for Spanned<T> {
61 fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
62 // FIXME: use the span?
63 self.node.to_tokens(cx)
64 }
65 }
66
67 impl<T: ToTokens> ToTokens for Option<T> {
68 fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
69 match *self {
70 Some(ref t) => t.to_tokens(cx),
71 None => Vec::new(),
72 }
73 }
74 }
75
76 impl ToTokens for ast::Ident {
77 fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
78 vec![TokenTree::Token(DUMMY_SP, token::Ident(*self))]
79 }
80 }
81
82 impl ToTokens for ast::Path {
83 fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
84 let nt = token::NtPath(self.clone());
85 vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))]
86 }
87 }
88
89 impl ToTokens for ast::Ty {
90 fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
91 let nt = token::NtTy(P(self.clone()));
92 vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
93 }
94 }
95
96 impl ToTokens for ast::Block {
97 fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
98 let nt = token::NtBlock(P(self.clone()));
99 vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
100 }
101 }
102
103 impl ToTokens for ast::Generics {
104 fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
105 let nt = token::NtGenerics(self.clone());
106 vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))]
107 }
108 }
109
110 impl ToTokens for ast::WhereClause {
111 fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
112 let nt = token::NtWhereClause(self.clone());
113 vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))]
114 }
115 }
116
117 impl ToTokens for P<ast::Item> {
118 fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
119 let nt = token::NtItem(self.clone());
120 vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
121 }
122 }
123
124 impl ToTokens for ast::ImplItem {
125 fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
126 let nt = token::NtImplItem(self.clone());
127 vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
128 }
129 }
130
131 impl ToTokens for P<ast::ImplItem> {
132 fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
133 let nt = token::NtImplItem((**self).clone());
134 vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
135 }
136 }
137
138 impl ToTokens for ast::TraitItem {
139 fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
140 let nt = token::NtTraitItem(self.clone());
141 vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
142 }
143 }
144
145 impl ToTokens for ast::Stmt {
146 fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
147 let nt = token::NtStmt(self.clone());
148 let mut tts = vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))];
149
150 // Some statements require a trailing semicolon.
151 if classify::stmt_ends_with_semi(&self.node) {
152 tts.push(TokenTree::Token(self.span, token::Semi));
153 }
154
155 tts
156 }
157 }
158
159 impl ToTokens for P<ast::Expr> {
160 fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
161 let nt = token::NtExpr(self.clone());
162 vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
163 }
164 }
165
166 impl ToTokens for P<ast::Pat> {
167 fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
168 let nt = token::NtPat(self.clone());
169 vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
170 }
171 }
172
173 impl ToTokens for ast::Arm {
174 fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
175 let nt = token::NtArm(self.clone());
176 vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))]
177 }
178 }
179
180 impl ToTokens for ast::Arg {
181 fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
182 let nt = token::NtArg(self.clone());
183 vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))]
184 }
185 }
186
187 impl ToTokens for P<ast::Block> {
188 fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
189 let nt = token::NtBlock(self.clone());
190 vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))]
191 }
192 }
193
194 macro_rules! impl_to_tokens_slice {
195 ($t: ty, $sep: expr) => {
196 impl ToTokens for [$t] {
197 fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
198 let mut v = vec![];
199 for (i, x) in self.iter().enumerate() {
200 if i > 0 {
201 v.extend_from_slice(&$sep);
202 }
203 v.extend(x.to_tokens(cx));
204 }
205 v
206 }
207 }
208 };
209 }
210
211 impl_to_tokens_slice! { ast::Ty, [TokenTree::Token(DUMMY_SP, token::Comma)] }
212 impl_to_tokens_slice! { P<ast::Item>, [] }
213 impl_to_tokens_slice! { ast::Arg, [TokenTree::Token(DUMMY_SP, token::Comma)] }
214
215 impl ToTokens for ast::MetaItem {
216 fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
217 let nt = token::NtMeta(self.clone());
218 vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))]
219 }
220 }
221
222 impl ToTokens for ast::Attribute {
223 fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
224 let mut r = vec![];
225 // FIXME: The spans could be better
226 r.push(TokenTree::Token(self.span, token::Pound));
227 if self.style == ast::AttrStyle::Inner {
228 r.push(TokenTree::Token(self.span, token::Not));
229 }
230 r.push(TokenTree::Delimited(self.span, tokenstream::Delimited {
231 delim: token::Bracket,
232 tts: self.value.to_tokens(cx).into_iter().collect::<TokenStream>().into(),
233 }));
234 r
235 }
236 }
237
238 impl ToTokens for str {
239 fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
240 let lit = ast::LitKind::Str(Symbol::intern(self), ast::StrStyle::Cooked);
241 dummy_spanned(lit).to_tokens(cx)
242 }
243 }
244
245 impl ToTokens for () {
246 fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
247 vec![TokenTree::Delimited(DUMMY_SP, tokenstream::Delimited {
248 delim: token::Paren,
249 tts: TokenStream::empty().into(),
250 })]
251 }
252 }
253
254 impl ToTokens for ast::Lit {
255 fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
256 // FIXME: This is wrong
257 P(ast::Expr {
258 id: ast::DUMMY_NODE_ID,
259 node: ast::ExprKind::Lit(P(self.clone())),
260 span: DUMMY_SP,
261 attrs: ast::ThinVec::new(),
262 }).to_tokens(cx)
263 }
264 }
265
266 impl ToTokens for bool {
267 fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
268 dummy_spanned(ast::LitKind::Bool(*self)).to_tokens(cx)
269 }
270 }
271
272 impl ToTokens for char {
273 fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
274 dummy_spanned(ast::LitKind::Char(*self)).to_tokens(cx)
275 }
276 }
277
278 macro_rules! impl_to_tokens_int {
279 (signed, $t:ty, $tag:expr) => (
280 impl ToTokens for $t {
281 fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
282 let val = if *self < 0 {
283 -self
284 } else {
285 *self
286 };
287 let lit = ast::LitKind::Int(val as u128, ast::LitIntType::Signed($tag));
288 let lit = P(ast::Expr {
289 id: ast::DUMMY_NODE_ID,
290 node: ast::ExprKind::Lit(P(dummy_spanned(lit))),
291 span: DUMMY_SP,
292 attrs: ast::ThinVec::new(),
293 });
294 if *self >= 0 {
295 return lit.to_tokens(cx);
296 }
297 P(ast::Expr {
298 id: ast::DUMMY_NODE_ID,
299 node: ast::ExprKind::Unary(ast::UnOp::Neg, lit),
300 span: DUMMY_SP,
301 attrs: ast::ThinVec::new(),
302 }).to_tokens(cx)
303 }
304 }
305 );
306 (unsigned, $t:ty, $tag:expr) => (
307 impl ToTokens for $t {
308 fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
309 let lit = ast::LitKind::Int(*self as u128, ast::LitIntType::Unsigned($tag));
310 dummy_spanned(lit).to_tokens(cx)
311 }
312 }
313 );
314 }
315
316 impl_to_tokens_int! { signed, isize, ast::IntTy::Is }
317 impl_to_tokens_int! { signed, i8, ast::IntTy::I8 }
318 impl_to_tokens_int! { signed, i16, ast::IntTy::I16 }
319 impl_to_tokens_int! { signed, i32, ast::IntTy::I32 }
320 impl_to_tokens_int! { signed, i64, ast::IntTy::I64 }
321
322 impl_to_tokens_int! { unsigned, usize, ast::UintTy::Us }
323 impl_to_tokens_int! { unsigned, u8, ast::UintTy::U8 }
324 impl_to_tokens_int! { unsigned, u16, ast::UintTy::U16 }
325 impl_to_tokens_int! { unsigned, u32, ast::UintTy::U32 }
326 impl_to_tokens_int! { unsigned, u64, ast::UintTy::U64 }
327
328 pub trait ExtParseUtils {
329 fn parse_item(&self, s: String) -> P<ast::Item>;
330 fn parse_expr(&self, s: String) -> P<ast::Expr>;
331 fn parse_stmt(&self, s: String) -> ast::Stmt;
332 fn parse_tts(&self, s: String) -> Vec<TokenTree>;
333 }
334
335 impl<'a> ExtParseUtils for ExtCtxt<'a> {
336 fn parse_item(&self, s: String) -> P<ast::Item> {
337 panictry!(parse::parse_item_from_source_str(
338 "<quote expansion>".to_string(),
339 s,
340 self.parse_sess())).expect("parse error")
341 }
342
343 fn parse_stmt(&self, s: String) -> ast::Stmt {
344 panictry!(parse::parse_stmt_from_source_str(
345 "<quote expansion>".to_string(),
346 s,
347 self.parse_sess())).expect("parse error")
348 }
349
350 fn parse_expr(&self, s: String) -> P<ast::Expr> {
351 panictry!(parse::parse_expr_from_source_str(
352 "<quote expansion>".to_string(),
353 s,
354 self.parse_sess()))
355 }
356
357 fn parse_tts(&self, s: String) -> Vec<TokenTree> {
358 let source_name = "<quote expansion>".to_owned();
359 parse::parse_stream_from_source_str(source_name, s, self.parse_sess())
360 .into_trees().collect()
361 }
362 }
363 }
364
365 // Replaces `Token::OpenDelim .. Token::CloseDelim` with `TokenTree::Delimited(..)`.
366 pub fn unflatten(tts: Vec<TokenTree>) -> Vec<TokenTree> {
367 use tokenstream::Delimited;
368
369 let mut results = Vec::new();
370 let mut result = Vec::new();
371 for tree in tts {
372 match tree {
373 TokenTree::Token(_, token::OpenDelim(..)) => {
374 results.push(::std::mem::replace(&mut result, Vec::new()));
375 }
376 TokenTree::Token(span, token::CloseDelim(delim)) => {
377 let tree = TokenTree::Delimited(span, Delimited {
378 delim: delim,
379 tts: result.into_iter().map(TokenStream::from).collect::<TokenStream>().into(),
380 });
381 result = results.pop().unwrap();
382 result.push(tree);
383 }
384 tree @ _ => result.push(tree),
385 }
386 }
387 result
388 }
389
390 // These panicking parsing functions are used by the quote_*!() syntax extensions,
391 // but shouldn't be used otherwise.
392 pub fn parse_expr_panic(parser: &mut Parser) -> P<Expr> {
393 panictry!(parser.parse_expr())
394 }
395
396 pub fn parse_item_panic(parser: &mut Parser) -> Option<P<Item>> {
397 panictry!(parser.parse_item())
398 }
399
400 pub fn parse_pat_panic(parser: &mut Parser) -> P<Pat> {
401 panictry!(parser.parse_pat())
402 }
403
404 pub fn parse_arm_panic(parser: &mut Parser) -> Arm {
405 panictry!(parser.parse_arm())
406 }
407
408 pub fn parse_ty_panic(parser: &mut Parser) -> P<Ty> {
409 panictry!(parser.parse_ty_no_plus())
410 }
411
412 pub fn parse_stmt_panic(parser: &mut Parser) -> Option<Stmt> {
413 panictry!(parser.parse_stmt())
414 }
415
416 pub fn parse_attribute_panic(parser: &mut Parser, permit_inner: bool) -> ast::Attribute {
417 panictry!(parser.parse_attribute(permit_inner))
418 }
419
420 pub fn parse_arg_panic(parser: &mut Parser) -> Arg {
421 panictry!(parser.parse_arg())
422 }
423
424 pub fn parse_block_panic(parser: &mut Parser) -> P<Block> {
425 panictry!(parser.parse_block())
426 }
427
428 pub fn parse_meta_item_panic(parser: &mut Parser) -> ast::MetaItem {
429 panictry!(parser.parse_meta_item())
430 }
431
432 pub fn parse_path_panic(parser: &mut Parser, mode: PathStyle) -> ast::Path {
433 panictry!(parser.parse_path(mode))
434 }
435
436 pub fn expand_quote_tokens<'cx>(cx: &'cx mut ExtCtxt,
437 sp: Span,
438 tts: &[TokenTree])
439 -> Box<base::MacResult+'cx> {
440 let (cx_expr, expr) = expand_tts(cx, sp, tts);
441 let expanded = expand_wrapper(cx, sp, cx_expr, expr, &[&["syntax", "ext", "quote", "rt"]]);
442 base::MacEager::expr(expanded)
443 }
444
445 pub fn expand_quote_expr<'cx>(cx: &'cx mut ExtCtxt,
446 sp: Span,
447 tts: &[TokenTree])
448 -> Box<base::MacResult+'cx> {
449 let expanded = expand_parse_call(cx, sp, "parse_expr_panic", vec![], tts);
450 base::MacEager::expr(expanded)
451 }
452
453 pub fn expand_quote_item<'cx>(cx: &'cx mut ExtCtxt,
454 sp: Span,
455 tts: &[TokenTree])
456 -> Box<base::MacResult+'cx> {
457 let expanded = expand_parse_call(cx, sp, "parse_item_panic", vec![], tts);
458 base::MacEager::expr(expanded)
459 }
460
461 pub fn expand_quote_pat<'cx>(cx: &'cx mut ExtCtxt,
462 sp: Span,
463 tts: &[TokenTree])
464 -> Box<base::MacResult+'cx> {
465 let expanded = expand_parse_call(cx, sp, "parse_pat_panic", vec![], tts);
466 base::MacEager::expr(expanded)
467 }
468
469 pub fn expand_quote_arm(cx: &mut ExtCtxt,
470 sp: Span,
471 tts: &[TokenTree])
472 -> Box<base::MacResult+'static> {
473 let expanded = expand_parse_call(cx, sp, "parse_arm_panic", vec![], tts);
474 base::MacEager::expr(expanded)
475 }
476
477 pub fn expand_quote_ty(cx: &mut ExtCtxt,
478 sp: Span,
479 tts: &[TokenTree])
480 -> Box<base::MacResult+'static> {
481 let expanded = expand_parse_call(cx, sp, "parse_ty_panic", vec![], tts);
482 base::MacEager::expr(expanded)
483 }
484
485 pub fn expand_quote_stmt(cx: &mut ExtCtxt,
486 sp: Span,
487 tts: &[TokenTree])
488 -> Box<base::MacResult+'static> {
489 let expanded = expand_parse_call(cx, sp, "parse_stmt_panic", vec![], tts);
490 base::MacEager::expr(expanded)
491 }
492
493 pub fn expand_quote_attr(cx: &mut ExtCtxt,
494 sp: Span,
495 tts: &[TokenTree])
496 -> Box<base::MacResult+'static> {
497 let expanded = expand_parse_call(cx, sp, "parse_attribute_panic",
498 vec![cx.expr_bool(sp, true)], tts);
499
500 base::MacEager::expr(expanded)
501 }
502
503 pub fn expand_quote_arg(cx: &mut ExtCtxt,
504 sp: Span,
505 tts: &[TokenTree])
506 -> Box<base::MacResult+'static> {
507 let expanded = expand_parse_call(cx, sp, "parse_arg_panic", vec![], tts);
508 base::MacEager::expr(expanded)
509 }
510
511 pub fn expand_quote_block(cx: &mut ExtCtxt,
512 sp: Span,
513 tts: &[TokenTree])
514 -> Box<base::MacResult+'static> {
515 let expanded = expand_parse_call(cx, sp, "parse_block_panic", vec![], tts);
516 base::MacEager::expr(expanded)
517 }
518
519 pub fn expand_quote_meta_item(cx: &mut ExtCtxt,
520 sp: Span,
521 tts: &[TokenTree])
522 -> Box<base::MacResult+'static> {
523 let expanded = expand_parse_call(cx, sp, "parse_meta_item_panic", vec![], tts);
524 base::MacEager::expr(expanded)
525 }
526
527 pub fn expand_quote_path(cx: &mut ExtCtxt,
528 sp: Span,
529 tts: &[TokenTree])
530 -> Box<base::MacResult+'static> {
531 let mode = mk_parser_path(cx, sp, &["PathStyle", "Type"]);
532 let expanded = expand_parse_call(cx, sp, "parse_path_panic", vec![mode], tts);
533 base::MacEager::expr(expanded)
534 }
535
536 fn ids_ext(strs: Vec<String>) -> Vec<ast::Ident> {
537 strs.iter().map(|s| ast::Ident::from_str(s)).collect()
538 }
539
540 fn id_ext(s: &str) -> ast::Ident {
541 ast::Ident::from_str(s)
542 }
543
544 // Lift an ident to the expr that evaluates to that ident.
545 fn mk_ident(cx: &ExtCtxt, sp: Span, ident: ast::Ident) -> P<ast::Expr> {
546 let e_str = cx.expr_str(sp, ident.name);
547 cx.expr_method_call(sp,
548 cx.expr_ident(sp, id_ext("ext_cx")),
549 id_ext("ident_of"),
550 vec![e_str])
551 }
552
553 // Lift a name to the expr that evaluates to that name
554 fn mk_name(cx: &ExtCtxt, sp: Span, ident: ast::Ident) -> P<ast::Expr> {
555 let e_str = cx.expr_str(sp, ident.name);
556 cx.expr_method_call(sp,
557 cx.expr_ident(sp, id_ext("ext_cx")),
558 id_ext("name_of"),
559 vec![e_str])
560 }
561
562 fn mk_tt_path(cx: &ExtCtxt, sp: Span, name: &str) -> P<ast::Expr> {
563 let idents = vec![id_ext("syntax"), id_ext("tokenstream"), id_ext("TokenTree"), id_ext(name)];
564 cx.expr_path(cx.path_global(sp, idents))
565 }
566
567 fn mk_token_path(cx: &ExtCtxt, sp: Span, name: &str) -> P<ast::Expr> {
568 let idents = vec![id_ext("syntax"), id_ext("parse"), id_ext("token"), id_ext(name)];
569 cx.expr_path(cx.path_global(sp, idents))
570 }
571
572 fn mk_parser_path(cx: &ExtCtxt, sp: Span, names: &[&str]) -> P<ast::Expr> {
573 let mut idents = vec![id_ext("syntax"), id_ext("parse"), id_ext("parser")];
574 idents.extend(names.iter().cloned().map(id_ext));
575 cx.expr_path(cx.path_global(sp, idents))
576 }
577
578 fn mk_binop(cx: &ExtCtxt, sp: Span, bop: token::BinOpToken) -> P<ast::Expr> {
579 let name = match bop {
580 token::Plus => "Plus",
581 token::Minus => "Minus",
582 token::Star => "Star",
583 token::Slash => "Slash",
584 token::Percent => "Percent",
585 token::Caret => "Caret",
586 token::And => "And",
587 token::Or => "Or",
588 token::Shl => "Shl",
589 token::Shr => "Shr"
590 };
591 mk_token_path(cx, sp, name)
592 }
593
594 fn mk_delim(cx: &ExtCtxt, sp: Span, delim: token::DelimToken) -> P<ast::Expr> {
595 let name = match delim {
596 token::Paren => "Paren",
597 token::Bracket => "Bracket",
598 token::Brace => "Brace",
599 token::NoDelim => "NoDelim",
600 };
601 mk_token_path(cx, sp, name)
602 }
603
604 #[allow(non_upper_case_globals)]
605 fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
606 macro_rules! mk_lit {
607 ($name: expr, $suffix: expr, $($args: expr),*) => {{
608 let inner = cx.expr_call(sp, mk_token_path(cx, sp, $name), vec![$($args),*]);
609 let suffix = match $suffix {
610 Some(name) => cx.expr_some(sp, mk_name(cx, sp, ast::Ident::with_empty_ctxt(name))),
611 None => cx.expr_none(sp)
612 };
613 cx.expr_call(sp, mk_token_path(cx, sp, "Literal"), vec![inner, suffix])
614 }}
615 }
616 match *tok {
617 token::BinOp(binop) => {
618 return cx.expr_call(sp, mk_token_path(cx, sp, "BinOp"), vec![mk_binop(cx, sp, binop)]);
619 }
620 token::BinOpEq(binop) => {
621 return cx.expr_call(sp, mk_token_path(cx, sp, "BinOpEq"),
622 vec![mk_binop(cx, sp, binop)]);
623 }
624
625 token::OpenDelim(delim) => {
626 return cx.expr_call(sp, mk_token_path(cx, sp, "OpenDelim"),
627 vec![mk_delim(cx, sp, delim)]);
628 }
629 token::CloseDelim(delim) => {
630 return cx.expr_call(sp, mk_token_path(cx, sp, "CloseDelim"),
631 vec![mk_delim(cx, sp, delim)]);
632 }
633
634 token::Literal(token::Byte(i), suf) => {
635 let e_byte = mk_name(cx, sp, ast::Ident::with_empty_ctxt(i));
636 return mk_lit!("Byte", suf, e_byte);
637 }
638
639 token::Literal(token::Char(i), suf) => {
640 let e_char = mk_name(cx, sp, ast::Ident::with_empty_ctxt(i));
641 return mk_lit!("Char", suf, e_char);
642 }
643
644 token::Literal(token::Integer(i), suf) => {
645 let e_int = mk_name(cx, sp, ast::Ident::with_empty_ctxt(i));
646 return mk_lit!("Integer", suf, e_int);
647 }
648
649 token::Literal(token::Float(fident), suf) => {
650 let e_fident = mk_name(cx, sp, ast::Ident::with_empty_ctxt(fident));
651 return mk_lit!("Float", suf, e_fident);
652 }
653
654 token::Literal(token::Str_(ident), suf) => {
655 return mk_lit!("Str_", suf, mk_name(cx, sp, ast::Ident::with_empty_ctxt(ident)))
656 }
657
658 token::Literal(token::StrRaw(ident, n), suf) => {
659 return mk_lit!("StrRaw", suf, mk_name(cx, sp, ast::Ident::with_empty_ctxt(ident)),
660 cx.expr_usize(sp, n))
661 }
662
663 token::Ident(ident) => {
664 return cx.expr_call(sp,
665 mk_token_path(cx, sp, "Ident"),
666 vec![mk_ident(cx, sp, ident)]);
667 }
668
669 token::Lifetime(ident) => {
670 return cx.expr_call(sp,
671 mk_token_path(cx, sp, "Lifetime"),
672 vec![mk_ident(cx, sp, ident)]);
673 }
674
675 token::DocComment(ident) => {
676 return cx.expr_call(sp,
677 mk_token_path(cx, sp, "DocComment"),
678 vec![mk_name(cx, sp, ast::Ident::with_empty_ctxt(ident))]);
679 }
680
681 token::Interpolated(_) => panic!("quote! with interpolated token"),
682
683 _ => ()
684 }
685
686 let name = match *tok {
687 token::Eq => "Eq",
688 token::Lt => "Lt",
689 token::Le => "Le",
690 token::EqEq => "EqEq",
691 token::Ne => "Ne",
692 token::Ge => "Ge",
693 token::Gt => "Gt",
694 token::AndAnd => "AndAnd",
695 token::OrOr => "OrOr",
696 token::Not => "Not",
697 token::Tilde => "Tilde",
698 token::At => "At",
699 token::Dot => "Dot",
700 token::DotDot => "DotDot",
701 token::Comma => "Comma",
702 token::Semi => "Semi",
703 token::Colon => "Colon",
704 token::ModSep => "ModSep",
705 token::RArrow => "RArrow",
706 token::LArrow => "LArrow",
707 token::FatArrow => "FatArrow",
708 token::Pound => "Pound",
709 token::Dollar => "Dollar",
710 token::Question => "Question",
711 token::Underscore => "Underscore",
712 token::Eof => "Eof",
713 _ => panic!("unhandled token in quote!"),
714 };
715 mk_token_path(cx, sp, name)
716 }
717
718 fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, quoted: bool) -> Vec<ast::Stmt> {
719 match *tt {
720 TokenTree::Token(sp, token::Ident(ident)) if quoted => {
721 // tt.extend($ident.to_tokens(ext_cx))
722
723 let e_to_toks =
724 cx.expr_method_call(sp,
725 cx.expr_ident(sp, ident),
726 id_ext("to_tokens"),
727 vec![cx.expr_ident(sp, id_ext("ext_cx"))]);
728 let e_to_toks =
729 cx.expr_method_call(sp, e_to_toks, id_ext("into_iter"), vec![]);
730
731 let e_push =
732 cx.expr_method_call(sp,
733 cx.expr_ident(sp, id_ext("tt")),
734 id_ext("extend"),
735 vec![e_to_toks]);
736
737 vec![cx.stmt_expr(e_push)]
738 }
739 TokenTree::Token(sp, ref tok) => {
740 let e_sp = cx.expr_ident(sp, id_ext("_sp"));
741 let e_tok = cx.expr_call(sp,
742 mk_tt_path(cx, sp, "Token"),
743 vec![e_sp, expr_mk_token(cx, sp, tok)]);
744 let e_push =
745 cx.expr_method_call(sp,
746 cx.expr_ident(sp, id_ext("tt")),
747 id_ext("push"),
748 vec![e_tok]);
749 vec![cx.stmt_expr(e_push)]
750 },
751 TokenTree::Delimited(span, ref delimed) => {
752 let mut stmts = statements_mk_tt(cx, &delimed.open_tt(span), false);
753 stmts.extend(statements_mk_tts(cx, delimed.stream()));
754 stmts.extend(statements_mk_tt(cx, &delimed.close_tt(span), false));
755 stmts
756 }
757 }
758 }
759
760 fn parse_arguments_to_quote(cx: &ExtCtxt, tts: &[TokenTree])
761 -> (P<ast::Expr>, Vec<TokenTree>) {
762 let mut p = cx.new_parser_from_tts(tts);
763
764 let cx_expr = panictry!(p.parse_expr());
765 if !p.eat(&token::Comma) {
766 let _ = p.diagnostic().fatal("expected token `,`");
767 }
768
769 let tts = panictry!(p.parse_all_token_trees());
770 p.abort_if_errors();
771
772 (cx_expr, tts)
773 }
774
775 fn mk_stmts_let(cx: &ExtCtxt, sp: Span) -> Vec<ast::Stmt> {
776 // We also bind a single value, sp, to ext_cx.call_site()
777 //
778 // This causes every span in a token-tree quote to be attributed to the
779 // call site of the extension using the quote. We can't really do much
780 // better since the source of the quote may well be in a library that
781 // was not even parsed by this compilation run, that the user has no
782 // source code for (eg. in libsyntax, which they're just _using_).
783 //
784 // The old quasiquoter had an elaborate mechanism for denoting input
785 // file locations from which quotes originated; unfortunately this
786 // relied on feeding the source string of the quote back into the
787 // compiler (which we don't really want to do) and, in any case, only
788 // pushed the problem a very small step further back: an error
789 // resulting from a parse of the resulting quote is still attributed to
790 // the site the string literal occurred, which was in a source file
791 // _other_ than the one the user has control over. For example, an
792 // error in a quote from the protocol compiler, invoked in user code
793 // using macro_rules! for example, will be attributed to the macro_rules.rs
794 // file in libsyntax, which the user might not even have source to (unless
795 // they happen to have a compiler on hand). Over all, the phase distinction
796 // just makes quotes "hard to attribute". Possibly this could be fixed
797 // by recreating some of the original qq machinery in the tt regime
798 // (pushing fake FileMaps onto the parser to account for original sites
799 // of quotes, for example) but at this point it seems not likely to be
800 // worth the hassle.
801
802 let e_sp = cx.expr_method_call(sp,
803 cx.expr_ident(sp, id_ext("ext_cx")),
804 id_ext("call_site"),
805 Vec::new());
806
807 let stmt_let_sp = cx.stmt_let(sp, false,
808 id_ext("_sp"),
809 e_sp);
810
811 let stmt_let_tt = cx.stmt_let(sp, true, id_ext("tt"), cx.expr_vec_ng(sp));
812
813 vec![stmt_let_sp, stmt_let_tt]
814 }
815
816 fn statements_mk_tts(cx: &ExtCtxt, tts: TokenStream) -> Vec<ast::Stmt> {
817 let mut ss = Vec::new();
818 let mut quoted = false;
819 for tt in tts.into_trees() {
820 quoted = match tt {
821 TokenTree::Token(_, token::Dollar) if !quoted => true,
822 _ => {
823 ss.extend(statements_mk_tt(cx, &tt, quoted));
824 false
825 }
826 }
827 }
828 ss
829 }
830
831 fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[TokenTree]) -> (P<ast::Expr>, P<ast::Expr>) {
832 let (cx_expr, tts) = parse_arguments_to_quote(cx, tts);
833
834 let mut vector = mk_stmts_let(cx, sp);
835 vector.extend(statements_mk_tts(cx, tts.iter().cloned().collect()));
836 vector.push(cx.stmt_expr(cx.expr_ident(sp, id_ext("tt"))));
837 let block = cx.expr_block(cx.block(sp, vector));
838 let unflatten = vec![id_ext("syntax"), id_ext("ext"), id_ext("quote"), id_ext("unflatten")];
839
840 (cx_expr, cx.expr_call_global(sp, unflatten, vec![block]))
841 }
842
843 fn expand_wrapper(cx: &ExtCtxt,
844 sp: Span,
845 cx_expr: P<ast::Expr>,
846 expr: P<ast::Expr>,
847 imports: &[&[&str]]) -> P<ast::Expr> {
848 // Explicitly borrow to avoid moving from the invoker (#16992)
849 let cx_expr_borrow = cx.expr_addr_of(sp, cx.expr_deref(sp, cx_expr));
850 let stmt_let_ext_cx = cx.stmt_let(sp, false, id_ext("ext_cx"), cx_expr_borrow);
851
852 let mut stmts = imports.iter().map(|path| {
853 // make item: `use ...;`
854 let path = path.iter().map(|s| s.to_string()).collect();
855 cx.stmt_item(sp, cx.item_use_glob(sp, ast::Visibility::Inherited, ids_ext(path)))
856 }).chain(Some(stmt_let_ext_cx)).collect::<Vec<_>>();
857 stmts.push(cx.stmt_expr(expr));
858
859 cx.expr_block(cx.block(sp, stmts))
860 }
861
862 fn expand_parse_call(cx: &ExtCtxt,
863 sp: Span,
864 parse_method: &str,
865 arg_exprs: Vec<P<ast::Expr>> ,
866 tts: &[TokenTree]) -> P<ast::Expr> {
867 let (cx_expr, tts_expr) = expand_tts(cx, sp, tts);
868
869 let parse_sess_call = || cx.expr_method_call(
870 sp, cx.expr_ident(sp, id_ext("ext_cx")),
871 id_ext("parse_sess"), Vec::new());
872
873 let new_parser_call =
874 cx.expr_call(sp,
875 cx.expr_ident(sp, id_ext("new_parser_from_tts")),
876 vec![parse_sess_call(), tts_expr]);
877
878 let path = vec![id_ext("syntax"), id_ext("ext"), id_ext("quote"), id_ext(parse_method)];
879 let mut args = vec![cx.expr_mut_addr_of(sp, new_parser_call)];
880 args.extend(arg_exprs);
881 let expr = cx.expr_call_global(sp, path, args);
882
883 if parse_method == "parse_attribute" {
884 expand_wrapper(cx, sp, cx_expr, expr, &[&["syntax", "ext", "quote", "rt"],
885 &["syntax", "parse", "attr"]])
886 } else {
887 expand_wrapper(cx, sp, cx_expr, expr, &[&["syntax", "ext", "quote", "rt"]])
888 }
889 }