]>
Commit | Line | Data |
---|---|---|
1 | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT | |
2 | // file at the top-level directory of this distribution and at | |
3 | // http://rust-lang.org/COPYRIGHT. | |
4 | // | |
5 | // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or | |
6 | // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license | |
7 | // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your | |
8 | // option. This file may not be copied, modified, or distributed | |
9 | // except according to those terms. | |
10 | ||
11 | use ast::{self, Arg, Arm, Block, Expr, Item, Pat, Stmt, TokenTree, Ty}; | |
12 | use codemap::Span; | |
13 | use ext::base::ExtCtxt; | |
14 | use ext::base; | |
15 | use ext::build::AstBuilder; | |
16 | use parse::parser::{Parser, PathParsingMode}; | |
17 | use parse::token::*; | |
18 | use parse::token; | |
19 | use ptr::P; | |
20 | ||
21 | /// Quasiquoting works via token trees. | |
22 | /// | |
23 | /// This is registered as a set of expression syntax extension called quote! | |
24 | /// that lifts its argument token-tree to an AST representing the | |
25 | /// construction of the same token tree, with token::SubstNt interpreted | |
26 | /// as antiquotes (splices). | |
27 | ||
28 | pub mod rt { | |
29 | use ast; | |
30 | use codemap::Spanned; | |
31 | use ext::base::ExtCtxt; | |
32 | use parse::{self, token, classify}; | |
33 | use ptr::P; | |
34 | use std::rc::Rc; | |
35 | ||
36 | use ast::TokenTree; | |
37 | ||
38 | pub use parse::new_parser_from_tts; | |
39 | pub use codemap::{BytePos, Span, dummy_spanned, DUMMY_SP}; | |
40 | ||
41 | pub trait ToTokens { | |
42 | fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree>; | |
43 | } | |
44 | ||
45 | impl ToTokens for TokenTree { | |
46 | fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { | |
47 | vec!(self.clone()) | |
48 | } | |
49 | } | |
50 | ||
51 | impl<T: ToTokens> ToTokens for Vec<T> { | |
52 | fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> { | |
53 | self.iter().flat_map(|t| t.to_tokens(cx)).collect() | |
54 | } | |
55 | } | |
56 | ||
57 | impl<T: ToTokens> ToTokens for Spanned<T> { | |
58 | fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> { | |
59 | // FIXME: use the span? | |
60 | self.node.to_tokens(cx) | |
61 | } | |
62 | } | |
63 | ||
64 | impl<T: ToTokens> ToTokens for Option<T> { | |
65 | fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> { | |
66 | match *self { | |
67 | Some(ref t) => t.to_tokens(cx), | |
68 | None => Vec::new(), | |
69 | } | |
70 | } | |
71 | } | |
72 | ||
73 | impl ToTokens for ast::Ident { | |
74 | fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { | |
75 | vec![TokenTree::Token(DUMMY_SP, token::Ident(*self, token::Plain))] | |
76 | } | |
77 | } | |
78 | ||
79 | impl ToTokens for ast::Path { | |
80 | fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { | |
81 | vec![TokenTree::Token(DUMMY_SP, | |
82 | token::Interpolated(token::NtPath(Box::new(self.clone()))))] | |
83 | } | |
84 | } | |
85 | ||
86 | impl ToTokens for ast::Ty { | |
87 | fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { | |
88 | vec![TokenTree::Token(self.span, token::Interpolated(token::NtTy(P(self.clone()))))] | |
89 | } | |
90 | } | |
91 | ||
92 | impl ToTokens for ast::Block { | |
93 | fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { | |
94 | vec![TokenTree::Token(self.span, token::Interpolated(token::NtBlock(P(self.clone()))))] | |
95 | } | |
96 | } | |
97 | ||
98 | impl ToTokens for ast::Generics { | |
99 | fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { | |
100 | vec![TokenTree::Token(DUMMY_SP, token::Interpolated(token::NtGenerics(self.clone())))] | |
101 | } | |
102 | } | |
103 | ||
104 | impl ToTokens for ast::WhereClause { | |
105 | fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { | |
106 | vec![TokenTree::Token(DUMMY_SP, | |
107 | token::Interpolated(token::NtWhereClause(self.clone())))] | |
108 | } | |
109 | } | |
110 | ||
111 | impl ToTokens for P<ast::Item> { | |
112 | fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { | |
113 | vec![TokenTree::Token(self.span, token::Interpolated(token::NtItem(self.clone())))] | |
114 | } | |
115 | } | |
116 | ||
117 | impl ToTokens for P<ast::ImplItem> { | |
118 | fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { | |
119 | vec![TokenTree::Token(self.span, token::Interpolated(token::NtImplItem(self.clone())))] | |
120 | } | |
121 | } | |
122 | ||
123 | impl ToTokens for P<ast::TraitItem> { | |
124 | fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { | |
125 | vec![TokenTree::Token(self.span, token::Interpolated(token::NtTraitItem(self.clone())))] | |
126 | } | |
127 | } | |
128 | ||
129 | impl ToTokens for P<ast::Stmt> { | |
130 | fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { | |
131 | let mut tts = vec![ | |
132 | TokenTree::Token(self.span, token::Interpolated(token::NtStmt(self.clone()))) | |
133 | ]; | |
134 | ||
135 | // Some statements require a trailing semicolon. | |
136 | if classify::stmt_ends_with_semi(&self.node) { | |
137 | tts.push(TokenTree::Token(self.span, token::Semi)); | |
138 | } | |
139 | ||
140 | tts | |
141 | } | |
142 | } | |
143 | ||
144 | impl ToTokens for P<ast::Expr> { | |
145 | fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { | |
146 | vec![TokenTree::Token(self.span, token::Interpolated(token::NtExpr(self.clone())))] | |
147 | } | |
148 | } | |
149 | ||
150 | impl ToTokens for P<ast::Pat> { | |
151 | fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { | |
152 | vec![TokenTree::Token(self.span, token::Interpolated(token::NtPat(self.clone())))] | |
153 | } | |
154 | } | |
155 | ||
156 | impl ToTokens for ast::Arm { | |
157 | fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { | |
158 | vec![TokenTree::Token(DUMMY_SP, token::Interpolated(token::NtArm(self.clone())))] | |
159 | } | |
160 | } | |
161 | ||
162 | impl ToTokens for ast::Arg { | |
163 | fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { | |
164 | vec![TokenTree::Token(DUMMY_SP, token::Interpolated(token::NtArg(self.clone())))] | |
165 | } | |
166 | } | |
167 | ||
168 | impl ToTokens for P<ast::Block> { | |
169 | fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { | |
170 | vec![TokenTree::Token(DUMMY_SP, token::Interpolated(token::NtBlock(self.clone())))] | |
171 | } | |
172 | } | |
173 | ||
174 | macro_rules! impl_to_tokens_slice { | |
175 | ($t: ty, $sep: expr) => { | |
176 | impl ToTokens for [$t] { | |
177 | fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> { | |
178 | let mut v = vec![]; | |
179 | for (i, x) in self.iter().enumerate() { | |
180 | if i > 0 { | |
181 | v.extend_from_slice(&$sep); | |
182 | } | |
183 | v.extend(x.to_tokens(cx)); | |
184 | } | |
185 | v | |
186 | } | |
187 | } | |
188 | }; | |
189 | } | |
190 | ||
191 | impl_to_tokens_slice! { ast::Ty, [TokenTree::Token(DUMMY_SP, token::Comma)] } | |
192 | impl_to_tokens_slice! { P<ast::Item>, [] } | |
193 | impl_to_tokens_slice! { ast::Arg, [TokenTree::Token(DUMMY_SP, token::Comma)] } | |
194 | ||
195 | impl ToTokens for P<ast::MetaItem> { | |
196 | fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { | |
197 | vec![TokenTree::Token(DUMMY_SP, token::Interpolated(token::NtMeta(self.clone())))] | |
198 | } | |
199 | } | |
200 | ||
201 | impl ToTokens for ast::Attribute { | |
202 | fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> { | |
203 | let mut r = vec![]; | |
204 | // FIXME: The spans could be better | |
205 | r.push(TokenTree::Token(self.span, token::Pound)); | |
206 | if self.node.style == ast::AttrStyle::Inner { | |
207 | r.push(TokenTree::Token(self.span, token::Not)); | |
208 | } | |
209 | r.push(TokenTree::Delimited(self.span, Rc::new(ast::Delimited { | |
210 | delim: token::Bracket, | |
211 | open_span: self.span, | |
212 | tts: self.node.value.to_tokens(cx), | |
213 | close_span: self.span, | |
214 | }))); | |
215 | r | |
216 | } | |
217 | } | |
218 | ||
219 | impl ToTokens for str { | |
220 | fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> { | |
221 | let lit = ast::LitStr( | |
222 | token::intern_and_get_ident(self), ast::CookedStr); | |
223 | dummy_spanned(lit).to_tokens(cx) | |
224 | } | |
225 | } | |
226 | ||
227 | impl ToTokens for () { | |
228 | fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { | |
229 | vec![TokenTree::Delimited(DUMMY_SP, Rc::new(ast::Delimited { | |
230 | delim: token::Paren, | |
231 | open_span: DUMMY_SP, | |
232 | tts: vec![], | |
233 | close_span: DUMMY_SP, | |
234 | }))] | |
235 | } | |
236 | } | |
237 | ||
238 | impl ToTokens for ast::Lit { | |
239 | fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> { | |
240 | // FIXME: This is wrong | |
241 | P(ast::Expr { | |
242 | id: ast::DUMMY_NODE_ID, | |
243 | node: ast::ExprLit(P(self.clone())), | |
244 | span: DUMMY_SP, | |
245 | attrs: None, | |
246 | }).to_tokens(cx) | |
247 | } | |
248 | } | |
249 | ||
250 | impl ToTokens for bool { | |
251 | fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> { | |
252 | dummy_spanned(ast::LitBool(*self)).to_tokens(cx) | |
253 | } | |
254 | } | |
255 | ||
256 | impl ToTokens for char { | |
257 | fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> { | |
258 | dummy_spanned(ast::LitChar(*self)).to_tokens(cx) | |
259 | } | |
260 | } | |
261 | ||
262 | macro_rules! impl_to_tokens_int { | |
263 | (signed, $t:ty, $tag:expr) => ( | |
264 | impl ToTokens for $t { | |
265 | fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> { | |
266 | let lit = ast::LitInt(*self as u64, ast::SignedIntLit($tag, | |
267 | ast::Sign::new(*self))); | |
268 | dummy_spanned(lit).to_tokens(cx) | |
269 | } | |
270 | } | |
271 | ); | |
272 | (unsigned, $t:ty, $tag:expr) => ( | |
273 | impl ToTokens for $t { | |
274 | fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> { | |
275 | let lit = ast::LitInt(*self as u64, ast::UnsignedIntLit($tag)); | |
276 | dummy_spanned(lit).to_tokens(cx) | |
277 | } | |
278 | } | |
279 | ); | |
280 | } | |
281 | ||
282 | impl_to_tokens_int! { signed, isize, ast::TyIs } | |
283 | impl_to_tokens_int! { signed, i8, ast::TyI8 } | |
284 | impl_to_tokens_int! { signed, i16, ast::TyI16 } | |
285 | impl_to_tokens_int! { signed, i32, ast::TyI32 } | |
286 | impl_to_tokens_int! { signed, i64, ast::TyI64 } | |
287 | ||
288 | impl_to_tokens_int! { unsigned, usize, ast::TyUs } | |
289 | impl_to_tokens_int! { unsigned, u8, ast::TyU8 } | |
290 | impl_to_tokens_int! { unsigned, u16, ast::TyU16 } | |
291 | impl_to_tokens_int! { unsigned, u32, ast::TyU32 } | |
292 | impl_to_tokens_int! { unsigned, u64, ast::TyU64 } | |
293 | ||
294 | pub trait ExtParseUtils { | |
295 | fn parse_item(&self, s: String) -> P<ast::Item>; | |
296 | fn parse_expr(&self, s: String) -> P<ast::Expr>; | |
297 | fn parse_stmt(&self, s: String) -> P<ast::Stmt>; | |
298 | fn parse_tts(&self, s: String) -> Vec<TokenTree>; | |
299 | } | |
300 | ||
301 | impl<'a> ExtParseUtils for ExtCtxt<'a> { | |
302 | ||
303 | fn parse_item(&self, s: String) -> P<ast::Item> { | |
304 | parse::parse_item_from_source_str( | |
305 | "<quote expansion>".to_string(), | |
306 | s, | |
307 | self.cfg(), | |
308 | self.parse_sess()).expect("parse error") | |
309 | } | |
310 | ||
311 | fn parse_stmt(&self, s: String) -> P<ast::Stmt> { | |
312 | parse::parse_stmt_from_source_str("<quote expansion>".to_string(), | |
313 | s, | |
314 | self.cfg(), | |
315 | self.parse_sess()).expect("parse error") | |
316 | } | |
317 | ||
318 | fn parse_expr(&self, s: String) -> P<ast::Expr> { | |
319 | parse::parse_expr_from_source_str("<quote expansion>".to_string(), | |
320 | s, | |
321 | self.cfg(), | |
322 | self.parse_sess()) | |
323 | } | |
324 | ||
325 | fn parse_tts(&self, s: String) -> Vec<TokenTree> { | |
326 | parse::parse_tts_from_source_str("<quote expansion>".to_string(), | |
327 | s, | |
328 | self.cfg(), | |
329 | self.parse_sess()) | |
330 | } | |
331 | } | |
332 | } | |
333 | ||
334 | // These panicking parsing functions are used by the quote_*!() syntax extensions, | |
335 | // but shouldn't be used otherwise. | |
336 | pub fn parse_expr_panic(parser: &mut Parser) -> P<Expr> { | |
337 | panictry!(parser.parse_expr()) | |
338 | } | |
339 | ||
340 | pub fn parse_item_panic(parser: &mut Parser) -> Option<P<Item>> { | |
341 | panictry!(parser.parse_item()) | |
342 | } | |
343 | ||
344 | pub fn parse_pat_panic(parser: &mut Parser) -> P<Pat> { | |
345 | panictry!(parser.parse_pat()) | |
346 | } | |
347 | ||
348 | pub fn parse_arm_panic(parser: &mut Parser) -> Arm { | |
349 | panictry!(parser.parse_arm()) | |
350 | } | |
351 | ||
352 | pub fn parse_ty_panic(parser: &mut Parser) -> P<Ty> { | |
353 | panictry!(parser.parse_ty()) | |
354 | } | |
355 | ||
356 | pub fn parse_stmt_panic(parser: &mut Parser) -> Option<P<Stmt>> { | |
357 | panictry!(parser.parse_stmt()) | |
358 | } | |
359 | ||
360 | pub fn parse_attribute_panic(parser: &mut Parser, permit_inner: bool) -> ast::Attribute { | |
361 | panictry!(parser.parse_attribute(permit_inner)) | |
362 | } | |
363 | ||
364 | pub fn parse_arg_panic(parser: &mut Parser) -> Arg { | |
365 | panictry!(parser.parse_arg()) | |
366 | } | |
367 | ||
368 | pub fn parse_block_panic(parser: &mut Parser) -> P<Block> { | |
369 | panictry!(parser.parse_block()) | |
370 | } | |
371 | ||
372 | pub fn parse_meta_item_panic(parser: &mut Parser) -> P<ast::MetaItem> { | |
373 | panictry!(parser.parse_meta_item()) | |
374 | } | |
375 | ||
376 | pub fn parse_path_panic(parser: &mut Parser, mode: PathParsingMode) -> ast::Path { | |
377 | panictry!(parser.parse_path(mode)) | |
378 | } | |
379 | ||
380 | pub fn expand_quote_tokens<'cx>(cx: &'cx mut ExtCtxt, | |
381 | sp: Span, | |
382 | tts: &[TokenTree]) | |
383 | -> Box<base::MacResult+'cx> { | |
384 | let (cx_expr, expr) = expand_tts(cx, sp, tts); | |
385 | let expanded = expand_wrapper(cx, sp, cx_expr, expr, &[&["syntax", "ext", "quote", "rt"]]); | |
386 | base::MacEager::expr(expanded) | |
387 | } | |
388 | ||
389 | pub fn expand_quote_expr<'cx>(cx: &'cx mut ExtCtxt, | |
390 | sp: Span, | |
391 | tts: &[TokenTree]) | |
392 | -> Box<base::MacResult+'cx> { | |
393 | let expanded = expand_parse_call(cx, sp, "parse_expr_panic", vec!(), tts); | |
394 | base::MacEager::expr(expanded) | |
395 | } | |
396 | ||
397 | pub fn expand_quote_item<'cx>(cx: &mut ExtCtxt, | |
398 | sp: Span, | |
399 | tts: &[TokenTree]) | |
400 | -> Box<base::MacResult+'cx> { | |
401 | let expanded = expand_parse_call(cx, sp, "parse_item_panic", vec!(), tts); | |
402 | base::MacEager::expr(expanded) | |
403 | } | |
404 | ||
405 | pub fn expand_quote_pat<'cx>(cx: &'cx mut ExtCtxt, | |
406 | sp: Span, | |
407 | tts: &[TokenTree]) | |
408 | -> Box<base::MacResult+'cx> { | |
409 | let expanded = expand_parse_call(cx, sp, "parse_pat_panic", vec!(), tts); | |
410 | base::MacEager::expr(expanded) | |
411 | } | |
412 | ||
413 | pub fn expand_quote_arm(cx: &mut ExtCtxt, | |
414 | sp: Span, | |
415 | tts: &[TokenTree]) | |
416 | -> Box<base::MacResult+'static> { | |
417 | let expanded = expand_parse_call(cx, sp, "parse_arm_panic", vec!(), tts); | |
418 | base::MacEager::expr(expanded) | |
419 | } | |
420 | ||
421 | pub fn expand_quote_ty(cx: &mut ExtCtxt, | |
422 | sp: Span, | |
423 | tts: &[TokenTree]) | |
424 | -> Box<base::MacResult+'static> { | |
425 | let expanded = expand_parse_call(cx, sp, "parse_ty_panic", vec!(), tts); | |
426 | base::MacEager::expr(expanded) | |
427 | } | |
428 | ||
429 | pub fn expand_quote_stmt(cx: &mut ExtCtxt, | |
430 | sp: Span, | |
431 | tts: &[TokenTree]) | |
432 | -> Box<base::MacResult+'static> { | |
433 | let expanded = expand_parse_call(cx, sp, "parse_stmt_panic", vec!(), tts); | |
434 | base::MacEager::expr(expanded) | |
435 | } | |
436 | ||
437 | pub fn expand_quote_attr(cx: &mut ExtCtxt, | |
438 | sp: Span, | |
439 | tts: &[TokenTree]) | |
440 | -> Box<base::MacResult+'static> { | |
441 | let expanded = expand_parse_call(cx, sp, "parse_attribute_panic", | |
442 | vec!(cx.expr_bool(sp, true)), tts); | |
443 | ||
444 | base::MacEager::expr(expanded) | |
445 | } | |
446 | ||
447 | pub fn expand_quote_arg(cx: &mut ExtCtxt, | |
448 | sp: Span, | |
449 | tts: &[TokenTree]) | |
450 | -> Box<base::MacResult+'static> { | |
451 | let expanded = expand_parse_call(cx, sp, "parse_arg_panic", vec!(), tts); | |
452 | base::MacEager::expr(expanded) | |
453 | } | |
454 | ||
455 | pub fn expand_quote_block(cx: &mut ExtCtxt, | |
456 | sp: Span, | |
457 | tts: &[TokenTree]) | |
458 | -> Box<base::MacResult+'static> { | |
459 | let expanded = expand_parse_call(cx, sp, "parse_block_panic", vec!(), tts); | |
460 | base::MacEager::expr(expanded) | |
461 | } | |
462 | ||
463 | pub fn expand_quote_meta_item(cx: &mut ExtCtxt, | |
464 | sp: Span, | |
465 | tts: &[TokenTree]) | |
466 | -> Box<base::MacResult+'static> { | |
467 | let expanded = expand_parse_call(cx, sp, "parse_meta_item_panic", vec!(), tts); | |
468 | base::MacEager::expr(expanded) | |
469 | } | |
470 | ||
471 | pub fn expand_quote_path(cx: &mut ExtCtxt, | |
472 | sp: Span, | |
473 | tts: &[TokenTree]) | |
474 | -> Box<base::MacResult+'static> { | |
475 | let mode = mk_parser_path(cx, sp, "LifetimeAndTypesWithoutColons"); | |
476 | let expanded = expand_parse_call(cx, sp, "parse_path_panic", vec!(mode), tts); | |
477 | base::MacEager::expr(expanded) | |
478 | } | |
479 | ||
480 | pub fn expand_quote_matcher(cx: &mut ExtCtxt, | |
481 | sp: Span, | |
482 | tts: &[TokenTree]) | |
483 | -> Box<base::MacResult+'static> { | |
484 | let (cx_expr, tts) = parse_arguments_to_quote(cx, tts); | |
485 | let mut vector = mk_stmts_let(cx, sp); | |
486 | vector.extend(statements_mk_tts(cx, &tts[..], true)); | |
487 | let block = cx.expr_block( | |
488 | cx.block_all(sp, | |
489 | vector, | |
490 | Some(cx.expr_ident(sp, id_ext("tt"))))); | |
491 | ||
492 | let expanded = expand_wrapper(cx, sp, cx_expr, block, &[&["syntax", "ext", "quote", "rt"]]); | |
493 | base::MacEager::expr(expanded) | |
494 | } | |
495 | ||
496 | fn ids_ext(strs: Vec<String> ) -> Vec<ast::Ident> { | |
497 | strs.iter().map(|str| str_to_ident(&(*str))).collect() | |
498 | } | |
499 | ||
500 | fn id_ext(str: &str) -> ast::Ident { | |
501 | str_to_ident(str) | |
502 | } | |
503 | ||
504 | // Lift an ident to the expr that evaluates to that ident. | |
505 | fn mk_ident(cx: &ExtCtxt, sp: Span, ident: ast::Ident) -> P<ast::Expr> { | |
506 | let e_str = cx.expr_str(sp, ident.name.as_str()); | |
507 | cx.expr_method_call(sp, | |
508 | cx.expr_ident(sp, id_ext("ext_cx")), | |
509 | id_ext("ident_of"), | |
510 | vec!(e_str)) | |
511 | } | |
512 | ||
513 | // Lift a name to the expr that evaluates to that name | |
514 | fn mk_name(cx: &ExtCtxt, sp: Span, ident: ast::Ident) -> P<ast::Expr> { | |
515 | let e_str = cx.expr_str(sp, ident.name.as_str()); | |
516 | cx.expr_method_call(sp, | |
517 | cx.expr_ident(sp, id_ext("ext_cx")), | |
518 | id_ext("name_of"), | |
519 | vec!(e_str)) | |
520 | } | |
521 | ||
522 | fn mk_tt_path(cx: &ExtCtxt, sp: Span, name: &str) -> P<ast::Expr> { | |
523 | let idents = vec!(id_ext("syntax"), id_ext("ast"), id_ext("TokenTree"), id_ext(name)); | |
524 | cx.expr_path(cx.path_global(sp, idents)) | |
525 | } | |
526 | ||
527 | fn mk_ast_path(cx: &ExtCtxt, sp: Span, name: &str) -> P<ast::Expr> { | |
528 | let idents = vec!(id_ext("syntax"), id_ext("ast"), id_ext(name)); | |
529 | cx.expr_path(cx.path_global(sp, idents)) | |
530 | } | |
531 | ||
532 | fn mk_token_path(cx: &ExtCtxt, sp: Span, name: &str) -> P<ast::Expr> { | |
533 | let idents = vec!(id_ext("syntax"), id_ext("parse"), id_ext("token"), id_ext(name)); | |
534 | cx.expr_path(cx.path_global(sp, idents)) | |
535 | } | |
536 | ||
537 | fn mk_parser_path(cx: &ExtCtxt, sp: Span, name: &str) -> P<ast::Expr> { | |
538 | let idents = vec!(id_ext("syntax"), id_ext("parse"), id_ext("parser"), id_ext(name)); | |
539 | cx.expr_path(cx.path_global(sp, idents)) | |
540 | } | |
541 | ||
542 | fn mk_binop(cx: &ExtCtxt, sp: Span, bop: token::BinOpToken) -> P<ast::Expr> { | |
543 | let name = match bop { | |
544 | token::Plus => "Plus", | |
545 | token::Minus => "Minus", | |
546 | token::Star => "Star", | |
547 | token::Slash => "Slash", | |
548 | token::Percent => "Percent", | |
549 | token::Caret => "Caret", | |
550 | token::And => "And", | |
551 | token::Or => "Or", | |
552 | token::Shl => "Shl", | |
553 | token::Shr => "Shr" | |
554 | }; | |
555 | mk_token_path(cx, sp, name) | |
556 | } | |
557 | ||
558 | fn mk_delim(cx: &ExtCtxt, sp: Span, delim: token::DelimToken) -> P<ast::Expr> { | |
559 | let name = match delim { | |
560 | token::Paren => "Paren", | |
561 | token::Bracket => "Bracket", | |
562 | token::Brace => "Brace", | |
563 | }; | |
564 | mk_token_path(cx, sp, name) | |
565 | } | |
566 | ||
567 | #[allow(non_upper_case_globals)] | |
568 | fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> { | |
569 | macro_rules! mk_lit { | |
570 | ($name: expr, $suffix: expr, $($args: expr),*) => {{ | |
571 | let inner = cx.expr_call(sp, mk_token_path(cx, sp, $name), vec![$($args),*]); | |
572 | let suffix = match $suffix { | |
573 | Some(name) => cx.expr_some(sp, mk_name(cx, sp, ast::Ident::with_empty_ctxt(name))), | |
574 | None => cx.expr_none(sp) | |
575 | }; | |
576 | cx.expr_call(sp, mk_token_path(cx, sp, "Literal"), vec![inner, suffix]) | |
577 | }} | |
578 | } | |
579 | match *tok { | |
580 | token::BinOp(binop) => { | |
581 | return cx.expr_call(sp, mk_token_path(cx, sp, "BinOp"), vec!(mk_binop(cx, sp, binop))); | |
582 | } | |
583 | token::BinOpEq(binop) => { | |
584 | return cx.expr_call(sp, mk_token_path(cx, sp, "BinOpEq"), | |
585 | vec!(mk_binop(cx, sp, binop))); | |
586 | } | |
587 | ||
588 | token::OpenDelim(delim) => { | |
589 | return cx.expr_call(sp, mk_token_path(cx, sp, "OpenDelim"), | |
590 | vec![mk_delim(cx, sp, delim)]); | |
591 | } | |
592 | token::CloseDelim(delim) => { | |
593 | return cx.expr_call(sp, mk_token_path(cx, sp, "CloseDelim"), | |
594 | vec![mk_delim(cx, sp, delim)]); | |
595 | } | |
596 | ||
597 | token::Literal(token::Byte(i), suf) => { | |
598 | let e_byte = mk_name(cx, sp, ast::Ident::with_empty_ctxt(i)); | |
599 | return mk_lit!("Byte", suf, e_byte); | |
600 | } | |
601 | ||
602 | token::Literal(token::Char(i), suf) => { | |
603 | let e_char = mk_name(cx, sp, ast::Ident::with_empty_ctxt(i)); | |
604 | return mk_lit!("Char", suf, e_char); | |
605 | } | |
606 | ||
607 | token::Literal(token::Integer(i), suf) => { | |
608 | let e_int = mk_name(cx, sp, ast::Ident::with_empty_ctxt(i)); | |
609 | return mk_lit!("Integer", suf, e_int); | |
610 | } | |
611 | ||
612 | token::Literal(token::Float(fident), suf) => { | |
613 | let e_fident = mk_name(cx, sp, ast::Ident::with_empty_ctxt(fident)); | |
614 | return mk_lit!("Float", suf, e_fident); | |
615 | } | |
616 | ||
617 | token::Literal(token::Str_(ident), suf) => { | |
618 | return mk_lit!("Str_", suf, mk_name(cx, sp, ast::Ident::with_empty_ctxt(ident))) | |
619 | } | |
620 | ||
621 | token::Literal(token::StrRaw(ident, n), suf) => { | |
622 | return mk_lit!("StrRaw", suf, mk_name(cx, sp, ast::Ident::with_empty_ctxt(ident)), | |
623 | cx.expr_usize(sp, n)) | |
624 | } | |
625 | ||
626 | token::Ident(ident, style) => { | |
627 | return cx.expr_call(sp, | |
628 | mk_token_path(cx, sp, "Ident"), | |
629 | vec![mk_ident(cx, sp, ident), | |
630 | match style { | |
631 | ModName => mk_token_path(cx, sp, "ModName"), | |
632 | Plain => mk_token_path(cx, sp, "Plain"), | |
633 | }]); | |
634 | } | |
635 | ||
636 | token::Lifetime(ident) => { | |
637 | return cx.expr_call(sp, | |
638 | mk_token_path(cx, sp, "Lifetime"), | |
639 | vec!(mk_ident(cx, sp, ident))); | |
640 | } | |
641 | ||
642 | token::DocComment(ident) => { | |
643 | return cx.expr_call(sp, | |
644 | mk_token_path(cx, sp, "DocComment"), | |
645 | vec!(mk_name(cx, sp, ast::Ident::with_empty_ctxt(ident)))); | |
646 | } | |
647 | ||
648 | token::MatchNt(name, kind, namep, kindp) => { | |
649 | return cx.expr_call(sp, | |
650 | mk_token_path(cx, sp, "MatchNt"), | |
651 | vec!(mk_ident(cx, sp, name), | |
652 | mk_ident(cx, sp, kind), | |
653 | match namep { | |
654 | ModName => mk_token_path(cx, sp, "ModName"), | |
655 | Plain => mk_token_path(cx, sp, "Plain"), | |
656 | }, | |
657 | match kindp { | |
658 | ModName => mk_token_path(cx, sp, "ModName"), | |
659 | Plain => mk_token_path(cx, sp, "Plain"), | |
660 | })); | |
661 | } | |
662 | ||
663 | token::Interpolated(_) => panic!("quote! with interpolated token"), | |
664 | ||
665 | _ => () | |
666 | } | |
667 | ||
668 | let name = match *tok { | |
669 | token::Eq => "Eq", | |
670 | token::Lt => "Lt", | |
671 | token::Le => "Le", | |
672 | token::EqEq => "EqEq", | |
673 | token::Ne => "Ne", | |
674 | token::Ge => "Ge", | |
675 | token::Gt => "Gt", | |
676 | token::AndAnd => "AndAnd", | |
677 | token::OrOr => "OrOr", | |
678 | token::Not => "Not", | |
679 | token::Tilde => "Tilde", | |
680 | token::At => "At", | |
681 | token::Dot => "Dot", | |
682 | token::DotDot => "DotDot", | |
683 | token::Comma => "Comma", | |
684 | token::Semi => "Semi", | |
685 | token::Colon => "Colon", | |
686 | token::ModSep => "ModSep", | |
687 | token::RArrow => "RArrow", | |
688 | token::LArrow => "LArrow", | |
689 | token::FatArrow => "FatArrow", | |
690 | token::Pound => "Pound", | |
691 | token::Dollar => "Dollar", | |
692 | token::Question => "Question", | |
693 | token::Underscore => "Underscore", | |
694 | token::Eof => "Eof", | |
695 | _ => panic!("unhandled token in quote!"), | |
696 | }; | |
697 | mk_token_path(cx, sp, name) | |
698 | } | |
699 | ||
700 | fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec<P<ast::Stmt>> { | |
701 | match *tt { | |
702 | TokenTree::Token(sp, SubstNt(ident, _)) => { | |
703 | // tt.extend($ident.to_tokens(ext_cx)) | |
704 | ||
705 | let e_to_toks = | |
706 | cx.expr_method_call(sp, | |
707 | cx.expr_ident(sp, ident), | |
708 | id_ext("to_tokens"), | |
709 | vec!(cx.expr_ident(sp, id_ext("ext_cx")))); | |
710 | let e_to_toks = | |
711 | cx.expr_method_call(sp, e_to_toks, id_ext("into_iter"), vec![]); | |
712 | ||
713 | let e_push = | |
714 | cx.expr_method_call(sp, | |
715 | cx.expr_ident(sp, id_ext("tt")), | |
716 | id_ext("extend"), | |
717 | vec!(e_to_toks)); | |
718 | ||
719 | vec!(cx.stmt_expr(e_push)) | |
720 | } | |
721 | ref tt @ TokenTree::Token(_, MatchNt(..)) if !matcher => { | |
722 | let mut seq = vec![]; | |
723 | for i in 0..tt.len() { | |
724 | seq.push(tt.get_tt(i)); | |
725 | } | |
726 | statements_mk_tts(cx, &seq[..], matcher) | |
727 | } | |
728 | TokenTree::Token(sp, ref tok) => { | |
729 | let e_sp = cx.expr_ident(sp, id_ext("_sp")); | |
730 | let e_tok = cx.expr_call(sp, | |
731 | mk_tt_path(cx, sp, "Token"), | |
732 | vec!(e_sp, expr_mk_token(cx, sp, tok))); | |
733 | let e_push = | |
734 | cx.expr_method_call(sp, | |
735 | cx.expr_ident(sp, id_ext("tt")), | |
736 | id_ext("push"), | |
737 | vec!(e_tok)); | |
738 | vec!(cx.stmt_expr(e_push)) | |
739 | }, | |
740 | TokenTree::Delimited(_, ref delimed) => { | |
741 | statements_mk_tt(cx, &delimed.open_tt(), matcher).into_iter() | |
742 | .chain(delimed.tts.iter() | |
743 | .flat_map(|tt| statements_mk_tt(cx, tt, matcher))) | |
744 | .chain(statements_mk_tt(cx, &delimed.close_tt(), matcher)) | |
745 | .collect() | |
746 | }, | |
747 | TokenTree::Sequence(sp, ref seq) => { | |
748 | if !matcher { | |
749 | panic!("TokenTree::Sequence in quote!"); | |
750 | } | |
751 | ||
752 | let e_sp = cx.expr_ident(sp, id_ext("_sp")); | |
753 | ||
754 | let stmt_let_tt = cx.stmt_let(sp, true, id_ext("tt"), cx.expr_vec_ng(sp)); | |
755 | let mut tts_stmts = vec![stmt_let_tt]; | |
756 | tts_stmts.extend(statements_mk_tts(cx, &seq.tts[..], matcher)); | |
757 | let e_tts = cx.expr_block(cx.block(sp, tts_stmts, | |
758 | Some(cx.expr_ident(sp, id_ext("tt"))))); | |
759 | let e_separator = match seq.separator { | |
760 | Some(ref sep) => cx.expr_some(sp, expr_mk_token(cx, sp, sep)), | |
761 | None => cx.expr_none(sp), | |
762 | }; | |
763 | let e_op = match seq.op { | |
764 | ast::ZeroOrMore => mk_ast_path(cx, sp, "ZeroOrMore"), | |
765 | ast::OneOrMore => mk_ast_path(cx, sp, "OneOrMore"), | |
766 | }; | |
767 | let fields = vec![cx.field_imm(sp, id_ext("tts"), e_tts), | |
768 | cx.field_imm(sp, id_ext("separator"), e_separator), | |
769 | cx.field_imm(sp, id_ext("op"), e_op), | |
770 | cx.field_imm(sp, id_ext("num_captures"), | |
771 | cx.expr_usize(sp, seq.num_captures))]; | |
772 | let seq_path = vec![id_ext("syntax"), id_ext("ast"), id_ext("SequenceRepetition")]; | |
773 | let e_seq_struct = cx.expr_struct(sp, cx.path_global(sp, seq_path), fields); | |
774 | let e_rc_new = cx.expr_call_global(sp, vec![id_ext("std"), | |
775 | id_ext("rc"), | |
776 | id_ext("Rc"), | |
777 | id_ext("new")], | |
778 | vec![e_seq_struct]); | |
779 | let e_tok = cx.expr_call(sp, | |
780 | mk_tt_path(cx, sp, "Sequence"), | |
781 | vec!(e_sp, e_rc_new)); | |
782 | let e_push = | |
783 | cx.expr_method_call(sp, | |
784 | cx.expr_ident(sp, id_ext("tt")), | |
785 | id_ext("push"), | |
786 | vec!(e_tok)); | |
787 | vec!(cx.stmt_expr(e_push)) | |
788 | } | |
789 | } | |
790 | } | |
791 | ||
792 | fn parse_arguments_to_quote(cx: &ExtCtxt, tts: &[TokenTree]) | |
793 | -> (P<ast::Expr>, Vec<TokenTree>) { | |
794 | // NB: It appears that the main parser loses its mind if we consider | |
795 | // $foo as a SubstNt during the main parse, so we have to re-parse | |
796 | // under quote_depth > 0. This is silly and should go away; the _guess_ is | |
797 | // it has to do with transition away from supporting old-style macros, so | |
798 | // try removing it when enough of them are gone. | |
799 | ||
800 | let mut p = cx.new_parser_from_tts(tts); | |
801 | p.quote_depth += 1; | |
802 | ||
803 | let cx_expr = panictry!(p.parse_expr()); | |
804 | if !p.eat(&token::Comma) { | |
805 | let _ = p.diagnostic().fatal("expected token `,`"); | |
806 | } | |
807 | ||
808 | let tts = panictry!(p.parse_all_token_trees()); | |
809 | p.abort_if_errors(); | |
810 | ||
811 | (cx_expr, tts) | |
812 | } | |
813 | ||
814 | fn mk_stmts_let(cx: &ExtCtxt, sp: Span) -> Vec<P<ast::Stmt>> { | |
815 | // We also bind a single value, sp, to ext_cx.call_site() | |
816 | // | |
817 | // This causes every span in a token-tree quote to be attributed to the | |
818 | // call site of the extension using the quote. We can't really do much | |
819 | // better since the source of the quote may well be in a library that | |
820 | // was not even parsed by this compilation run, that the user has no | |
821 | // source code for (eg. in libsyntax, which they're just _using_). | |
822 | // | |
823 | // The old quasiquoter had an elaborate mechanism for denoting input | |
824 | // file locations from which quotes originated; unfortunately this | |
825 | // relied on feeding the source string of the quote back into the | |
826 | // compiler (which we don't really want to do) and, in any case, only | |
827 | // pushed the problem a very small step further back: an error | |
828 | // resulting from a parse of the resulting quote is still attributed to | |
829 | // the site the string literal occurred, which was in a source file | |
830 | // _other_ than the one the user has control over. For example, an | |
831 | // error in a quote from the protocol compiler, invoked in user code | |
832 | // using macro_rules! for example, will be attributed to the macro_rules.rs | |
833 | // file in libsyntax, which the user might not even have source to (unless | |
834 | // they happen to have a compiler on hand). Over all, the phase distinction | |
835 | // just makes quotes "hard to attribute". Possibly this could be fixed | |
836 | // by recreating some of the original qq machinery in the tt regime | |
837 | // (pushing fake FileMaps onto the parser to account for original sites | |
838 | // of quotes, for example) but at this point it seems not likely to be | |
839 | // worth the hassle. | |
840 | ||
841 | let e_sp = cx.expr_method_call(sp, | |
842 | cx.expr_ident(sp, id_ext("ext_cx")), | |
843 | id_ext("call_site"), | |
844 | Vec::new()); | |
845 | ||
846 | let stmt_let_sp = cx.stmt_let(sp, false, | |
847 | id_ext("_sp"), | |
848 | e_sp); | |
849 | ||
850 | let stmt_let_tt = cx.stmt_let(sp, true, id_ext("tt"), cx.expr_vec_ng(sp)); | |
851 | ||
852 | vec!(stmt_let_sp, stmt_let_tt) | |
853 | } | |
854 | ||
855 | fn statements_mk_tts(cx: &ExtCtxt, tts: &[TokenTree], matcher: bool) -> Vec<P<ast::Stmt>> { | |
856 | let mut ss = Vec::new(); | |
857 | for tt in tts { | |
858 | ss.extend(statements_mk_tt(cx, tt, matcher)); | |
859 | } | |
860 | ss | |
861 | } | |
862 | ||
863 | fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[TokenTree]) | |
864 | -> (P<ast::Expr>, P<ast::Expr>) { | |
865 | let (cx_expr, tts) = parse_arguments_to_quote(cx, tts); | |
866 | ||
867 | let mut vector = mk_stmts_let(cx, sp); | |
868 | vector.extend(statements_mk_tts(cx, &tts[..], false)); | |
869 | let block = cx.expr_block( | |
870 | cx.block_all(sp, | |
871 | vector, | |
872 | Some(cx.expr_ident(sp, id_ext("tt"))))); | |
873 | ||
874 | (cx_expr, block) | |
875 | } | |
876 | ||
877 | fn expand_wrapper(cx: &ExtCtxt, | |
878 | sp: Span, | |
879 | cx_expr: P<ast::Expr>, | |
880 | expr: P<ast::Expr>, | |
881 | imports: &[&[&str]]) -> P<ast::Expr> { | |
882 | // Explicitly borrow to avoid moving from the invoker (#16992) | |
883 | let cx_expr_borrow = cx.expr_addr_of(sp, cx.expr_deref(sp, cx_expr)); | |
884 | let stmt_let_ext_cx = cx.stmt_let(sp, false, id_ext("ext_cx"), cx_expr_borrow); | |
885 | ||
886 | let stmts = imports.iter().map(|path| { | |
887 | // make item: `use ...;` | |
888 | let path = path.iter().map(|s| s.to_string()).collect(); | |
889 | cx.stmt_item(sp, cx.item_use_glob(sp, ast::Inherited, ids_ext(path))) | |
890 | }).chain(Some(stmt_let_ext_cx)).collect(); | |
891 | ||
892 | cx.expr_block(cx.block_all(sp, stmts, Some(expr))) | |
893 | } | |
894 | ||
895 | fn expand_parse_call(cx: &ExtCtxt, | |
896 | sp: Span, | |
897 | parse_method: &str, | |
898 | arg_exprs: Vec<P<ast::Expr>> , | |
899 | tts: &[TokenTree]) -> P<ast::Expr> { | |
900 | let (cx_expr, tts_expr) = expand_tts(cx, sp, tts); | |
901 | ||
902 | let cfg_call = || cx.expr_method_call( | |
903 | sp, cx.expr_ident(sp, id_ext("ext_cx")), | |
904 | id_ext("cfg"), Vec::new()); | |
905 | ||
906 | let parse_sess_call = || cx.expr_method_call( | |
907 | sp, cx.expr_ident(sp, id_ext("ext_cx")), | |
908 | id_ext("parse_sess"), Vec::new()); | |
909 | ||
910 | let new_parser_call = | |
911 | cx.expr_call(sp, | |
912 | cx.expr_ident(sp, id_ext("new_parser_from_tts")), | |
913 | vec!(parse_sess_call(), cfg_call(), tts_expr)); | |
914 | ||
915 | let path = vec![id_ext("syntax"), id_ext("ext"), id_ext("quote"), id_ext(parse_method)]; | |
916 | let mut args = vec![cx.expr_mut_addr_of(sp, new_parser_call)]; | |
917 | args.extend(arg_exprs); | |
918 | let expr = cx.expr_call_global(sp, path, args); | |
919 | ||
920 | if parse_method == "parse_attribute" { | |
921 | expand_wrapper(cx, sp, cx_expr, expr, &[&["syntax", "ext", "quote", "rt"], | |
922 | &["syntax", "parse", "attr"]]) | |
923 | } else { | |
924 | expand_wrapper(cx, sp, cx_expr, expr, &[&["syntax", "ext", "quote", "rt"]]) | |
925 | } | |
926 | } |