]> git.proxmox.com Git - rustc.git/blob - compiler/rustc_expand/src/mbe/transcribe.rs
New upstream version 1.48.0~beta.8+dfsg1
[rustc.git] / compiler / rustc_expand / src / mbe / transcribe.rs
1 use crate::base::ExtCtxt;
2 use crate::mbe;
3 use crate::mbe::macro_parser::{MatchedNonterminal, MatchedSeq, NamedMatch};
4
5 use rustc_ast::mut_visit::{self, MutVisitor};
6 use rustc_ast::token::{self, NtTT, Token};
7 use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndSpacing};
8 use rustc_ast::MacCall;
9 use rustc_data_structures::fx::FxHashMap;
10 use rustc_data_structures::sync::Lrc;
11 use rustc_errors::{pluralize, PResult};
12 use rustc_span::hygiene::{ExpnId, Transparency};
13 use rustc_span::symbol::MacroRulesNormalizedIdent;
14 use rustc_span::Span;
15
16 use smallvec::{smallvec, SmallVec};
17 use std::mem;
18
19 // A Marker adds the given mark to the syntax context.
20 struct Marker(ExpnId, Transparency);
21
22 impl MutVisitor for Marker {
23 fn visit_span(&mut self, span: &mut Span) {
24 *span = span.apply_mark(self.0, self.1)
25 }
26
27 fn visit_mac(&mut self, mac: &mut MacCall) {
28 mut_visit::noop_visit_mac(mac, self)
29 }
30 }
31
32 /// An iterator over the token trees in a delimited token tree (`{ ... }`) or a sequence (`$(...)`).
33 enum Frame {
34 Delimited { forest: Lrc<mbe::Delimited>, idx: usize, span: DelimSpan },
35 Sequence { forest: Lrc<mbe::SequenceRepetition>, idx: usize, sep: Option<Token> },
36 }
37
38 impl Frame {
39 /// Construct a new frame around the delimited set of tokens.
40 fn new(tts: Vec<mbe::TokenTree>) -> Frame {
41 let forest = Lrc::new(mbe::Delimited { delim: token::NoDelim, tts });
42 Frame::Delimited { forest, idx: 0, span: DelimSpan::dummy() }
43 }
44 }
45
46 impl Iterator for Frame {
47 type Item = mbe::TokenTree;
48
49 fn next(&mut self) -> Option<mbe::TokenTree> {
50 match *self {
51 Frame::Delimited { ref forest, ref mut idx, .. } => {
52 *idx += 1;
53 forest.tts.get(*idx - 1).cloned()
54 }
55 Frame::Sequence { ref forest, ref mut idx, .. } => {
56 *idx += 1;
57 forest.tts.get(*idx - 1).cloned()
58 }
59 }
60 }
61 }
62
63 /// This can do Macro-By-Example transcription.
64 /// - `interp` is a map of meta-variables to the tokens (non-terminals) they matched in the
65 /// invocation. We are assuming we already know there is a match.
66 /// - `src` is the RHS of the MBE, that is, the "example" we are filling in.
67 ///
68 /// For example,
69 ///
70 /// ```rust
71 /// macro_rules! foo {
72 /// ($id:ident) => { println!("{}", stringify!($id)); }
73 /// }
74 ///
75 /// foo!(bar);
76 /// ```
77 ///
78 /// `interp` would contain `$id => bar` and `src` would contain `println!("{}", stringify!($id));`.
79 ///
80 /// `transcribe` would return a `TokenStream` containing `println!("{}", stringify!(bar));`.
81 ///
82 /// Along the way, we do some additional error checking.
83 pub(super) fn transcribe<'a>(
84 cx: &ExtCtxt<'a>,
85 interp: &FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
86 src: Vec<mbe::TokenTree>,
87 transparency: Transparency,
88 ) -> PResult<'a, TokenStream> {
89 // Nothing for us to transcribe...
90 if src.is_empty() {
91 return Ok(TokenStream::default());
92 }
93
94 // We descend into the RHS (`src`), expanding things as we go. This stack contains the things
95 // we have yet to expand/are still expanding. We start the stack off with the whole RHS.
96 let mut stack: SmallVec<[Frame; 1]> = smallvec![Frame::new(src)];
97
98 // As we descend in the RHS, we will need to be able to match nested sequences of matchers.
99 // `repeats` keeps track of where we are in matching at each level, with the last element being
100 // the most deeply nested sequence. This is used as a stack.
101 let mut repeats = Vec::new();
102
103 // `result` contains resulting token stream from the TokenTree we just finished processing. At
104 // the end, this will contain the full result of transcription, but at arbitrary points during
105 // `transcribe`, `result` will contain subsets of the final result.
106 //
107 // Specifically, as we descend into each TokenTree, we will push the existing results onto the
108 // `result_stack` and clear `results`. We will then produce the results of transcribing the
109 // TokenTree into `results`. Then, as we unwind back out of the `TokenTree`, we will pop the
110 // `result_stack` and append `results` too it to produce the new `results` up to that point.
111 //
112 // Thus, if we try to pop the `result_stack` and it is empty, we have reached the top-level
113 // again, and we are done transcribing.
114 let mut result: Vec<TreeAndSpacing> = Vec::new();
115 let mut result_stack = Vec::new();
116 let mut marker = Marker(cx.current_expansion.id, transparency);
117
118 loop {
119 // Look at the last frame on the stack.
120 let tree = if let Some(tree) = stack.last_mut().unwrap().next() {
121 // If it still has a TokenTree we have not looked at yet, use that tree.
122 tree
123 } else {
124 // This else-case never produces a value for `tree` (it `continue`s or `return`s).
125
126 // Otherwise, if we have just reached the end of a sequence and we can keep repeating,
127 // go back to the beginning of the sequence.
128 if let Frame::Sequence { idx, sep, .. } = stack.last_mut().unwrap() {
129 let (repeat_idx, repeat_len) = repeats.last_mut().unwrap();
130 *repeat_idx += 1;
131 if repeat_idx < repeat_len {
132 *idx = 0;
133 if let Some(sep) = sep {
134 result.push(TokenTree::Token(sep.clone()).into());
135 }
136 continue;
137 }
138 }
139
140 // We are done with the top of the stack. Pop it. Depending on what it was, we do
141 // different things. Note that the outermost item must be the delimited, wrapped RHS
142 // that was passed in originally to `transcribe`.
143 match stack.pop().unwrap() {
144 // Done with a sequence. Pop from repeats.
145 Frame::Sequence { .. } => {
146 repeats.pop();
147 }
148
149 // We are done processing a Delimited. If this is the top-level delimited, we are
150 // done. Otherwise, we unwind the result_stack to append what we have produced to
151 // any previous results.
152 Frame::Delimited { forest, span, .. } => {
153 if result_stack.is_empty() {
154 // No results left to compute! We are back at the top-level.
155 return Ok(TokenStream::new(result));
156 }
157
158 // Step back into the parent Delimited.
159 let tree = TokenTree::Delimited(span, forest.delim, TokenStream::new(result));
160 result = result_stack.pop().unwrap();
161 result.push(tree.into());
162 }
163 }
164 continue;
165 };
166
167 // At this point, we know we are in the middle of a TokenTree (the last one on `stack`).
168 // `tree` contains the next `TokenTree` to be processed.
169 match tree {
170 // We are descending into a sequence. We first make sure that the matchers in the RHS
171 // and the matches in `interp` have the same shape. Otherwise, either the caller or the
172 // macro writer has made a mistake.
173 seq @ mbe::TokenTree::Sequence(..) => {
174 match lockstep_iter_size(&seq, interp, &repeats) {
175 LockstepIterSize::Unconstrained => {
176 return Err(cx.struct_span_err(
177 seq.span(), /* blame macro writer */
178 "attempted to repeat an expression containing no syntax variables \
179 matched as repeating at this depth",
180 ));
181 }
182
183 LockstepIterSize::Contradiction(ref msg) => {
184 // FIXME: this really ought to be caught at macro definition time... It
185 // happens when two meta-variables are used in the same repetition in a
186 // sequence, but they come from different sequence matchers and repeat
187 // different amounts.
188 return Err(cx.struct_span_err(seq.span(), &msg[..]));
189 }
190
191 LockstepIterSize::Constraint(len, _) => {
192 // We do this to avoid an extra clone above. We know that this is a
193 // sequence already.
194 let (sp, seq) = if let mbe::TokenTree::Sequence(sp, seq) = seq {
195 (sp, seq)
196 } else {
197 unreachable!()
198 };
199
200 // Is the repetition empty?
201 if len == 0 {
202 if seq.kleene.op == mbe::KleeneOp::OneOrMore {
203 // FIXME: this really ought to be caught at macro definition
204 // time... It happens when the Kleene operator in the matcher and
205 // the body for the same meta-variable do not match.
206 return Err(cx.struct_span_err(
207 sp.entire(),
208 "this must repeat at least once",
209 ));
210 }
211 } else {
212 // 0 is the initial counter (we have done 0 repretitions so far). `len`
213 // is the total number of reptitions we should generate.
214 repeats.push((0, len));
215
216 // The first time we encounter the sequence we push it to the stack. It
217 // then gets reused (see the beginning of the loop) until we are done
218 // repeating.
219 stack.push(Frame::Sequence {
220 idx: 0,
221 sep: seq.separator.clone(),
222 forest: seq,
223 });
224 }
225 }
226 }
227 }
228
229 // Replace the meta-var with the matched token tree from the invocation.
230 mbe::TokenTree::MetaVar(mut sp, mut orignal_ident) => {
231 // Find the matched nonterminal from the macro invocation, and use it to replace
232 // the meta-var.
233 let ident = MacroRulesNormalizedIdent::new(orignal_ident);
234 if let Some(cur_matched) = lookup_cur_matched(ident, interp, &repeats) {
235 if let MatchedNonterminal(ref nt) = cur_matched {
236 // FIXME #2887: why do we apply a mark when matching a token tree meta-var
237 // (e.g. `$x:tt`), but not when we are matching any other type of token
238 // tree?
239 if let NtTT(ref tt) = **nt {
240 result.push(tt.clone().into());
241 } else {
242 marker.visit_span(&mut sp);
243 let token = TokenTree::token(token::Interpolated(nt.clone()), sp);
244 result.push(token.into());
245 }
246 } else {
247 // We were unable to descend far enough. This is an error.
248 return Err(cx.struct_span_err(
249 sp, /* blame the macro writer */
250 &format!("variable '{}' is still repeating at this depth", ident),
251 ));
252 }
253 } else {
254 // If we aren't able to match the meta-var, we push it back into the result but
255 // with modified syntax context. (I believe this supports nested macros).
256 marker.visit_span(&mut sp);
257 marker.visit_ident(&mut orignal_ident);
258 result.push(TokenTree::token(token::Dollar, sp).into());
259 result.push(TokenTree::Token(Token::from_ast_ident(orignal_ident)).into());
260 }
261 }
262
263 // If we are entering a new delimiter, we push its contents to the `stack` to be
264 // processed, and we push all of the currently produced results to the `result_stack`.
265 // We will produce all of the results of the inside of the `Delimited` and then we will
266 // jump back out of the Delimited, pop the result_stack and add the new results back to
267 // the previous results (from outside the Delimited).
268 mbe::TokenTree::Delimited(mut span, delimited) => {
269 mut_visit::visit_delim_span(&mut span, &mut marker);
270 stack.push(Frame::Delimited { forest: delimited, idx: 0, span });
271 result_stack.push(mem::take(&mut result));
272 }
273
274 // Nothing much to do here. Just push the token to the result, being careful to
275 // preserve syntax context.
276 mbe::TokenTree::Token(token) => {
277 let mut tt = TokenTree::Token(token);
278 marker.visit_tt(&mut tt);
279 result.push(tt.into());
280 }
281
282 // There should be no meta-var declarations in the invocation of a macro.
283 mbe::TokenTree::MetaVarDecl(..) => panic!("unexpected `TokenTree::MetaVarDecl"),
284 }
285 }
286 }
287
288 /// Lookup the meta-var named `ident` and return the matched token tree from the invocation using
289 /// the set of matches `interpolations`.
290 ///
291 /// See the definition of `repeats` in the `transcribe` function. `repeats` is used to descend
292 /// into the right place in nested matchers. If we attempt to descend too far, the macro writer has
293 /// made a mistake, and we return `None`.
294 fn lookup_cur_matched<'a>(
295 ident: MacroRulesNormalizedIdent,
296 interpolations: &'a FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
297 repeats: &[(usize, usize)],
298 ) -> Option<&'a NamedMatch> {
299 interpolations.get(&ident).map(|matched| {
300 let mut matched = matched;
301 for &(idx, _) in repeats {
302 match matched {
303 MatchedNonterminal(_) => break,
304 MatchedSeq(ref ads) => matched = ads.get(idx).unwrap(),
305 }
306 }
307
308 matched
309 })
310 }
311
312 /// An accumulator over a TokenTree to be used with `fold`. During transcription, we need to make
313 /// sure that the size of each sequence and all of its nested sequences are the same as the sizes
314 /// of all the matched (nested) sequences in the macro invocation. If they don't match, somebody
315 /// has made a mistake (either the macro writer or caller).
316 #[derive(Clone)]
317 enum LockstepIterSize {
318 /// No constraints on length of matcher. This is true for any TokenTree variants except a
319 /// `MetaVar` with an actual `MatchedSeq` (as opposed to a `MatchedNonterminal`).
320 Unconstrained,
321
322 /// A `MetaVar` with an actual `MatchedSeq`. The length of the match and the name of the
323 /// meta-var are returned.
324 Constraint(usize, MacroRulesNormalizedIdent),
325
326 /// Two `Constraint`s on the same sequence had different lengths. This is an error.
327 Contradiction(String),
328 }
329
330 impl LockstepIterSize {
331 /// Find incompatibilities in matcher/invocation sizes.
332 /// - `Unconstrained` is compatible with everything.
333 /// - `Contradiction` is incompatible with everything.
334 /// - `Constraint(len)` is only compatible with other constraints of the same length.
335 fn with(self, other: LockstepIterSize) -> LockstepIterSize {
336 match self {
337 LockstepIterSize::Unconstrained => other,
338 LockstepIterSize::Contradiction(_) => self,
339 LockstepIterSize::Constraint(l_len, ref l_id) => match other {
340 LockstepIterSize::Unconstrained => self,
341 LockstepIterSize::Contradiction(_) => other,
342 LockstepIterSize::Constraint(r_len, _) if l_len == r_len => self,
343 LockstepIterSize::Constraint(r_len, r_id) => {
344 let msg = format!(
345 "meta-variable `{}` repeats {} time{}, but `{}` repeats {} time{}",
346 l_id,
347 l_len,
348 pluralize!(l_len),
349 r_id,
350 r_len,
351 pluralize!(r_len),
352 );
353 LockstepIterSize::Contradiction(msg)
354 }
355 },
356 }
357 }
358 }
359
360 /// Given a `tree`, make sure that all sequences have the same length as the matches for the
361 /// appropriate meta-vars in `interpolations`.
362 ///
363 /// Note that if `repeats` does not match the exact correct depth of a meta-var,
364 /// `lookup_cur_matched` will return `None`, which is why this still works even in the presnece of
365 /// multiple nested matcher sequences.
366 fn lockstep_iter_size(
367 tree: &mbe::TokenTree,
368 interpolations: &FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
369 repeats: &[(usize, usize)],
370 ) -> LockstepIterSize {
371 use mbe::TokenTree;
372 match *tree {
373 TokenTree::Delimited(_, ref delimed) => {
374 delimed.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
375 size.with(lockstep_iter_size(tt, interpolations, repeats))
376 })
377 }
378 TokenTree::Sequence(_, ref seq) => {
379 seq.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
380 size.with(lockstep_iter_size(tt, interpolations, repeats))
381 })
382 }
383 TokenTree::MetaVar(_, name) | TokenTree::MetaVarDecl(_, name, _) => {
384 let name = MacroRulesNormalizedIdent::new(name);
385 match lookup_cur_matched(name, interpolations, repeats) {
386 Some(matched) => match matched {
387 MatchedNonterminal(_) => LockstepIterSize::Unconstrained,
388 MatchedSeq(ref ads) => LockstepIterSize::Constraint(ads.len(), name),
389 },
390 _ => LockstepIterSize::Unconstrained,
391 }
392 }
393 TokenTree::Token(..) => LockstepIterSize::Unconstrained,
394 }
395 }