]> git.proxmox.com Git - rustc.git/blob - compiler/rustc_expand/src/mbe/quoted.rs
New upstream version 1.64.0+dfsg1
[rustc.git] / compiler / rustc_expand / src / mbe / quoted.rs
1 use crate::mbe::macro_parser::count_metavar_decls;
2 use crate::mbe::{Delimited, KleeneOp, KleeneToken, MetaVarExpr, SequenceRepetition, TokenTree};
3
4 use rustc_ast::token::{self, Delimiter, Token};
5 use rustc_ast::{tokenstream, NodeId};
6 use rustc_ast_pretty::pprust;
7 use rustc_feature::Features;
8 use rustc_session::parse::{feature_err, ParseSess};
9 use rustc_span::symbol::{kw, sym, Ident};
10
11 use rustc_span::edition::Edition;
12 use rustc_span::{Span, SyntaxContext};
13
14 const VALID_FRAGMENT_NAMES_MSG: &str = "valid fragment specifiers are \
15 `ident`, `block`, `stmt`, `expr`, `pat`, `ty`, `lifetime`, \
16 `literal`, `path`, `meta`, `tt`, `item` and `vis`";
17
18 /// Takes a `tokenstream::TokenStream` and returns a `Vec<self::TokenTree>`. Specifically, this
19 /// takes a generic `TokenStream`, such as is used in the rest of the compiler, and returns a
20 /// collection of `TokenTree` for use in parsing a macro.
21 ///
22 /// # Parameters
23 ///
24 /// - `input`: a token stream to read from, the contents of which we are parsing.
25 /// - `parsing_patterns`: `parse` can be used to parse either the "patterns" or the "body" of a
26 /// macro. Both take roughly the same form _except_ that:
27 /// - In a pattern, metavars are declared with their "matcher" type. For example `$var:expr` or
28 /// `$id:ident`. In this example, `expr` and `ident` are "matchers". They are not present in the
29 /// body of a macro rule -- just in the pattern.
30 /// - Metavariable expressions are only valid in the "body", not the "pattern".
31 /// - `sess`: the parsing session. Any errors will be emitted to this session.
32 /// - `node_id`: the NodeId of the macro we are parsing.
33 /// - `features`: language features so we can do feature gating.
34 ///
35 /// # Returns
36 ///
37 /// A collection of `self::TokenTree`. There may also be some errors emitted to `sess`.
38 pub(super) fn parse(
39 input: tokenstream::TokenStream,
40 parsing_patterns: bool,
41 sess: &ParseSess,
42 node_id: NodeId,
43 features: &Features,
44 edition: Edition,
45 ) -> Vec<TokenTree> {
46 // Will contain the final collection of `self::TokenTree`
47 let mut result = Vec::new();
48
49 // For each token tree in `input`, parse the token into a `self::TokenTree`, consuming
50 // additional trees if need be.
51 let mut trees = input.into_trees();
52 while let Some(tree) = trees.next() {
53 // Given the parsed tree, if there is a metavar and we are expecting matchers, actually
54 // parse out the matcher (i.e., in `$id:ident` this would parse the `:` and `ident`).
55 let tree = parse_tree(tree, &mut trees, parsing_patterns, sess, node_id, features, edition);
56 match tree {
57 TokenTree::MetaVar(start_sp, ident) if parsing_patterns => {
58 let span = match trees.next() {
59 Some(tokenstream::TokenTree::Token(Token { kind: token::Colon, span }, _)) => {
60 match trees.next() {
61 Some(tokenstream::TokenTree::Token(token, _)) => match token.ident() {
62 Some((frag, _)) => {
63 let span = token.span.with_lo(start_sp.lo());
64
65 let kind =
66 token::NonterminalKind::from_symbol(frag.name, || {
67 // FIXME(#85708) - once we properly decode a foreign
68 // crate's `SyntaxContext::root`, then we can replace
69 // this with just `span.edition()`. A
70 // `SyntaxContext::root()` from the current crate will
71 // have the edition of the current crate, and a
72 // `SyntaxContext::root()` from a foreign crate will
73 // have the edition of that crate (which we manually
74 // retrieve via the `edition` parameter).
75 if span.ctxt() == SyntaxContext::root() {
76 edition
77 } else {
78 span.edition()
79 }
80 })
81 .unwrap_or_else(
82 || {
83 let msg = format!(
84 "invalid fragment specifier `{}`",
85 frag.name
86 );
87 sess.span_diagnostic
88 .struct_span_err(span, &msg)
89 .help(VALID_FRAGMENT_NAMES_MSG)
90 .emit();
91 token::NonterminalKind::Ident
92 },
93 );
94 result.push(TokenTree::MetaVarDecl(span, ident, Some(kind)));
95 continue;
96 }
97 _ => token.span,
98 },
99 tree => tree.as_ref().map_or(span, tokenstream::TokenTree::span),
100 }
101 }
102 tree => tree.as_ref().map_or(start_sp, tokenstream::TokenTree::span),
103 };
104
105 result.push(TokenTree::MetaVarDecl(span, ident, None));
106 }
107
108 // Not a metavar or no matchers allowed, so just return the tree
109 _ => result.push(tree),
110 }
111 }
112 result
113 }
114
115 /// Asks for the `macro_metavar_expr` feature if it is not already declared
116 fn maybe_emit_macro_metavar_expr_feature(features: &Features, sess: &ParseSess, span: Span) {
117 if !features.macro_metavar_expr {
118 let msg = "meta-variable expressions are unstable";
119 feature_err(&sess, sym::macro_metavar_expr, span, msg).emit();
120 }
121 }
122
123 /// Takes a `tokenstream::TokenTree` and returns a `self::TokenTree`. Specifically, this takes a
124 /// generic `TokenTree`, such as is used in the rest of the compiler, and returns a `TokenTree`
125 /// for use in parsing a macro.
126 ///
127 /// Converting the given tree may involve reading more tokens.
128 ///
129 /// # Parameters
130 ///
131 /// - `tree`: the tree we wish to convert.
132 /// - `outer_trees`: an iterator over trees. We may need to read more tokens from it in order to finish
133 /// converting `tree`
134 /// - `parsing_patterns`: same as [parse].
135 /// - `sess`: the parsing session. Any errors will be emitted to this session.
136 /// - `features`: language features so we can do feature gating.
137 fn parse_tree(
138 tree: tokenstream::TokenTree,
139 outer_trees: &mut impl Iterator<Item = tokenstream::TokenTree>,
140 parsing_patterns: bool,
141 sess: &ParseSess,
142 node_id: NodeId,
143 features: &Features,
144 edition: Edition,
145 ) -> TokenTree {
146 // Depending on what `tree` is, we could be parsing different parts of a macro
147 match tree {
148 // `tree` is a `$` token. Look at the next token in `trees`
149 tokenstream::TokenTree::Token(Token { kind: token::Dollar, span }, _) => {
150 // FIXME: Handle `Invisible`-delimited groups in a more systematic way
151 // during parsing.
152 let mut next = outer_trees.next();
153 let mut trees: Box<dyn Iterator<Item = tokenstream::TokenTree>>;
154 if let Some(tokenstream::TokenTree::Delimited(_, Delimiter::Invisible, tts)) = next {
155 trees = Box::new(tts.into_trees());
156 next = trees.next();
157 } else {
158 trees = Box::new(outer_trees);
159 }
160
161 match next {
162 // `tree` is followed by a delimited set of token trees.
163 Some(tokenstream::TokenTree::Delimited(delim_span, delim, tts)) => {
164 if parsing_patterns {
165 if delim != Delimiter::Parenthesis {
166 span_dollar_dollar_or_metavar_in_the_lhs_err(
167 sess,
168 &Token { kind: token::OpenDelim(delim), span: delim_span.entire() },
169 );
170 }
171 } else {
172 match delim {
173 Delimiter::Brace => {
174 // The delimiter is `{`. This indicates the beginning
175 // of a meta-variable expression (e.g. `${count(ident)}`).
176 // Try to parse the meta-variable expression.
177 match MetaVarExpr::parse(&tts, delim_span.entire(), sess) {
178 Err(mut err) => {
179 err.emit();
180 // Returns early the same read `$` to avoid spanning
181 // unrelated diagnostics that could be performed afterwards
182 return TokenTree::token(token::Dollar, span);
183 }
184 Ok(elem) => {
185 maybe_emit_macro_metavar_expr_feature(
186 features,
187 sess,
188 delim_span.entire(),
189 );
190 return TokenTree::MetaVarExpr(delim_span, elem);
191 }
192 }
193 }
194 Delimiter::Parenthesis => {}
195 _ => {
196 let tok = pprust::token_kind_to_string(&token::OpenDelim(delim));
197 let msg = format!("expected `(` or `{{`, found `{}`", tok);
198 sess.span_diagnostic.span_err(delim_span.entire(), &msg);
199 }
200 }
201 }
202 // If we didn't find a metavar expression above, then we must have a
203 // repetition sequence in the macro (e.g. `$(pat)*`). Parse the
204 // contents of the sequence itself
205 let sequence = parse(tts, parsing_patterns, sess, node_id, features, edition);
206 // Get the Kleene operator and optional separator
207 let (separator, kleene) =
208 parse_sep_and_kleene_op(&mut trees, delim_span.entire(), sess);
209 // Count the number of captured "names" (i.e., named metavars)
210 let num_captures =
211 if parsing_patterns { count_metavar_decls(&sequence) } else { 0 };
212 TokenTree::Sequence(
213 delim_span,
214 SequenceRepetition { tts: sequence, separator, kleene, num_captures },
215 )
216 }
217
218 // `tree` is followed by an `ident`. This could be `$meta_var` or the `$crate`
219 // special metavariable that names the crate of the invocation.
220 Some(tokenstream::TokenTree::Token(token, _)) if token.is_ident() => {
221 let (ident, is_raw) = token.ident().unwrap();
222 let span = ident.span.with_lo(span.lo());
223 if ident.name == kw::Crate && !is_raw {
224 TokenTree::token(token::Ident(kw::DollarCrate, is_raw), span)
225 } else {
226 TokenTree::MetaVar(span, ident)
227 }
228 }
229
230 // `tree` is followed by another `$`. This is an escaped `$`.
231 Some(tokenstream::TokenTree::Token(Token { kind: token::Dollar, span }, _)) => {
232 if parsing_patterns {
233 span_dollar_dollar_or_metavar_in_the_lhs_err(
234 sess,
235 &Token { kind: token::Dollar, span },
236 );
237 } else {
238 maybe_emit_macro_metavar_expr_feature(features, sess, span);
239 }
240 TokenTree::token(token::Dollar, span)
241 }
242
243 // `tree` is followed by some other token. This is an error.
244 Some(tokenstream::TokenTree::Token(token, _)) => {
245 let msg = format!(
246 "expected identifier, found `{}`",
247 pprust::token_to_string(&token),
248 );
249 sess.span_diagnostic.span_err(token.span, &msg);
250 TokenTree::MetaVar(token.span, Ident::empty())
251 }
252
253 // There are no more tokens. Just return the `$` we already have.
254 None => TokenTree::token(token::Dollar, span),
255 }
256 }
257
258 // `tree` is an arbitrary token. Keep it.
259 tokenstream::TokenTree::Token(token, _) => TokenTree::Token(token),
260
261 // `tree` is the beginning of a delimited set of tokens (e.g., `(` or `{`). We need to
262 // descend into the delimited set and further parse it.
263 tokenstream::TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited(
264 span,
265 Delimited {
266 delim,
267 tts: parse(tts, parsing_patterns, sess, node_id, features, edition),
268 },
269 ),
270 }
271 }
272
273 /// Takes a token and returns `Some(KleeneOp)` if the token is `+` `*` or `?`. Otherwise, return
274 /// `None`.
275 fn kleene_op(token: &Token) -> Option<KleeneOp> {
276 match token.kind {
277 token::BinOp(token::Star) => Some(KleeneOp::ZeroOrMore),
278 token::BinOp(token::Plus) => Some(KleeneOp::OneOrMore),
279 token::Question => Some(KleeneOp::ZeroOrOne),
280 _ => None,
281 }
282 }
283
284 /// Parse the next token tree of the input looking for a KleeneOp. Returns
285 ///
286 /// - Ok(Ok((op, span))) if the next token tree is a KleeneOp
287 /// - Ok(Err(tok, span)) if the next token tree is a token but not a KleeneOp
288 /// - Err(span) if the next token tree is not a token
289 fn parse_kleene_op(
290 input: &mut impl Iterator<Item = tokenstream::TokenTree>,
291 span: Span,
292 ) -> Result<Result<(KleeneOp, Span), Token>, Span> {
293 match input.next() {
294 Some(tokenstream::TokenTree::Token(token, _)) => match kleene_op(&token) {
295 Some(op) => Ok(Ok((op, token.span))),
296 None => Ok(Err(token)),
297 },
298 tree => Err(tree.as_ref().map_or(span, tokenstream::TokenTree::span)),
299 }
300 }
301
302 /// Attempt to parse a single Kleene star, possibly with a separator.
303 ///
304 /// For example, in a pattern such as `$(a),*`, `a` is the pattern to be repeated, `,` is the
305 /// separator, and `*` is the Kleene operator. This function is specifically concerned with parsing
306 /// the last two tokens of such a pattern: namely, the optional separator and the Kleene operator
307 /// itself. Note that here we are parsing the _macro_ itself, rather than trying to match some
308 /// stream of tokens in an invocation of a macro.
309 ///
310 /// This function will take some input iterator `input` corresponding to `span` and a parsing
311 /// session `sess`. If the next one (or possibly two) tokens in `input` correspond to a Kleene
312 /// operator and separator, then a tuple with `(separator, KleeneOp)` is returned. Otherwise, an
313 /// error with the appropriate span is emitted to `sess` and a dummy value is returned.
314 fn parse_sep_and_kleene_op(
315 input: &mut impl Iterator<Item = tokenstream::TokenTree>,
316 span: Span,
317 sess: &ParseSess,
318 ) -> (Option<Token>, KleeneToken) {
319 // We basically look at two token trees here, denoted as #1 and #2 below
320 let span = match parse_kleene_op(input, span) {
321 // #1 is a `?`, `+`, or `*` KleeneOp
322 Ok(Ok((op, span))) => return (None, KleeneToken::new(op, span)),
323
324 // #1 is a separator followed by #2, a KleeneOp
325 Ok(Err(token)) => match parse_kleene_op(input, token.span) {
326 // #2 is the `?` Kleene op, which does not take a separator (error)
327 Ok(Ok((KleeneOp::ZeroOrOne, span))) => {
328 // Error!
329 sess.span_diagnostic.span_err(
330 token.span,
331 "the `?` macro repetition operator does not take a separator",
332 );
333
334 // Return a dummy
335 return (None, KleeneToken::new(KleeneOp::ZeroOrMore, span));
336 }
337
338 // #2 is a KleeneOp :D
339 Ok(Ok((op, span))) => return (Some(token), KleeneToken::new(op, span)),
340
341 // #2 is a random token or not a token at all :(
342 Ok(Err(Token { span, .. })) | Err(span) => span,
343 },
344
345 // #1 is not a token
346 Err(span) => span,
347 };
348
349 // If we ever get to this point, we have experienced an "unexpected token" error
350 sess.span_diagnostic.span_err(span, "expected one of: `*`, `+`, or `?`");
351
352 // Return a dummy
353 (None, KleeneToken::new(KleeneOp::ZeroOrMore, span))
354 }
355
356 // `$$` or a meta-variable is the lhs of a macro but shouldn't.
357 //
358 // For example, `macro_rules! foo { ( ${length()} ) => {} }`
359 fn span_dollar_dollar_or_metavar_in_the_lhs_err<'sess>(sess: &'sess ParseSess, token: &Token) {
360 sess.span_diagnostic
361 .span_err(token.span, &format!("unexpected token: {}", pprust::token_to_string(token)));
362 sess.span_diagnostic.span_note_without_error(
363 token.span,
364 "`$$` and meta-variable expressions are not allowed inside macro parameter definitions",
365 );
366 }