]> git.proxmox.com Git - rustc.git/blame - vendor/rustc-ap-rustc_expand/src/mbe/quoted.rs
Update upstream source from tag 'upstream/1.52.1+dfsg1'
[rustc.git] / vendor / rustc-ap-rustc_expand / src / mbe / quoted.rs
CommitLineData
f20569fa
XL
1use crate::mbe::macro_parser;
2use crate::mbe::{Delimited, KleeneOp, KleeneToken, SequenceRepetition, TokenTree};
3
4use rustc_ast::token::{self, Token};
5use rustc_ast::tokenstream;
6use rustc_ast::{NodeId, DUMMY_NODE_ID};
7use rustc_ast_pretty::pprust;
8use rustc_feature::Features;
9use rustc_session::parse::{feature_err, ParseSess};
10use rustc_span::symbol::{kw, sym, Ident};
11
12use rustc_span::Span;
13
14use rustc_data_structures::sync::Lrc;
15
16const VALID_FRAGMENT_NAMES_MSG: &str = "valid fragment specifiers are \
17 `ident`, `block`, `stmt`, `expr`, `pat`, `ty`, `lifetime`, \
18 `literal`, `path`, `meta`, `tt`, `item` and `vis`";
19
20/// Takes a `tokenstream::TokenStream` and returns a `Vec<self::TokenTree>`. Specifically, this
21/// takes a generic `TokenStream`, such as is used in the rest of the compiler, and returns a
22/// collection of `TokenTree` for use in parsing a macro.
23///
24/// # Parameters
25///
26/// - `input`: a token stream to read from, the contents of which we are parsing.
27/// - `expect_matchers`: `parse` can be used to parse either the "patterns" or the "body" of a
28/// macro. Both take roughly the same form _except_ that in a pattern, metavars are declared with
29/// their "matcher" type. For example `$var:expr` or `$id:ident`. In this example, `expr` and
30/// `ident` are "matchers". They are not present in the body of a macro rule -- just in the
31/// pattern, so we pass a parameter to indicate whether to expect them or not.
32/// - `sess`: the parsing session. Any errors will be emitted to this session.
33/// - `node_id`: the NodeId of the macro we are parsing.
34/// - `features`: language features so we can do feature gating.
35///
36/// # Returns
37///
38/// A collection of `self::TokenTree`. There may also be some errors emitted to `sess`.
39pub(super) fn parse(
40 input: tokenstream::TokenStream,
41 expect_matchers: bool,
42 sess: &ParseSess,
43 node_id: NodeId,
44 features: &Features,
45) -> Vec<TokenTree> {
46 // Will contain the final collection of `self::TokenTree`
47 let mut result = Vec::new();
48
49 // For each token tree in `input`, parse the token into a `self::TokenTree`, consuming
50 // additional trees if need be.
51 let mut trees = input.trees();
52 while let Some(tree) = trees.next() {
53 // Given the parsed tree, if there is a metavar and we are expecting matchers, actually
54 // parse out the matcher (i.e., in `$id:ident` this would parse the `:` and `ident`).
55 let tree = parse_tree(tree, &mut trees, expect_matchers, sess, node_id, features);
56 match tree {
57 TokenTree::MetaVar(start_sp, ident) if expect_matchers => {
58 let span = match trees.next() {
59 Some(tokenstream::TokenTree::Token(Token { kind: token::Colon, span })) => {
60 match trees.next() {
61 Some(tokenstream::TokenTree::Token(token)) => match token.ident() {
62 Some((frag, _)) => {
63 let span = token.span.with_lo(start_sp.lo());
64
65 match frag.name {
66 sym::pat2018 | sym::pat2021 => {
67 if !features.edition_macro_pats {
68 feature_err(
69 sess,
70 sym::edition_macro_pats,
71 frag.span,
72 "`pat2018` and `pat2021` are unstable.",
73 )
74 .emit();
75 }
76 }
77 _ => {}
78 }
79
80 let kind =
81 token::NonterminalKind::from_symbol(frag.name, || {
82 span.edition()
83 })
84 .unwrap_or_else(
85 || {
86 let msg = format!(
87 "invalid fragment specifier `{}`",
88 frag.name
89 );
90 sess.span_diagnostic
91 .struct_span_err(span, &msg)
92 .help(VALID_FRAGMENT_NAMES_MSG)
93 .emit();
94 token::NonterminalKind::Ident
95 },
96 );
97 result.push(TokenTree::MetaVarDecl(span, ident, Some(kind)));
98 continue;
99 }
100 _ => token.span,
101 },
102 tree => tree.as_ref().map_or(span, tokenstream::TokenTree::span),
103 }
104 }
105 tree => tree.as_ref().map_or(start_sp, tokenstream::TokenTree::span),
106 };
107 if node_id != DUMMY_NODE_ID {
108 // Macros loaded from other crates have dummy node ids.
109 sess.missing_fragment_specifiers.borrow_mut().insert(span, node_id);
110 }
111 result.push(TokenTree::MetaVarDecl(span, ident, None));
112 }
113
114 // Not a metavar or no matchers allowed, so just return the tree
115 _ => result.push(tree),
116 }
117 }
118 result
119}
120
121/// Takes a `tokenstream::TokenTree` and returns a `self::TokenTree`. Specifically, this takes a
122/// generic `TokenTree`, such as is used in the rest of the compiler, and returns a `TokenTree`
123/// for use in parsing a macro.
124///
125/// Converting the given tree may involve reading more tokens.
126///
127/// # Parameters
128///
129/// - `tree`: the tree we wish to convert.
130/// - `outer_trees`: an iterator over trees. We may need to read more tokens from it in order to finish
131/// converting `tree`
132/// - `expect_matchers`: same as for `parse` (see above).
133/// - `sess`: the parsing session. Any errors will be emitted to this session.
134/// - `features`: language features so we can do feature gating.
135fn parse_tree(
136 tree: tokenstream::TokenTree,
137 outer_trees: &mut impl Iterator<Item = tokenstream::TokenTree>,
138 expect_matchers: bool,
139 sess: &ParseSess,
140 node_id: NodeId,
141 features: &Features,
142) -> TokenTree {
143 // Depending on what `tree` is, we could be parsing different parts of a macro
144 match tree {
145 // `tree` is a `$` token. Look at the next token in `trees`
146 tokenstream::TokenTree::Token(Token { kind: token::Dollar, span }) => {
147 // FIXME: Handle `None`-delimited groups in a more systematic way
148 // during parsing.
149 let mut next = outer_trees.next();
150 let mut trees: Box<dyn Iterator<Item = tokenstream::TokenTree>>;
151 if let Some(tokenstream::TokenTree::Delimited(_, token::NoDelim, tts)) = next {
152 trees = Box::new(tts.into_trees());
153 next = trees.next();
154 } else {
155 trees = Box::new(outer_trees);
156 }
157
158 match next {
159 // `tree` is followed by a delimited set of token trees. This indicates the beginning
160 // of a repetition sequence in the macro (e.g. `$(pat)*`).
161 Some(tokenstream::TokenTree::Delimited(span, delim, tts)) => {
162 // Must have `(` not `{` or `[`
163 if delim != token::Paren {
164 let tok = pprust::token_kind_to_string(&token::OpenDelim(delim));
165 let msg = format!("expected `(`, found `{}`", tok);
166 sess.span_diagnostic.span_err(span.entire(), &msg);
167 }
168 // Parse the contents of the sequence itself
169 let sequence = parse(tts, expect_matchers, sess, node_id, features);
170 // Get the Kleene operator and optional separator
171 let (separator, kleene) =
172 parse_sep_and_kleene_op(&mut trees, span.entire(), sess);
173 // Count the number of captured "names" (i.e., named metavars)
174 let name_captures = macro_parser::count_names(&sequence);
175 TokenTree::Sequence(
176 span,
177 Lrc::new(SequenceRepetition {
178 tts: sequence,
179 separator,
180 kleene,
181 num_captures: name_captures,
182 }),
183 )
184 }
185
186 // `tree` is followed by an `ident`. This could be `$meta_var` or the `$crate` special
187 // metavariable that names the crate of the invocation.
188 Some(tokenstream::TokenTree::Token(token)) if token.is_ident() => {
189 let (ident, is_raw) = token.ident().unwrap();
190 let span = ident.span.with_lo(span.lo());
191 if ident.name == kw::Crate && !is_raw {
192 TokenTree::token(token::Ident(kw::DollarCrate, is_raw), span)
193 } else {
194 TokenTree::MetaVar(span, ident)
195 }
196 }
197
198 // `tree` is followed by a random token. This is an error.
199 Some(tokenstream::TokenTree::Token(token)) => {
200 let msg = format!(
201 "expected identifier, found `{}`",
202 pprust::token_to_string(&token),
203 );
204 sess.span_diagnostic.span_err(token.span, &msg);
205 TokenTree::MetaVar(token.span, Ident::invalid())
206 }
207
208 // There are no more tokens. Just return the `$` we already have.
209 None => TokenTree::token(token::Dollar, span),
210 }
211 }
212
213 // `tree` is an arbitrary token. Keep it.
214 tokenstream::TokenTree::Token(token) => TokenTree::Token(token),
215
216 // `tree` is the beginning of a delimited set of tokens (e.g., `(` or `{`). We need to
217 // descend into the delimited set and further parse it.
218 tokenstream::TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited(
219 span,
220 Lrc::new(Delimited {
221 delim,
222 tts: parse(tts, expect_matchers, sess, node_id, features),
223 }),
224 ),
225 }
226}
227
228/// Takes a token and returns `Some(KleeneOp)` if the token is `+` `*` or `?`. Otherwise, return
229/// `None`.
230fn kleene_op(token: &Token) -> Option<KleeneOp> {
231 match token.kind {
232 token::BinOp(token::Star) => Some(KleeneOp::ZeroOrMore),
233 token::BinOp(token::Plus) => Some(KleeneOp::OneOrMore),
234 token::Question => Some(KleeneOp::ZeroOrOne),
235 _ => None,
236 }
237}
238
239/// Parse the next token tree of the input looking for a KleeneOp. Returns
240///
241/// - Ok(Ok((op, span))) if the next token tree is a KleeneOp
242/// - Ok(Err(tok, span)) if the next token tree is a token but not a KleeneOp
243/// - Err(span) if the next token tree is not a token
244fn parse_kleene_op(
245 input: &mut impl Iterator<Item = tokenstream::TokenTree>,
246 span: Span,
247) -> Result<Result<(KleeneOp, Span), Token>, Span> {
248 match input.next() {
249 Some(tokenstream::TokenTree::Token(token)) => match kleene_op(&token) {
250 Some(op) => Ok(Ok((op, token.span))),
251 None => Ok(Err(token)),
252 },
253 tree => Err(tree.as_ref().map_or(span, tokenstream::TokenTree::span)),
254 }
255}
256
257/// Attempt to parse a single Kleene star, possibly with a separator.
258///
259/// For example, in a pattern such as `$(a),*`, `a` is the pattern to be repeated, `,` is the
260/// separator, and `*` is the Kleene operator. This function is specifically concerned with parsing
261/// the last two tokens of such a pattern: namely, the optional separator and the Kleene operator
262/// itself. Note that here we are parsing the _macro_ itself, rather than trying to match some
263/// stream of tokens in an invocation of a macro.
264///
265/// This function will take some input iterator `input` corresponding to `span` and a parsing
266/// session `sess`. If the next one (or possibly two) tokens in `input` correspond to a Kleene
267/// operator and separator, then a tuple with `(separator, KleeneOp)` is returned. Otherwise, an
268/// error with the appropriate span is emitted to `sess` and a dummy value is returned.
269fn parse_sep_and_kleene_op(
270 input: &mut impl Iterator<Item = tokenstream::TokenTree>,
271 span: Span,
272 sess: &ParseSess,
273) -> (Option<Token>, KleeneToken) {
274 // We basically look at two token trees here, denoted as #1 and #2 below
275 let span = match parse_kleene_op(input, span) {
276 // #1 is a `?`, `+`, or `*` KleeneOp
277 Ok(Ok((op, span))) => return (None, KleeneToken::new(op, span)),
278
279 // #1 is a separator followed by #2, a KleeneOp
280 Ok(Err(token)) => match parse_kleene_op(input, token.span) {
281 // #2 is the `?` Kleene op, which does not take a separator (error)
282 Ok(Ok((KleeneOp::ZeroOrOne, span))) => {
283 // Error!
284 sess.span_diagnostic.span_err(
285 token.span,
286 "the `?` macro repetition operator does not take a separator",
287 );
288
289 // Return a dummy
290 return (None, KleeneToken::new(KleeneOp::ZeroOrMore, span));
291 }
292
293 // #2 is a KleeneOp :D
294 Ok(Ok((op, span))) => return (Some(token), KleeneToken::new(op, span)),
295
296 // #2 is a random token or not a token at all :(
297 Ok(Err(Token { span, .. })) | Err(span) => span,
298 },
299
300 // #1 is not a token
301 Err(span) => span,
302 };
303
304 // If we ever get to this point, we have experienced an "unexpected token" error
305 sess.span_diagnostic.span_err(span, "expected one of: `*`, `+`, or `?`");
306
307 // Return a dummy
308 (None, KleeneToken::new(KleeneOp::ZeroOrMore, span))
309}