]> git.proxmox.com Git - rustc.git/blob - src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests.rs
New upstream version 1.70.0+dfsg1
[rustc.git] / src / tools / rust-analyzer / crates / hir-def / src / macro_expansion_tests.rs
1 //! This module contains tests for macro expansion. Effectively, it covers `tt`,
2 //! `mbe`, `proc_macro_api` and `hir_expand` crates. This might seem like a
3 //! wrong architecture at the first glance, but is intentional.
4 //!
5 //! Physically, macro expansion process is intertwined with name resolution. You
6 //! can not expand *just* the syntax. So, to be able to write integration tests
7 //! of the "expand this code please" form, we have to do it after name
8 //! resolution. That is, in this crate. We *could* fake some dependencies and
9 //! write unit-tests (in fact, we used to do that), but that makes tests brittle
10 //! and harder to understand.
11
12 mod mbe;
13 mod builtin_fn_macro;
14 mod builtin_derive_macro;
15 mod proc_macros;
16
17 use std::{iter, ops::Range, sync::Arc};
18
19 use ::mbe::TokenMap;
20 use base_db::{fixture::WithFixture, ProcMacro, SourceDatabase};
21 use expect_test::Expect;
22 use hir_expand::{
23 db::{ExpandDatabase, TokenExpander},
24 AstId, InFile, MacroDefId, MacroDefKind, MacroFile,
25 };
26 use stdx::format_to;
27 use syntax::{
28 ast::{self, edit::IndentLevel},
29 AstNode, SyntaxElement,
30 SyntaxKind::{self, COMMENT, EOF, IDENT, LIFETIME_IDENT},
31 SyntaxNode, TextRange, T,
32 };
33 use tt::token_id::{Subtree, TokenId};
34
35 use crate::{
36 db::DefDatabase, macro_id_to_def_id, nameres::ModuleSource, resolver::HasResolver,
37 src::HasSource, test_db::TestDB, AdtId, AsMacroCall, Lookup, ModuleDefId,
38 };
39
40 #[track_caller]
41 fn check(ra_fixture: &str, mut expect: Expect) {
42 let extra_proc_macros = vec![(
43 r#"
44 #[proc_macro_attribute]
45 pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream {
46 item
47 }
48 "#
49 .into(),
50 ProcMacro {
51 name: "identity_when_valid".into(),
52 kind: base_db::ProcMacroKind::Attr,
53 expander: Arc::new(IdentityWhenValidProcMacroExpander),
54 },
55 )];
56 let db = TestDB::with_files_extra_proc_macros(ra_fixture, extra_proc_macros);
57 let krate = db.crate_graph().iter().next().unwrap();
58 let def_map = db.crate_def_map(krate);
59 let local_id = def_map.root();
60 let module = def_map.module_id(local_id);
61 let resolver = module.resolver(&db);
62 let source = def_map[local_id].definition_source(&db);
63 let source_file = match source.value {
64 ModuleSource::SourceFile(it) => it,
65 ModuleSource::Module(_) | ModuleSource::BlockExpr(_) => panic!(),
66 };
67
68 // What we want to do is to replace all macros (fn-like, derive, attr) with
69 // their expansions. Turns out, we don't actually store enough information
70 // to do this precisely though! Specifically, if a macro expands to nothing,
71 // it leaves zero traces in def-map, so we can't get its expansion after the
72 // fact.
73 //
74 // This is the usual
75 // <https://github.com/rust-lang/rust-analyzer/issues/3407>
76 // resolve/record tension!
77 //
78 // So here we try to do a resolve, which is necessary a heuristic. For macro
79 // calls, we use `as_call_id_with_errors`. For derives, we look at the impls
80 // in the module and assume that, if impls's source is a different
81 // `HirFileId`, than it came from macro expansion.
82
83 let mut text_edits = Vec::new();
84 let mut expansions = Vec::new();
85
86 for macro_ in source_file.syntax().descendants().filter_map(ast::Macro::cast) {
87 let mut show_token_ids = false;
88 for comment in macro_.syntax().children_with_tokens().filter(|it| it.kind() == COMMENT) {
89 show_token_ids |= comment.to_string().contains("+tokenids");
90 }
91 if !show_token_ids {
92 continue;
93 }
94
95 let call_offset = macro_.syntax().text_range().start().into();
96 let file_ast_id = db.ast_id_map(source.file_id).ast_id(&macro_);
97 let ast_id = AstId::new(source.file_id, file_ast_id.upcast());
98 let kind = MacroDefKind::Declarative(ast_id);
99
100 let macro_def = db
101 .macro_def(MacroDefId { krate, kind, local_inner: false, allow_internal_unsafe: false })
102 .unwrap();
103 if let TokenExpander::DeclarativeMacro { mac, def_site_token_map } = &*macro_def {
104 let tt = match &macro_ {
105 ast::Macro::MacroRules(mac) => mac.token_tree().unwrap(),
106 ast::Macro::MacroDef(_) => unimplemented!(""),
107 };
108
109 let tt_start = tt.syntax().text_range().start();
110 tt.syntax().descendants_with_tokens().filter_map(SyntaxElement::into_token).for_each(
111 |token| {
112 let range = token.text_range().checked_sub(tt_start).unwrap();
113 if let Some(id) = def_site_token_map.token_by_range(range) {
114 let offset = (range.end() + tt_start).into();
115 text_edits.push((offset..offset, format!("#{}", id.0)));
116 }
117 },
118 );
119 text_edits.push((
120 call_offset..call_offset,
121 format!("// call ids will be shifted by {:?}\n", mac.shift()),
122 ));
123 }
124 }
125
126 for macro_call in source_file.syntax().descendants().filter_map(ast::MacroCall::cast) {
127 let macro_call = InFile::new(source.file_id, &macro_call);
128 let mut error = None;
129 let macro_call_id = macro_call
130 .as_call_id_with_errors(
131 &db,
132 krate,
133 |path| {
134 resolver.resolve_path_as_macro(&db, &path).map(|it| macro_id_to_def_id(&db, it))
135 },
136 &mut |err| error = Some(err),
137 )
138 .unwrap()
139 .unwrap();
140 let macro_file = MacroFile { macro_call_id };
141 let mut expansion_result = db.parse_macro_expansion(macro_file);
142 expansion_result.err = expansion_result.err.or(error);
143 expansions.push((macro_call.value.clone(), expansion_result, db.macro_arg(macro_call_id)));
144 }
145
146 for (call, exp, arg) in expansions.into_iter().rev() {
147 let mut tree = false;
148 let mut expect_errors = false;
149 let mut show_token_ids = false;
150 for comment in call.syntax().children_with_tokens().filter(|it| it.kind() == COMMENT) {
151 tree |= comment.to_string().contains("+tree");
152 expect_errors |= comment.to_string().contains("+errors");
153 show_token_ids |= comment.to_string().contains("+tokenids");
154 }
155
156 let mut expn_text = String::new();
157 if let Some(err) = exp.err {
158 format_to!(expn_text, "/* error: {} */", err);
159 }
160 if let Some((parse, token_map)) = exp.value {
161 if expect_errors {
162 assert!(!parse.errors().is_empty(), "no parse errors in expansion");
163 for e in parse.errors() {
164 format_to!(expn_text, "/* parse error: {} */\n", e);
165 }
166 } else {
167 assert!(
168 parse.errors().is_empty(),
169 "parse errors in expansion: \n{:#?}",
170 parse.errors()
171 );
172 }
173 let pp = pretty_print_macro_expansion(
174 parse.syntax_node(),
175 show_token_ids.then_some(&*token_map),
176 );
177 let indent = IndentLevel::from_node(call.syntax());
178 let pp = reindent(indent, pp);
179 format_to!(expn_text, "{}", pp);
180
181 if tree {
182 let tree = format!("{:#?}", parse.syntax_node())
183 .split_inclusive('\n')
184 .map(|line| format!("// {line}"))
185 .collect::<String>();
186 format_to!(expn_text, "\n{}", tree)
187 }
188 }
189 let range = call.syntax().text_range();
190 let range: Range<usize> = range.into();
191
192 if show_token_ids {
193 if let Some((tree, map, _)) = arg.as_deref() {
194 let tt_range = call.token_tree().unwrap().syntax().text_range();
195 let mut ranges = Vec::new();
196 extract_id_ranges(&mut ranges, map, tree);
197 for (range, id) in ranges {
198 let idx = (tt_range.start() + range.end()).into();
199 text_edits.push((idx..idx, format!("#{}", id.0)));
200 }
201 }
202 text_edits.push((range.start..range.start, "// ".into()));
203 call.to_string().match_indices('\n').for_each(|(offset, _)| {
204 let offset = offset + 1 + range.start;
205 text_edits.push((offset..offset, "// ".into()));
206 });
207 text_edits.push((range.end..range.end, "\n".into()));
208 text_edits.push((range.end..range.end, expn_text));
209 } else {
210 text_edits.push((range, expn_text));
211 }
212 }
213
214 text_edits.sort_by_key(|(range, _)| range.start);
215 text_edits.reverse();
216 let mut expanded_text = source_file.to_string();
217 for (range, text) in text_edits {
218 expanded_text.replace_range(range, &text);
219 }
220
221 for decl_id in def_map[local_id].scope.declarations() {
222 // FIXME: I'm sure there's already better way to do this
223 let src = match decl_id {
224 ModuleDefId::AdtId(AdtId::StructId(struct_id)) => {
225 Some(struct_id.lookup(&db).source(&db).syntax().cloned())
226 }
227 ModuleDefId::FunctionId(function_id) => {
228 Some(function_id.lookup(&db).source(&db).syntax().cloned())
229 }
230 _ => None,
231 };
232 if let Some(src) = src {
233 if src.file_id.is_attr_macro(&db) || src.file_id.is_custom_derive(&db) {
234 let pp = pretty_print_macro_expansion(src.value, None);
235 format_to!(expanded_text, "\n{}", pp)
236 }
237 }
238 }
239
240 for impl_id in def_map[local_id].scope.impls() {
241 let src = impl_id.lookup(&db).source(&db);
242 if src.file_id.is_builtin_derive(&db).is_some() {
243 let pp = pretty_print_macro_expansion(src.value.syntax().clone(), None);
244 format_to!(expanded_text, "\n{}", pp)
245 }
246 }
247
248 expect.indent(false);
249 expect.assert_eq(&expanded_text);
250 }
251
252 fn extract_id_ranges(ranges: &mut Vec<(TextRange, TokenId)>, map: &TokenMap, tree: &Subtree) {
253 tree.token_trees.iter().for_each(|tree| match tree {
254 tt::TokenTree::Leaf(leaf) => {
255 let id = match leaf {
256 tt::Leaf::Literal(it) => it.span,
257 tt::Leaf::Punct(it) => it.span,
258 tt::Leaf::Ident(it) => it.span,
259 };
260 ranges.extend(map.ranges_by_token(id, SyntaxKind::ERROR).map(|range| (range, id)));
261 }
262 tt::TokenTree::Subtree(tree) => extract_id_ranges(ranges, map, tree),
263 });
264 }
265
266 fn reindent(indent: IndentLevel, pp: String) -> String {
267 if !pp.contains('\n') {
268 return pp;
269 }
270 let mut lines = pp.split_inclusive('\n');
271 let mut res = lines.next().unwrap().to_string();
272 for line in lines {
273 if line.trim().is_empty() {
274 res.push_str(line)
275 } else {
276 format_to!(res, "{}{}", indent, line)
277 }
278 }
279 res
280 }
281
282 fn pretty_print_macro_expansion(expn: SyntaxNode, map: Option<&TokenMap>) -> String {
283 let mut res = String::new();
284 let mut prev_kind = EOF;
285 let mut indent_level = 0;
286 for token in iter::successors(expn.first_token(), |t| t.next_token()) {
287 let curr_kind = token.kind();
288 let space = match (prev_kind, curr_kind) {
289 _ if prev_kind.is_trivia() || curr_kind.is_trivia() => "",
290 (T!['{'], T!['}']) => "",
291 (T![=], _) | (_, T![=]) => " ",
292 (_, T!['{']) => " ",
293 (T![;] | T!['{'] | T!['}'], _) => "\n",
294 (_, T!['}']) => "\n",
295 (IDENT | LIFETIME_IDENT, IDENT | LIFETIME_IDENT) => " ",
296 _ if prev_kind.is_keyword() && curr_kind.is_keyword() => " ",
297 (IDENT, _) if curr_kind.is_keyword() => " ",
298 (_, IDENT) if prev_kind.is_keyword() => " ",
299 (T![>], IDENT) => " ",
300 (T![>], _) if curr_kind.is_keyword() => " ",
301 (T![->], _) | (_, T![->]) => " ",
302 (T![&&], _) | (_, T![&&]) => " ",
303 (T![,], _) => " ",
304 (T![:], IDENT | T!['(']) => " ",
305 (T![:], _) if curr_kind.is_keyword() => " ",
306 (T![fn], T!['(']) => "",
307 (T![']'], _) if curr_kind.is_keyword() => " ",
308 (T![']'], T![#]) => "\n",
309 (T![Self], T![::]) => "",
310 _ if prev_kind.is_keyword() => " ",
311 _ => "",
312 };
313
314 match prev_kind {
315 T!['{'] => indent_level += 1,
316 T!['}'] => indent_level -= 1,
317 _ => (),
318 }
319
320 res.push_str(space);
321 if space == "\n" {
322 let level = if curr_kind == T!['}'] { indent_level - 1 } else { indent_level };
323 res.push_str(&" ".repeat(level));
324 }
325 prev_kind = curr_kind;
326 format_to!(res, "{}", token);
327 if let Some(map) = map {
328 if let Some(id) = map.token_by_range(token.text_range()) {
329 format_to!(res, "#{}", id.0);
330 }
331 }
332 }
333 res
334 }
335
336 // Identity mapping, but only works when the input is syntactically valid. This
337 // simulates common proc macros that unnecessarily parse their input and return
338 // compile errors.
339 #[derive(Debug)]
340 struct IdentityWhenValidProcMacroExpander;
341 impl base_db::ProcMacroExpander for IdentityWhenValidProcMacroExpander {
342 fn expand(
343 &self,
344 subtree: &Subtree,
345 _: Option<&Subtree>,
346 _: &base_db::Env,
347 ) -> Result<Subtree, base_db::ProcMacroExpansionError> {
348 let (parse, _) =
349 ::mbe::token_tree_to_syntax_node(subtree, ::mbe::TopEntryPoint::MacroItems);
350 if parse.errors().is_empty() {
351 Ok(subtree.clone())
352 } else {
353 panic!("got invalid macro input: {:?}", parse.errors());
354 }
355 }
356 }