1 //! This module contains tests for macro expansion. Effectively, it covers `tt`,
2 //! `mbe`, `proc_macro_api` and `hir_expand` crates. This might seem like a
3 //! wrong architecture at the first glance, but is intentional.
5 //! Physically, macro expansion process is intertwined with name resolution. You
6 //! can not expand *just* the syntax. So, to be able to write integration tests
7 //! of the "expand this code please" form, we have to do it after name
8 //! resolution. That is, in this crate. We *could* fake some dependencies and
9 //! write unit-tests (in fact, we used to do that), but that makes tests brittle
10 //! and harder to understand.
14 mod builtin_derive_macro
;
17 use std
::{iter, ops::Range, sync::Arc}
;
20 use base_db
::{fixture::WithFixture, ProcMacro, SourceDatabase}
;
21 use expect_test
::Expect
;
23 db
::{ExpandDatabase, TokenExpander}
,
24 AstId
, InFile
, MacroDefId
, MacroDefKind
, MacroFile
,
28 ast
::{self, edit::IndentLevel}
,
29 AstNode
, SyntaxElement
,
30 SyntaxKind
::{self, COMMENT, EOF, IDENT, LIFETIME_IDENT}
,
31 SyntaxNode
, TextRange
, T
,
33 use tt
::token_id
::{Subtree, TokenId}
;
36 db
::DefDatabase
, macro_id_to_def_id
, nameres
::ModuleSource
, resolver
::HasResolver
,
37 src
::HasSource
, test_db
::TestDB
, AdtId
, AsMacroCall
, Lookup
, ModuleDefId
,
41 fn check(ra_fixture
: &str, mut expect
: Expect
) {
42 let extra_proc_macros
= vec
![(
44 #[proc_macro_attribute]
45 pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream {
51 name
: "identity_when_valid".into(),
52 kind
: base_db
::ProcMacroKind
::Attr
,
53 expander
: Arc
::new(IdentityWhenValidProcMacroExpander
),
56 let db
= TestDB
::with_files_extra_proc_macros(ra_fixture
, extra_proc_macros
);
57 let krate
= db
.crate_graph().iter().next().unwrap();
58 let def_map
= db
.crate_def_map(krate
);
59 let local_id
= def_map
.root();
60 let module
= def_map
.module_id(local_id
);
61 let resolver
= module
.resolver(&db
);
62 let source
= def_map
[local_id
].definition_source(&db
);
63 let source_file
= match source
.value
{
64 ModuleSource
::SourceFile(it
) => it
,
65 ModuleSource
::Module(_
) | ModuleSource
::BlockExpr(_
) => panic
!(),
68 // What we want to do is to replace all macros (fn-like, derive, attr) with
69 // their expansions. Turns out, we don't actually store enough information
70 // to do this precisely though! Specifically, if a macro expands to nothing,
71 // it leaves zero traces in def-map, so we can't get its expansion after the
75 // <https://github.com/rust-lang/rust-analyzer/issues/3407>
76 // resolve/record tension!
78 // So here we try to do a resolve, which is necessary a heuristic. For macro
79 // calls, we use `as_call_id_with_errors`. For derives, we look at the impls
80 // in the module and assume that, if impls's source is a different
81 // `HirFileId`, than it came from macro expansion.
83 let mut text_edits
= Vec
::new();
84 let mut expansions
= Vec
::new();
86 for macro_
in source_file
.syntax().descendants().filter_map(ast
::Macro
::cast
) {
87 let mut show_token_ids
= false;
88 for comment
in macro_
.syntax().children_with_tokens().filter(|it
| it
.kind() == COMMENT
) {
89 show_token_ids
|= comment
.to_string().contains("+tokenids");
95 let call_offset
= macro_
.syntax().text_range().start().into();
96 let file_ast_id
= db
.ast_id_map(source
.file_id
).ast_id(¯o_
);
97 let ast_id
= AstId
::new(source
.file_id
, file_ast_id
.upcast());
98 let kind
= MacroDefKind
::Declarative(ast_id
);
101 .macro_def(MacroDefId { krate, kind, local_inner: false, allow_internal_unsafe: false }
)
103 if let TokenExpander
::DeclarativeMacro { mac, def_site_token_map }
= &*macro_def
{
104 let tt
= match ¯o_
{
105 ast
::Macro
::MacroRules(mac
) => mac
.token_tree().unwrap(),
106 ast
::Macro
::MacroDef(_
) => unimplemented
!(""),
109 let tt_start
= tt
.syntax().text_range().start();
110 tt
.syntax().descendants_with_tokens().filter_map(SyntaxElement
::into_token
).for_each(
112 let range
= token
.text_range().checked_sub(tt_start
).unwrap();
113 if let Some(id
) = def_site_token_map
.token_by_range(range
) {
114 let offset
= (range
.end() + tt_start
).into();
115 text_edits
.push((offset
..offset
, format
!("#{}", id
.0)));
120 call_offset
..call_offset
,
121 format
!("// call ids will be shifted by {:?}\n", mac
.shift()),
126 for macro_call
in source_file
.syntax().descendants().filter_map(ast
::MacroCall
::cast
) {
127 let macro_call
= InFile
::new(source
.file_id
, ¯o_call
);
128 let mut error
= None
;
129 let macro_call_id
= macro_call
130 .as_call_id_with_errors(
134 resolver
.resolve_path_as_macro(&db
, &path
).map(|it
| macro_id_to_def_id(&db
, it
))
136 &mut |err
| error
= Some(err
),
140 let macro_file
= MacroFile { macro_call_id }
;
141 let mut expansion_result
= db
.parse_macro_expansion(macro_file
);
142 expansion_result
.err
= expansion_result
.err
.or(error
);
143 expansions
.push((macro_call
.value
.clone(), expansion_result
, db
.macro_arg(macro_call_id
)));
146 for (call
, exp
, arg
) in expansions
.into_iter().rev() {
147 let mut tree
= false;
148 let mut expect_errors
= false;
149 let mut show_token_ids
= false;
150 for comment
in call
.syntax().children_with_tokens().filter(|it
| it
.kind() == COMMENT
) {
151 tree
|= comment
.to_string().contains("+tree");
152 expect_errors
|= comment
.to_string().contains("+errors");
153 show_token_ids
|= comment
.to_string().contains("+tokenids");
156 let mut expn_text
= String
::new();
157 if let Some(err
) = exp
.err
{
158 format_to
!(expn_text
, "/* error: {} */", err
);
160 if let Some((parse
, token_map
)) = exp
.value
{
162 assert
!(!parse
.errors().is_empty(), "no parse errors in expansion");
163 for e
in parse
.errors() {
164 format_to
!(expn_text
, "/* parse error: {} */\n", e
);
168 parse
.errors().is_empty(),
169 "parse errors in expansion: \n{:#?}",
173 let pp
= pretty_print_macro_expansion(
175 show_token_ids
.then_some(&*token_map
),
177 let indent
= IndentLevel
::from_node(call
.syntax());
178 let pp
= reindent(indent
, pp
);
179 format_to
!(expn_text
, "{}", pp
);
182 let tree
= format
!("{:#?}", parse
.syntax_node())
183 .split_inclusive('
\n'
)
184 .map(|line
| format
!("// {line}"))
185 .collect
::<String
>();
186 format_to
!(expn_text
, "\n{}", tree
)
189 let range
= call
.syntax().text_range();
190 let range
: Range
<usize> = range
.into();
193 if let Some((tree
, map
, _
)) = arg
.as_deref() {
194 let tt_range
= call
.token_tree().unwrap().syntax().text_range();
195 let mut ranges
= Vec
::new();
196 extract_id_ranges(&mut ranges
, map
, tree
);
197 for (range
, id
) in ranges
{
198 let idx
= (tt_range
.start() + range
.end()).into();
199 text_edits
.push((idx
..idx
, format
!("#{}", id
.0)));
202 text_edits
.push((range
.start
..range
.start
, "// ".into()));
203 call
.to_string().match_indices('
\n'
).for_each(|(offset
, _
)| {
204 let offset
= offset
+ 1 + range
.start
;
205 text_edits
.push((offset
..offset
, "// ".into()));
207 text_edits
.push((range
.end
..range
.end
, "\n".into()));
208 text_edits
.push((range
.end
..range
.end
, expn_text
));
210 text_edits
.push((range
, expn_text
));
214 text_edits
.sort_by_key(|(range
, _
)| range
.start
);
215 text_edits
.reverse();
216 let mut expanded_text
= source_file
.to_string();
217 for (range
, text
) in text_edits
{
218 expanded_text
.replace_range(range
, &text
);
221 for decl_id
in def_map
[local_id
].scope
.declarations() {
222 // FIXME: I'm sure there's already better way to do this
223 let src
= match decl_id
{
224 ModuleDefId
::AdtId(AdtId
::StructId(struct_id
)) => {
225 Some(struct_id
.lookup(&db
).source(&db
).syntax().cloned())
227 ModuleDefId
::FunctionId(function_id
) => {
228 Some(function_id
.lookup(&db
).source(&db
).syntax().cloned())
232 if let Some(src
) = src
{
233 if src
.file_id
.is_attr_macro(&db
) || src
.file_id
.is_custom_derive(&db
) {
234 let pp
= pretty_print_macro_expansion(src
.value
, None
);
235 format_to
!(expanded_text
, "\n{}", pp
)
240 for impl_id
in def_map
[local_id
].scope
.impls() {
241 let src
= impl_id
.lookup(&db
).source(&db
);
242 if src
.file_id
.is_builtin_derive(&db
).is_some() {
243 let pp
= pretty_print_macro_expansion(src
.value
.syntax().clone(), None
);
244 format_to
!(expanded_text
, "\n{}", pp
)
248 expect
.indent(false);
249 expect
.assert_eq(&expanded_text
);
252 fn extract_id_ranges(ranges
: &mut Vec
<(TextRange
, TokenId
)>, map
: &TokenMap
, tree
: &Subtree
) {
253 tree
.token_trees
.iter().for_each(|tree
| match tree
{
254 tt
::TokenTree
::Leaf(leaf
) => {
255 let id
= match leaf
{
256 tt
::Leaf
::Literal(it
) => it
.span
,
257 tt
::Leaf
::Punct(it
) => it
.span
,
258 tt
::Leaf
::Ident(it
) => it
.span
,
260 ranges
.extend(map
.ranges_by_token(id
, SyntaxKind
::ERROR
).map(|range
| (range
, id
)));
262 tt
::TokenTree
::Subtree(tree
) => extract_id_ranges(ranges
, map
, tree
),
266 fn reindent(indent
: IndentLevel
, pp
: String
) -> String
{
267 if !pp
.contains('
\n'
) {
270 let mut lines
= pp
.split_inclusive('
\n'
);
271 let mut res
= lines
.next().unwrap().to_string();
273 if line
.trim().is_empty() {
276 format_to
!(res
, "{}{}", indent
, line
)
282 fn pretty_print_macro_expansion(expn
: SyntaxNode
, map
: Option
<&TokenMap
>) -> String
{
283 let mut res
= String
::new();
284 let mut prev_kind
= EOF
;
285 let mut indent_level
= 0;
286 for token
in iter
::successors(expn
.first_token(), |t
| t
.next_token()) {
287 let curr_kind
= token
.kind();
288 let space
= match (prev_kind
, curr_kind
) {
289 _
if prev_kind
.is_trivia() || curr_kind
.is_trivia() => "",
290 (T
!['{'], T!['}'
]) => "",
291 (T
![=], _
) | (_
, T
![=]) => " ",
293 (T
![;] | T
!['{'] | T!['}'
], _
) => "\n",
294 (_
, T
!['
}'
]) => "\n",
295 (IDENT
| LIFETIME_IDENT
, IDENT
| LIFETIME_IDENT
) => " ",
296 _
if prev_kind
.is_keyword() && curr_kind
.is_keyword() => " ",
297 (IDENT
, _
) if curr_kind
.is_keyword() => " ",
298 (_
, IDENT
) if prev_kind
.is_keyword() => " ",
299 (T
![>], IDENT
) => " ",
300 (T
![>], _
) if curr_kind
.is_keyword() => " ",
301 (T
![->], _
) | (_
, T
![->]) => " ",
302 (T
![&&], _
) | (_
, T
![&&]) => " ",
304 (T
![:], IDENT
| T
!['
('
]) => " ",
305 (T
![:], _
) if curr_kind
.is_keyword() => " ",
306 (T
![fn], T
!['
('
]) => "",
307 (T
!['
]'
], _
) if curr_kind
.is_keyword() => " ",
308 (T
!['
]'
], T
![#]) => "\n",
309 (T
![Self], T
![::]) => "",
310 _
if prev_kind
.is_keyword() => " ",
315 T
!['
{'
] => indent_level
+= 1,
316 T
!['
}'
] => indent_level
-= 1,
322 let level
= if curr_kind
== T
!['
}'
] { indent_level - 1 }
else { indent_level }
;
323 res
.push_str(&" ".repeat(level
));
325 prev_kind
= curr_kind
;
326 format_to
!(res
, "{}", token
);
327 if let Some(map
) = map
{
328 if let Some(id
) = map
.token_by_range(token
.text_range()) {
329 format_to
!(res
, "#{}", id
.0);
336 // Identity mapping, but only works when the input is syntactically valid. This
337 // simulates common proc macros that unnecessarily parse their input and return
340 struct IdentityWhenValidProcMacroExpander
;
341 impl base_db
::ProcMacroExpander
for IdentityWhenValidProcMacroExpander
{
347 ) -> Result
<Subtree
, base_db
::ProcMacroExpansionError
> {
349 ::mbe
::token_tree_to_syntax_node(subtree
, ::mbe
::TopEntryPoint
::MacroItems
);
350 if parse
.errors().is_empty() {
353 panic
!("got invalid macro input: {:?}", parse
.errors());