1 //! To make attribute macros work reliably when typing, we need to take care to
2 //! fix up syntax errors in the code we're passing to them.
5 span
::{ErasedFileAstId, SpanAnchor, SpanData}
,
9 use rustc_hash
::{FxHashMap, FxHashSet}
;
10 use smallvec
::SmallVec
;
13 ast
::{self, AstNode, HasLoopBody}
,
14 match_ast
, SyntaxElement
, SyntaxKind
, SyntaxNode
, TextRange
, TextSize
,
17 use tt
::{Spacing, Span}
;
21 tt
::{Ident, Leaf, Punct, Subtree}
,
24 /// The result of calculating fixes for a syntax node -- a bunch of changes
25 /// (appending to and replacing nodes), the information that is needed to
26 /// reverse those changes afterwards, and a token map.
27 #[derive(Debug, Default)]
28 pub(crate) struct SyntaxFixups
{
29 pub(crate) append
: FxHashMap
<SyntaxElement
, Vec
<Leaf
>>,
30 pub(crate) remove
: FxHashSet
<SyntaxNode
>,
31 pub(crate) undo_info
: SyntaxFixupUndoInfo
,
34 /// This is the information needed to reverse the fixups.
35 #[derive(Clone, Debug, Default, PartialEq, Eq)]
36 pub struct SyntaxFixupUndoInfo
{
37 // FIXME: ThinArc<[Subtree]>
38 original
: Option
<Arc
<Box
<[Subtree
]>>>,
41 impl SyntaxFixupUndoInfo
{
42 pub(crate) const NONE
: Self = SyntaxFixupUndoInfo { original: None }
;
45 // censoring -> just don't convert the node
46 // replacement -> censor + append
47 // append -> insert a fake node, here we need to assemble some dummy span that we can figure out how
49 const FIXUP_DUMMY_FILE
: FileId
= FileId
::from_raw(FileId
::MAX_FILE_ID
);
50 const FIXUP_DUMMY_AST_ID
: ErasedFileAstId
= ErasedFileAstId
::from_raw(RawIdx
::from_u32(!0));
51 const FIXUP_DUMMY_RANGE
: TextRange
= TextRange
::empty(TextSize
::new(0));
52 const FIXUP_DUMMY_RANGE_END
: TextSize
= TextSize
::new(!0);
54 pub(crate) fn fixup_syntax(span_map
: SpanMapRef
<'_
>, node
: &SyntaxNode
) -> SyntaxFixups
{
55 let mut append
= FxHashMap
::<SyntaxElement
, _
>::default();
56 let mut remove
= FxHashSet
::<SyntaxNode
>::default();
57 let mut preorder
= node
.preorder();
58 let mut original
= Vec
::new();
59 let dummy_range
= FIXUP_DUMMY_RANGE
;
60 // we use a file id of `FileId(!0)` to signal a fake node, and the text range's start offset as
61 // the index into the replacement vec but only if the end points to !0
62 let dummy_anchor
= SpanAnchor { file_id: FIXUP_DUMMY_FILE, ast_id: FIXUP_DUMMY_AST_ID }
;
63 let fake_span
= |range
| SpanData
{
66 ctx
: span_map
.span_for_range(range
).ctx
,
68 while let Some(event
) = preorder
.next() {
69 let syntax
::WalkEvent
::Enter(node
) = event
else { continue }
;
71 let node_range
= node
.text_range();
72 if can_handle_error(&node
) && has_error_to_handle(&node
) {
73 remove
.insert(node
.clone().into());
74 // the node contains an error node, we have to completely replace it by something valid
75 let original_tree
= mbe
::syntax_node_to_token_tree(&node
, span_map
);
76 let idx
= original
.len() as u32;
77 original
.push(original_tree
);
78 let replacement
= Leaf
::Ident(Ident
{
79 text
: "__ra_fixup".into(),
81 range
: TextRange
::new(TextSize
::new(idx
), FIXUP_DUMMY_RANGE_END
),
83 ctx
: span_map
.span_for_range(node_range
).ctx
,
86 append
.insert(node
.clone().into(), vec
![replacement
]);
87 preorder
.skip_subtree();
91 // In some other situations, we can fix things by just appending some tokens.
94 ast
::FieldExpr(it
) => {
95 if it
.name_ref().is_none() {
96 // incomplete field access: some_expr.|
97 append
.insert(node
.clone().into(), vec
![
99 text
: "__ra_fixup".into(),
100 span
: fake_span(node_range
),
105 ast
::ExprStmt(it
) => {
106 if it
.semicolon_token().is_none() {
107 append
.insert(node
.clone().into(), vec
![
110 spacing
: Spacing
::Alone
,
111 span
: fake_span(node_range
),
116 ast
::LetStmt(it
) => {
117 if it
.semicolon_token().is_none() {
118 append
.insert(node
.clone().into(), vec
![
121 spacing
: Spacing
::Alone
,
122 span
: fake_span(node_range
)
128 if it
.condition().is_none() {
129 // insert placeholder token after the if token
130 let if_token
= match it
.if_token() {
134 append
.insert(if_token
.into(), vec
![
136 text
: "__ra_fixup".into(),
137 span
: fake_span(node_range
)
141 if it
.then_branch().is_none() {
142 append
.insert(node
.clone().into(), vec
![
143 // FIXME: THis should be a subtree no?
146 spacing
: Spacing
::Alone
,
147 span
: fake_span(node_range
)
151 spacing
: Spacing
::Alone
,
152 span
: fake_span(node_range
)
157 ast
::WhileExpr(it
) => {
158 if it
.condition().is_none() {
159 // insert placeholder token after the while token
160 let while_token
= match it
.while_token() {
164 append
.insert(while_token
.into(), vec
![
166 text
: "__ra_fixup".into(),
167 span
: fake_span(node_range
)
171 if it
.loop_body().is_none() {
172 append
.insert(node
.clone().into(), vec
![
173 // FIXME: THis should be a subtree no?
176 spacing
: Spacing
::Alone
,
177 span
: fake_span(node_range
)
181 spacing
: Spacing
::Alone
,
182 span
: fake_span(node_range
)
187 ast
::LoopExpr(it
) => {
188 if it
.loop_body().is_none() {
189 append
.insert(node
.clone().into(), vec
![
190 // FIXME: THis should be a subtree no?
193 spacing
: Spacing
::Alone
,
194 span
: fake_span(node_range
)
198 spacing
: Spacing
::Alone
,
199 span
: fake_span(node_range
)
205 ast
::MatchExpr(it
) => {
206 if it
.expr().is_none() {
207 let match_token
= match it
.match_token() {
211 append
.insert(match_token
.into(), vec
![
213 text
: "__ra_fixup".into(),
214 span
: fake_span(node_range
)
218 if it
.match_arm_list().is_none() {
220 append
.insert(node
.clone().into(), vec
![
221 // FIXME: THis should be a subtree no?
224 spacing
: Spacing
::Alone
,
225 span
: fake_span(node_range
)
229 spacing
: Spacing
::Alone
,
230 span
: fake_span(node_range
)
235 ast
::ForExpr(it
) => {
236 let for_token
= match it
.for_token() {
237 Some(token
) => token
,
241 let [pat
, in_token
, iter
] = [
248 span
: fake_span(node_range
)
252 if it
.pat().is_none() && it
.in_token().is_none() && it
.iterable().is_none() {
253 append
.insert(for_token
.into(), vec
![pat
, in_token
, iter
]);
254 // does something funky -- see test case for_no_pat
255 } else if it
.pat().is_none() {
256 append
.insert(for_token
.into(), vec
![pat
]);
259 if it
.loop_body().is_none() {
260 append
.insert(node
.clone().into(), vec
![
261 // FIXME: THis should be a subtree no?
264 spacing
: Spacing
::Alone
,
265 span
: fake_span(node_range
)
269 spacing
: Spacing
::Alone
,
270 span
: fake_span(node_range
)
279 let needs_fixups
= !append
.is_empty() || !original
.is_empty();
283 undo_info
: SyntaxFixupUndoInfo
{
284 original
: needs_fixups
.then(|| Arc
::new(original
.into_boxed_slice())),
289 fn has_error(node
: &SyntaxNode
) -> bool
{
290 node
.children().any(|c
| c
.kind() == SyntaxKind
::ERROR
)
293 fn can_handle_error(node
: &SyntaxNode
) -> bool
{
294 ast
::Expr
::can_cast(node
.kind())
297 fn has_error_to_handle(node
: &SyntaxNode
) -> bool
{
298 has_error(node
) || node
.children().any(|c
| !can_handle_error(&c
) && has_error_to_handle(&c
))
301 pub(crate) fn reverse_fixups(tt
: &mut Subtree
, undo_info
: &SyntaxFixupUndoInfo
) {
302 let Some(undo_info
) = undo_info
.original
.as_deref() else { return }
;
303 let undo_info
= &**undo_info
;
305 tt
.delimiter
.close
.anchor
.file_id
== FIXUP_DUMMY_FILE
306 || tt
.delimiter
.open
.anchor
.file_id
== FIXUP_DUMMY_FILE
308 tt
.delimiter
.close
= SpanData
::DUMMY
;
309 tt
.delimiter
.open
= SpanData
::DUMMY
;
311 reverse_fixups_(tt
, undo_info
);
314 fn reverse_fixups_(tt
: &mut Subtree
, undo_info
: &[Subtree
]) {
315 let tts
= std
::mem
::take(&mut tt
.token_trees
);
318 // delete all fake nodes
319 .filter(|tt
| match tt
{
320 tt
::TokenTree
::Leaf(leaf
) => {
321 let span
= leaf
.span();
322 let is_real_leaf
= span
.anchor
.file_id
!= FIXUP_DUMMY_FILE
;
323 let is_replaced_node
= span
.range
.end() == FIXUP_DUMMY_RANGE_END
;
324 is_real_leaf
|| is_replaced_node
326 tt
::TokenTree
::Subtree(_
) => true,
328 .flat_map(|tt
| match tt
{
329 tt
::TokenTree
::Subtree(mut tt
) => {
330 if tt
.delimiter
.close
.anchor
.file_id
== FIXUP_DUMMY_FILE
331 || tt
.delimiter
.open
.anchor
.file_id
== FIXUP_DUMMY_FILE
333 // Even though fixup never creates subtrees with fixup spans, the old proc-macro server
334 // might copy them if the proc-macro asks for it, so we need to filter those out
336 return SmallVec
::new_const();
338 reverse_fixups_(&mut tt
, undo_info
);
339 SmallVec
::from_const([tt
.into()])
341 tt
::TokenTree
::Leaf(leaf
) => {
342 if leaf
.span().anchor
.file_id
== FIXUP_DUMMY_FILE
{
343 // we have a fake node here, we need to replace it again with the original
344 let original
= undo_info
[u32::from(leaf
.span().range
.start()) as usize].clone();
345 if original
.delimiter
.kind
== tt
::DelimiterKind
::Invisible
{
346 original
.token_trees
.into()
348 SmallVec
::from_const([original
.into()])
351 // just a normal leaf
352 SmallVec
::from_const([leaf
.into()])
362 use expect_test
::{expect, Expect}
;
366 fixup
::reverse_fixups
,
367 span
::{RealSpanMap, SpanMap}
,
371 // The following three functions are only meant to check partial structural equivalence of
372 // `TokenTree`s, see the last assertion in `check()`.
373 fn check_leaf_eq(a
: &tt
::Leaf
, b
: &tt
::Leaf
) -> bool
{
375 (tt
::Leaf
::Literal(a
), tt
::Leaf
::Literal(b
)) => a
.text
== b
.text
,
376 (tt
::Leaf
::Punct(a
), tt
::Leaf
::Punct(b
)) => a
.char == b
.char,
377 (tt
::Leaf
::Ident(a
), tt
::Leaf
::Ident(b
)) => a
.text
== b
.text
,
382 fn check_subtree_eq(a
: &tt
::Subtree
, b
: &tt
::Subtree
) -> bool
{
383 a
.delimiter
.kind
== b
.delimiter
.kind
384 && a
.token_trees
.len() == b
.token_trees
.len()
385 && a
.token_trees
.iter().zip(&b
.token_trees
).all(|(a
, b
)| check_tt_eq(a
, b
))
388 fn check_tt_eq(a
: &tt
::TokenTree
, b
: &tt
::TokenTree
) -> bool
{
390 (tt
::TokenTree
::Leaf(a
), tt
::TokenTree
::Leaf(b
)) => check_leaf_eq(a
, b
),
391 (tt
::TokenTree
::Subtree(a
), tt
::TokenTree
::Subtree(b
)) => check_subtree_eq(a
, b
),
397 fn check(ra_fixture
: &str, mut expect
: Expect
) {
398 let parsed
= syntax
::SourceFile
::parse(ra_fixture
);
399 let span_map
= SpanMap
::RealSpanMap(Arc
::new(RealSpanMap
::absolute(FileId
::from_raw(0))));
400 let fixups
= super::fixup_syntax(span_map
.as_ref(), &parsed
.syntax_node());
401 let mut tt
= mbe
::syntax_node_to_token_tree_modified(
402 &parsed
.syntax_node(),
408 let actual
= format
!("{tt}\n");
410 expect
.indent(false);
411 expect
.assert_eq(&actual
);
413 // the fixed-up tree should be syntactically valid
414 let (parse
, _
) = mbe
::token_tree_to_syntax_node(&tt
, ::mbe
::TopEntryPoint
::MacroItems
);
416 parse
.errors().is_empty(),
417 "parse has syntax errors. parse tree:\n{:#?}",
421 reverse_fixups(&mut tt
, &fixups
.undo_info
);
423 // the fixed-up + reversed version should be equivalent to the original input
424 // modulo token IDs and `Punct`s' spacing.
426 mbe
::syntax_node_to_token_tree(&parsed
.syntax_node(), span_map
.as_ref());
428 check_subtree_eq(&tt
, &original_as_tt
),
429 "different token tree:\n{tt:?}\n\n{original_as_tt:?}"
434 fn just_for_token() {
442 fn foo () {for _ in __ra_fixup { }}
448 fn for_no_iter_pattern() {
456 fn foo () {for _ in __ra_fixup {}}
470 fn foo () {for bar in qux { }}
475 // FIXME: https://github.com/rust-lang/rust-analyzer/pull/12937#discussion_r937633695
487 fn foo () {__ra_fixup}
493 fn match_no_expr_no_arms() {
501 fn foo () {match __ra_fixup { }}
507 fn match_expr_no_arms() {
517 fn foo () {match it {}}
533 fn foo () {match __ra_fixup { }}
539 fn incomplete_field_expr_1() {
547 fn foo () {a . __ra_fixup}
553 fn incomplete_field_expr_2() {
561 fn foo () {a . __ra_fixup ;}
567 fn incomplete_field_expr_3() {
576 fn foo () {a . __ra_fixup ; bar () ;}
582 fn incomplete_let() {
590 fn foo () {let it = a ;}
596 fn incomplete_field_expr_in_let() {
604 fn foo () {let it = a . __ra_fixup ;}
610 fn field_expr_before_call() {
611 // another case that easily happens while typing
620 fn foo () {a . b ; bar () ;}
626 fn extraneous_comma() {
634 fn foo () {__ra_fixup ;}
662 fn foo () {if __ra_fixup { }}
676 fn foo () {if __ra_fixup {} { }}
690 fn foo () {while __ra_fixup { }}
704 fn foo () {while foo { }}
717 fn foo () {while __ra_fixup {}}