1 //! Module responsible for analyzing the code surrounding the cursor for completion.
4 use hir
::{Semantics, Type, TypeInfo}
;
5 use ide_db
::{active_parameter::ActiveParameter, RootDatabase}
;
7 algo
::{find_node_at_offset, non_trivia_sibling}
,
8 ast
::{self, AttrKind, HasArgList, HasLoopBody, HasName, NameOrNameRef}
,
9 match_ast
, AstNode
, AstToken
, Direction
, NodeOrToken
, SyntaxElement
, SyntaxKind
, SyntaxNode
,
10 SyntaxToken
, TextRange
, TextSize
, T
,
14 AttrCtx
, CompletionAnalysis
, DotAccess
, DotAccessKind
, ExprCtx
, ItemListKind
, LifetimeContext
,
15 LifetimeKind
, NameContext
, NameKind
, NameRefContext
, NameRefKind
, ParamContext
, ParamKind
,
16 PathCompletionCtx
, PathKind
, PatternContext
, PatternRefutability
, Qualified
, QualifierCtx
,
17 TypeAscriptionTarget
, TypeLocation
, COMPLETION_MARKER
,
20 struct ExpansionResult
{
21 original_file
: SyntaxNode
,
22 speculative_file
: SyntaxNode
,
24 fake_ident_token
: SyntaxToken
,
25 derive_ctx
: Option
<(SyntaxNode
, SyntaxNode
, TextSize
, ast
::Attr
)>,
28 pub(super) struct AnalysisResult
{
29 pub(super) analysis
: CompletionAnalysis
,
30 pub(super) expected
: (Option
<Type
>, Option
<ast
::NameOrNameRef
>),
31 pub(super) qualifier_ctx
: QualifierCtx
,
32 pub(super) token
: SyntaxToken
,
33 pub(super) offset
: TextSize
,
36 pub(super) fn expand_and_analyze(
37 sema
: &Semantics
<'_
, RootDatabase
>,
38 original_file
: SyntaxNode
,
39 speculative_file
: SyntaxNode
,
41 original_token
: &SyntaxToken
,
42 ) -> Option
<AnalysisResult
> {
43 // as we insert after the offset, right biased will *always* pick the identifier no matter
44 // if there is an ident already typed or not
45 let fake_ident_token
= speculative_file
.token_at_offset(offset
).right_biased()?
;
46 // the relative offset between the cursor and the *identifier* token we are completing on
47 let relative_offset
= offset
- fake_ident_token
.text_range().start();
48 // make the offset point to the start of the original token, as that is what the
49 // intermediate offsets calculated in expansion always points to
50 let offset
= offset
- relative_offset
;
51 let expansion
= expand(sema
, original_file
, speculative_file
, offset
, fake_ident_token
);
52 // add the relative offset back, so that left_biased finds the proper token
53 let offset
= expansion
.offset
+ relative_offset
;
54 let token
= expansion
.original_file
.token_at_offset(offset
).left_biased()?
;
56 analyze(sema
, expansion
, original_token
, &token
).map(|(analysis
, expected
, qualifier_ctx
)| {
57 AnalysisResult { analysis, expected, qualifier_ctx, token, offset }
61 /// Expand attributes and macro calls at the current cursor position for both the original file
62 /// and fake file repeatedly. As soon as one of the two expansions fail we stop so the original
63 /// and speculative states stay in sync.
65 sema
: &Semantics
<'_
, RootDatabase
>,
66 mut original_file
: SyntaxNode
,
67 mut speculative_file
: SyntaxNode
,
69 mut fake_ident_token
: SyntaxToken
,
70 ) -> ExpansionResult
{
71 let _p
= profile
::span("CompletionContext::expand");
72 let mut derive_ctx
= None
;
76 |item
: &ast
::Item
| item
.syntax().ancestors().skip(1).find_map(ast
::Item
::cast
);
77 let ancestor_items
= iter
::successors(
79 find_node_at_offset
::<ast
::Item
>(&original_file
, offset
),
80 find_node_at_offset
::<ast
::Item
>(&speculative_file
, offset
),
82 |(a
, b
)| parent_item(a
).zip(parent_item(b
)),
85 // first try to expand attributes as these are always the outermost macro calls
86 'ancestors
: for (actual_item
, item_with_fake_ident
) in ancestor_items
{
88 sema
.expand_attr_macro(&actual_item
),
89 sema
.speculative_expand_attr_macro(
91 &item_with_fake_ident
,
92 fake_ident_token
.clone(),
95 // maybe parent items have attributes, so continue walking the ancestors
96 (None
, None
) => continue 'ancestors
,
97 // successful expansions
98 (Some(actual_expansion
), Some((fake_expansion
, fake_mapped_token
))) => {
99 let new_offset
= fake_mapped_token
.text_range().start();
100 if new_offset
> actual_expansion
.text_range().end() {
101 // offset outside of bounds from the original expansion,
102 // stop here to prevent problems from happening
105 original_file
= actual_expansion
;
106 speculative_file
= fake_expansion
;
107 fake_ident_token
= fake_mapped_token
;
111 // exactly one expansion failed, inconsistent state so stop expanding completely
112 _
=> break 'expansion
,
116 // No attributes have been expanded, so look for macro_call! token trees or derive token trees
117 let orig_tt
= match find_node_at_offset
::<ast
::TokenTree
>(&original_file
, offset
) {
119 None
=> break 'expansion
,
121 let spec_tt
= match find_node_at_offset
::<ast
::TokenTree
>(&speculative_file
, offset
) {
123 None
=> break 'expansion
,
126 // Expand pseudo-derive expansion
127 if let (Some(orig_attr
), Some(spec_attr
)) = (
128 orig_tt
.syntax().parent().and_then(ast
::Meta
::cast
).and_then(|it
| it
.parent_attr()),
129 spec_tt
.syntax().parent().and_then(ast
::Meta
::cast
).and_then(|it
| it
.parent_attr()),
131 if let (Some(actual_expansion
), Some((fake_expansion
, fake_mapped_token
))) = (
132 sema
.expand_derive_as_pseudo_attr_macro(&orig_attr
),
133 sema
.speculative_expand_derive_as_pseudo_attr_macro(
136 fake_ident_token
.clone(),
142 fake_mapped_token
.text_range().start(),
146 // at this point we won't have any more successful expansions, so stop
150 // Expand fn-like macro calls
151 if let (Some(actual_macro_call
), Some(macro_call_with_fake_ident
)) = (
152 orig_tt
.syntax().ancestors().find_map(ast
::MacroCall
::cast
),
153 spec_tt
.syntax().ancestors().find_map(ast
::MacroCall
::cast
),
155 let mac_call_path0
= actual_macro_call
.path().as_ref().map(|s
| s
.syntax().text());
157 macro_call_with_fake_ident
.path().as_ref().map(|s
| s
.syntax().text());
159 // inconsistent state, stop expanding
160 if mac_call_path0
!= mac_call_path1
{
163 let speculative_args
= match macro_call_with_fake_ident
.token_tree() {
165 None
=> break 'expansion
,
169 sema
.expand(&actual_macro_call
),
170 sema
.speculative_expand(
173 fake_ident_token
.clone(),
176 // successful expansions
177 (Some(actual_expansion
), Some((fake_expansion
, fake_mapped_token
))) => {
178 let new_offset
= fake_mapped_token
.text_range().start();
179 if new_offset
> actual_expansion
.text_range().end() {
180 // offset outside of bounds from the original expansion,
181 // stop here to prevent problems from happening
184 original_file
= actual_expansion
;
185 speculative_file
= fake_expansion
;
186 fake_ident_token
= fake_mapped_token
;
190 // at least on expansion failed, we won't have anything to expand from this point
191 // onwards so break out
192 _
=> break 'expansion
,
196 // none of our states have changed so stop the loop
199 ExpansionResult { original_file, speculative_file, offset, fake_ident_token, derive_ctx }
202 /// Fill the completion context, this is what does semantic reasoning about the surrounding context
203 /// of the completion location.
205 sema
: &Semantics
<'_
, RootDatabase
>,
206 expansion_result
: ExpansionResult
,
207 original_token
: &SyntaxToken
,
208 self_token
: &SyntaxToken
,
209 ) -> Option
<(CompletionAnalysis
, (Option
<Type
>, Option
<ast
::NameOrNameRef
>), QualifierCtx
)> {
210 let _p
= profile
::span("CompletionContext::analyze");
211 let ExpansionResult { original_file, speculative_file, offset, fake_ident_token, derive_ctx }
=
213 let syntax_element
= NodeOrToken
::Token(fake_ident_token
);
214 if is_in_token_of_for_loop(syntax_element
.clone()) {
216 // there is nothing to complete here except `in` keyword
217 // don't bother populating the context
218 // FIXME: the completion calculations should end up good enough
219 // such that this special case becomes unnecessary
223 // Overwrite the path kind for derives
224 if let Some((original_file
, file_with_fake_ident
, offset
, origin_attr
)) = derive_ctx
{
225 if let Some(ast
::NameLike
::NameRef(name_ref
)) =
226 find_node_at_offset(&file_with_fake_ident
, offset
)
228 let parent
= name_ref
.syntax().parent()?
;
229 let (mut nameref_ctx
, _
) = classify_name_ref(&sema
, &original_file
, name_ref
, parent
)?
;
230 if let NameRefKind
::Path(path_ctx
) = &mut nameref_ctx
.kind
{
231 path_ctx
.kind
= PathKind
::Derive
{
232 existing_derives
: sema
233 .resolve_derive_macro(&origin_attr
)
241 CompletionAnalysis
::NameRef(nameref_ctx
),
243 QualifierCtx
::default(),
249 let name_like
= match find_node_at_offset(&speculative_file
, offset
) {
252 let analysis
= if let Some(original
) = ast
::String
::cast(original_token
.clone()) {
253 CompletionAnalysis
::String
{
255 expanded
: ast
::String
::cast(self_token
.clone()),
258 // Fix up trailing whitespace problem
260 let token
= syntax
::algo
::skip_trivia_token(self_token
.clone(), Direction
::Prev
)?
;
261 let p
= token
.parent()?
;
262 if p
.kind() == SyntaxKind
::TOKEN_TREE
263 && p
.ancestors().any(|it
| it
.kind() == SyntaxKind
::META
)
265 let colon_prefix
= previous_non_trivia_token(self_token
.clone())
266 .map_or(false, |it
| T
![:] == it
.kind());
267 CompletionAnalysis
::UnexpandedAttrTT
{
268 fake_attribute_under_caret
: syntax_element
270 .find_map(ast
::Attr
::cast
),
277 return Some((analysis
, (None
, None
), QualifierCtx
::default()));
280 let expected
= expected_type_and_name(sema
, &self_token
, &name_like
);
281 let mut qual_ctx
= QualifierCtx
::default();
282 let analysis
= match name_like
{
283 ast
::NameLike
::Lifetime(lifetime
) => {
284 CompletionAnalysis
::Lifetime(classify_lifetime(sema
, &original_file
, lifetime
)?
)
286 ast
::NameLike
::NameRef(name_ref
) => {
287 let parent
= name_ref
.syntax().parent()?
;
288 let (nameref_ctx
, qualifier_ctx
) =
289 classify_name_ref(sema
, &original_file
, name_ref
, parent
.clone())?
;
290 qual_ctx
= qualifier_ctx
;
291 CompletionAnalysis
::NameRef(nameref_ctx
)
293 ast
::NameLike
::Name(name
) => {
294 let name_ctx
= classify_name(sema
, &original_file
, name
)?
;
295 CompletionAnalysis
::Name(name_ctx
)
298 Some((analysis
, expected
, qual_ctx
))
301 /// Calculate the expected type and name of the cursor position.
302 fn expected_type_and_name(
303 sema
: &Semantics
<'_
, RootDatabase
>,
305 name_like
: &ast
::NameLike
,
306 ) -> (Option
<Type
>, Option
<NameOrNameRef
>) {
307 let mut node
= match token
.parent() {
309 None
=> return (None
, None
),
312 let strip_refs
= |mut ty
: Type
| match name_like
{
313 ast
::NameLike
::NameRef(n
) => {
314 let p
= match n
.syntax().parent() {
318 let top_syn
= match_ast
! {
320 ast
::FieldExpr(e
) => e
323 .map_while(ast
::FieldExpr
::cast
)
325 .map(|it
| it
.syntax().clone()),
326 ast
::PathSegment(e
) => e
330 .take_while(|it
| ast
::Path
::can_cast(it
.kind()) || ast
::PathExpr
::can_cast(it
.kind()))
331 .find_map(ast
::PathExpr
::cast
)
332 .map(|it
| it
.syntax().clone()),
336 let top_syn
= match top_syn
{
340 for _
in top_syn
.ancestors().skip(1).map_while(ast
::RefExpr
::cast
) {
341 cov_mark
::hit
!(expected_type_fn_param_ref
);
342 ty
= ty
.strip_reference();
352 ast
::LetStmt(it
) => {
353 cov_mark
::hit
!(expected_type_let_with_leading_char
);
354 cov_mark
::hit
!(expected_type_let_without_leading_char
);
356 .and_then(|pat
| sema
.type_of_pat(&pat
))
357 .or_else(|| it
.initializer().and_then(|it
| sema
.type_of_expr(&it
)))
358 .map(TypeInfo
::original
);
359 let name
= match it
.pat() {
360 Some(ast
::Pat
::IdentPat(ident
)) => ident
.name().map(NameOrNameRef
::Name
),
361 Some(_
) | None
=> None
,
366 ast
::LetExpr(it
) => {
367 cov_mark
::hit
!(expected_type_if_let_without_leading_char
);
369 .and_then(|pat
| sema
.type_of_pat(&pat
))
370 .or_else(|| it
.expr().and_then(|it
| sema
.type_of_expr(&it
)))
371 .map(TypeInfo
::original
);
375 cov_mark
::hit
!(expected_type_fn_param
);
376 ActiveParameter
::at_token(
380 let name
= ap
.ident().map(NameOrNameRef
::Name
);
382 let ty
= strip_refs(ap
.ty
);
385 .unwrap_or((None
, None
))
387 ast
::RecordExprFieldList(it
) => {
388 // wouldn't try {} be nice...
390 if token
.kind() == T
![..]
391 ||token
.prev_token().map(|t
| t
.kind()) == Some(T
![..])
393 cov_mark
::hit
!(expected_type_struct_func_update
);
394 let record_expr
= it
.syntax().parent().and_then(ast
::RecordExpr
::cast
)?
;
395 let ty
= sema
.type_of_expr(&record_expr
.into())?
;
401 cov_mark
::hit
!(expected_type_struct_field_without_leading_char
);
402 let expr_field
= token
.prev_sibling_or_token()?
404 .and_then(ast
::RecordExprField
::cast
)?
;
405 let (_
, _
, ty
) = sema
.resolve_record_field(&expr_field
)?
;
408 expr_field
.field_name().map(NameOrNameRef
::NameRef
),
411 })().unwrap_or((None
, None
))
413 ast
::RecordExprField(it
) => {
414 if let Some(expr
) = it
.expr() {
415 cov_mark
::hit
!(expected_type_struct_field_with_leading_char
);
417 sema
.type_of_expr(&expr
).map(TypeInfo
::original
),
418 it
.field_name().map(NameOrNameRef
::NameRef
),
421 cov_mark
::hit
!(expected_type_struct_field_followed_by_comma
);
422 let ty
= sema
.resolve_record_field(&it
)
423 .map(|(_
, _
, ty
)| ty
);
426 it
.field_name().map(NameOrNameRef
::NameRef
),
431 // match foo { ..., pat => $0 }
432 ast
::MatchExpr(it
) => {
433 let on_arrow
= previous_non_trivia_token(token
.clone()).map_or(false, |it
| T
![=>] == it
.kind());
435 let ty
= if on_arrow
{
436 // match foo { ..., pat => $0 }
437 cov_mark
::hit
!(expected_type_match_arm_body_without_leading_char
);
438 cov_mark
::hit
!(expected_type_match_arm_body_with_leading_char
);
439 sema
.type_of_expr(&it
.into())
442 cov_mark
::hit
!(expected_type_match_arm_without_leading_char
);
443 it
.expr().and_then(|e
| sema
.type_of_expr(&e
))
444 }.map(TypeInfo
::original
);
448 let ty
= it
.condition()
449 .and_then(|e
| sema
.type_of_expr(&e
))
450 .map(TypeInfo
::original
);
453 ast
::IdentPat(it
) => {
454 cov_mark
::hit
!(expected_type_if_let_with_leading_char
);
455 cov_mark
::hit
!(expected_type_match_arm_with_leading_char
);
456 let ty
= sema
.type_of_pat(&ast
::Pat
::from(it
)).map(TypeInfo
::original
);
460 cov_mark
::hit
!(expected_type_fn_ret_with_leading_char
);
461 cov_mark
::hit
!(expected_type_fn_ret_without_leading_char
);
462 let def
= sema
.to_def(&it
);
463 (def
.map(|def
| def
.ret_type(sema
.db
)), None
)
465 ast
::ClosureExpr(it
) => {
466 let ty
= sema
.type_of_expr(&it
.into());
467 ty
.and_then(|ty
| ty
.original
.as_callable(sema
.db
))
468 .map(|c
| (Some(c
.return_type()), None
))
469 .unwrap_or((None
, None
))
471 ast
::ParamList(_
) => (None
, None
),
472 ast
::Stmt(_
) => (None
, None
),
473 ast
::Item(_
) => (None
, None
),
475 match node
.parent() {
480 None
=> (None
, None
),
488 fn classify_lifetime(
489 _sema
: &Semantics
<'_
, RootDatabase
>,
490 original_file
: &SyntaxNode
,
491 lifetime
: ast
::Lifetime
,
492 ) -> Option
<LifetimeContext
> {
493 let parent
= lifetime
.syntax().parent()?
;
494 if parent
.kind() == SyntaxKind
::ERROR
{
498 let kind
= match_ast
! {
500 ast
::LifetimeParam(param
) => LifetimeKind
::LifetimeParam
{
501 is_decl
: param
.lifetime().as_ref() == Some(&lifetime
),
504 ast
::BreakExpr(_
) => LifetimeKind
::LabelRef
,
505 ast
::ContinueExpr(_
) => LifetimeKind
::LabelRef
,
506 ast
::Label(_
) => LifetimeKind
::LabelDef
,
507 _
=> LifetimeKind
::Lifetime
,
510 let lifetime
= find_node_at_offset(&original_file
, lifetime
.syntax().text_range().start());
512 Some(LifetimeContext { lifetime, kind }
)
516 sema
: &Semantics
<'_
, RootDatabase
>,
517 original_file
: &SyntaxNode
,
519 ) -> Option
<NameContext
> {
520 let parent
= name
.syntax().parent()?
;
521 let kind
= match_ast
! {
523 ast
::Const(_
) => NameKind
::Const
,
524 ast
::ConstParam(_
) => NameKind
::ConstParam
,
525 ast
::Enum(_
) => NameKind
::Enum
,
526 ast
::Fn(_
) => NameKind
::Function
,
527 ast
::IdentPat(bind_pat
) => {
528 let mut pat_ctx
= pattern_context_for(sema
, original_file
, bind_pat
.into());
529 if let Some(record_field
) = ast
::RecordPatField
::for_field_name(&name
) {
530 pat_ctx
.record_pat
= find_node_in_file_compensated(sema
, original_file
, &record_field
.parent_record_pat());
533 NameKind
::IdentPat(pat_ctx
)
535 ast
::MacroDef(_
) => NameKind
::MacroDef
,
536 ast
::MacroRules(_
) => NameKind
::MacroRules
,
537 ast
::Module(module
) => NameKind
::Module(module
),
538 ast
::RecordField(_
) => NameKind
::RecordField
,
539 ast
::Rename(_
) => NameKind
::Rename
,
540 ast
::SelfParam(_
) => NameKind
::SelfParam
,
541 ast
::Static(_
) => NameKind
::Static
,
542 ast
::Struct(_
) => NameKind
::Struct
,
543 ast
::Trait(_
) => NameKind
::Trait
,
544 ast
::TypeAlias(_
) => NameKind
::TypeAlias
,
545 ast
::TypeParam(_
) => NameKind
::TypeParam
,
546 ast
::Union(_
) => NameKind
::Union
,
547 ast
::Variant(_
) => NameKind
::Variant
,
551 let name
= find_node_at_offset(&original_file
, name
.syntax().text_range().start());
552 Some(NameContext { name, kind }
)
555 fn classify_name_ref(
556 sema
: &Semantics
<'_
, RootDatabase
>,
557 original_file
: &SyntaxNode
,
558 name_ref
: ast
::NameRef
,
560 ) -> Option
<(NameRefContext
, QualifierCtx
)> {
561 let nameref
= find_node_at_offset(&original_file
, name_ref
.syntax().text_range().start());
563 let make_res
= |kind
| (NameRefContext { nameref: nameref.clone(), kind }
, Default
::default());
565 if let Some(record_field
) = ast
::RecordExprField
::for_field_name(&name_ref
) {
566 let dot_prefix
= previous_non_trivia_token(name_ref
.syntax().clone())
567 .map_or(false, |it
| T
![.] == it
.kind());
569 return find_node_in_file_compensated(
572 &record_field
.parent_record_lit(),
574 .map(|expr
| NameRefKind
::RecordExpr { expr, dot_prefix }
)
577 if let Some(record_field
) = ast
::RecordPatField
::for_field_name_ref(&name_ref
) {
578 let kind
= NameRefKind
::Pattern(PatternContext
{
580 has_type_ascription
: false,
583 record_pat
: find_node_in_file_compensated(
586 &record_field
.parent_record_pat(),
588 ..pattern_context_for(
591 record_field
.parent_record_pat().clone().into(),
594 return Some(make_res(kind
));
597 let segment
= match_ast
! {
599 ast
::PathSegment(segment
) => segment
,
600 ast
::FieldExpr(field
) => {
601 let receiver
= find_opt_node_in_file(original_file
, field
.expr());
602 let receiver_is_ambiguous_float_literal
= match &receiver
{
603 Some(ast
::Expr
::Literal(l
)) => matches
! {
605 ast
::LiteralKind
::FloatNumber { .. }
if l
.syntax().last_token().map_or(false, |it
| it
.text().ends_with('
.'
))
609 let kind
= NameRefKind
::DotAccess(DotAccess
{
610 receiver_ty
: receiver
.as_ref().and_then(|it
| sema
.type_of_expr(it
)),
611 kind
: DotAccessKind
::Field { receiver_is_ambiguous_float_literal }
,
614 return Some(make_res(kind
));
616 ast
::MethodCallExpr(method
) => {
617 let receiver
= find_opt_node_in_file(original_file
, method
.receiver());
618 let kind
= NameRefKind
::DotAccess(DotAccess
{
619 receiver_ty
: receiver
.as_ref().and_then(|it
| sema
.type_of_expr(it
)),
620 kind
: DotAccessKind
::Method { has_parens: method.arg_list().map_or(false, |it| it.l_paren_token().is_some()) }
,
623 return Some(make_res(kind
));
629 let path
= segment
.parent_path();
630 let original_path
= find_node_in_file_compensated(sema
, original_file
, &path
);
632 let mut path_ctx
= PathCompletionCtx
{
633 has_call_parens
: false,
634 has_macro_bang
: false,
635 qualified
: Qualified
::No
,
639 kind
: PathKind
::Item { kind: ItemListKind::SourceFile }
,
640 has_type_args
: false,
641 use_tree_parent
: false,
644 let is_in_block
= |it
: &SyntaxNode
| {
647 ast
::ExprStmt
::can_cast(node
.kind()) || ast
::StmtList
::can_cast(node
.kind())
651 let func_update_record
= |syn
: &SyntaxNode
| {
652 if let Some(record_expr
) = syn
.ancestors().nth(2).and_then(ast
::RecordExpr
::cast
) {
653 find_node_in_file_compensated(sema
, original_file
, &record_expr
)
658 let after_if_expr
= |node
: SyntaxNode
| {
659 let prev_expr
= (|| {
660 let prev_sibling
= non_trivia_sibling(node
.into(), Direction
::Prev
)?
.into_node()?
;
661 ast
::ExprStmt
::cast(prev_sibling
)?
.expr()
663 matches
!(prev_expr
, Some(ast
::Expr
::IfExpr(_
)))
666 // We do not want to generate path completions when we are sandwiched between an item decl signature and its body.
667 // ex. trait Foo $0 {}
668 // in these cases parser recovery usually kicks in for our inserted identifier, causing it
669 // to either be parsed as an ExprStmt or a MacroCall, depending on whether it is in a block
670 // expression or an item list.
671 // The following code checks if the body is missing, if it is we either cut off the body
672 // from the item or it was missing in the first place
673 let inbetween_body_and_decl_check
= |node
: SyntaxNode
| {
674 if let Some(NodeOrToken
::Node(n
)) =
675 syntax
::algo
::non_trivia_sibling(node
.into(), syntax
::Direction
::Prev
)
677 if let Some(item
) = ast
::Item
::cast(n
) {
678 let is_inbetween
= match &item
{
679 ast
::Item
::Const(it
) => it
.body().is_none(),
680 ast
::Item
::Enum(it
) => it
.variant_list().is_none(),
681 ast
::Item
::ExternBlock(it
) => it
.extern_item_list().is_none(),
682 ast
::Item
::Fn(it
) => it
.body().is_none(),
683 ast
::Item
::Impl(it
) => it
.assoc_item_list().is_none(),
684 ast
::Item
::Module(it
) => {
685 it
.item_list().is_none() && it
.semicolon_token().is_none()
687 ast
::Item
::Static(it
) => it
.body().is_none(),
688 ast
::Item
::Struct(it
) => {
689 it
.field_list().is_none() && it
.semicolon_token().is_none()
691 ast
::Item
::Trait(it
) => it
.assoc_item_list().is_none(),
692 ast
::Item
::TypeAlias(it
) => it
.ty().is_none(),
693 ast
::Item
::Union(it
) => it
.record_field_list().is_none(),
704 let type_location
= |node
: &SyntaxNode
| {
705 let parent
= node
.parent()?
;
706 let res
= match_ast
! {
709 let name
= find_opt_node_in_file(original_file
, it
.name())?
;
710 let original
= ast
::Const
::cast(name
.syntax().parent()?
)?
;
711 TypeLocation
::TypeAscription(TypeAscriptionTarget
::Const(original
.body()))
713 ast
::RetType(it
) => {
714 if it
.thin_arrow_token().is_none() {
717 let parent
= match ast
::Fn
::cast(parent
.parent()?
) {
718 Some(x
) => x
.param_list(),
719 None
=> ast
::ClosureExpr
::cast(parent
.parent()?
)?
.param_list(),
722 let parent
= find_opt_node_in_file(original_file
, parent
)?
.syntax().parent()?
;
723 TypeLocation
::TypeAscription(TypeAscriptionTarget
::RetType(match_ast
! {
725 ast
::ClosureExpr(it
) => {
729 it
.body().map(ast
::Expr
::BlockExpr
)
736 if it
.colon_token().is_none() {
739 TypeLocation
::TypeAscription(TypeAscriptionTarget
::FnParam(find_opt_node_in_file(original_file
, it
.pat())))
741 ast
::LetStmt(it
) => {
742 if it
.colon_token().is_none() {
745 TypeLocation
::TypeAscription(TypeAscriptionTarget
::Let(find_opt_node_in_file(original_file
, it
.pat())))
749 Some(t
) if t
.syntax() == node
=> TypeLocation
::ImplTrait
,
750 _
=> match it
.self_ty() {
751 Some(t
) if t
.syntax() == node
=> TypeLocation
::ImplTarget
,
756 ast
::TypeBound(_
) => TypeLocation
::TypeBound
,
757 // is this case needed?
758 ast
::TypeBoundList(_
) => TypeLocation
::TypeBound
,
759 ast
::GenericArg(it
) => TypeLocation
::GenericArgList(find_opt_node_in_file_compensated(sema
, original_file
, it
.syntax().parent().and_then(ast
::GenericArgList
::cast
))),
760 // is this case needed?
761 ast
::GenericArgList(it
) => TypeLocation
::GenericArgList(find_opt_node_in_file_compensated(sema
, original_file
, Some(it
))),
762 ast
::TupleField(_
) => TypeLocation
::TupleField
,
769 let is_in_condition
= |it
: &ast
::Expr
| {
771 let parent
= it
.syntax().parent()?
;
772 if let Some(expr
) = ast
::WhileExpr
::cast(parent
.clone()) {
773 Some(expr
.condition()?
== *it
)
774 } else if let Some(expr
) = ast
::IfExpr
::cast(parent
) {
775 Some(expr
.condition()?
== *it
)
783 let make_path_kind_expr
= |expr
: ast
::Expr
| {
784 let it
= expr
.syntax();
785 let in_block_expr
= is_in_block(it
);
786 let in_loop_body
= is_in_loop_body(it
);
787 let after_if_expr
= after_if_expr(it
.clone());
788 let ref_expr_parent
=
789 path
.as_single_name_ref().and_then(|_
| it
.parent()).and_then(ast
::RefExpr
::cast
);
790 let (innermost_ret_ty
, self_param
) = {
791 let find_ret_ty
= |it
: SyntaxNode
| {
792 if let Some(item
) = ast
::Item
::cast(it
.clone()) {
794 ast
::Item
::Fn(f
) => Some(sema
.to_def(&f
).map(|it
| it
.ret_type(sema
.db
))),
795 ast
::Item
::MacroCall(_
) => None
,
799 let expr
= ast
::Expr
::cast(it
)?
;
800 let callable
= match expr
{
802 // ast::Expr::BlockExpr(b) if b.async_token().is_some() || b.try_token().is_some() => sema.type_of_expr(b),
803 ast
::Expr
::ClosureExpr(_
) => sema
.type_of_expr(&expr
),
808 .and_then(|c
| c
.adjusted().as_callable(sema
.db
))
809 .map(|it
| it
.return_type()),
813 let find_fn_self_param
= |it
| match it
{
814 ast
::Item
::Fn(fn_
) => Some(sema
.to_def(&fn_
).and_then(|it
| it
.self_param(sema
.db
))),
815 ast
::Item
::MacroCall(_
) => None
,
819 match find_node_in_file_compensated(sema
, original_file
, &expr
) {
821 let innermost_ret_ty
= sema
822 .ancestors_with_macros(it
.syntax().clone())
823 .find_map(find_ret_ty
)
826 let self_param
= sema
827 .ancestors_with_macros(it
.syntax().clone())
828 .filter_map(ast
::Item
::cast
)
829 .find_map(find_fn_self_param
)
831 (innermost_ret_ty
, self_param
)
833 None
=> (None
, None
),
836 let is_func_update
= func_update_record(it
);
837 let in_condition
= is_in_condition(&expr
);
838 let incomplete_let
= it
840 .and_then(ast
::LetStmt
::cast
)
841 .map_or(false, |it
| it
.semicolon_token().is_none());
842 let impl_
= fetch_immediate_impl(sema
, original_file
, expr
.syntax());
844 let in_match_guard
= match it
.parent().and_then(ast
::MatchArm
::cast
) {
847 .map_or(true, |arrow
| it
.text_range().start() < arrow
.text_range().start()),
867 let make_path_kind_type
= |ty
: ast
::Type
| {
868 let location
= type_location(ty
.syntax());
869 PathKind
::Type { location: location.unwrap_or(TypeLocation::Other) }
872 let mut kind_macro_call
= |it
: ast
::MacroCall
| {
873 path_ctx
.has_macro_bang
= it
.excl_token().is_some();
874 let parent
= it
.syntax().parent()?
;
875 // Any path in an item list will be treated as a macro call by the parser
876 let kind
= match_ast
! {
878 ast
::MacroExpr(expr
) => make_path_kind_expr(expr
.into()),
879 ast
::MacroPat(it
) => PathKind
::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}
,
880 ast
::MacroType(ty
) => make_path_kind_type(ty
.into()),
881 ast
::ItemList(_
) => PathKind
::Item { kind: ItemListKind::Module }
,
882 ast
::AssocItemList(_
) => PathKind
::Item
{ kind
: match parent
.parent() {
883 Some(it
) => match_ast
! {
885 ast
::Trait(_
) => ItemListKind
::Trait
,
886 ast
::Impl(it
) => if it
.trait_().is_some() {
887 ItemListKind
::TraitImpl(find_node_in_file_compensated(sema
, original_file
, &it
))
896 ast
::ExternItemList(_
) => PathKind
::Item { kind: ItemListKind::ExternBlock }
,
897 ast
::SourceFile(_
) => PathKind
::Item { kind: ItemListKind::SourceFile }
,
903 let make_path_kind_attr
= |meta
: ast
::Meta
| {
904 let attr
= meta
.parent_attr()?
;
905 let kind
= attr
.kind();
906 let attached
= attr
.syntax().parent()?
;
907 let is_trailing_outer_attr
= kind
!= AttrKind
::Inner
908 && non_trivia_sibling(attr
.syntax().clone().into(), syntax
::Direction
::Next
).is_none();
909 let annotated_item_kind
= if is_trailing_outer_attr { None }
else { Some(attached.kind()) }
;
910 Some(PathKind
::Attr { attr_ctx: AttrCtx { kind, annotated_item_kind }
})
913 // Infer the path kind
914 let parent
= path
.syntax().parent()?
;
915 let kind
= match_ast
! {
917 ast
::PathType(it
) => make_path_kind_type(it
.into()),
918 ast
::PathExpr(it
) => {
919 if let Some(p
) = it
.syntax().parent() {
920 if ast
::ExprStmt
::can_cast(p
.kind()) {
921 if let Some(kind
) = inbetween_body_and_decl_check(p
) {
922 return Some(make_res(NameRefKind
::Keyword(kind
)));
927 path_ctx
.has_call_parens
= it
.syntax().parent().map_or(false, |it
| ast
::CallExpr
::can_cast(it
.kind()));
929 make_path_kind_expr(it
.into())
931 ast
::TupleStructPat(it
) => {
932 path_ctx
.has_call_parens
= true;
933 PathKind
::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
935 ast
::RecordPat(it
) => {
936 path_ctx
.has_call_parens
= true;
937 PathKind
::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
939 ast
::PathPat(it
) => {
940 PathKind
::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}
942 ast
::MacroCall(it
) => {
943 // A macro call in this position is usually a result of parsing recovery, so check that
944 if let Some(kind
) = inbetween_body_and_decl_check(it
.syntax().clone()) {
945 return Some(make_res(NameRefKind
::Keyword(kind
)));
950 ast
::Meta(meta
) => make_path_kind_attr(meta
)?
,
951 ast
::Visibility(it
) => PathKind
::Vis { has_in_token: it.in_token().is_some() }
,
952 ast
::UseTree(_
) => PathKind
::Use
,
953 // completing inside a qualifier
954 ast
::Path(parent
) => {
955 path_ctx
.parent
= Some(parent
.clone());
956 let parent
= iter
::successors(Some(parent
), |it
| it
.parent_path()).last()?
.syntax().parent()?
;
959 ast
::PathType(it
) => make_path_kind_type(it
.into()),
960 ast
::PathExpr(it
) => {
961 path_ctx
.has_call_parens
= it
.syntax().parent().map_or(false, |it
| ast
::CallExpr
::can_cast(it
.kind()));
963 make_path_kind_expr(it
.into())
965 ast
::TupleStructPat(it
) => {
966 path_ctx
.has_call_parens
= true;
967 PathKind
::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
969 ast
::RecordPat(it
) => {
970 path_ctx
.has_call_parens
= true;
971 PathKind
::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
973 ast
::PathPat(it
) => {
974 PathKind
::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}
976 ast
::MacroCall(it
) => {
979 ast
::Meta(meta
) => make_path_kind_attr(meta
)?
,
980 ast
::Visibility(it
) => PathKind
::Vis { has_in_token: it.in_token().is_some() }
,
981 ast
::UseTree(_
) => PathKind
::Use
,
982 ast
::RecordExpr(it
) => make_path_kind_expr(it
.into()),
987 ast
::RecordExpr(it
) => make_path_kind_expr(it
.into()),
992 path_ctx
.kind
= kind
;
993 path_ctx
.has_type_args
= segment
.generic_arg_list().is_some();
995 // calculate the qualifier context
996 if let Some((qualifier
, use_tree_parent
)) = path_or_use_tree_qualifier(&path
) {
997 path_ctx
.use_tree_parent
= use_tree_parent
;
998 if !use_tree_parent
&& segment
.coloncolon_token().is_some() {
999 path_ctx
.qualified
= Qualified
::Absolute
;
1001 let qualifier
= qualifier
1003 .and_then(|it
| find_node_in_file(original_file
, &it
))
1004 .map(|it
| it
.parent_path());
1005 if let Some(qualifier
) = qualifier
{
1006 let type_anchor
= match qualifier
.segment().and_then(|it
| it
.kind()) {
1007 Some(ast
::PathSegmentKind
::Type { type_ref: Some(type_ref), trait_ref }
)
1008 if qualifier
.qualifier().is_none() =>
1010 Some((type_ref
, trait_ref
))
1015 path_ctx
.qualified
= if let Some((ty
, trait_ref
)) = type_anchor
{
1017 ast
::Type
::InferType(_
) => None
,
1018 ty
=> sema
.resolve_type(&ty
),
1020 let trait_
= trait_ref
.and_then(|it
| sema
.resolve_trait(&it
.path()?
));
1021 Qualified
::TypeAnchor { ty, trait_ }
1023 let res
= sema
.resolve_path(&qualifier
);
1025 // For understanding how and why super_chain_len is calculated the way it
1026 // is check the documentation at it's definition
1027 let mut segment_count
= 0;
1028 let super_count
= iter
::successors(Some(qualifier
.clone()), |p
| p
.qualifier())
1039 let super_chain_len
=
1040 if segment_count
> super_count { None }
else { Some(super_count) }
;
1042 Qualified
::With { path: qualifier, resolution: res, super_chain_len }
1046 } else if let Some(segment
) = path
.segment() {
1047 if segment
.coloncolon_token().is_some() {
1048 path_ctx
.qualified
= Qualified
::Absolute
;
1052 let mut qualifier_ctx
= QualifierCtx
::default();
1053 if path_ctx
.is_trivial_path() {
1054 // fetch the full expression that may have qualifiers attached to it
1055 let top_node
= match path_ctx
.kind
{
1056 PathKind
::Expr { expr_ctx: ExprCtx { in_block_expr: true, .. }
} => {
1057 parent
.ancestors().find(|it
| ast
::PathExpr
::can_cast(it
.kind())).and_then(|p
| {
1058 let parent
= p
.parent()?
;
1059 if ast
::StmtList
::can_cast(parent
.kind()) {
1061 } else if ast
::ExprStmt
::can_cast(parent
.kind()) {
1068 PathKind
::Item { .. }
=> {
1069 parent
.ancestors().find(|it
| ast
::MacroCall
::can_cast(it
.kind()))
1073 if let Some(top
) = top_node
{
1074 if let Some(NodeOrToken
::Node(error_node
)) =
1075 syntax
::algo
::non_trivia_sibling(top
.clone().into(), syntax
::Direction
::Prev
)
1077 if error_node
.kind() == SyntaxKind
::ERROR
{
1078 qualifier_ctx
.unsafe_tok
= error_node
1079 .children_with_tokens()
1080 .filter_map(NodeOrToken
::into_token
)
1081 .find(|it
| it
.kind() == T
![unsafe]);
1082 qualifier_ctx
.vis_node
= error_node
.children().find_map(ast
::Visibility
::cast
);
1086 if let PathKind
::Item { .. }
= path_ctx
.kind
{
1087 if qualifier_ctx
.none() {
1088 if let Some(t
) = top
.first_token() {
1089 if let Some(prev
) = t
1091 .and_then(|t
| syntax
::algo
::skip_trivia_token(t
, Direction
::Prev
))
1093 if ![T
![;], T
!['
}'
], T
!['
{'
]].contains(&prev
.kind()) {
1094 // This was inferred to be an item position path, but it seems
1095 // to be part of some other broken node which leaked into an item
1105 Some((NameRefContext { nameref, kind: NameRefKind::Path(path_ctx) }
, qualifier_ctx
))
1108 fn pattern_context_for(
1109 sema
: &Semantics
<'_
, RootDatabase
>,
1110 original_file
: &SyntaxNode
,
1112 ) -> PatternContext
{
1113 let mut param_ctx
= None
;
1114 let (refutability
, has_type_ascription
) =
1118 .skip_while(|it
| ast
::Pat
::can_cast(it
.kind()))
1120 .map_or((PatternRefutability
::Irrefutable
, false), |node
| {
1121 let refutability
= match_ast
! {
1123 ast
::LetStmt(let_
) => return (PatternRefutability
::Irrefutable
, let_
.ty().is_some()),
1124 ast
::Param(param
) => {
1125 let has_type_ascription
= param
.ty().is_some();
1127 let fake_param_list
= param
.syntax().parent().and_then(ast
::ParamList
::cast
)?
;
1128 let param_list
= find_node_in_file_compensated(sema
, original_file
, &fake_param_list
)?
;
1129 let param_list_owner
= param_list
.syntax().parent()?
;
1130 let kind
= match_ast
! {
1131 match param_list_owner
{
1132 ast
::ClosureExpr(closure
) => ParamKind
::Closure(closure
),
1133 ast
::Fn(fn_
) => ParamKind
::Function(fn_
),
1138 param_list
, param
, kind
1141 return (PatternRefutability
::Irrefutable
, has_type_ascription
)
1143 ast
::MatchArm(_
) => PatternRefutability
::Refutable
,
1144 ast
::LetExpr(_
) => PatternRefutability
::Refutable
,
1145 ast
::ForExpr(_
) => PatternRefutability
::Irrefutable
,
1146 _
=> PatternRefutability
::Irrefutable
,
1149 (refutability
, false)
1151 let (ref_token
, mut_token
) = match &pat
{
1152 ast
::Pat
::IdentPat(it
) => (it
.ref_token(), it
.mut_token()),
1159 has_type_ascription
,
1160 parent_pat
: pat
.syntax().parent().and_then(ast
::Pat
::cast
),
1164 impl_
: fetch_immediate_impl(sema
, original_file
, pat
.syntax()),
1168 fn fetch_immediate_impl(
1169 sema
: &Semantics
<'_
, RootDatabase
>,
1170 original_file
: &SyntaxNode
,
1172 ) -> Option
<ast
::Impl
> {
1173 let mut ancestors
= ancestors_in_file_compensated(sema
, original_file
, node
)?
1174 .filter_map(ast
::Item
::cast
)
1175 .filter(|it
| !matches
!(it
, ast
::Item
::MacroCall(_
)));
1177 match ancestors
.next()?
{
1178 ast
::Item
::Const(_
) | ast
::Item
::Fn(_
) | ast
::Item
::TypeAlias(_
) => (),
1179 ast
::Item
::Impl(it
) => return Some(it
),
1182 match ancestors
.next()?
{
1183 ast
::Item
::Impl(it
) => Some(it
),
1188 /// Attempts to find `node` inside `syntax` via `node`'s text range.
1189 /// If the fake identifier has been inserted after this node or inside of this node use the `_compensated` version instead.
1190 fn find_opt_node_in_file
<N
: AstNode
>(syntax
: &SyntaxNode
, node
: Option
<N
>) -> Option
<N
> {
1191 find_node_in_file(syntax
, &node?
)
1194 /// Attempts to find `node` inside `syntax` via `node`'s text range.
1195 /// If the fake identifier has been inserted after this node or inside of this node use the `_compensated` version instead.
1196 fn find_node_in_file
<N
: AstNode
>(syntax
: &SyntaxNode
, node
: &N
) -> Option
<N
> {
1197 let syntax_range
= syntax
.text_range();
1198 let range
= node
.syntax().text_range();
1199 let intersection
= range
.intersect(syntax_range
)?
;
1200 syntax
.covering_element(intersection
).ancestors().find_map(N
::cast
)
1203 /// Attempts to find `node` inside `syntax` via `node`'s text range while compensating
1204 /// for the offset introduced by the fake ident.
1205 /// This is wrong if `node` comes before the insertion point! Use `find_node_in_file` instead.
1206 fn find_node_in_file_compensated
<N
: AstNode
>(
1207 sema
: &Semantics
<'_
, RootDatabase
>,
1208 in_file
: &SyntaxNode
,
1211 ancestors_in_file_compensated(sema
, in_file
, node
.syntax())?
.find_map(N
::cast
)
1214 fn ancestors_in_file_compensated
<'sema
>(
1215 sema
: &'sema Semantics
<'_
, RootDatabase
>,
1216 in_file
: &SyntaxNode
,
1218 ) -> Option
<impl Iterator
<Item
= SyntaxNode
> + 'sema
> {
1219 let syntax_range
= in_file
.text_range();
1220 let range
= node
.text_range();
1221 let end
= range
.end().checked_sub(TextSize
::try_from(COMPLETION_MARKER
.len()).ok()?
)?
;
1222 if end
< range
.start() {
1225 let range
= TextRange
::new(range
.start(), end
);
1226 // our inserted ident could cause `range` to go outside of the original syntax, so cap it
1227 let intersection
= range
.intersect(syntax_range
)?
;
1228 let node
= match in_file
.covering_element(intersection
) {
1229 NodeOrToken
::Node(node
) => node
,
1230 NodeOrToken
::Token(tok
) => tok
.parent()?
,
1232 Some(sema
.ancestors_with_macros(node
))
1235 /// Attempts to find `node` inside `syntax` via `node`'s text range while compensating
1236 /// for the offset introduced by the fake ident..
1237 /// This is wrong if `node` comes before the insertion point! Use `find_node_in_file` instead.
1238 fn find_opt_node_in_file_compensated
<N
: AstNode
>(
1239 sema
: &Semantics
<'_
, RootDatabase
>,
1240 syntax
: &SyntaxNode
,
1243 find_node_in_file_compensated(sema
, syntax
, &node?
)
1246 fn path_or_use_tree_qualifier(path
: &ast
::Path
) -> Option
<(ast
::Path
, bool
)> {
1247 if let Some(qual
) = path
.qualifier() {
1248 return Some((qual
, false));
1250 let use_tree_list
= path
.syntax().ancestors().find_map(ast
::UseTreeList
::cast
)?
;
1251 let use_tree
= use_tree_list
.syntax().parent().and_then(ast
::UseTree
::cast
)?
;
1252 Some((use_tree
.path()?
, true))
1255 pub(crate) fn is_in_token_of_for_loop(element
: SyntaxElement
) -> bool
{
1258 let syntax_token
= element
.into_token()?
;
1259 let range
= syntax_token
.text_range();
1260 let for_expr
= syntax_token
.parent_ancestors().find_map(ast
::ForExpr
::cast
)?
;
1262 // check if the current token is the `in` token of a for loop
1263 if let Some(token
) = for_expr
.in_token() {
1264 return Some(syntax_token
== token
);
1266 let pat
= for_expr
.pat()?
;
1267 if range
.end() < pat
.syntax().text_range().end() {
1268 // if we are inside or before the pattern we can't be at the `in` token position
1271 let next_sibl
= next_non_trivia_sibling(pat
.syntax().clone().into())?
;
1272 Some(match next_sibl
{
1273 // the loop body is some node, if our token is at the start we are at the `in` position,
1274 // otherwise we could be in a recovered expression, we don't wanna ruin completions there
1275 syntax
::NodeOrToken
::Node(n
) => n
.text_range().start() == range
.start(),
1276 // the loop body consists of a single token, if we are this we are certainly at the `in` token position
1277 syntax
::NodeOrToken
::Token(t
) => t
== syntax_token
,
1284 fn test_for_is_prev2() {
1285 crate::tests
::check_pattern_is_applicable(r
"fn __() { for i i$0 }", is_in_token_of_for_loop
);
1288 pub(crate) fn is_in_loop_body(node
: &SyntaxNode
) -> bool
{
1290 .take_while(|it
| it
.kind() != SyntaxKind
::FN
&& it
.kind() != SyntaxKind
::CLOSURE_EXPR
)
1292 let loop_body
= match_ast
! {
1294 ast
::ForExpr(it
) => it
.loop_body(),
1295 ast
::WhileExpr(it
) => it
.loop_body(),
1296 ast
::LoopExpr(it
) => it
.loop_body(),
1300 loop_body
.filter(|it
| it
.syntax().text_range().contains_range(node
.text_range()))
1305 fn previous_non_trivia_token(e
: impl Into
<SyntaxElement
>) -> Option
<SyntaxToken
> {
1306 let mut token
= match e
.into() {
1307 SyntaxElement
::Node(n
) => n
.first_token()?
,
1308 SyntaxElement
::Token(t
) => t
,
1311 while let Some(inner
) = token
{
1312 if !inner
.kind().is_trivia() {
1315 token
= inner
.prev_token();
1321 fn next_non_trivia_sibling(ele
: SyntaxElement
) -> Option
<SyntaxElement
> {
1322 let mut e
= ele
.next_sibling_or_token();
1323 while let Some(inner
) = e
{
1324 if !inner
.kind().is_trivia() {
1327 e
= inner
.next_sibling_or_token();