]> git.proxmox.com Git - rustc.git/blame - src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs
New upstream version 1.66.0+dfsg1
[rustc.git] / src / tools / rust-analyzer / crates / ide-completion / src / context / analysis.rs
CommitLineData
064997fb
FG
1//! Module responsible for analyzing the code surrounding the cursor for completion.
2use std::iter;
3
4use hir::{Semantics, Type, TypeInfo};
5use ide_db::{active_parameter::ActiveParameter, RootDatabase};
6use syntax::{
7 algo::{find_node_at_offset, non_trivia_sibling},
8 ast::{self, AttrKind, HasArgList, HasLoopBody, HasName, NameOrNameRef},
9 match_ast, AstNode, AstToken, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode,
10 SyntaxToken, TextRange, TextSize, T,
11};
12
13use crate::context::{
2b03887a
FG
14 AttrCtx, CompletionAnalysis, DotAccess, DotAccessKind, ExprCtx, ItemListKind, LifetimeContext,
15 LifetimeKind, NameContext, NameKind, NameRefContext, NameRefKind, ParamContext, ParamKind,
16 PathCompletionCtx, PathKind, PatternContext, PatternRefutability, Qualified, QualifierCtx,
17 TypeAscriptionTarget, TypeLocation, COMPLETION_MARKER,
064997fb
FG
18};
19
2b03887a
FG
20struct ExpansionResult {
21 original_file: SyntaxNode,
22 speculative_file: SyntaxNode,
23 offset: TextSize,
24 fake_ident_token: SyntaxToken,
25 derive_ctx: Option<(SyntaxNode, SyntaxNode, TextSize, ast::Attr)>,
26}
064997fb 27
2b03887a
FG
28pub(super) struct AnalysisResult {
29 pub(super) analysis: CompletionAnalysis,
30 pub(super) expected: (Option<Type>, Option<ast::NameOrNameRef>),
31 pub(super) qualifier_ctx: QualifierCtx,
32 pub(super) token: SyntaxToken,
33 pub(super) offset: TextSize,
34}
064997fb 35
2b03887a
FG
36pub(super) fn expand_and_analyze(
37 sema: &Semantics<'_, RootDatabase>,
38 original_file: SyntaxNode,
39 speculative_file: SyntaxNode,
40 offset: TextSize,
41 original_token: &SyntaxToken,
42) -> Option<AnalysisResult> {
43 // as we insert after the offset, right biased will *always* pick the identifier no matter
44 // if there is an ident already typed or not
45 let fake_ident_token = speculative_file.token_at_offset(offset).right_biased()?;
46 // the relative offset between the cursor and the *identifier* token we are completing on
47 let relative_offset = offset - fake_ident_token.text_range().start();
48 // make the offset point to the start of the original token, as that is what the
49 // intermediate offsets calculated in expansion always points to
50 let offset = offset - relative_offset;
51 let expansion = expand(sema, original_file, speculative_file, offset, fake_ident_token);
52 // add the relative offset back, so that left_biased finds the proper token
53 let offset = expansion.offset + relative_offset;
54 let token = expansion.original_file.token_at_offset(offset).left_biased()?;
55
56 analyze(sema, expansion, original_token, &token).map(|(analysis, expected, qualifier_ctx)| {
57 AnalysisResult { analysis, expected, qualifier_ctx, token, offset }
58 })
59}
60
61/// Expand attributes and macro calls at the current cursor position for both the original file
62/// and fake file repeatedly. As soon as one of the two expansions fail we stop so the original
63/// and speculative states stay in sync.
64fn expand(
65 sema: &Semantics<'_, RootDatabase>,
66 mut original_file: SyntaxNode,
67 mut speculative_file: SyntaxNode,
68 mut offset: TextSize,
69 mut fake_ident_token: SyntaxToken,
70) -> ExpansionResult {
71 let _p = profile::span("CompletionContext::expand");
72 let mut derive_ctx = None;
73
74 'expansion: loop {
75 let parent_item =
76 |item: &ast::Item| item.syntax().ancestors().skip(1).find_map(ast::Item::cast);
77 let ancestor_items = iter::successors(
78 Option::zip(
79 find_node_at_offset::<ast::Item>(&original_file, offset),
80 find_node_at_offset::<ast::Item>(&speculative_file, offset),
81 ),
82 |(a, b)| parent_item(a).zip(parent_item(b)),
83 );
84
85 // first try to expand attributes as these are always the outermost macro calls
86 'ancestors: for (actual_item, item_with_fake_ident) in ancestor_items {
87 match (
88 sema.expand_attr_macro(&actual_item),
89 sema.speculative_expand_attr_macro(
90 &actual_item,
91 &item_with_fake_ident,
92 fake_ident_token.clone(),
93 ),
94 ) {
95 // maybe parent items have attributes, so continue walking the ancestors
96 (None, None) => continue 'ancestors,
97 // successful expansions
98 (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => {
99 let new_offset = fake_mapped_token.text_range().start();
100 if new_offset > actual_expansion.text_range().end() {
101 // offset outside of bounds from the original expansion,
102 // stop here to prevent problems from happening
103 break 'expansion;
064997fb 104 }
2b03887a
FG
105 original_file = actual_expansion;
106 speculative_file = fake_expansion;
107 fake_ident_token = fake_mapped_token;
108 offset = new_offset;
109 continue 'expansion;
064997fb 110 }
2b03887a
FG
111 // exactly one expansion failed, inconsistent state so stop expanding completely
112 _ => break 'expansion,
064997fb 113 }
2b03887a 114 }
064997fb 115
2b03887a
FG
116 // No attributes have been expanded, so look for macro_call! token trees or derive token trees
117 let orig_tt = match find_node_at_offset::<ast::TokenTree>(&original_file, offset) {
118 Some(it) => it,
119 None => break 'expansion,
120 };
121 let spec_tt = match find_node_at_offset::<ast::TokenTree>(&speculative_file, offset) {
122 Some(it) => it,
123 None => break 'expansion,
124 };
064997fb 125
2b03887a
FG
126 // Expand pseudo-derive expansion
127 if let (Some(orig_attr), Some(spec_attr)) = (
128 orig_tt.syntax().parent().and_then(ast::Meta::cast).and_then(|it| it.parent_attr()),
129 spec_tt.syntax().parent().and_then(ast::Meta::cast).and_then(|it| it.parent_attr()),
130 ) {
131 if let (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) = (
132 sema.expand_derive_as_pseudo_attr_macro(&orig_attr),
133 sema.speculative_expand_derive_as_pseudo_attr_macro(
134 &orig_attr,
135 &spec_attr,
136 fake_ident_token.clone(),
137 ),
064997fb 138 ) {
2b03887a
FG
139 derive_ctx = Some((
140 actual_expansion,
141 fake_expansion,
142 fake_mapped_token.text_range().start(),
143 orig_attr,
144 ));
064997fb 145 }
2b03887a
FG
146 // at this point we won't have any more successful expansions, so stop
147 break 'expansion;
148 }
064997fb 149
2b03887a
FG
150 // Expand fn-like macro calls
151 if let (Some(actual_macro_call), Some(macro_call_with_fake_ident)) = (
152 orig_tt.syntax().ancestors().find_map(ast::MacroCall::cast),
153 spec_tt.syntax().ancestors().find_map(ast::MacroCall::cast),
154 ) {
155 let mac_call_path0 = actual_macro_call.path().as_ref().map(|s| s.syntax().text());
156 let mac_call_path1 =
157 macro_call_with_fake_ident.path().as_ref().map(|s| s.syntax().text());
064997fb 158
2b03887a
FG
159 // inconsistent state, stop expanding
160 if mac_call_path0 != mac_call_path1 {
161 break 'expansion;
162 }
163 let speculative_args = match macro_call_with_fake_ident.token_tree() {
164 Some(tt) => tt,
165 None => break 'expansion,
166 };
064997fb 167
2b03887a
FG
168 match (
169 sema.expand(&actual_macro_call),
170 sema.speculative_expand(
171 &actual_macro_call,
172 &speculative_args,
173 fake_ident_token.clone(),
174 ),
175 ) {
176 // successful expansions
177 (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => {
178 let new_offset = fake_mapped_token.text_range().start();
179 if new_offset > actual_expansion.text_range().end() {
180 // offset outside of bounds from the original expansion,
181 // stop here to prevent problems from happening
182 break 'expansion;
064997fb 183 }
2b03887a
FG
184 original_file = actual_expansion;
185 speculative_file = fake_expansion;
186 fake_ident_token = fake_mapped_token;
187 offset = new_offset;
188 continue 'expansion;
064997fb 189 }
2b03887a
FG
190 // at least on expansion failed, we won't have anything to expand from this point
191 // onwards so break out
192 _ => break 'expansion,
064997fb 193 }
064997fb
FG
194 }
195
2b03887a
FG
196 // none of our states have changed so stop the loop
197 break 'expansion;
064997fb 198 }
2b03887a
FG
199 ExpansionResult { original_file, speculative_file, offset, fake_ident_token, derive_ctx }
200}
064997fb 201
2b03887a
FG
202/// Fill the completion context, this is what does semantic reasoning about the surrounding context
203/// of the completion location.
204fn analyze(
205 sema: &Semantics<'_, RootDatabase>,
206 expansion_result: ExpansionResult,
207 original_token: &SyntaxToken,
208 self_token: &SyntaxToken,
209) -> Option<(CompletionAnalysis, (Option<Type>, Option<ast::NameOrNameRef>), QualifierCtx)> {
210 let _p = profile::span("CompletionContext::analyze");
211 let ExpansionResult { original_file, speculative_file, offset, fake_ident_token, derive_ctx } =
212 expansion_result;
213 let syntax_element = NodeOrToken::Token(fake_ident_token);
214 if is_in_token_of_for_loop(syntax_element.clone()) {
215 // for pat $0
216 // there is nothing to complete here except `in` keyword
217 // don't bother populating the context
218 // FIXME: the completion calculations should end up good enough
219 // such that this special case becomes unnecessary
220 return None;
221 }
064997fb 222
2b03887a
FG
223 // Overwrite the path kind for derives
224 if let Some((original_file, file_with_fake_ident, offset, origin_attr)) = derive_ctx {
225 if let Some(ast::NameLike::NameRef(name_ref)) =
226 find_node_at_offset(&file_with_fake_ident, offset)
227 {
228 let parent = name_ref.syntax().parent()?;
229 let (mut nameref_ctx, _) = classify_name_ref(&sema, &original_file, name_ref, parent)?;
230 if let NameRefKind::Path(path_ctx) = &mut nameref_ctx.kind {
231 path_ctx.kind = PathKind::Derive {
232 existing_derives: sema
233 .resolve_derive_macro(&origin_attr)
234 .into_iter()
235 .flatten()
236 .flatten()
237 .collect(),
064997fb 238 };
064997fb 239 }
2b03887a
FG
240 return Some((
241 CompletionAnalysis::NameRef(nameref_ctx),
242 (None, None),
243 QualifierCtx::default(),
244 ));
245 }
246 return None;
247 }
064997fb 248
2b03887a
FG
249 let name_like = match find_node_at_offset(&speculative_file, offset) {
250 Some(it) => it,
251 None => {
252 let analysis = if let Some(original) = ast::String::cast(original_token.clone()) {
253 CompletionAnalysis::String {
254 original,
255 expanded: ast::String::cast(self_token.clone()),
256 }
257 } else {
258 // Fix up trailing whitespace problem
259 // #[attr(foo = $0
260 let token = syntax::algo::skip_trivia_token(self_token.clone(), Direction::Prev)?;
261 let p = token.parent()?;
262 if p.kind() == SyntaxKind::TOKEN_TREE
263 && p.ancestors().any(|it| it.kind() == SyntaxKind::META)
264 {
265 let colon_prefix = previous_non_trivia_token(self_token.clone())
266 .map_or(false, |it| T![:] == it.kind());
267 CompletionAnalysis::UnexpandedAttrTT {
268 fake_attribute_under_caret: syntax_element
269 .ancestors()
270 .find_map(ast::Attr::cast),
271 colon_prefix,
272 }
273 } else {
274 return None;
064997fb
FG
275 }
276 };
2b03887a 277 return Some((analysis, (None, None), QualifierCtx::default()));
064997fb 278 }
2b03887a
FG
279 };
280 let expected = expected_type_and_name(sema, &self_token, &name_like);
281 let mut qual_ctx = QualifierCtx::default();
282 let analysis = match name_like {
283 ast::NameLike::Lifetime(lifetime) => {
284 CompletionAnalysis::Lifetime(classify_lifetime(sema, &original_file, lifetime)?)
285 }
286 ast::NameLike::NameRef(name_ref) => {
287 let parent = name_ref.syntax().parent()?;
288 let (nameref_ctx, qualifier_ctx) =
289 classify_name_ref(sema, &original_file, name_ref, parent.clone())?;
290 qual_ctx = qualifier_ctx;
291 CompletionAnalysis::NameRef(nameref_ctx)
292 }
293 ast::NameLike::Name(name) => {
294 let name_ctx = classify_name(sema, &original_file, name)?;
295 CompletionAnalysis::Name(name_ctx)
064997fb 296 }
2b03887a
FG
297 };
298 Some((analysis, expected, qual_ctx))
299}
064997fb 300
2b03887a
FG
301/// Calculate the expected type and name of the cursor position.
302fn expected_type_and_name(
303 sema: &Semantics<'_, RootDatabase>,
304 token: &SyntaxToken,
305 name_like: &ast::NameLike,
306) -> (Option<Type>, Option<NameOrNameRef>) {
307 let mut node = match token.parent() {
308 Some(it) => it,
309 None => return (None, None),
310 };
311
312 let strip_refs = |mut ty: Type| match name_like {
313 ast::NameLike::NameRef(n) => {
314 let p = match n.syntax().parent() {
315 Some(it) => it,
316 None => return ty,
317 };
318 let top_syn = match_ast! {
319 match p {
320 ast::FieldExpr(e) => e
321 .syntax()
322 .ancestors()
323 .map_while(ast::FieldExpr::cast)
324 .last()
325 .map(|it| it.syntax().clone()),
326 ast::PathSegment(e) => e
327 .syntax()
328 .ancestors()
329 .skip(1)
330 .take_while(|it| ast::Path::can_cast(it.kind()) || ast::PathExpr::can_cast(it.kind()))
331 .find_map(ast::PathExpr::cast)
332 .map(|it| it.syntax().clone()),
333 _ => None
064997fb 334 }
2b03887a
FG
335 };
336 let top_syn = match top_syn {
337 Some(it) => it,
338 None => return ty,
339 };
340 for _ in top_syn.ancestors().skip(1).map_while(ast::RefExpr::cast) {
341 cov_mark::hit!(expected_type_fn_param_ref);
342 ty = ty.strip_reference();
064997fb 343 }
2b03887a 344 ty
064997fb 345 }
2b03887a
FG
346 _ => ty,
347 };
064997fb 348
2b03887a
FG
349 loop {
350 break match_ast! {
351 match node {
352 ast::LetStmt(it) => {
353 cov_mark::hit!(expected_type_let_with_leading_char);
354 cov_mark::hit!(expected_type_let_without_leading_char);
355 let ty = it.pat()
356 .and_then(|pat| sema.type_of_pat(&pat))
357 .or_else(|| it.initializer().and_then(|it| sema.type_of_expr(&it)))
358 .map(TypeInfo::original);
359 let name = match it.pat() {
360 Some(ast::Pat::IdentPat(ident)) => ident.name().map(NameOrNameRef::Name),
361 Some(_) | None => None,
362 };
363
364 (ty, name)
365 },
366 ast::LetExpr(it) => {
367 cov_mark::hit!(expected_type_if_let_without_leading_char);
368 let ty = it.pat()
369 .and_then(|pat| sema.type_of_pat(&pat))
370 .or_else(|| it.expr().and_then(|it| sema.type_of_expr(&it)))
371 .map(TypeInfo::original);
372 (ty, None)
373 },
374 ast::ArgList(_) => {
375 cov_mark::hit!(expected_type_fn_param);
376 ActiveParameter::at_token(
377 &sema,
378 token.clone(),
379 ).map(|ap| {
380 let name = ap.ident().map(NameOrNameRef::Name);
381
382 let ty = strip_refs(ap.ty);
383 (Some(ty), name)
384 })
385 .unwrap_or((None, None))
386 },
387 ast::RecordExprFieldList(it) => {
388 // wouldn't try {} be nice...
389 (|| {
390 if token.kind() == T![..]
391 ||token.prev_token().map(|t| t.kind()) == Some(T![..])
064997fb 392 {
2b03887a
FG
393 cov_mark::hit!(expected_type_struct_func_update);
394 let record_expr = it.syntax().parent().and_then(ast::RecordExpr::cast)?;
395 let ty = sema.type_of_expr(&record_expr.into())?;
396 Some((
397 Some(ty.original),
398 None
399 ))
064997fb 400 } else {
2b03887a
FG
401 cov_mark::hit!(expected_type_struct_field_without_leading_char);
402 let expr_field = token.prev_sibling_or_token()?
403 .into_node()
404 .and_then(ast::RecordExprField::cast)?;
405 let (_, _, ty) = sema.resolve_record_field(&expr_field)?;
406 Some((
407 Some(ty),
408 expr_field.field_name().map(NameOrNameRef::NameRef),
409 ))
064997fb 410 }
2b03887a
FG
411 })().unwrap_or((None, None))
412 },
413 ast::RecordExprField(it) => {
414 if let Some(expr) = it.expr() {
415 cov_mark::hit!(expected_type_struct_field_with_leading_char);
416 (
417 sema.type_of_expr(&expr).map(TypeInfo::original),
418 it.field_name().map(NameOrNameRef::NameRef),
419 )
420 } else {
421 cov_mark::hit!(expected_type_struct_field_followed_by_comma);
422 let ty = sema.resolve_record_field(&it)
423 .map(|(_, _, ty)| ty);
424 (
425 ty,
426 it.field_name().map(NameOrNameRef::NameRef),
427 )
428 }
429 },
430 // match foo { $0 }
431 // match foo { ..., pat => $0 }
432 ast::MatchExpr(it) => {
433 let on_arrow = previous_non_trivia_token(token.clone()).map_or(false, |it| T![=>] == it.kind());
064997fb 434
2b03887a
FG
435 let ty = if on_arrow {
436 // match foo { ..., pat => $0 }
437 cov_mark::hit!(expected_type_match_arm_body_without_leading_char);
438 cov_mark::hit!(expected_type_match_arm_body_with_leading_char);
439 sema.type_of_expr(&it.into())
440 } else {
441 // match foo { $0 }
442 cov_mark::hit!(expected_type_match_arm_without_leading_char);
443 it.expr().and_then(|e| sema.type_of_expr(&e))
444 }.map(TypeInfo::original);
445 (ty, None)
446 },
447 ast::IfExpr(it) => {
448 let ty = it.condition()
449 .and_then(|e| sema.type_of_expr(&e))
450 .map(TypeInfo::original);
451 (ty, None)
452 },
453 ast::IdentPat(it) => {
454 cov_mark::hit!(expected_type_if_let_with_leading_char);
455 cov_mark::hit!(expected_type_match_arm_with_leading_char);
456 let ty = sema.type_of_pat(&ast::Pat::from(it)).map(TypeInfo::original);
457 (ty, None)
458 },
459 ast::Fn(it) => {
460 cov_mark::hit!(expected_type_fn_ret_with_leading_char);
461 cov_mark::hit!(expected_type_fn_ret_without_leading_char);
462 let def = sema.to_def(&it);
463 (def.map(|def| def.ret_type(sema.db)), None)
464 },
465 ast::ClosureExpr(it) => {
466 let ty = sema.type_of_expr(&it.into());
467 ty.and_then(|ty| ty.original.as_callable(sema.db))
468 .map(|c| (Some(c.return_type()), None))
469 .unwrap_or((None, None))
470 },
471 ast::ParamList(_) => (None, None),
472 ast::Stmt(_) => (None, None),
473 ast::Item(_) => (None, None),
474 _ => {
475 match node.parent() {
476 Some(n) => {
477 node = n;
478 continue;
479 },
480 None => (None, None),
481 }
482 },
064997fb
FG
483 }
484 };
064997fb 485 }
2b03887a 486}
064997fb 487
2b03887a
FG
488fn classify_lifetime(
489 _sema: &Semantics<'_, RootDatabase>,
490 original_file: &SyntaxNode,
491 lifetime: ast::Lifetime,
492) -> Option<LifetimeContext> {
493 let parent = lifetime.syntax().parent()?;
494 if parent.kind() == SyntaxKind::ERROR {
495 return None;
496 }
064997fb 497
2b03887a
FG
498 let kind = match_ast! {
499 match parent {
500 ast::LifetimeParam(param) => LifetimeKind::LifetimeParam {
501 is_decl: param.lifetime().as_ref() == Some(&lifetime),
502 param
503 },
504 ast::BreakExpr(_) => LifetimeKind::LabelRef,
505 ast::ContinueExpr(_) => LifetimeKind::LabelRef,
506 ast::Label(_) => LifetimeKind::LabelDef,
507 _ => LifetimeKind::Lifetime,
508 }
509 };
510 let lifetime = find_node_at_offset(&original_file, lifetime.syntax().text_range().start());
064997fb 511
2b03887a
FG
512 Some(LifetimeContext { lifetime, kind })
513}
064997fb 514
2b03887a
FG
515fn classify_name(
516 sema: &Semantics<'_, RootDatabase>,
517 original_file: &SyntaxNode,
518 name: ast::Name,
519) -> Option<NameContext> {
520 let parent = name.syntax().parent()?;
521 let kind = match_ast! {
522 match parent {
523 ast::Const(_) => NameKind::Const,
524 ast::ConstParam(_) => NameKind::ConstParam,
525 ast::Enum(_) => NameKind::Enum,
526 ast::Fn(_) => NameKind::Function,
527 ast::IdentPat(bind_pat) => {
528 let mut pat_ctx = pattern_context_for(sema, original_file, bind_pat.into());
529 if let Some(record_field) = ast::RecordPatField::for_field_name(&name) {
530 pat_ctx.record_pat = find_node_in_file_compensated(sema, original_file, &record_field.parent_record_pat());
531 }
064997fb 532
2b03887a
FG
533 NameKind::IdentPat(pat_ctx)
534 },
535 ast::MacroDef(_) => NameKind::MacroDef,
536 ast::MacroRules(_) => NameKind::MacroRules,
537 ast::Module(module) => NameKind::Module(module),
538 ast::RecordField(_) => NameKind::RecordField,
539 ast::Rename(_) => NameKind::Rename,
540 ast::SelfParam(_) => NameKind::SelfParam,
541 ast::Static(_) => NameKind::Static,
542 ast::Struct(_) => NameKind::Struct,
543 ast::Trait(_) => NameKind::Trait,
544 ast::TypeAlias(_) => NameKind::TypeAlias,
545 ast::TypeParam(_) => NameKind::TypeParam,
546 ast::Union(_) => NameKind::Union,
547 ast::Variant(_) => NameKind::Variant,
548 _ => return None,
549 }
550 };
551 let name = find_node_at_offset(&original_file, name.syntax().text_range().start());
552 Some(NameContext { name, kind })
553}
064997fb 554
2b03887a
FG
555fn classify_name_ref(
556 sema: &Semantics<'_, RootDatabase>,
557 original_file: &SyntaxNode,
558 name_ref: ast::NameRef,
559 parent: SyntaxNode,
560) -> Option<(NameRefContext, QualifierCtx)> {
561 let nameref = find_node_at_offset(&original_file, name_ref.syntax().text_range().start());
064997fb 562
2b03887a 563 let make_res = |kind| (NameRefContext { nameref: nameref.clone(), kind }, Default::default());
064997fb 564
2b03887a
FG
565 if let Some(record_field) = ast::RecordExprField::for_field_name(&name_ref) {
566 let dot_prefix = previous_non_trivia_token(name_ref.syntax().clone())
567 .map_or(false, |it| T![.] == it.kind());
064997fb 568
2b03887a
FG
569 return find_node_in_file_compensated(
570 sema,
571 original_file,
572 &record_field.parent_record_lit(),
573 )
574 .map(|expr| NameRefKind::RecordExpr { expr, dot_prefix })
575 .map(make_res);
576 }
577 if let Some(record_field) = ast::RecordPatField::for_field_name_ref(&name_ref) {
578 let kind = NameRefKind::Pattern(PatternContext {
579 param_ctx: None,
580 has_type_ascription: false,
581 ref_token: None,
582 mut_token: None,
583 record_pat: find_node_in_file_compensated(
064997fb
FG
584 sema,
585 original_file,
2b03887a
FG
586 &record_field.parent_record_pat(),
587 ),
588 ..pattern_context_for(
589 sema,
590 original_file,
591 record_field.parent_record_pat().clone().into(),
064997fb 592 )
2b03887a
FG
593 });
594 return Some(make_res(kind));
595 }
596
597 let segment = match_ast! {
598 match parent {
599 ast::PathSegment(segment) => segment,
600 ast::FieldExpr(field) => {
601 let receiver = find_opt_node_in_file(original_file, field.expr());
602 let receiver_is_ambiguous_float_literal = match &receiver {
603 Some(ast::Expr::Literal(l)) => matches! {
604 l.kind(),
605 ast::LiteralKind::FloatNumber { .. } if l.syntax().last_token().map_or(false, |it| it.text().ends_with('.'))
606 },
607 _ => false,
608 };
609 let kind = NameRefKind::DotAccess(DotAccess {
610 receiver_ty: receiver.as_ref().and_then(|it| sema.type_of_expr(it)),
611 kind: DotAccessKind::Field { receiver_is_ambiguous_float_literal },
612 receiver
613 });
614 return Some(make_res(kind));
615 },
616 ast::MethodCallExpr(method) => {
617 let receiver = find_opt_node_in_file(original_file, method.receiver());
618 let kind = NameRefKind::DotAccess(DotAccess {
619 receiver_ty: receiver.as_ref().and_then(|it| sema.type_of_expr(it)),
620 kind: DotAccessKind::Method { has_parens: method.arg_list().map_or(false, |it| it.l_paren_token().is_some()) },
621 receiver
622 });
623 return Some(make_res(kind));
624 },
625 _ => return None,
064997fb 626 }
2b03887a
FG
627 };
628
629 let path = segment.parent_path();
630 let original_path = find_node_in_file_compensated(sema, original_file, &path);
631
632 let mut path_ctx = PathCompletionCtx {
633 has_call_parens: false,
634 has_macro_bang: false,
635 qualified: Qualified::No,
636 parent: None,
637 path: path.clone(),
638 original_path,
639 kind: PathKind::Item { kind: ItemListKind::SourceFile },
640 has_type_args: false,
641 use_tree_parent: false,
642 };
643
644 let is_in_block = |it: &SyntaxNode| {
645 it.parent()
646 .map(|node| {
647 ast::ExprStmt::can_cast(node.kind()) || ast::StmtList::can_cast(node.kind())
648 })
649 .unwrap_or(false)
650 };
651 let func_update_record = |syn: &SyntaxNode| {
652 if let Some(record_expr) = syn.ancestors().nth(2).and_then(ast::RecordExpr::cast) {
653 find_node_in_file_compensated(sema, original_file, &record_expr)
654 } else {
655 None
656 }
657 };
658 let after_if_expr = |node: SyntaxNode| {
659 let prev_expr = (|| {
660 let prev_sibling = non_trivia_sibling(node.into(), Direction::Prev)?.into_node()?;
661 ast::ExprStmt::cast(prev_sibling)?.expr()
662 })();
663 matches!(prev_expr, Some(ast::Expr::IfExpr(_)))
664 };
665
666 // We do not want to generate path completions when we are sandwiched between an item decl signature and its body.
667 // ex. trait Foo $0 {}
668 // in these cases parser recovery usually kicks in for our inserted identifier, causing it
669 // to either be parsed as an ExprStmt or a MacroCall, depending on whether it is in a block
670 // expression or an item list.
671 // The following code checks if the body is missing, if it is we either cut off the body
672 // from the item or it was missing in the first place
673 let inbetween_body_and_decl_check = |node: SyntaxNode| {
674 if let Some(NodeOrToken::Node(n)) =
675 syntax::algo::non_trivia_sibling(node.into(), syntax::Direction::Prev)
676 {
677 if let Some(item) = ast::Item::cast(n) {
678 let is_inbetween = match &item {
679 ast::Item::Const(it) => it.body().is_none(),
680 ast::Item::Enum(it) => it.variant_list().is_none(),
681 ast::Item::ExternBlock(it) => it.extern_item_list().is_none(),
682 ast::Item::Fn(it) => it.body().is_none(),
683 ast::Item::Impl(it) => it.assoc_item_list().is_none(),
684 ast::Item::Module(it) => it.item_list().is_none(),
685 ast::Item::Static(it) => it.body().is_none(),
686 ast::Item::Struct(it) => it.field_list().is_none(),
687 ast::Item::Trait(it) => it.assoc_item_list().is_none(),
688 ast::Item::TypeAlias(it) => it.ty().is_none(),
689 ast::Item::Union(it) => it.record_field_list().is_none(),
690 _ => false,
691 };
692 if is_inbetween {
693 return Some(item);
694 }
695 }
064997fb 696 }
2b03887a
FG
697 None
698 };
064997fb 699
2b03887a
FG
700 let type_location = |node: &SyntaxNode| {
701 let parent = node.parent()?;
702 let res = match_ast! {
064997fb 703 match parent {
2b03887a
FG
704 ast::Const(it) => {
705 let name = find_opt_node_in_file(original_file, it.name())?;
706 let original = ast::Const::cast(name.syntax().parent()?)?;
707 TypeLocation::TypeAscription(TypeAscriptionTarget::Const(original.body()))
708 },
709 ast::RetType(it) => {
710 if it.thin_arrow_token().is_none() {
711 return None;
712 }
713 let parent = match ast::Fn::cast(parent.parent()?) {
714 Some(x) => x.param_list(),
715 None => ast::ClosureExpr::cast(parent.parent()?)?.param_list(),
064997fb 716 };
2b03887a
FG
717
718 let parent = find_opt_node_in_file(original_file, parent)?.syntax().parent()?;
719 TypeLocation::TypeAscription(TypeAscriptionTarget::RetType(match_ast! {
720 match parent {
721 ast::ClosureExpr(it) => {
722 it.body()
723 },
724 ast::Fn(it) => {
725 it.body().map(ast::Expr::BlockExpr)
726 },
727 _ => return None,
728 }
729 }))
730 },
731 ast::Param(it) => {
732 if it.colon_token().is_none() {
733 return None;
734 }
735 TypeLocation::TypeAscription(TypeAscriptionTarget::FnParam(find_opt_node_in_file(original_file, it.pat())))
064997fb 736 },
2b03887a
FG
737 ast::LetStmt(it) => {
738 if it.colon_token().is_none() {
739 return None;
740 }
741 TypeLocation::TypeAscription(TypeAscriptionTarget::Let(find_opt_node_in_file(original_file, it.pat())))
742 },
743 ast::Impl(it) => {
744 match it.trait_() {
745 Some(t) if t.syntax() == node => TypeLocation::ImplTrait,
746 _ => match it.self_ty() {
747 Some(t) if t.syntax() == node => TypeLocation::ImplTarget,
748 _ => return None,
749 },
750 }
064997fb 751 },
2b03887a
FG
752 ast::TypeBound(_) => TypeLocation::TypeBound,
753 // is this case needed?
754 ast::TypeBoundList(_) => TypeLocation::TypeBound,
755 ast::GenericArg(it) => TypeLocation::GenericArgList(find_opt_node_in_file_compensated(sema, original_file, it.syntax().parent().and_then(ast::GenericArgList::cast))),
756 // is this case needed?
757 ast::GenericArgList(it) => TypeLocation::GenericArgList(find_opt_node_in_file_compensated(sema, original_file, Some(it))),
758 ast::TupleField(_) => TypeLocation::TupleField,
064997fb
FG
759 _ => return None,
760 }
761 };
2b03887a
FG
762 Some(res)
763 };
064997fb 764
2b03887a
FG
765 let is_in_condition = |it: &ast::Expr| {
766 (|| {
767 let parent = it.syntax().parent()?;
768 if let Some(expr) = ast::WhileExpr::cast(parent.clone()) {
769 Some(expr.condition()? == *it)
770 } else if let Some(expr) = ast::IfExpr::cast(parent) {
771 Some(expr.condition()? == *it)
064997fb
FG
772 } else {
773 None
774 }
2b03887a
FG
775 })()
776 .unwrap_or(false)
777 };
064997fb 778
2b03887a
FG
779 let make_path_kind_expr = |expr: ast::Expr| {
780 let it = expr.syntax();
781 let in_block_expr = is_in_block(it);
782 let in_loop_body = is_in_loop_body(it);
783 let after_if_expr = after_if_expr(it.clone());
784 let ref_expr_parent =
785 path.as_single_name_ref().and_then(|_| it.parent()).and_then(ast::RefExpr::cast);
786 let (innermost_ret_ty, self_param) = {
787 let find_ret_ty = |it: SyntaxNode| {
788 if let Some(item) = ast::Item::cast(it.clone()) {
789 match item {
790 ast::Item::Fn(f) => Some(sema.to_def(&f).map(|it| it.ret_type(sema.db))),
791 ast::Item::MacroCall(_) => None,
792 _ => Some(None),
064997fb 793 }
2b03887a
FG
794 } else {
795 let expr = ast::Expr::cast(it)?;
796 let callable = match expr {
797 // FIXME
798 // ast::Expr::BlockExpr(b) if b.async_token().is_some() || b.try_token().is_some() => sema.type_of_expr(b),
799 ast::Expr::ClosureExpr(_) => sema.type_of_expr(&expr),
800 _ => return None,
801 };
802 Some(
803 callable
804 .and_then(|c| c.adjusted().as_callable(sema.db))
805 .map(|it| it.return_type()),
806 )
807 }
808 };
809 let find_fn_self_param = |it| match it {
810 ast::Item::Fn(fn_) => Some(sema.to_def(&fn_).and_then(|it| it.self_param(sema.db))),
811 ast::Item::MacroCall(_) => None,
812 _ => Some(None),
813 };
814
815 match find_node_in_file_compensated(sema, original_file, &expr) {
816 Some(it) => {
817 let innermost_ret_ty = sema
818 .ancestors_with_macros(it.syntax().clone())
819 .find_map(find_ret_ty)
820 .flatten();
821
822 let self_param = sema
823 .ancestors_with_macros(it.syntax().clone())
824 .filter_map(ast::Item::cast)
825 .find_map(find_fn_self_param)
826 .flatten();
827 (innermost_ret_ty, self_param)
064997fb 828 }
2b03887a 829 None => (None, None),
064997fb 830 }
064997fb 831 };
2b03887a
FG
832 let is_func_update = func_update_record(it);
833 let in_condition = is_in_condition(&expr);
834 let incomplete_let = it
835 .parent()
836 .and_then(ast::LetStmt::cast)
837 .map_or(false, |it| it.semicolon_token().is_none());
838 let impl_ = fetch_immediate_impl(sema, original_file, expr.syntax());
064997fb 839
2b03887a
FG
840 let in_match_guard = match it.parent().and_then(ast::MatchArm::cast) {
841 Some(arm) => arm
842 .fat_arrow_token()
843 .map_or(true, |arrow| it.text_range().start() < arrow.text_range().start()),
844 None => false,
845 };
064997fb 846
2b03887a
FG
847 PathKind::Expr {
848 expr_ctx: ExprCtx {
849 in_block_expr,
850 in_loop_body,
851 after_if_expr,
852 in_condition,
853 ref_expr_parent,
854 is_func_update,
855 innermost_ret_ty,
856 self_param,
857 incomplete_let,
858 impl_,
859 in_match_guard,
860 },
861 }
862 };
863 let make_path_kind_type = |ty: ast::Type| {
864 let location = type_location(ty.syntax());
865 PathKind::Type { location: location.unwrap_or(TypeLocation::Other) }
866 };
867
868 let mut kind_macro_call = |it: ast::MacroCall| {
869 path_ctx.has_macro_bang = it.excl_token().is_some();
870 let parent = it.syntax().parent()?;
871 // Any path in an item list will be treated as a macro call by the parser
872 let kind = match_ast! {
873 match parent {
874 ast::MacroExpr(expr) => make_path_kind_expr(expr.into()),
875 ast::MacroPat(it) => PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())},
876 ast::MacroType(ty) => make_path_kind_type(ty.into()),
877 ast::ItemList(_) => PathKind::Item { kind: ItemListKind::Module },
878 ast::AssocItemList(_) => PathKind::Item { kind: match parent.parent() {
879 Some(it) => match_ast! {
880 match it {
881 ast::Trait(_) => ItemListKind::Trait,
882 ast::Impl(it) => if it.trait_().is_some() {
883 ItemListKind::TraitImpl(find_node_in_file_compensated(sema, original_file, &it))
884 } else {
885 ItemListKind::Impl
064997fb 886 },
2b03887a 887 _ => return None
064997fb
FG
888 }
889 },
2b03887a
FG
890 None => return None,
891 } },
892 ast::ExternItemList(_) => PathKind::Item { kind: ItemListKind::ExternBlock },
893 ast::SourceFile(_) => PathKind::Item { kind: ItemListKind::SourceFile },
894 _ => return None,
895 }
064997fb 896 };
2b03887a
FG
897 Some(kind)
898 };
899 let make_path_kind_attr = |meta: ast::Meta| {
900 let attr = meta.parent_attr()?;
901 let kind = attr.kind();
902 let attached = attr.syntax().parent()?;
903 let is_trailing_outer_attr = kind != AttrKind::Inner
904 && non_trivia_sibling(attr.syntax().clone().into(), syntax::Direction::Next).is_none();
905 let annotated_item_kind = if is_trailing_outer_attr { None } else { Some(attached.kind()) };
906 Some(PathKind::Attr { attr_ctx: AttrCtx { kind, annotated_item_kind } })
907 };
064997fb 908
2b03887a
FG
909 // Infer the path kind
910 let parent = path.syntax().parent()?;
911 let kind = match_ast! {
912 match parent {
913 ast::PathType(it) => make_path_kind_type(it.into()),
914 ast::PathExpr(it) => {
915 if let Some(p) = it.syntax().parent() {
916 if ast::ExprStmt::can_cast(p.kind()) {
917 if let Some(kind) = inbetween_body_and_decl_check(p) {
918 return Some(make_res(NameRefKind::Keyword(kind)));
064997fb 919 }
064997fb 920 }
2b03887a 921 }
064997fb 922
2b03887a 923 path_ctx.has_call_parens = it.syntax().parent().map_or(false, |it| ast::CallExpr::can_cast(it.kind()));
064997fb 924
2b03887a
FG
925 make_path_kind_expr(it.into())
926 },
927 ast::TupleStructPat(it) => {
928 path_ctx.has_call_parens = true;
929 PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
930 },
931 ast::RecordPat(it) => {
932 path_ctx.has_call_parens = true;
933 PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
934 },
935 ast::PathPat(it) => {
936 PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}
937 },
938 ast::MacroCall(it) => {
939 // A macro call in this position is usually a result of parsing recovery, so check that
940 if let Some(kind) = inbetween_body_and_decl_check(it.syntax().clone()) {
941 return Some(make_res(NameRefKind::Keyword(kind)));
064997fb 942 }
064997fb 943
2b03887a
FG
944 kind_macro_call(it)?
945 },
946 ast::Meta(meta) => make_path_kind_attr(meta)?,
947 ast::Visibility(it) => PathKind::Vis { has_in_token: it.in_token().is_some() },
948 ast::UseTree(_) => PathKind::Use,
949 // completing inside a qualifier
950 ast::Path(parent) => {
951 path_ctx.parent = Some(parent.clone());
952 let parent = iter::successors(Some(parent), |it| it.parent_path()).last()?.syntax().parent()?;
953 match_ast! {
954 match parent {
955 ast::PathType(it) => make_path_kind_type(it.into()),
956 ast::PathExpr(it) => {
957 path_ctx.has_call_parens = it.syntax().parent().map_or(false, |it| ast::CallExpr::can_cast(it.kind()));
064997fb 958
2b03887a 959 make_path_kind_expr(it.into())
064997fb 960 },
2b03887a
FG
961 ast::TupleStructPat(it) => {
962 path_ctx.has_call_parens = true;
963 PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
964 },
965 ast::RecordPat(it) => {
966 path_ctx.has_call_parens = true;
967 PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
968 },
969 ast::PathPat(it) => {
970 PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}
971 },
972 ast::MacroCall(it) => {
973 kind_macro_call(it)?
974 },
975 ast::Meta(meta) => make_path_kind_attr(meta)?,
976 ast::Visibility(it) => PathKind::Vis { has_in_token: it.in_token().is_some() },
977 ast::UseTree(_) => PathKind::Use,
978 ast::RecordExpr(it) => make_path_kind_expr(it.into()),
979 _ => return None,
064997fb 980 }
2b03887a
FG
981 }
982 },
983 ast::RecordExpr(it) => make_path_kind_expr(it.into()),
984 _ => return None,
985 }
986 };
064997fb 987
2b03887a
FG
988 path_ctx.kind = kind;
989 path_ctx.has_type_args = segment.generic_arg_list().is_some();
064997fb 990
2b03887a
FG
991 // calculate the qualifier context
992 if let Some((qualifier, use_tree_parent)) = path_or_use_tree_qualifier(&path) {
993 path_ctx.use_tree_parent = use_tree_parent;
994 if !use_tree_parent && segment.coloncolon_token().is_some() {
995 path_ctx.qualified = Qualified::Absolute;
996 } else {
997 let qualifier = qualifier
998 .segment()
999 .and_then(|it| find_node_in_file(original_file, &it))
1000 .map(|it| it.parent_path());
1001 if let Some(qualifier) = qualifier {
1002 let type_anchor = match qualifier.segment().and_then(|it| it.kind()) {
1003 Some(ast::PathSegmentKind::Type { type_ref: Some(type_ref), trait_ref })
1004 if qualifier.qualifier().is_none() =>
1005 {
1006 Some((type_ref, trait_ref))
064997fb 1007 }
2b03887a
FG
1008 _ => None,
1009 };
064997fb 1010
2b03887a
FG
1011 path_ctx.qualified = if let Some((ty, trait_ref)) = type_anchor {
1012 let ty = match ty {
1013 ast::Type::InferType(_) => None,
1014 ty => sema.resolve_type(&ty),
064997fb 1015 };
2b03887a
FG
1016 let trait_ = trait_ref.and_then(|it| sema.resolve_trait(&it.path()?));
1017 Qualified::TypeAnchor { ty, trait_ }
1018 } else {
1019 let res = sema.resolve_path(&qualifier);
064997fb 1020
2b03887a
FG
1021 // For understanding how and why super_chain_len is calculated the way it
1022 // is check the documentation at it's definition
1023 let mut segment_count = 0;
1024 let super_count = iter::successors(Some(qualifier.clone()), |p| p.qualifier())
1025 .take_while(|p| {
1026 p.segment()
1027 .and_then(|s| {
1028 segment_count += 1;
1029 s.super_token()
064997fb 1030 })
2b03887a
FG
1031 .is_some()
1032 })
1033 .count();
064997fb 1034
2b03887a
FG
1035 let super_chain_len =
1036 if segment_count > super_count { None } else { Some(super_count) };
064997fb 1037
2b03887a 1038 Qualified::With { path: qualifier, resolution: res, super_chain_len }
064997fb 1039 }
064997fb 1040 };
2b03887a
FG
1041 }
1042 } else if let Some(segment) = path.segment() {
1043 if segment.coloncolon_token().is_some() {
1044 path_ctx.qualified = Qualified::Absolute;
1045 }
1046 }
1047
1048 let mut qualifier_ctx = QualifierCtx::default();
1049 if path_ctx.is_trivial_path() {
1050 // fetch the full expression that may have qualifiers attached to it
1051 let top_node = match path_ctx.kind {
1052 PathKind::Expr { expr_ctx: ExprCtx { in_block_expr: true, .. } } => {
1053 parent.ancestors().find(|it| ast::PathExpr::can_cast(it.kind())).and_then(|p| {
1054 let parent = p.parent()?;
1055 if ast::StmtList::can_cast(parent.kind()) {
1056 Some(p)
1057 } else if ast::ExprStmt::can_cast(parent.kind()) {
1058 Some(parent)
1059 } else {
1060 None
064997fb 1061 }
2b03887a
FG
1062 })
1063 }
1064 PathKind::Item { .. } => {
1065 parent.ancestors().find(|it| ast::MacroCall::can_cast(it.kind()))
1066 }
1067 _ => None,
1068 };
1069 if let Some(top) = top_node {
1070 if let Some(NodeOrToken::Node(error_node)) =
1071 syntax::algo::non_trivia_sibling(top.clone().into(), syntax::Direction::Prev)
1072 {
1073 if error_node.kind() == SyntaxKind::ERROR {
1074 qualifier_ctx.unsafe_tok = error_node
1075 .children_with_tokens()
1076 .filter_map(NodeOrToken::into_token)
1077 .find(|it| it.kind() == T![unsafe]);
1078 qualifier_ctx.vis_node = error_node.children().find_map(ast::Visibility::cast);
064997fb 1079 }
2b03887a 1080 }
064997fb 1081
2b03887a
FG
1082 if let PathKind::Item { .. } = path_ctx.kind {
1083 if qualifier_ctx.none() {
1084 if let Some(t) = top.first_token() {
1085 if let Some(prev) = t
1086 .prev_token()
1087 .and_then(|t| syntax::algo::skip_trivia_token(t, Direction::Prev))
1088 {
1089 if ![T![;], T!['}'], T!['{']].contains(&prev.kind()) {
1090 // This was inferred to be an item position path, but it seems
1091 // to be part of some other broken node which leaked into an item
1092 // list
1093 return None;
064997fb
FG
1094 }
1095 }
1096 }
1097 }
1098 }
1099 }
064997fb 1100 }
2b03887a 1101 Some((NameRefContext { nameref, kind: NameRefKind::Path(path_ctx) }, qualifier_ctx))
064997fb
FG
1102}
1103
1104fn pattern_context_for(
1105 sema: &Semantics<'_, RootDatabase>,
1106 original_file: &SyntaxNode,
1107 pat: ast::Pat,
1108) -> PatternContext {
1109 let mut param_ctx = None;
1110 let (refutability, has_type_ascription) =
1111 pat
1112 .syntax()
1113 .ancestors()
1114 .skip_while(|it| ast::Pat::can_cast(it.kind()))
1115 .next()
1116 .map_or((PatternRefutability::Irrefutable, false), |node| {
1117 let refutability = match_ast! {
1118 match node {
1119 ast::LetStmt(let_) => return (PatternRefutability::Irrefutable, let_.ty().is_some()),
1120 ast::Param(param) => {
1121 let has_type_ascription = param.ty().is_some();
1122 param_ctx = (|| {
1123 let fake_param_list = param.syntax().parent().and_then(ast::ParamList::cast)?;
1124 let param_list = find_node_in_file_compensated(sema, original_file, &fake_param_list)?;
1125 let param_list_owner = param_list.syntax().parent()?;
1126 let kind = match_ast! {
1127 match param_list_owner {
1128 ast::ClosureExpr(closure) => ParamKind::Closure(closure),
1129 ast::Fn(fn_) => ParamKind::Function(fn_),
1130 _ => return None,
1131 }
1132 };
1133 Some(ParamContext {
1134 param_list, param, kind
1135 })
1136 })();
1137 return (PatternRefutability::Irrefutable, has_type_ascription)
1138 },
1139 ast::MatchArm(_) => PatternRefutability::Refutable,
1140 ast::LetExpr(_) => PatternRefutability::Refutable,
1141 ast::ForExpr(_) => PatternRefutability::Irrefutable,
1142 _ => PatternRefutability::Irrefutable,
1143 }
1144 };
1145 (refutability, false)
1146 });
1147 let (ref_token, mut_token) = match &pat {
1148 ast::Pat::IdentPat(it) => (it.ref_token(), it.mut_token()),
1149 _ => (None, None),
1150 };
1151
1152 PatternContext {
1153 refutability,
1154 param_ctx,
1155 has_type_ascription,
1156 parent_pat: pat.syntax().parent().and_then(ast::Pat::cast),
1157 mut_token,
1158 ref_token,
1159 record_pat: None,
1160 impl_: fetch_immediate_impl(sema, original_file, pat.syntax()),
1161 }
1162}
1163
1164fn fetch_immediate_impl(
1165 sema: &Semantics<'_, RootDatabase>,
1166 original_file: &SyntaxNode,
1167 node: &SyntaxNode,
1168) -> Option<ast::Impl> {
1169 let mut ancestors = ancestors_in_file_compensated(sema, original_file, node)?
1170 .filter_map(ast::Item::cast)
1171 .filter(|it| !matches!(it, ast::Item::MacroCall(_)));
1172
1173 match ancestors.next()? {
1174 ast::Item::Const(_) | ast::Item::Fn(_) | ast::Item::TypeAlias(_) => (),
1175 ast::Item::Impl(it) => return Some(it),
1176 _ => return None,
1177 }
1178 match ancestors.next()? {
1179 ast::Item::Impl(it) => Some(it),
1180 _ => None,
1181 }
1182}
1183
1184/// Attempts to find `node` inside `syntax` via `node`'s text range.
1185/// If the fake identifier has been inserted after this node or inside of this node use the `_compensated` version instead.
1186fn find_opt_node_in_file<N: AstNode>(syntax: &SyntaxNode, node: Option<N>) -> Option<N> {
1187 find_node_in_file(syntax, &node?)
1188}
1189
1190/// Attempts to find `node` inside `syntax` via `node`'s text range.
1191/// If the fake identifier has been inserted after this node or inside of this node use the `_compensated` version instead.
1192fn find_node_in_file<N: AstNode>(syntax: &SyntaxNode, node: &N) -> Option<N> {
1193 let syntax_range = syntax.text_range();
1194 let range = node.syntax().text_range();
1195 let intersection = range.intersect(syntax_range)?;
1196 syntax.covering_element(intersection).ancestors().find_map(N::cast)
1197}
1198
1199/// Attempts to find `node` inside `syntax` via `node`'s text range while compensating
1200/// for the offset introduced by the fake ident.
1201/// This is wrong if `node` comes before the insertion point! Use `find_node_in_file` instead.
1202fn find_node_in_file_compensated<N: AstNode>(
1203 sema: &Semantics<'_, RootDatabase>,
1204 in_file: &SyntaxNode,
1205 node: &N,
1206) -> Option<N> {
1207 ancestors_in_file_compensated(sema, in_file, node.syntax())?.find_map(N::cast)
1208}
1209
1210fn ancestors_in_file_compensated<'sema>(
1211 sema: &'sema Semantics<'_, RootDatabase>,
1212 in_file: &SyntaxNode,
1213 node: &SyntaxNode,
1214) -> Option<impl Iterator<Item = SyntaxNode> + 'sema> {
1215 let syntax_range = in_file.text_range();
1216 let range = node.text_range();
1217 let end = range.end().checked_sub(TextSize::try_from(COMPLETION_MARKER.len()).ok()?)?;
1218 if end < range.start() {
1219 return None;
1220 }
1221 let range = TextRange::new(range.start(), end);
1222 // our inserted ident could cause `range` to go outside of the original syntax, so cap it
1223 let intersection = range.intersect(syntax_range)?;
1224 let node = match in_file.covering_element(intersection) {
1225 NodeOrToken::Node(node) => node,
1226 NodeOrToken::Token(tok) => tok.parent()?,
1227 };
1228 Some(sema.ancestors_with_macros(node))
1229}
1230
1231/// Attempts to find `node` inside `syntax` via `node`'s text range while compensating
1232/// for the offset introduced by the fake ident..
1233/// This is wrong if `node` comes before the insertion point! Use `find_node_in_file` instead.
1234fn find_opt_node_in_file_compensated<N: AstNode>(
1235 sema: &Semantics<'_, RootDatabase>,
1236 syntax: &SyntaxNode,
1237 node: Option<N>,
1238) -> Option<N> {
1239 find_node_in_file_compensated(sema, syntax, &node?)
1240}
1241
1242fn path_or_use_tree_qualifier(path: &ast::Path) -> Option<(ast::Path, bool)> {
1243 if let Some(qual) = path.qualifier() {
1244 return Some((qual, false));
1245 }
1246 let use_tree_list = path.syntax().ancestors().find_map(ast::UseTreeList::cast)?;
1247 let use_tree = use_tree_list.syntax().parent().and_then(ast::UseTree::cast)?;
1248 Some((use_tree.path()?, true))
1249}
1250
1251pub(crate) fn is_in_token_of_for_loop(element: SyntaxElement) -> bool {
1252 // oh my ...
1253 (|| {
1254 let syntax_token = element.into_token()?;
1255 let range = syntax_token.text_range();
1256 let for_expr = syntax_token.parent_ancestors().find_map(ast::ForExpr::cast)?;
1257
1258 // check if the current token is the `in` token of a for loop
1259 if let Some(token) = for_expr.in_token() {
1260 return Some(syntax_token == token);
1261 }
1262 let pat = for_expr.pat()?;
1263 if range.end() < pat.syntax().text_range().end() {
1264 // if we are inside or before the pattern we can't be at the `in` token position
1265 return None;
1266 }
1267 let next_sibl = next_non_trivia_sibling(pat.syntax().clone().into())?;
1268 Some(match next_sibl {
1269 // the loop body is some node, if our token is at the start we are at the `in` position,
1270 // otherwise we could be in a recovered expression, we don't wanna ruin completions there
1271 syntax::NodeOrToken::Node(n) => n.text_range().start() == range.start(),
1272 // the loop body consists of a single token, if we are this we are certainly at the `in` token position
1273 syntax::NodeOrToken::Token(t) => t == syntax_token,
1274 })
1275 })()
1276 .unwrap_or(false)
1277}
1278
1279#[test]
1280fn test_for_is_prev2() {
1281 crate::tests::check_pattern_is_applicable(r"fn __() { for i i$0 }", is_in_token_of_for_loop);
1282}
1283
1284pub(crate) fn is_in_loop_body(node: &SyntaxNode) -> bool {
1285 node.ancestors()
1286 .take_while(|it| it.kind() != SyntaxKind::FN && it.kind() != SyntaxKind::CLOSURE_EXPR)
1287 .find_map(|it| {
1288 let loop_body = match_ast! {
1289 match it {
1290 ast::ForExpr(it) => it.loop_body(),
1291 ast::WhileExpr(it) => it.loop_body(),
1292 ast::LoopExpr(it) => it.loop_body(),
1293 _ => None,
1294 }
1295 };
1296 loop_body.filter(|it| it.syntax().text_range().contains_range(node.text_range()))
1297 })
1298 .is_some()
1299}
1300
1301fn previous_non_trivia_token(e: impl Into<SyntaxElement>) -> Option<SyntaxToken> {
1302 let mut token = match e.into() {
1303 SyntaxElement::Node(n) => n.first_token()?,
1304 SyntaxElement::Token(t) => t,
1305 }
1306 .prev_token();
1307 while let Some(inner) = token {
1308 if !inner.kind().is_trivia() {
1309 return Some(inner);
1310 } else {
1311 token = inner.prev_token();
1312 }
1313 }
1314 None
1315}
1316
1317fn next_non_trivia_sibling(ele: SyntaxElement) -> Option<SyntaxElement> {
1318 let mut e = ele.next_sibling_or_token();
1319 while let Some(inner) = e {
1320 if !inner.kind().is_trivia() {
1321 return Some(inner);
1322 } else {
1323 e = inner.next_sibling_or_token();
1324 }
1325 }
1326 None
1327}