]> git.proxmox.com Git - rustc.git/blob - src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs
New upstream version 1.67.1+dfsg1
[rustc.git] / src / tools / rust-analyzer / crates / ide-completion / src / context / analysis.rs
1 //! Module responsible for analyzing the code surrounding the cursor for completion.
2 use std::iter;
3
4 use hir::{Semantics, Type, TypeInfo};
5 use ide_db::{active_parameter::ActiveParameter, RootDatabase};
6 use syntax::{
7 algo::{find_node_at_offset, non_trivia_sibling},
8 ast::{self, AttrKind, HasArgList, HasLoopBody, HasName, NameOrNameRef},
9 match_ast, AstNode, AstToken, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode,
10 SyntaxToken, TextRange, TextSize, T,
11 };
12
13 use crate::context::{
14 AttrCtx, CompletionAnalysis, DotAccess, DotAccessKind, ExprCtx, ItemListKind, LifetimeContext,
15 LifetimeKind, NameContext, NameKind, NameRefContext, NameRefKind, ParamContext, ParamKind,
16 PathCompletionCtx, PathKind, PatternContext, PatternRefutability, Qualified, QualifierCtx,
17 TypeAscriptionTarget, TypeLocation, COMPLETION_MARKER,
18 };
19
20 struct ExpansionResult {
21 original_file: SyntaxNode,
22 speculative_file: SyntaxNode,
23 offset: TextSize,
24 fake_ident_token: SyntaxToken,
25 derive_ctx: Option<(SyntaxNode, SyntaxNode, TextSize, ast::Attr)>,
26 }
27
28 pub(super) struct AnalysisResult {
29 pub(super) analysis: CompletionAnalysis,
30 pub(super) expected: (Option<Type>, Option<ast::NameOrNameRef>),
31 pub(super) qualifier_ctx: QualifierCtx,
32 pub(super) token: SyntaxToken,
33 pub(super) offset: TextSize,
34 }
35
36 pub(super) fn expand_and_analyze(
37 sema: &Semantics<'_, RootDatabase>,
38 original_file: SyntaxNode,
39 speculative_file: SyntaxNode,
40 offset: TextSize,
41 original_token: &SyntaxToken,
42 ) -> Option<AnalysisResult> {
43 // as we insert after the offset, right biased will *always* pick the identifier no matter
44 // if there is an ident already typed or not
45 let fake_ident_token = speculative_file.token_at_offset(offset).right_biased()?;
46 // the relative offset between the cursor and the *identifier* token we are completing on
47 let relative_offset = offset - fake_ident_token.text_range().start();
48 // make the offset point to the start of the original token, as that is what the
49 // intermediate offsets calculated in expansion always points to
50 let offset = offset - relative_offset;
51 let expansion = expand(sema, original_file, speculative_file, offset, fake_ident_token);
52 // add the relative offset back, so that left_biased finds the proper token
53 let offset = expansion.offset + relative_offset;
54 let token = expansion.original_file.token_at_offset(offset).left_biased()?;
55
56 analyze(sema, expansion, original_token, &token).map(|(analysis, expected, qualifier_ctx)| {
57 AnalysisResult { analysis, expected, qualifier_ctx, token, offset }
58 })
59 }
60
61 /// Expand attributes and macro calls at the current cursor position for both the original file
62 /// and fake file repeatedly. As soon as one of the two expansions fail we stop so the original
63 /// and speculative states stay in sync.
64 fn expand(
65 sema: &Semantics<'_, RootDatabase>,
66 mut original_file: SyntaxNode,
67 mut speculative_file: SyntaxNode,
68 mut offset: TextSize,
69 mut fake_ident_token: SyntaxToken,
70 ) -> ExpansionResult {
71 let _p = profile::span("CompletionContext::expand");
72 let mut derive_ctx = None;
73
74 'expansion: loop {
75 let parent_item =
76 |item: &ast::Item| item.syntax().ancestors().skip(1).find_map(ast::Item::cast);
77 let ancestor_items = iter::successors(
78 Option::zip(
79 find_node_at_offset::<ast::Item>(&original_file, offset),
80 find_node_at_offset::<ast::Item>(&speculative_file, offset),
81 ),
82 |(a, b)| parent_item(a).zip(parent_item(b)),
83 );
84
85 // first try to expand attributes as these are always the outermost macro calls
86 'ancestors: for (actual_item, item_with_fake_ident) in ancestor_items {
87 match (
88 sema.expand_attr_macro(&actual_item),
89 sema.speculative_expand_attr_macro(
90 &actual_item,
91 &item_with_fake_ident,
92 fake_ident_token.clone(),
93 ),
94 ) {
95 // maybe parent items have attributes, so continue walking the ancestors
96 (None, None) => continue 'ancestors,
97 // successful expansions
98 (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => {
99 let new_offset = fake_mapped_token.text_range().start();
100 if new_offset > actual_expansion.text_range().end() {
101 // offset outside of bounds from the original expansion,
102 // stop here to prevent problems from happening
103 break 'expansion;
104 }
105 original_file = actual_expansion;
106 speculative_file = fake_expansion;
107 fake_ident_token = fake_mapped_token;
108 offset = new_offset;
109 continue 'expansion;
110 }
111 // exactly one expansion failed, inconsistent state so stop expanding completely
112 _ => break 'expansion,
113 }
114 }
115
116 // No attributes have been expanded, so look for macro_call! token trees or derive token trees
117 let orig_tt = match find_node_at_offset::<ast::TokenTree>(&original_file, offset) {
118 Some(it) => it,
119 None => break 'expansion,
120 };
121 let spec_tt = match find_node_at_offset::<ast::TokenTree>(&speculative_file, offset) {
122 Some(it) => it,
123 None => break 'expansion,
124 };
125
126 // Expand pseudo-derive expansion
127 if let (Some(orig_attr), Some(spec_attr)) = (
128 orig_tt.syntax().parent().and_then(ast::Meta::cast).and_then(|it| it.parent_attr()),
129 spec_tt.syntax().parent().and_then(ast::Meta::cast).and_then(|it| it.parent_attr()),
130 ) {
131 if let (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) = (
132 sema.expand_derive_as_pseudo_attr_macro(&orig_attr),
133 sema.speculative_expand_derive_as_pseudo_attr_macro(
134 &orig_attr,
135 &spec_attr,
136 fake_ident_token.clone(),
137 ),
138 ) {
139 derive_ctx = Some((
140 actual_expansion,
141 fake_expansion,
142 fake_mapped_token.text_range().start(),
143 orig_attr,
144 ));
145 }
146 // at this point we won't have any more successful expansions, so stop
147 break 'expansion;
148 }
149
150 // Expand fn-like macro calls
151 if let (Some(actual_macro_call), Some(macro_call_with_fake_ident)) = (
152 orig_tt.syntax().ancestors().find_map(ast::MacroCall::cast),
153 spec_tt.syntax().ancestors().find_map(ast::MacroCall::cast),
154 ) {
155 let mac_call_path0 = actual_macro_call.path().as_ref().map(|s| s.syntax().text());
156 let mac_call_path1 =
157 macro_call_with_fake_ident.path().as_ref().map(|s| s.syntax().text());
158
159 // inconsistent state, stop expanding
160 if mac_call_path0 != mac_call_path1 {
161 break 'expansion;
162 }
163 let speculative_args = match macro_call_with_fake_ident.token_tree() {
164 Some(tt) => tt,
165 None => break 'expansion,
166 };
167
168 match (
169 sema.expand(&actual_macro_call),
170 sema.speculative_expand(
171 &actual_macro_call,
172 &speculative_args,
173 fake_ident_token.clone(),
174 ),
175 ) {
176 // successful expansions
177 (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => {
178 let new_offset = fake_mapped_token.text_range().start();
179 if new_offset > actual_expansion.text_range().end() {
180 // offset outside of bounds from the original expansion,
181 // stop here to prevent problems from happening
182 break 'expansion;
183 }
184 original_file = actual_expansion;
185 speculative_file = fake_expansion;
186 fake_ident_token = fake_mapped_token;
187 offset = new_offset;
188 continue 'expansion;
189 }
190 // at least on expansion failed, we won't have anything to expand from this point
191 // onwards so break out
192 _ => break 'expansion,
193 }
194 }
195
196 // none of our states have changed so stop the loop
197 break 'expansion;
198 }
199 ExpansionResult { original_file, speculative_file, offset, fake_ident_token, derive_ctx }
200 }
201
202 /// Fill the completion context, this is what does semantic reasoning about the surrounding context
203 /// of the completion location.
204 fn analyze(
205 sema: &Semantics<'_, RootDatabase>,
206 expansion_result: ExpansionResult,
207 original_token: &SyntaxToken,
208 self_token: &SyntaxToken,
209 ) -> Option<(CompletionAnalysis, (Option<Type>, Option<ast::NameOrNameRef>), QualifierCtx)> {
210 let _p = profile::span("CompletionContext::analyze");
211 let ExpansionResult { original_file, speculative_file, offset, fake_ident_token, derive_ctx } =
212 expansion_result;
213 let syntax_element = NodeOrToken::Token(fake_ident_token);
214 if is_in_token_of_for_loop(syntax_element.clone()) {
215 // for pat $0
216 // there is nothing to complete here except `in` keyword
217 // don't bother populating the context
218 // FIXME: the completion calculations should end up good enough
219 // such that this special case becomes unnecessary
220 return None;
221 }
222
223 // Overwrite the path kind for derives
224 if let Some((original_file, file_with_fake_ident, offset, origin_attr)) = derive_ctx {
225 if let Some(ast::NameLike::NameRef(name_ref)) =
226 find_node_at_offset(&file_with_fake_ident, offset)
227 {
228 let parent = name_ref.syntax().parent()?;
229 let (mut nameref_ctx, _) = classify_name_ref(&sema, &original_file, name_ref, parent)?;
230 if let NameRefKind::Path(path_ctx) = &mut nameref_ctx.kind {
231 path_ctx.kind = PathKind::Derive {
232 existing_derives: sema
233 .resolve_derive_macro(&origin_attr)
234 .into_iter()
235 .flatten()
236 .flatten()
237 .collect(),
238 };
239 }
240 return Some((
241 CompletionAnalysis::NameRef(nameref_ctx),
242 (None, None),
243 QualifierCtx::default(),
244 ));
245 }
246 return None;
247 }
248
249 let name_like = match find_node_at_offset(&speculative_file, offset) {
250 Some(it) => it,
251 None => {
252 let analysis = if let Some(original) = ast::String::cast(original_token.clone()) {
253 CompletionAnalysis::String {
254 original,
255 expanded: ast::String::cast(self_token.clone()),
256 }
257 } else {
258 // Fix up trailing whitespace problem
259 // #[attr(foo = $0
260 let token = syntax::algo::skip_trivia_token(self_token.clone(), Direction::Prev)?;
261 let p = token.parent()?;
262 if p.kind() == SyntaxKind::TOKEN_TREE
263 && p.ancestors().any(|it| it.kind() == SyntaxKind::META)
264 {
265 let colon_prefix = previous_non_trivia_token(self_token.clone())
266 .map_or(false, |it| T![:] == it.kind());
267 CompletionAnalysis::UnexpandedAttrTT {
268 fake_attribute_under_caret: syntax_element
269 .ancestors()
270 .find_map(ast::Attr::cast),
271 colon_prefix,
272 }
273 } else {
274 return None;
275 }
276 };
277 return Some((analysis, (None, None), QualifierCtx::default()));
278 }
279 };
280 let expected = expected_type_and_name(sema, &self_token, &name_like);
281 let mut qual_ctx = QualifierCtx::default();
282 let analysis = match name_like {
283 ast::NameLike::Lifetime(lifetime) => {
284 CompletionAnalysis::Lifetime(classify_lifetime(sema, &original_file, lifetime)?)
285 }
286 ast::NameLike::NameRef(name_ref) => {
287 let parent = name_ref.syntax().parent()?;
288 let (nameref_ctx, qualifier_ctx) =
289 classify_name_ref(sema, &original_file, name_ref, parent.clone())?;
290 qual_ctx = qualifier_ctx;
291 CompletionAnalysis::NameRef(nameref_ctx)
292 }
293 ast::NameLike::Name(name) => {
294 let name_ctx = classify_name(sema, &original_file, name)?;
295 CompletionAnalysis::Name(name_ctx)
296 }
297 };
298 Some((analysis, expected, qual_ctx))
299 }
300
301 /// Calculate the expected type and name of the cursor position.
302 fn expected_type_and_name(
303 sema: &Semantics<'_, RootDatabase>,
304 token: &SyntaxToken,
305 name_like: &ast::NameLike,
306 ) -> (Option<Type>, Option<NameOrNameRef>) {
307 let mut node = match token.parent() {
308 Some(it) => it,
309 None => return (None, None),
310 };
311
312 let strip_refs = |mut ty: Type| match name_like {
313 ast::NameLike::NameRef(n) => {
314 let p = match n.syntax().parent() {
315 Some(it) => it,
316 None => return ty,
317 };
318 let top_syn = match_ast! {
319 match p {
320 ast::FieldExpr(e) => e
321 .syntax()
322 .ancestors()
323 .map_while(ast::FieldExpr::cast)
324 .last()
325 .map(|it| it.syntax().clone()),
326 ast::PathSegment(e) => e
327 .syntax()
328 .ancestors()
329 .skip(1)
330 .take_while(|it| ast::Path::can_cast(it.kind()) || ast::PathExpr::can_cast(it.kind()))
331 .find_map(ast::PathExpr::cast)
332 .map(|it| it.syntax().clone()),
333 _ => None
334 }
335 };
336 let top_syn = match top_syn {
337 Some(it) => it,
338 None => return ty,
339 };
340 for _ in top_syn.ancestors().skip(1).map_while(ast::RefExpr::cast) {
341 cov_mark::hit!(expected_type_fn_param_ref);
342 ty = ty.strip_reference();
343 }
344 ty
345 }
346 _ => ty,
347 };
348
349 loop {
350 break match_ast! {
351 match node {
352 ast::LetStmt(it) => {
353 cov_mark::hit!(expected_type_let_with_leading_char);
354 cov_mark::hit!(expected_type_let_without_leading_char);
355 let ty = it.pat()
356 .and_then(|pat| sema.type_of_pat(&pat))
357 .or_else(|| it.initializer().and_then(|it| sema.type_of_expr(&it)))
358 .map(TypeInfo::original);
359 let name = match it.pat() {
360 Some(ast::Pat::IdentPat(ident)) => ident.name().map(NameOrNameRef::Name),
361 Some(_) | None => None,
362 };
363
364 (ty, name)
365 },
366 ast::LetExpr(it) => {
367 cov_mark::hit!(expected_type_if_let_without_leading_char);
368 let ty = it.pat()
369 .and_then(|pat| sema.type_of_pat(&pat))
370 .or_else(|| it.expr().and_then(|it| sema.type_of_expr(&it)))
371 .map(TypeInfo::original);
372 (ty, None)
373 },
374 ast::ArgList(_) => {
375 cov_mark::hit!(expected_type_fn_param);
376 ActiveParameter::at_token(
377 &sema,
378 token.clone(),
379 ).map(|ap| {
380 let name = ap.ident().map(NameOrNameRef::Name);
381
382 let ty = strip_refs(ap.ty);
383 (Some(ty), name)
384 })
385 .unwrap_or((None, None))
386 },
387 ast::RecordExprFieldList(it) => {
388 // wouldn't try {} be nice...
389 (|| {
390 if token.kind() == T![..]
391 ||token.prev_token().map(|t| t.kind()) == Some(T![..])
392 {
393 cov_mark::hit!(expected_type_struct_func_update);
394 let record_expr = it.syntax().parent().and_then(ast::RecordExpr::cast)?;
395 let ty = sema.type_of_expr(&record_expr.into())?;
396 Some((
397 Some(ty.original),
398 None
399 ))
400 } else {
401 cov_mark::hit!(expected_type_struct_field_without_leading_char);
402 let expr_field = token.prev_sibling_or_token()?
403 .into_node()
404 .and_then(ast::RecordExprField::cast)?;
405 let (_, _, ty) = sema.resolve_record_field(&expr_field)?;
406 Some((
407 Some(ty),
408 expr_field.field_name().map(NameOrNameRef::NameRef),
409 ))
410 }
411 })().unwrap_or((None, None))
412 },
413 ast::RecordExprField(it) => {
414 if let Some(expr) = it.expr() {
415 cov_mark::hit!(expected_type_struct_field_with_leading_char);
416 (
417 sema.type_of_expr(&expr).map(TypeInfo::original),
418 it.field_name().map(NameOrNameRef::NameRef),
419 )
420 } else {
421 cov_mark::hit!(expected_type_struct_field_followed_by_comma);
422 let ty = sema.resolve_record_field(&it)
423 .map(|(_, _, ty)| ty);
424 (
425 ty,
426 it.field_name().map(NameOrNameRef::NameRef),
427 )
428 }
429 },
430 // match foo { $0 }
431 // match foo { ..., pat => $0 }
432 ast::MatchExpr(it) => {
433 let on_arrow = previous_non_trivia_token(token.clone()).map_or(false, |it| T![=>] == it.kind());
434
435 let ty = if on_arrow {
436 // match foo { ..., pat => $0 }
437 cov_mark::hit!(expected_type_match_arm_body_without_leading_char);
438 cov_mark::hit!(expected_type_match_arm_body_with_leading_char);
439 sema.type_of_expr(&it.into())
440 } else {
441 // match foo { $0 }
442 cov_mark::hit!(expected_type_match_arm_without_leading_char);
443 it.expr().and_then(|e| sema.type_of_expr(&e))
444 }.map(TypeInfo::original);
445 (ty, None)
446 },
447 ast::IfExpr(it) => {
448 let ty = it.condition()
449 .and_then(|e| sema.type_of_expr(&e))
450 .map(TypeInfo::original);
451 (ty, None)
452 },
453 ast::IdentPat(it) => {
454 cov_mark::hit!(expected_type_if_let_with_leading_char);
455 cov_mark::hit!(expected_type_match_arm_with_leading_char);
456 let ty = sema.type_of_pat(&ast::Pat::from(it)).map(TypeInfo::original);
457 (ty, None)
458 },
459 ast::Fn(it) => {
460 cov_mark::hit!(expected_type_fn_ret_with_leading_char);
461 cov_mark::hit!(expected_type_fn_ret_without_leading_char);
462 let def = sema.to_def(&it);
463 (def.map(|def| def.ret_type(sema.db)), None)
464 },
465 ast::ClosureExpr(it) => {
466 let ty = sema.type_of_expr(&it.into());
467 ty.and_then(|ty| ty.original.as_callable(sema.db))
468 .map(|c| (Some(c.return_type()), None))
469 .unwrap_or((None, None))
470 },
471 ast::ParamList(_) => (None, None),
472 ast::Stmt(_) => (None, None),
473 ast::Item(_) => (None, None),
474 _ => {
475 match node.parent() {
476 Some(n) => {
477 node = n;
478 continue;
479 },
480 None => (None, None),
481 }
482 },
483 }
484 };
485 }
486 }
487
488 fn classify_lifetime(
489 _sema: &Semantics<'_, RootDatabase>,
490 original_file: &SyntaxNode,
491 lifetime: ast::Lifetime,
492 ) -> Option<LifetimeContext> {
493 let parent = lifetime.syntax().parent()?;
494 if parent.kind() == SyntaxKind::ERROR {
495 return None;
496 }
497
498 let kind = match_ast! {
499 match parent {
500 ast::LifetimeParam(param) => LifetimeKind::LifetimeParam {
501 is_decl: param.lifetime().as_ref() == Some(&lifetime),
502 param
503 },
504 ast::BreakExpr(_) => LifetimeKind::LabelRef,
505 ast::ContinueExpr(_) => LifetimeKind::LabelRef,
506 ast::Label(_) => LifetimeKind::LabelDef,
507 _ => LifetimeKind::Lifetime,
508 }
509 };
510 let lifetime = find_node_at_offset(&original_file, lifetime.syntax().text_range().start());
511
512 Some(LifetimeContext { lifetime, kind })
513 }
514
515 fn classify_name(
516 sema: &Semantics<'_, RootDatabase>,
517 original_file: &SyntaxNode,
518 name: ast::Name,
519 ) -> Option<NameContext> {
520 let parent = name.syntax().parent()?;
521 let kind = match_ast! {
522 match parent {
523 ast::Const(_) => NameKind::Const,
524 ast::ConstParam(_) => NameKind::ConstParam,
525 ast::Enum(_) => NameKind::Enum,
526 ast::Fn(_) => NameKind::Function,
527 ast::IdentPat(bind_pat) => {
528 let mut pat_ctx = pattern_context_for(sema, original_file, bind_pat.into());
529 if let Some(record_field) = ast::RecordPatField::for_field_name(&name) {
530 pat_ctx.record_pat = find_node_in_file_compensated(sema, original_file, &record_field.parent_record_pat());
531 }
532
533 NameKind::IdentPat(pat_ctx)
534 },
535 ast::MacroDef(_) => NameKind::MacroDef,
536 ast::MacroRules(_) => NameKind::MacroRules,
537 ast::Module(module) => NameKind::Module(module),
538 ast::RecordField(_) => NameKind::RecordField,
539 ast::Rename(_) => NameKind::Rename,
540 ast::SelfParam(_) => NameKind::SelfParam,
541 ast::Static(_) => NameKind::Static,
542 ast::Struct(_) => NameKind::Struct,
543 ast::Trait(_) => NameKind::Trait,
544 ast::TypeAlias(_) => NameKind::TypeAlias,
545 ast::TypeParam(_) => NameKind::TypeParam,
546 ast::Union(_) => NameKind::Union,
547 ast::Variant(_) => NameKind::Variant,
548 _ => return None,
549 }
550 };
551 let name = find_node_at_offset(&original_file, name.syntax().text_range().start());
552 Some(NameContext { name, kind })
553 }
554
555 fn classify_name_ref(
556 sema: &Semantics<'_, RootDatabase>,
557 original_file: &SyntaxNode,
558 name_ref: ast::NameRef,
559 parent: SyntaxNode,
560 ) -> Option<(NameRefContext, QualifierCtx)> {
561 let nameref = find_node_at_offset(&original_file, name_ref.syntax().text_range().start());
562
563 let make_res = |kind| (NameRefContext { nameref: nameref.clone(), kind }, Default::default());
564
565 if let Some(record_field) = ast::RecordExprField::for_field_name(&name_ref) {
566 let dot_prefix = previous_non_trivia_token(name_ref.syntax().clone())
567 .map_or(false, |it| T![.] == it.kind());
568
569 return find_node_in_file_compensated(
570 sema,
571 original_file,
572 &record_field.parent_record_lit(),
573 )
574 .map(|expr| NameRefKind::RecordExpr { expr, dot_prefix })
575 .map(make_res);
576 }
577 if let Some(record_field) = ast::RecordPatField::for_field_name_ref(&name_ref) {
578 let kind = NameRefKind::Pattern(PatternContext {
579 param_ctx: None,
580 has_type_ascription: false,
581 ref_token: None,
582 mut_token: None,
583 record_pat: find_node_in_file_compensated(
584 sema,
585 original_file,
586 &record_field.parent_record_pat(),
587 ),
588 ..pattern_context_for(
589 sema,
590 original_file,
591 record_field.parent_record_pat().clone().into(),
592 )
593 });
594 return Some(make_res(kind));
595 }
596
597 let segment = match_ast! {
598 match parent {
599 ast::PathSegment(segment) => segment,
600 ast::FieldExpr(field) => {
601 let receiver = find_opt_node_in_file(original_file, field.expr());
602 let receiver_is_ambiguous_float_literal = match &receiver {
603 Some(ast::Expr::Literal(l)) => matches! {
604 l.kind(),
605 ast::LiteralKind::FloatNumber { .. } if l.syntax().last_token().map_or(false, |it| it.text().ends_with('.'))
606 },
607 _ => false,
608 };
609 let kind = NameRefKind::DotAccess(DotAccess {
610 receiver_ty: receiver.as_ref().and_then(|it| sema.type_of_expr(it)),
611 kind: DotAccessKind::Field { receiver_is_ambiguous_float_literal },
612 receiver
613 });
614 return Some(make_res(kind));
615 },
616 ast::MethodCallExpr(method) => {
617 let receiver = find_opt_node_in_file(original_file, method.receiver());
618 let kind = NameRefKind::DotAccess(DotAccess {
619 receiver_ty: receiver.as_ref().and_then(|it| sema.type_of_expr(it)),
620 kind: DotAccessKind::Method { has_parens: method.arg_list().map_or(false, |it| it.l_paren_token().is_some()) },
621 receiver
622 });
623 return Some(make_res(kind));
624 },
625 _ => return None,
626 }
627 };
628
629 let path = segment.parent_path();
630 let original_path = find_node_in_file_compensated(sema, original_file, &path);
631
632 let mut path_ctx = PathCompletionCtx {
633 has_call_parens: false,
634 has_macro_bang: false,
635 qualified: Qualified::No,
636 parent: None,
637 path: path.clone(),
638 original_path,
639 kind: PathKind::Item { kind: ItemListKind::SourceFile },
640 has_type_args: false,
641 use_tree_parent: false,
642 };
643
644 let is_in_block = |it: &SyntaxNode| {
645 it.parent()
646 .map(|node| {
647 ast::ExprStmt::can_cast(node.kind()) || ast::StmtList::can_cast(node.kind())
648 })
649 .unwrap_or(false)
650 };
651 let func_update_record = |syn: &SyntaxNode| {
652 if let Some(record_expr) = syn.ancestors().nth(2).and_then(ast::RecordExpr::cast) {
653 find_node_in_file_compensated(sema, original_file, &record_expr)
654 } else {
655 None
656 }
657 };
658 let after_if_expr = |node: SyntaxNode| {
659 let prev_expr = (|| {
660 let prev_sibling = non_trivia_sibling(node.into(), Direction::Prev)?.into_node()?;
661 ast::ExprStmt::cast(prev_sibling)?.expr()
662 })();
663 matches!(prev_expr, Some(ast::Expr::IfExpr(_)))
664 };
665
666 // We do not want to generate path completions when we are sandwiched between an item decl signature and its body.
667 // ex. trait Foo $0 {}
668 // in these cases parser recovery usually kicks in for our inserted identifier, causing it
669 // to either be parsed as an ExprStmt or a MacroCall, depending on whether it is in a block
670 // expression or an item list.
671 // The following code checks if the body is missing, if it is we either cut off the body
672 // from the item or it was missing in the first place
673 let inbetween_body_and_decl_check = |node: SyntaxNode| {
674 if let Some(NodeOrToken::Node(n)) =
675 syntax::algo::non_trivia_sibling(node.into(), syntax::Direction::Prev)
676 {
677 if let Some(item) = ast::Item::cast(n) {
678 let is_inbetween = match &item {
679 ast::Item::Const(it) => it.body().is_none(),
680 ast::Item::Enum(it) => it.variant_list().is_none(),
681 ast::Item::ExternBlock(it) => it.extern_item_list().is_none(),
682 ast::Item::Fn(it) => it.body().is_none(),
683 ast::Item::Impl(it) => it.assoc_item_list().is_none(),
684 ast::Item::Module(it) => {
685 it.item_list().is_none() && it.semicolon_token().is_none()
686 }
687 ast::Item::Static(it) => it.body().is_none(),
688 ast::Item::Struct(it) => {
689 it.field_list().is_none() && it.semicolon_token().is_none()
690 }
691 ast::Item::Trait(it) => it.assoc_item_list().is_none(),
692 ast::Item::TypeAlias(it) => it.ty().is_none(),
693 ast::Item::Union(it) => it.record_field_list().is_none(),
694 _ => false,
695 };
696 if is_inbetween {
697 return Some(item);
698 }
699 }
700 }
701 None
702 };
703
704 let type_location = |node: &SyntaxNode| {
705 let parent = node.parent()?;
706 let res = match_ast! {
707 match parent {
708 ast::Const(it) => {
709 let name = find_opt_node_in_file(original_file, it.name())?;
710 let original = ast::Const::cast(name.syntax().parent()?)?;
711 TypeLocation::TypeAscription(TypeAscriptionTarget::Const(original.body()))
712 },
713 ast::RetType(it) => {
714 if it.thin_arrow_token().is_none() {
715 return None;
716 }
717 let parent = match ast::Fn::cast(parent.parent()?) {
718 Some(x) => x.param_list(),
719 None => ast::ClosureExpr::cast(parent.parent()?)?.param_list(),
720 };
721
722 let parent = find_opt_node_in_file(original_file, parent)?.syntax().parent()?;
723 TypeLocation::TypeAscription(TypeAscriptionTarget::RetType(match_ast! {
724 match parent {
725 ast::ClosureExpr(it) => {
726 it.body()
727 },
728 ast::Fn(it) => {
729 it.body().map(ast::Expr::BlockExpr)
730 },
731 _ => return None,
732 }
733 }))
734 },
735 ast::Param(it) => {
736 if it.colon_token().is_none() {
737 return None;
738 }
739 TypeLocation::TypeAscription(TypeAscriptionTarget::FnParam(find_opt_node_in_file(original_file, it.pat())))
740 },
741 ast::LetStmt(it) => {
742 if it.colon_token().is_none() {
743 return None;
744 }
745 TypeLocation::TypeAscription(TypeAscriptionTarget::Let(find_opt_node_in_file(original_file, it.pat())))
746 },
747 ast::Impl(it) => {
748 match it.trait_() {
749 Some(t) if t.syntax() == node => TypeLocation::ImplTrait,
750 _ => match it.self_ty() {
751 Some(t) if t.syntax() == node => TypeLocation::ImplTarget,
752 _ => return None,
753 },
754 }
755 },
756 ast::TypeBound(_) => TypeLocation::TypeBound,
757 // is this case needed?
758 ast::TypeBoundList(_) => TypeLocation::TypeBound,
759 ast::GenericArg(it) => TypeLocation::GenericArgList(find_opt_node_in_file_compensated(sema, original_file, it.syntax().parent().and_then(ast::GenericArgList::cast))),
760 // is this case needed?
761 ast::GenericArgList(it) => TypeLocation::GenericArgList(find_opt_node_in_file_compensated(sema, original_file, Some(it))),
762 ast::TupleField(_) => TypeLocation::TupleField,
763 _ => return None,
764 }
765 };
766 Some(res)
767 };
768
769 let is_in_condition = |it: &ast::Expr| {
770 (|| {
771 let parent = it.syntax().parent()?;
772 if let Some(expr) = ast::WhileExpr::cast(parent.clone()) {
773 Some(expr.condition()? == *it)
774 } else if let Some(expr) = ast::IfExpr::cast(parent) {
775 Some(expr.condition()? == *it)
776 } else {
777 None
778 }
779 })()
780 .unwrap_or(false)
781 };
782
783 let make_path_kind_expr = |expr: ast::Expr| {
784 let it = expr.syntax();
785 let in_block_expr = is_in_block(it);
786 let in_loop_body = is_in_loop_body(it);
787 let after_if_expr = after_if_expr(it.clone());
788 let ref_expr_parent =
789 path.as_single_name_ref().and_then(|_| it.parent()).and_then(ast::RefExpr::cast);
790 let (innermost_ret_ty, self_param) = {
791 let find_ret_ty = |it: SyntaxNode| {
792 if let Some(item) = ast::Item::cast(it.clone()) {
793 match item {
794 ast::Item::Fn(f) => Some(sema.to_def(&f).map(|it| it.ret_type(sema.db))),
795 ast::Item::MacroCall(_) => None,
796 _ => Some(None),
797 }
798 } else {
799 let expr = ast::Expr::cast(it)?;
800 let callable = match expr {
801 // FIXME
802 // ast::Expr::BlockExpr(b) if b.async_token().is_some() || b.try_token().is_some() => sema.type_of_expr(b),
803 ast::Expr::ClosureExpr(_) => sema.type_of_expr(&expr),
804 _ => return None,
805 };
806 Some(
807 callable
808 .and_then(|c| c.adjusted().as_callable(sema.db))
809 .map(|it| it.return_type()),
810 )
811 }
812 };
813 let find_fn_self_param = |it| match it {
814 ast::Item::Fn(fn_) => Some(sema.to_def(&fn_).and_then(|it| it.self_param(sema.db))),
815 ast::Item::MacroCall(_) => None,
816 _ => Some(None),
817 };
818
819 match find_node_in_file_compensated(sema, original_file, &expr) {
820 Some(it) => {
821 let innermost_ret_ty = sema
822 .ancestors_with_macros(it.syntax().clone())
823 .find_map(find_ret_ty)
824 .flatten();
825
826 let self_param = sema
827 .ancestors_with_macros(it.syntax().clone())
828 .filter_map(ast::Item::cast)
829 .find_map(find_fn_self_param)
830 .flatten();
831 (innermost_ret_ty, self_param)
832 }
833 None => (None, None),
834 }
835 };
836 let is_func_update = func_update_record(it);
837 let in_condition = is_in_condition(&expr);
838 let incomplete_let = it
839 .parent()
840 .and_then(ast::LetStmt::cast)
841 .map_or(false, |it| it.semicolon_token().is_none());
842 let impl_ = fetch_immediate_impl(sema, original_file, expr.syntax());
843
844 let in_match_guard = match it.parent().and_then(ast::MatchArm::cast) {
845 Some(arm) => arm
846 .fat_arrow_token()
847 .map_or(true, |arrow| it.text_range().start() < arrow.text_range().start()),
848 None => false,
849 };
850
851 PathKind::Expr {
852 expr_ctx: ExprCtx {
853 in_block_expr,
854 in_loop_body,
855 after_if_expr,
856 in_condition,
857 ref_expr_parent,
858 is_func_update,
859 innermost_ret_ty,
860 self_param,
861 incomplete_let,
862 impl_,
863 in_match_guard,
864 },
865 }
866 };
867 let make_path_kind_type = |ty: ast::Type| {
868 let location = type_location(ty.syntax());
869 PathKind::Type { location: location.unwrap_or(TypeLocation::Other) }
870 };
871
872 let mut kind_macro_call = |it: ast::MacroCall| {
873 path_ctx.has_macro_bang = it.excl_token().is_some();
874 let parent = it.syntax().parent()?;
875 // Any path in an item list will be treated as a macro call by the parser
876 let kind = match_ast! {
877 match parent {
878 ast::MacroExpr(expr) => make_path_kind_expr(expr.into()),
879 ast::MacroPat(it) => PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())},
880 ast::MacroType(ty) => make_path_kind_type(ty.into()),
881 ast::ItemList(_) => PathKind::Item { kind: ItemListKind::Module },
882 ast::AssocItemList(_) => PathKind::Item { kind: match parent.parent() {
883 Some(it) => match_ast! {
884 match it {
885 ast::Trait(_) => ItemListKind::Trait,
886 ast::Impl(it) => if it.trait_().is_some() {
887 ItemListKind::TraitImpl(find_node_in_file_compensated(sema, original_file, &it))
888 } else {
889 ItemListKind::Impl
890 },
891 _ => return None
892 }
893 },
894 None => return None,
895 } },
896 ast::ExternItemList(_) => PathKind::Item { kind: ItemListKind::ExternBlock },
897 ast::SourceFile(_) => PathKind::Item { kind: ItemListKind::SourceFile },
898 _ => return None,
899 }
900 };
901 Some(kind)
902 };
903 let make_path_kind_attr = |meta: ast::Meta| {
904 let attr = meta.parent_attr()?;
905 let kind = attr.kind();
906 let attached = attr.syntax().parent()?;
907 let is_trailing_outer_attr = kind != AttrKind::Inner
908 && non_trivia_sibling(attr.syntax().clone().into(), syntax::Direction::Next).is_none();
909 let annotated_item_kind = if is_trailing_outer_attr { None } else { Some(attached.kind()) };
910 Some(PathKind::Attr { attr_ctx: AttrCtx { kind, annotated_item_kind } })
911 };
912
913 // Infer the path kind
914 let parent = path.syntax().parent()?;
915 let kind = match_ast! {
916 match parent {
917 ast::PathType(it) => make_path_kind_type(it.into()),
918 ast::PathExpr(it) => {
919 if let Some(p) = it.syntax().parent() {
920 if ast::ExprStmt::can_cast(p.kind()) {
921 if let Some(kind) = inbetween_body_and_decl_check(p) {
922 return Some(make_res(NameRefKind::Keyword(kind)));
923 }
924 }
925 }
926
927 path_ctx.has_call_parens = it.syntax().parent().map_or(false, |it| ast::CallExpr::can_cast(it.kind()));
928
929 make_path_kind_expr(it.into())
930 },
931 ast::TupleStructPat(it) => {
932 path_ctx.has_call_parens = true;
933 PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
934 },
935 ast::RecordPat(it) => {
936 path_ctx.has_call_parens = true;
937 PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
938 },
939 ast::PathPat(it) => {
940 PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}
941 },
942 ast::MacroCall(it) => {
943 // A macro call in this position is usually a result of parsing recovery, so check that
944 if let Some(kind) = inbetween_body_and_decl_check(it.syntax().clone()) {
945 return Some(make_res(NameRefKind::Keyword(kind)));
946 }
947
948 kind_macro_call(it)?
949 },
950 ast::Meta(meta) => make_path_kind_attr(meta)?,
951 ast::Visibility(it) => PathKind::Vis { has_in_token: it.in_token().is_some() },
952 ast::UseTree(_) => PathKind::Use,
953 // completing inside a qualifier
954 ast::Path(parent) => {
955 path_ctx.parent = Some(parent.clone());
956 let parent = iter::successors(Some(parent), |it| it.parent_path()).last()?.syntax().parent()?;
957 match_ast! {
958 match parent {
959 ast::PathType(it) => make_path_kind_type(it.into()),
960 ast::PathExpr(it) => {
961 path_ctx.has_call_parens = it.syntax().parent().map_or(false, |it| ast::CallExpr::can_cast(it.kind()));
962
963 make_path_kind_expr(it.into())
964 },
965 ast::TupleStructPat(it) => {
966 path_ctx.has_call_parens = true;
967 PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
968 },
969 ast::RecordPat(it) => {
970 path_ctx.has_call_parens = true;
971 PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
972 },
973 ast::PathPat(it) => {
974 PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}
975 },
976 ast::MacroCall(it) => {
977 kind_macro_call(it)?
978 },
979 ast::Meta(meta) => make_path_kind_attr(meta)?,
980 ast::Visibility(it) => PathKind::Vis { has_in_token: it.in_token().is_some() },
981 ast::UseTree(_) => PathKind::Use,
982 ast::RecordExpr(it) => make_path_kind_expr(it.into()),
983 _ => return None,
984 }
985 }
986 },
987 ast::RecordExpr(it) => make_path_kind_expr(it.into()),
988 _ => return None,
989 }
990 };
991
992 path_ctx.kind = kind;
993 path_ctx.has_type_args = segment.generic_arg_list().is_some();
994
995 // calculate the qualifier context
996 if let Some((qualifier, use_tree_parent)) = path_or_use_tree_qualifier(&path) {
997 path_ctx.use_tree_parent = use_tree_parent;
998 if !use_tree_parent && segment.coloncolon_token().is_some() {
999 path_ctx.qualified = Qualified::Absolute;
1000 } else {
1001 let qualifier = qualifier
1002 .segment()
1003 .and_then(|it| find_node_in_file(original_file, &it))
1004 .map(|it| it.parent_path());
1005 if let Some(qualifier) = qualifier {
1006 let type_anchor = match qualifier.segment().and_then(|it| it.kind()) {
1007 Some(ast::PathSegmentKind::Type { type_ref: Some(type_ref), trait_ref })
1008 if qualifier.qualifier().is_none() =>
1009 {
1010 Some((type_ref, trait_ref))
1011 }
1012 _ => None,
1013 };
1014
1015 path_ctx.qualified = if let Some((ty, trait_ref)) = type_anchor {
1016 let ty = match ty {
1017 ast::Type::InferType(_) => None,
1018 ty => sema.resolve_type(&ty),
1019 };
1020 let trait_ = trait_ref.and_then(|it| sema.resolve_trait(&it.path()?));
1021 Qualified::TypeAnchor { ty, trait_ }
1022 } else {
1023 let res = sema.resolve_path(&qualifier);
1024
1025 // For understanding how and why super_chain_len is calculated the way it
1026 // is check the documentation at it's definition
1027 let mut segment_count = 0;
1028 let super_count = iter::successors(Some(qualifier.clone()), |p| p.qualifier())
1029 .take_while(|p| {
1030 p.segment()
1031 .and_then(|s| {
1032 segment_count += 1;
1033 s.super_token()
1034 })
1035 .is_some()
1036 })
1037 .count();
1038
1039 let super_chain_len =
1040 if segment_count > super_count { None } else { Some(super_count) };
1041
1042 Qualified::With { path: qualifier, resolution: res, super_chain_len }
1043 }
1044 };
1045 }
1046 } else if let Some(segment) = path.segment() {
1047 if segment.coloncolon_token().is_some() {
1048 path_ctx.qualified = Qualified::Absolute;
1049 }
1050 }
1051
1052 let mut qualifier_ctx = QualifierCtx::default();
1053 if path_ctx.is_trivial_path() {
1054 // fetch the full expression that may have qualifiers attached to it
1055 let top_node = match path_ctx.kind {
1056 PathKind::Expr { expr_ctx: ExprCtx { in_block_expr: true, .. } } => {
1057 parent.ancestors().find(|it| ast::PathExpr::can_cast(it.kind())).and_then(|p| {
1058 let parent = p.parent()?;
1059 if ast::StmtList::can_cast(parent.kind()) {
1060 Some(p)
1061 } else if ast::ExprStmt::can_cast(parent.kind()) {
1062 Some(parent)
1063 } else {
1064 None
1065 }
1066 })
1067 }
1068 PathKind::Item { .. } => {
1069 parent.ancestors().find(|it| ast::MacroCall::can_cast(it.kind()))
1070 }
1071 _ => None,
1072 };
1073 if let Some(top) = top_node {
1074 if let Some(NodeOrToken::Node(error_node)) =
1075 syntax::algo::non_trivia_sibling(top.clone().into(), syntax::Direction::Prev)
1076 {
1077 if error_node.kind() == SyntaxKind::ERROR {
1078 qualifier_ctx.unsafe_tok = error_node
1079 .children_with_tokens()
1080 .filter_map(NodeOrToken::into_token)
1081 .find(|it| it.kind() == T![unsafe]);
1082 qualifier_ctx.vis_node = error_node.children().find_map(ast::Visibility::cast);
1083 }
1084 }
1085
1086 if let PathKind::Item { .. } = path_ctx.kind {
1087 if qualifier_ctx.none() {
1088 if let Some(t) = top.first_token() {
1089 if let Some(prev) = t
1090 .prev_token()
1091 .and_then(|t| syntax::algo::skip_trivia_token(t, Direction::Prev))
1092 {
1093 if ![T![;], T!['}'], T!['{']].contains(&prev.kind()) {
1094 // This was inferred to be an item position path, but it seems
1095 // to be part of some other broken node which leaked into an item
1096 // list
1097 return None;
1098 }
1099 }
1100 }
1101 }
1102 }
1103 }
1104 }
1105 Some((NameRefContext { nameref, kind: NameRefKind::Path(path_ctx) }, qualifier_ctx))
1106 }
1107
1108 fn pattern_context_for(
1109 sema: &Semantics<'_, RootDatabase>,
1110 original_file: &SyntaxNode,
1111 pat: ast::Pat,
1112 ) -> PatternContext {
1113 let mut param_ctx = None;
1114 let (refutability, has_type_ascription) =
1115 pat
1116 .syntax()
1117 .ancestors()
1118 .skip_while(|it| ast::Pat::can_cast(it.kind()))
1119 .next()
1120 .map_or((PatternRefutability::Irrefutable, false), |node| {
1121 let refutability = match_ast! {
1122 match node {
1123 ast::LetStmt(let_) => return (PatternRefutability::Irrefutable, let_.ty().is_some()),
1124 ast::Param(param) => {
1125 let has_type_ascription = param.ty().is_some();
1126 param_ctx = (|| {
1127 let fake_param_list = param.syntax().parent().and_then(ast::ParamList::cast)?;
1128 let param_list = find_node_in_file_compensated(sema, original_file, &fake_param_list)?;
1129 let param_list_owner = param_list.syntax().parent()?;
1130 let kind = match_ast! {
1131 match param_list_owner {
1132 ast::ClosureExpr(closure) => ParamKind::Closure(closure),
1133 ast::Fn(fn_) => ParamKind::Function(fn_),
1134 _ => return None,
1135 }
1136 };
1137 Some(ParamContext {
1138 param_list, param, kind
1139 })
1140 })();
1141 return (PatternRefutability::Irrefutable, has_type_ascription)
1142 },
1143 ast::MatchArm(_) => PatternRefutability::Refutable,
1144 ast::LetExpr(_) => PatternRefutability::Refutable,
1145 ast::ForExpr(_) => PatternRefutability::Irrefutable,
1146 _ => PatternRefutability::Irrefutable,
1147 }
1148 };
1149 (refutability, false)
1150 });
1151 let (ref_token, mut_token) = match &pat {
1152 ast::Pat::IdentPat(it) => (it.ref_token(), it.mut_token()),
1153 _ => (None, None),
1154 };
1155
1156 PatternContext {
1157 refutability,
1158 param_ctx,
1159 has_type_ascription,
1160 parent_pat: pat.syntax().parent().and_then(ast::Pat::cast),
1161 mut_token,
1162 ref_token,
1163 record_pat: None,
1164 impl_: fetch_immediate_impl(sema, original_file, pat.syntax()),
1165 }
1166 }
1167
1168 fn fetch_immediate_impl(
1169 sema: &Semantics<'_, RootDatabase>,
1170 original_file: &SyntaxNode,
1171 node: &SyntaxNode,
1172 ) -> Option<ast::Impl> {
1173 let mut ancestors = ancestors_in_file_compensated(sema, original_file, node)?
1174 .filter_map(ast::Item::cast)
1175 .filter(|it| !matches!(it, ast::Item::MacroCall(_)));
1176
1177 match ancestors.next()? {
1178 ast::Item::Const(_) | ast::Item::Fn(_) | ast::Item::TypeAlias(_) => (),
1179 ast::Item::Impl(it) => return Some(it),
1180 _ => return None,
1181 }
1182 match ancestors.next()? {
1183 ast::Item::Impl(it) => Some(it),
1184 _ => None,
1185 }
1186 }
1187
1188 /// Attempts to find `node` inside `syntax` via `node`'s text range.
1189 /// If the fake identifier has been inserted after this node or inside of this node use the `_compensated` version instead.
1190 fn find_opt_node_in_file<N: AstNode>(syntax: &SyntaxNode, node: Option<N>) -> Option<N> {
1191 find_node_in_file(syntax, &node?)
1192 }
1193
1194 /// Attempts to find `node` inside `syntax` via `node`'s text range.
1195 /// If the fake identifier has been inserted after this node or inside of this node use the `_compensated` version instead.
1196 fn find_node_in_file<N: AstNode>(syntax: &SyntaxNode, node: &N) -> Option<N> {
1197 let syntax_range = syntax.text_range();
1198 let range = node.syntax().text_range();
1199 let intersection = range.intersect(syntax_range)?;
1200 syntax.covering_element(intersection).ancestors().find_map(N::cast)
1201 }
1202
1203 /// Attempts to find `node` inside `syntax` via `node`'s text range while compensating
1204 /// for the offset introduced by the fake ident.
1205 /// This is wrong if `node` comes before the insertion point! Use `find_node_in_file` instead.
1206 fn find_node_in_file_compensated<N: AstNode>(
1207 sema: &Semantics<'_, RootDatabase>,
1208 in_file: &SyntaxNode,
1209 node: &N,
1210 ) -> Option<N> {
1211 ancestors_in_file_compensated(sema, in_file, node.syntax())?.find_map(N::cast)
1212 }
1213
1214 fn ancestors_in_file_compensated<'sema>(
1215 sema: &'sema Semantics<'_, RootDatabase>,
1216 in_file: &SyntaxNode,
1217 node: &SyntaxNode,
1218 ) -> Option<impl Iterator<Item = SyntaxNode> + 'sema> {
1219 let syntax_range = in_file.text_range();
1220 let range = node.text_range();
1221 let end = range.end().checked_sub(TextSize::try_from(COMPLETION_MARKER.len()).ok()?)?;
1222 if end < range.start() {
1223 return None;
1224 }
1225 let range = TextRange::new(range.start(), end);
1226 // our inserted ident could cause `range` to go outside of the original syntax, so cap it
1227 let intersection = range.intersect(syntax_range)?;
1228 let node = match in_file.covering_element(intersection) {
1229 NodeOrToken::Node(node) => node,
1230 NodeOrToken::Token(tok) => tok.parent()?,
1231 };
1232 Some(sema.ancestors_with_macros(node))
1233 }
1234
1235 /// Attempts to find `node` inside `syntax` via `node`'s text range while compensating
1236 /// for the offset introduced by the fake ident..
1237 /// This is wrong if `node` comes before the insertion point! Use `find_node_in_file` instead.
1238 fn find_opt_node_in_file_compensated<N: AstNode>(
1239 sema: &Semantics<'_, RootDatabase>,
1240 syntax: &SyntaxNode,
1241 node: Option<N>,
1242 ) -> Option<N> {
1243 find_node_in_file_compensated(sema, syntax, &node?)
1244 }
1245
1246 fn path_or_use_tree_qualifier(path: &ast::Path) -> Option<(ast::Path, bool)> {
1247 if let Some(qual) = path.qualifier() {
1248 return Some((qual, false));
1249 }
1250 let use_tree_list = path.syntax().ancestors().find_map(ast::UseTreeList::cast)?;
1251 let use_tree = use_tree_list.syntax().parent().and_then(ast::UseTree::cast)?;
1252 Some((use_tree.path()?, true))
1253 }
1254
1255 pub(crate) fn is_in_token_of_for_loop(element: SyntaxElement) -> bool {
1256 // oh my ...
1257 (|| {
1258 let syntax_token = element.into_token()?;
1259 let range = syntax_token.text_range();
1260 let for_expr = syntax_token.parent_ancestors().find_map(ast::ForExpr::cast)?;
1261
1262 // check if the current token is the `in` token of a for loop
1263 if let Some(token) = for_expr.in_token() {
1264 return Some(syntax_token == token);
1265 }
1266 let pat = for_expr.pat()?;
1267 if range.end() < pat.syntax().text_range().end() {
1268 // if we are inside or before the pattern we can't be at the `in` token position
1269 return None;
1270 }
1271 let next_sibl = next_non_trivia_sibling(pat.syntax().clone().into())?;
1272 Some(match next_sibl {
1273 // the loop body is some node, if our token is at the start we are at the `in` position,
1274 // otherwise we could be in a recovered expression, we don't wanna ruin completions there
1275 syntax::NodeOrToken::Node(n) => n.text_range().start() == range.start(),
1276 // the loop body consists of a single token, if we are this we are certainly at the `in` token position
1277 syntax::NodeOrToken::Token(t) => t == syntax_token,
1278 })
1279 })()
1280 .unwrap_or(false)
1281 }
1282
1283 #[test]
1284 fn test_for_is_prev2() {
1285 crate::tests::check_pattern_is_applicable(r"fn __() { for i i$0 }", is_in_token_of_for_loop);
1286 }
1287
1288 pub(crate) fn is_in_loop_body(node: &SyntaxNode) -> bool {
1289 node.ancestors()
1290 .take_while(|it| it.kind() != SyntaxKind::FN && it.kind() != SyntaxKind::CLOSURE_EXPR)
1291 .find_map(|it| {
1292 let loop_body = match_ast! {
1293 match it {
1294 ast::ForExpr(it) => it.loop_body(),
1295 ast::WhileExpr(it) => it.loop_body(),
1296 ast::LoopExpr(it) => it.loop_body(),
1297 _ => None,
1298 }
1299 };
1300 loop_body.filter(|it| it.syntax().text_range().contains_range(node.text_range()))
1301 })
1302 .is_some()
1303 }
1304
1305 fn previous_non_trivia_token(e: impl Into<SyntaxElement>) -> Option<SyntaxToken> {
1306 let mut token = match e.into() {
1307 SyntaxElement::Node(n) => n.first_token()?,
1308 SyntaxElement::Token(t) => t,
1309 }
1310 .prev_token();
1311 while let Some(inner) = token {
1312 if !inner.kind().is_trivia() {
1313 return Some(inner);
1314 } else {
1315 token = inner.prev_token();
1316 }
1317 }
1318 None
1319 }
1320
1321 fn next_non_trivia_sibling(ele: SyntaxElement) -> Option<SyntaxElement> {
1322 let mut e = ele.next_sibling_or_token();
1323 while let Some(inner) = e {
1324 if !inner.kind().is_trivia() {
1325 return Some(inner);
1326 } else {
1327 e = inner.next_sibling_or_token();
1328 }
1329 }
1330 None
1331 }