]> git.proxmox.com Git - rustc.git/blame - src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs
New upstream version 1.65.0+dfsg1
[rustc.git] / src / tools / rust-analyzer / crates / ide-completion / src / context / analysis.rs
CommitLineData
064997fb
FG
1//! Module responsible for analyzing the code surrounding the cursor for completion.
2use std::iter;
3
4use hir::{Semantics, Type, TypeInfo};
5use ide_db::{active_parameter::ActiveParameter, RootDatabase};
6use syntax::{
7 algo::{find_node_at_offset, non_trivia_sibling},
8 ast::{self, AttrKind, HasArgList, HasLoopBody, HasName, NameOrNameRef},
9 match_ast, AstNode, AstToken, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode,
10 SyntaxToken, TextRange, TextSize, T,
11};
12
13use crate::context::{
14 AttrCtx, CompletionAnalysis, CompletionContext, DotAccess, DotAccessKind, ExprCtx,
15 ItemListKind, LifetimeContext, LifetimeKind, NameContext, NameKind, NameRefContext,
16 NameRefKind, ParamContext, ParamKind, PathCompletionCtx, PathKind, PatternContext,
17 PatternRefutability, Qualified, QualifierCtx, TypeAscriptionTarget, TypeLocation,
18 COMPLETION_MARKER,
19};
20
21impl<'a> CompletionContext<'a> {
22 /// Expand attributes and macro calls at the current cursor position for both the original file
23 /// and fake file repeatedly. As soon as one of the two expansions fail we stop so the original
24 /// and speculative states stay in sync.
25 pub(super) fn expand_and_analyze(
26 &mut self,
27 mut original_file: SyntaxNode,
28 mut speculative_file: SyntaxNode,
29 mut offset: TextSize,
30 mut fake_ident_token: SyntaxToken,
31 ) -> Option<CompletionAnalysis> {
32 let _p = profile::span("CompletionContext::expand_and_fill");
33 let mut derive_ctx = None;
34
35 'expansion: loop {
36 let parent_item =
37 |item: &ast::Item| item.syntax().ancestors().skip(1).find_map(ast::Item::cast);
38 let ancestor_items = iter::successors(
39 Option::zip(
40 find_node_at_offset::<ast::Item>(&original_file, offset),
41 find_node_at_offset::<ast::Item>(&speculative_file, offset),
42 ),
43 |(a, b)| parent_item(a).zip(parent_item(b)),
44 );
45
46 // first try to expand attributes as these are always the outermost macro calls
47 'ancestors: for (actual_item, item_with_fake_ident) in ancestor_items {
48 match (
49 self.sema.expand_attr_macro(&actual_item),
50 self.sema.speculative_expand_attr_macro(
51 &actual_item,
52 &item_with_fake_ident,
53 fake_ident_token.clone(),
54 ),
55 ) {
56 // maybe parent items have attributes, so continue walking the ancestors
57 (None, None) => continue 'ancestors,
58 // successful expansions
59 (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => {
60 let new_offset = fake_mapped_token.text_range().start();
61 if new_offset > actual_expansion.text_range().end() {
62 // offset outside of bounds from the original expansion,
63 // stop here to prevent problems from happening
64 break 'expansion;
65 }
66 original_file = actual_expansion;
67 speculative_file = fake_expansion;
68 fake_ident_token = fake_mapped_token;
69 offset = new_offset;
70 continue 'expansion;
71 }
72 // exactly one expansion failed, inconsistent state so stop expanding completely
73 _ => break 'expansion,
74 }
75 }
76
77 // No attributes have been expanded, so look for macro_call! token trees or derive token trees
78 let orig_tt = match find_node_at_offset::<ast::TokenTree>(&original_file, offset) {
79 Some(it) => it,
80 None => break 'expansion,
81 };
82 let spec_tt = match find_node_at_offset::<ast::TokenTree>(&speculative_file, offset) {
83 Some(it) => it,
84 None => break 'expansion,
85 };
86
87 // Expand pseudo-derive expansion
88 if let (Some(orig_attr), Some(spec_attr)) = (
89 orig_tt.syntax().parent().and_then(ast::Meta::cast).and_then(|it| it.parent_attr()),
90 spec_tt.syntax().parent().and_then(ast::Meta::cast).and_then(|it| it.parent_attr()),
91 ) {
92 if let (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) = (
93 self.sema.expand_derive_as_pseudo_attr_macro(&orig_attr),
94 self.sema.speculative_expand_derive_as_pseudo_attr_macro(
95 &orig_attr,
96 &spec_attr,
97 fake_ident_token.clone(),
98 ),
99 ) {
100 derive_ctx = Some((
101 actual_expansion,
102 fake_expansion,
103 fake_mapped_token.text_range().start(),
104 orig_attr,
105 ));
106 }
107 // at this point we won't have any more successful expansions, so stop
108 break 'expansion;
109 }
110
111 // Expand fn-like macro calls
112 if let (Some(actual_macro_call), Some(macro_call_with_fake_ident)) = (
113 orig_tt.syntax().ancestors().find_map(ast::MacroCall::cast),
114 spec_tt.syntax().ancestors().find_map(ast::MacroCall::cast),
115 ) {
116 let mac_call_path0 = actual_macro_call.path().as_ref().map(|s| s.syntax().text());
117 let mac_call_path1 =
118 macro_call_with_fake_ident.path().as_ref().map(|s| s.syntax().text());
119
120 // inconsistent state, stop expanding
121 if mac_call_path0 != mac_call_path1 {
122 break 'expansion;
123 }
124 let speculative_args = match macro_call_with_fake_ident.token_tree() {
125 Some(tt) => tt,
126 None => break 'expansion,
127 };
128
129 match (
130 self.sema.expand(&actual_macro_call),
131 self.sema.speculative_expand(
132 &actual_macro_call,
133 &speculative_args,
134 fake_ident_token.clone(),
135 ),
136 ) {
137 // successful expansions
138 (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => {
139 let new_offset = fake_mapped_token.text_range().start();
140 if new_offset > actual_expansion.text_range().end() {
141 // offset outside of bounds from the original expansion,
142 // stop here to prevent problems from happening
143 break 'expansion;
144 }
145 original_file = actual_expansion;
146 speculative_file = fake_expansion;
147 fake_ident_token = fake_mapped_token;
148 offset = new_offset;
149 continue 'expansion;
150 }
151 // at least on expansion failed, we won't have anything to expand from this point
152 // onwards so break out
153 _ => break 'expansion,
154 }
155 }
156
157 // none of our states have changed so stop the loop
158 break 'expansion;
159 }
160
161 self.analyze(&original_file, speculative_file, offset, derive_ctx)
162 }
163
164 /// Calculate the expected type and name of the cursor position.
165 fn expected_type_and_name(
166 &self,
167 name_like: &ast::NameLike,
168 ) -> (Option<Type>, Option<NameOrNameRef>) {
169 let mut node = match self.token.parent() {
170 Some(it) => it,
171 None => return (None, None),
172 };
173
174 let strip_refs = |mut ty: Type| match name_like {
175 ast::NameLike::NameRef(n) => {
176 let p = match n.syntax().parent() {
177 Some(it) => it,
178 None => return ty,
179 };
180 let top_syn = match_ast! {
181 match p {
182 ast::FieldExpr(e) => e
183 .syntax()
184 .ancestors()
185 .map_while(ast::FieldExpr::cast)
186 .last()
187 .map(|it| it.syntax().clone()),
188 ast::PathSegment(e) => e
189 .syntax()
190 .ancestors()
191 .skip(1)
192 .take_while(|it| ast::Path::can_cast(it.kind()) || ast::PathExpr::can_cast(it.kind()))
193 .find_map(ast::PathExpr::cast)
194 .map(|it| it.syntax().clone()),
195 _ => None
196 }
197 };
198 let top_syn = match top_syn {
199 Some(it) => it,
200 None => return ty,
201 };
202 for _ in top_syn.ancestors().skip(1).map_while(ast::RefExpr::cast) {
203 cov_mark::hit!(expected_type_fn_param_ref);
204 ty = ty.strip_reference();
205 }
206 ty
207 }
208 _ => ty,
209 };
210
211 loop {
212 break match_ast! {
213 match node {
214 ast::LetStmt(it) => {
215 cov_mark::hit!(expected_type_let_with_leading_char);
216 cov_mark::hit!(expected_type_let_without_leading_char);
217 let ty = it.pat()
218 .and_then(|pat| self.sema.type_of_pat(&pat))
219 .or_else(|| it.initializer().and_then(|it| self.sema.type_of_expr(&it)))
220 .map(TypeInfo::original);
221 let name = match it.pat() {
222 Some(ast::Pat::IdentPat(ident)) => ident.name().map(NameOrNameRef::Name),
223 Some(_) | None => None,
224 };
225
226 (ty, name)
227 },
228 ast::LetExpr(it) => {
229 cov_mark::hit!(expected_type_if_let_without_leading_char);
230 let ty = it.pat()
231 .and_then(|pat| self.sema.type_of_pat(&pat))
232 .or_else(|| it.expr().and_then(|it| self.sema.type_of_expr(&it)))
233 .map(TypeInfo::original);
234 (ty, None)
235 },
236 ast::ArgList(_) => {
237 cov_mark::hit!(expected_type_fn_param);
238 ActiveParameter::at_token(
239 &self.sema,
240 self.token.clone(),
241 ).map(|ap| {
242 let name = ap.ident().map(NameOrNameRef::Name);
243
244 let ty = strip_refs(ap.ty);
245 (Some(ty), name)
246 })
247 .unwrap_or((None, None))
248 },
249 ast::RecordExprFieldList(it) => {
250 // wouldn't try {} be nice...
251 (|| {
252 if self.token.kind() == T![..]
253 || self.token.prev_token().map(|t| t.kind()) == Some(T![..])
254 {
255 cov_mark::hit!(expected_type_struct_func_update);
256 let record_expr = it.syntax().parent().and_then(ast::RecordExpr::cast)?;
257 let ty = self.sema.type_of_expr(&record_expr.into())?;
258 Some((
259 Some(ty.original),
260 None
261 ))
262 } else {
263 cov_mark::hit!(expected_type_struct_field_without_leading_char);
264 let expr_field = self.token.prev_sibling_or_token()?
265 .into_node()
266 .and_then(ast::RecordExprField::cast)?;
267 let (_, _, ty) = self.sema.resolve_record_field(&expr_field)?;
268 Some((
269 Some(ty),
270 expr_field.field_name().map(NameOrNameRef::NameRef),
271 ))
272 }
273 })().unwrap_or((None, None))
274 },
275 ast::RecordExprField(it) => {
276 if let Some(expr) = it.expr() {
277 cov_mark::hit!(expected_type_struct_field_with_leading_char);
278 (
279 self.sema.type_of_expr(&expr).map(TypeInfo::original),
280 it.field_name().map(NameOrNameRef::NameRef),
281 )
282 } else {
283 cov_mark::hit!(expected_type_struct_field_followed_by_comma);
284 let ty = self.sema.resolve_record_field(&it)
285 .map(|(_, _, ty)| ty);
286 (
287 ty,
288 it.field_name().map(NameOrNameRef::NameRef),
289 )
290 }
291 },
292 // match foo { $0 }
293 // match foo { ..., pat => $0 }
294 ast::MatchExpr(it) => {
295 let on_arrow = previous_non_trivia_token(self.token.clone()).map_or(false, |it| T![=>] == it.kind());
296
297 let ty = if on_arrow {
298 // match foo { ..., pat => $0 }
299 cov_mark::hit!(expected_type_match_arm_body_without_leading_char);
300 cov_mark::hit!(expected_type_match_arm_body_with_leading_char);
301 self.sema.type_of_expr(&it.into())
302 } else {
303 // match foo { $0 }
304 cov_mark::hit!(expected_type_match_arm_without_leading_char);
305 it.expr().and_then(|e| self.sema.type_of_expr(&e))
306 }.map(TypeInfo::original);
307 (ty, None)
308 },
309 ast::IfExpr(it) => {
310 let ty = it.condition()
311 .and_then(|e| self.sema.type_of_expr(&e))
312 .map(TypeInfo::original);
313 (ty, None)
314 },
315 ast::IdentPat(it) => {
316 cov_mark::hit!(expected_type_if_let_with_leading_char);
317 cov_mark::hit!(expected_type_match_arm_with_leading_char);
318 let ty = self.sema.type_of_pat(&ast::Pat::from(it)).map(TypeInfo::original);
319 (ty, None)
320 },
321 ast::Fn(it) => {
322 cov_mark::hit!(expected_type_fn_ret_with_leading_char);
323 cov_mark::hit!(expected_type_fn_ret_without_leading_char);
324 let def = self.sema.to_def(&it);
325 (def.map(|def| def.ret_type(self.db)), None)
326 },
327 ast::ClosureExpr(it) => {
328 let ty = self.sema.type_of_expr(&it.into());
329 ty.and_then(|ty| ty.original.as_callable(self.db))
330 .map(|c| (Some(c.return_type()), None))
331 .unwrap_or((None, None))
332 },
333 ast::ParamList(_) => (None, None),
334 ast::Stmt(_) => (None, None),
335 ast::Item(_) => (None, None),
336 _ => {
337 match node.parent() {
338 Some(n) => {
339 node = n;
340 continue;
341 },
342 None => (None, None),
343 }
344 },
345 }
346 };
347 }
348 }
349
350 /// Fill the completion context, this is what does semantic reasoning about the surrounding context
351 /// of the completion location.
352 fn analyze(
353 &mut self,
354 original_file: &SyntaxNode,
355 file_with_fake_ident: SyntaxNode,
356 offset: TextSize,
357 derive_ctx: Option<(SyntaxNode, SyntaxNode, TextSize, ast::Attr)>,
358 ) -> Option<CompletionAnalysis> {
359 let fake_ident_token = file_with_fake_ident.token_at_offset(offset).right_biased()?;
360 let syntax_element = NodeOrToken::Token(fake_ident_token);
361 if is_in_token_of_for_loop(syntax_element.clone()) {
362 // for pat $0
363 // there is nothing to complete here except `in` keyword
364 // don't bother populating the context
365 // FIXME: the completion calculations should end up good enough
366 // such that this special case becomes unnecessary
367 return None;
368 }
369
370 // Overwrite the path kind for derives
371 if let Some((original_file, file_with_fake_ident, offset, origin_attr)) = derive_ctx {
372 if let Some(ast::NameLike::NameRef(name_ref)) =
373 find_node_at_offset(&file_with_fake_ident, offset)
374 {
375 let parent = name_ref.syntax().parent()?;
376 let (mut nameref_ctx, _) =
377 Self::classify_name_ref(&self.sema, &original_file, name_ref, parent)?;
378 if let NameRefKind::Path(path_ctx) = &mut nameref_ctx.kind {
379 path_ctx.kind = PathKind::Derive {
380 existing_derives: self
381 .sema
382 .resolve_derive_macro(&origin_attr)
383 .into_iter()
384 .flatten()
385 .flatten()
386 .collect(),
387 };
388 }
389 return Some(CompletionAnalysis::NameRef(nameref_ctx));
390 }
391 return None;
392 }
393
394 let name_like = match find_node_at_offset(&file_with_fake_ident, offset) {
395 Some(it) => it,
396 None => {
397 let analysis =
398 if let Some(original) = ast::String::cast(self.original_token.clone()) {
399 CompletionAnalysis::String {
400 original,
401 expanded: ast::String::cast(self.token.clone()),
402 }
403 } else {
404 // Fix up trailing whitespace problem
405 // #[attr(foo = $0
406 let token =
407 syntax::algo::skip_trivia_token(self.token.clone(), Direction::Prev)?;
408 let p = token.parent()?;
409 if p.kind() == SyntaxKind::TOKEN_TREE
410 && p.ancestors().any(|it| it.kind() == SyntaxKind::META)
411 {
412 let colon_prefix = previous_non_trivia_token(self.token.clone())
413 .map_or(false, |it| T![:] == it.kind());
414 CompletionAnalysis::UnexpandedAttrTT {
415 fake_attribute_under_caret: syntax_element
416 .ancestors()
417 .find_map(ast::Attr::cast),
418 colon_prefix,
419 }
420 } else {
421 return None;
422 }
423 };
424 return Some(analysis);
425 }
426 };
427 (self.expected_type, self.expected_name) = self.expected_type_and_name(&name_like);
428 let analysis = match name_like {
429 ast::NameLike::Lifetime(lifetime) => CompletionAnalysis::Lifetime(
430 Self::classify_lifetime(&self.sema, original_file, lifetime)?,
431 ),
432 ast::NameLike::NameRef(name_ref) => {
433 let parent = name_ref.syntax().parent()?;
434 let (nameref_ctx, qualifier_ctx) =
435 Self::classify_name_ref(&self.sema, &original_file, name_ref, parent.clone())?;
436
437 self.qualifier_ctx = qualifier_ctx;
438 CompletionAnalysis::NameRef(nameref_ctx)
439 }
440 ast::NameLike::Name(name) => {
441 let name_ctx = Self::classify_name(&self.sema, original_file, name)?;
442 CompletionAnalysis::Name(name_ctx)
443 }
444 };
445 Some(analysis)
446 }
447
448 fn classify_lifetime(
449 _sema: &Semantics<'_, RootDatabase>,
450 original_file: &SyntaxNode,
451 lifetime: ast::Lifetime,
452 ) -> Option<LifetimeContext> {
453 let parent = lifetime.syntax().parent()?;
454 if parent.kind() == SyntaxKind::ERROR {
455 return None;
456 }
457
458 let kind = match_ast! {
459 match parent {
460 ast::LifetimeParam(param) => LifetimeKind::LifetimeParam {
461 is_decl: param.lifetime().as_ref() == Some(&lifetime),
462 param
463 },
464 ast::BreakExpr(_) => LifetimeKind::LabelRef,
465 ast::ContinueExpr(_) => LifetimeKind::LabelRef,
466 ast::Label(_) => LifetimeKind::LabelDef,
467 _ => LifetimeKind::Lifetime,
468 }
469 };
470 let lifetime = find_node_at_offset(&original_file, lifetime.syntax().text_range().start());
471
472 Some(LifetimeContext { lifetime, kind })
473 }
474
475 fn classify_name(
476 sema: &Semantics<'_, RootDatabase>,
477 original_file: &SyntaxNode,
478 name: ast::Name,
479 ) -> Option<NameContext> {
480 let parent = name.syntax().parent()?;
481 let kind = match_ast! {
482 match parent {
483 ast::Const(_) => NameKind::Const,
484 ast::ConstParam(_) => NameKind::ConstParam,
485 ast::Enum(_) => NameKind::Enum,
486 ast::Fn(_) => NameKind::Function,
487 ast::IdentPat(bind_pat) => {
488 let mut pat_ctx = pattern_context_for(sema, original_file, bind_pat.into());
489 if let Some(record_field) = ast::RecordPatField::for_field_name(&name) {
490 pat_ctx.record_pat = find_node_in_file_compensated(sema, original_file, &record_field.parent_record_pat());
491 }
492
493 NameKind::IdentPat(pat_ctx)
494 },
495 ast::MacroDef(_) => NameKind::MacroDef,
496 ast::MacroRules(_) => NameKind::MacroRules,
497 ast::Module(module) => NameKind::Module(module),
498 ast::RecordField(_) => NameKind::RecordField,
499 ast::Rename(_) => NameKind::Rename,
500 ast::SelfParam(_) => NameKind::SelfParam,
501 ast::Static(_) => NameKind::Static,
502 ast::Struct(_) => NameKind::Struct,
503 ast::Trait(_) => NameKind::Trait,
504 ast::TypeAlias(_) => NameKind::TypeAlias,
505 ast::TypeParam(_) => NameKind::TypeParam,
506 ast::Union(_) => NameKind::Union,
507 ast::Variant(_) => NameKind::Variant,
508 _ => return None,
509 }
510 };
511 let name = find_node_at_offset(&original_file, name.syntax().text_range().start());
512 Some(NameContext { name, kind })
513 }
514
515 fn classify_name_ref(
516 sema: &Semantics<'_, RootDatabase>,
517 original_file: &SyntaxNode,
518 name_ref: ast::NameRef,
519 parent: SyntaxNode,
520 ) -> Option<(NameRefContext, QualifierCtx)> {
521 let nameref = find_node_at_offset(&original_file, name_ref.syntax().text_range().start());
522
523 let make_res =
524 |kind| (NameRefContext { nameref: nameref.clone(), kind }, Default::default());
525
526 if let Some(record_field) = ast::RecordExprField::for_field_name(&name_ref) {
527 let dot_prefix = previous_non_trivia_token(name_ref.syntax().clone())
528 .map_or(false, |it| T![.] == it.kind());
529
530 return find_node_in_file_compensated(
531 sema,
532 original_file,
533 &record_field.parent_record_lit(),
534 )
535 .map(|expr| NameRefKind::RecordExpr { expr, dot_prefix })
536 .map(make_res);
537 }
538 if let Some(record_field) = ast::RecordPatField::for_field_name_ref(&name_ref) {
539 let kind = NameRefKind::Pattern(PatternContext {
540 param_ctx: None,
541 has_type_ascription: false,
542 ref_token: None,
543 mut_token: None,
544 record_pat: find_node_in_file_compensated(
545 sema,
546 original_file,
547 &record_field.parent_record_pat(),
548 ),
549 ..pattern_context_for(
550 sema,
551 original_file,
552 record_field.parent_record_pat().clone().into(),
553 )
554 });
555 return Some(make_res(kind));
556 }
557
558 let segment = match_ast! {
559 match parent {
560 ast::PathSegment(segment) => segment,
561 ast::FieldExpr(field) => {
562 let receiver = find_opt_node_in_file(original_file, field.expr());
563 let receiver_is_ambiguous_float_literal = match &receiver {
564 Some(ast::Expr::Literal(l)) => matches! {
565 l.kind(),
566 ast::LiteralKind::FloatNumber { .. } if l.syntax().last_token().map_or(false, |it| it.text().ends_with('.'))
567 },
568 _ => false,
569 };
570 let kind = NameRefKind::DotAccess(DotAccess {
571 receiver_ty: receiver.as_ref().and_then(|it| sema.type_of_expr(it)),
572 kind: DotAccessKind::Field { receiver_is_ambiguous_float_literal },
573 receiver
574 });
575 return Some(make_res(kind));
576 },
577 ast::MethodCallExpr(method) => {
578 let receiver = find_opt_node_in_file(original_file, method.receiver());
579 let kind = NameRefKind::DotAccess(DotAccess {
580 receiver_ty: receiver.as_ref().and_then(|it| sema.type_of_expr(it)),
581 kind: DotAccessKind::Method { has_parens: method.arg_list().map_or(false, |it| it.l_paren_token().is_some()) },
582 receiver
583 });
584 return Some(make_res(kind));
585 },
586 _ => return None,
587 }
588 };
589
590 let path = segment.parent_path();
f2b60f7d
FG
591 let original_path = find_node_in_file_compensated(sema, original_file, &path);
592
064997fb
FG
593 let mut path_ctx = PathCompletionCtx {
594 has_call_parens: false,
595 has_macro_bang: false,
596 qualified: Qualified::No,
597 parent: None,
598 path: path.clone(),
f2b60f7d 599 original_path,
064997fb
FG
600 kind: PathKind::Item { kind: ItemListKind::SourceFile },
601 has_type_args: false,
602 use_tree_parent: false,
603 };
604
605 let is_in_block = |it: &SyntaxNode| {
606 it.parent()
607 .map(|node| {
608 ast::ExprStmt::can_cast(node.kind()) || ast::StmtList::can_cast(node.kind())
609 })
610 .unwrap_or(false)
611 };
612 let func_update_record = |syn: &SyntaxNode| {
613 if let Some(record_expr) = syn.ancestors().nth(2).and_then(ast::RecordExpr::cast) {
614 find_node_in_file_compensated(sema, original_file, &record_expr)
615 } else {
616 None
617 }
618 };
619 let after_if_expr = |node: SyntaxNode| {
620 let prev_expr = (|| {
621 let prev_sibling = non_trivia_sibling(node.into(), Direction::Prev)?.into_node()?;
622 ast::ExprStmt::cast(prev_sibling)?.expr()
623 })();
624 matches!(prev_expr, Some(ast::Expr::IfExpr(_)))
625 };
626
627 // We do not want to generate path completions when we are sandwiched between an item decl signature and its body.
628 // ex. trait Foo $0 {}
629 // in these cases parser recovery usually kicks in for our inserted identifier, causing it
630 // to either be parsed as an ExprStmt or a MacroCall, depending on whether it is in a block
631 // expression or an item list.
632 // The following code checks if the body is missing, if it is we either cut off the body
633 // from the item or it was missing in the first place
634 let inbetween_body_and_decl_check = |node: SyntaxNode| {
635 if let Some(NodeOrToken::Node(n)) =
636 syntax::algo::non_trivia_sibling(node.into(), syntax::Direction::Prev)
637 {
638 if let Some(item) = ast::Item::cast(n) {
639 let is_inbetween = match &item {
640 ast::Item::Const(it) => it.body().is_none(),
641 ast::Item::Enum(it) => it.variant_list().is_none(),
642 ast::Item::ExternBlock(it) => it.extern_item_list().is_none(),
643 ast::Item::Fn(it) => it.body().is_none(),
644 ast::Item::Impl(it) => it.assoc_item_list().is_none(),
645 ast::Item::Module(it) => it.item_list().is_none(),
646 ast::Item::Static(it) => it.body().is_none(),
647 ast::Item::Struct(it) => it.field_list().is_none(),
648 ast::Item::Trait(it) => it.assoc_item_list().is_none(),
649 ast::Item::TypeAlias(it) => it.ty().is_none(),
650 ast::Item::Union(it) => it.record_field_list().is_none(),
651 _ => false,
652 };
653 if is_inbetween {
654 return Some(item);
655 }
656 }
657 }
658 None
659 };
660
661 let type_location = |node: &SyntaxNode| {
662 let parent = node.parent()?;
663 let res = match_ast! {
664 match parent {
665 ast::Const(it) => {
666 let name = find_opt_node_in_file(original_file, it.name())?;
667 let original = ast::Const::cast(name.syntax().parent()?)?;
668 TypeLocation::TypeAscription(TypeAscriptionTarget::Const(original.body()))
669 },
670 ast::RetType(it) => {
671 if it.thin_arrow_token().is_none() {
672 return None;
673 }
674 let parent = match ast::Fn::cast(parent.parent()?) {
675 Some(x) => x.param_list(),
676 None => ast::ClosureExpr::cast(parent.parent()?)?.param_list(),
677 };
678
679 let parent = find_opt_node_in_file(original_file, parent)?.syntax().parent()?;
680 TypeLocation::TypeAscription(TypeAscriptionTarget::RetType(match_ast! {
681 match parent {
682 ast::ClosureExpr(it) => {
683 it.body()
684 },
685 ast::Fn(it) => {
686 it.body().map(ast::Expr::BlockExpr)
687 },
688 _ => return None,
689 }
690 }))
691 },
692 ast::Param(it) => {
693 if it.colon_token().is_none() {
694 return None;
695 }
696 TypeLocation::TypeAscription(TypeAscriptionTarget::FnParam(find_opt_node_in_file(original_file, it.pat())))
697 },
698 ast::LetStmt(it) => {
699 if it.colon_token().is_none() {
700 return None;
701 }
702 TypeLocation::TypeAscription(TypeAscriptionTarget::Let(find_opt_node_in_file(original_file, it.pat())))
703 },
704 ast::Impl(it) => {
705 match it.trait_() {
706 Some(t) if t.syntax() == node => TypeLocation::ImplTrait,
707 _ => match it.self_ty() {
708 Some(t) if t.syntax() == node => TypeLocation::ImplTarget,
709 _ => return None,
710 },
711 }
712 },
713 ast::TypeBound(_) => TypeLocation::TypeBound,
714 // is this case needed?
715 ast::TypeBoundList(_) => TypeLocation::TypeBound,
716 ast::GenericArg(it) => TypeLocation::GenericArgList(find_opt_node_in_file_compensated(sema, original_file, it.syntax().parent().and_then(ast::GenericArgList::cast))),
717 // is this case needed?
718 ast::GenericArgList(it) => TypeLocation::GenericArgList(find_opt_node_in_file_compensated(sema, original_file, Some(it))),
719 ast::TupleField(_) => TypeLocation::TupleField,
720 _ => return None,
721 }
722 };
723 Some(res)
724 };
725
726 let is_in_condition = |it: &ast::Expr| {
727 (|| {
728 let parent = it.syntax().parent()?;
729 if let Some(expr) = ast::WhileExpr::cast(parent.clone()) {
730 Some(expr.condition()? == *it)
731 } else if let Some(expr) = ast::IfExpr::cast(parent) {
732 Some(expr.condition()? == *it)
733 } else {
734 None
735 }
736 })()
737 .unwrap_or(false)
738 };
739
740 let make_path_kind_expr = |expr: ast::Expr| {
741 let it = expr.syntax();
742 let in_block_expr = is_in_block(it);
743 let in_loop_body = is_in_loop_body(it);
744 let after_if_expr = after_if_expr(it.clone());
745 let ref_expr_parent =
746 path.as_single_name_ref().and_then(|_| it.parent()).and_then(ast::RefExpr::cast);
747 let (innermost_ret_ty, self_param) = {
748 let find_ret_ty = |it: SyntaxNode| {
749 if let Some(item) = ast::Item::cast(it.clone()) {
750 match item {
751 ast::Item::Fn(f) => {
752 Some(sema.to_def(&f).map(|it| it.ret_type(sema.db)))
753 }
754 ast::Item::MacroCall(_) => None,
755 _ => Some(None),
756 }
757 } else {
758 let expr = ast::Expr::cast(it)?;
759 let callable = match expr {
760 // FIXME
761 // ast::Expr::BlockExpr(b) if b.async_token().is_some() || b.try_token().is_some() => sema.type_of_expr(b),
762 ast::Expr::ClosureExpr(_) => sema.type_of_expr(&expr),
763 _ => return None,
764 };
765 Some(
766 callable
767 .and_then(|c| c.adjusted().as_callable(sema.db))
768 .map(|it| it.return_type()),
769 )
770 }
771 };
772 let find_fn_self_param = |it| match it {
773 ast::Item::Fn(fn_) => {
774 Some(sema.to_def(&fn_).and_then(|it| it.self_param(sema.db)))
775 }
776 ast::Item::MacroCall(_) => None,
777 _ => Some(None),
778 };
779
780 match find_node_in_file_compensated(sema, original_file, &expr) {
781 Some(it) => {
782 let innermost_ret_ty = sema
783 .ancestors_with_macros(it.syntax().clone())
784 .find_map(find_ret_ty)
785 .flatten();
786
787 let self_param = sema
788 .ancestors_with_macros(it.syntax().clone())
789 .filter_map(ast::Item::cast)
790 .find_map(find_fn_self_param)
791 .flatten();
792 (innermost_ret_ty, self_param)
793 }
794 None => (None, None),
795 }
796 };
797 let is_func_update = func_update_record(it);
798 let in_condition = is_in_condition(&expr);
799 let incomplete_let = it
800 .parent()
801 .and_then(ast::LetStmt::cast)
802 .map_or(false, |it| it.semicolon_token().is_none());
803 let impl_ = fetch_immediate_impl(sema, original_file, expr.syntax());
804
805 let in_match_guard = match it.parent().and_then(ast::MatchArm::cast) {
806 Some(arm) => arm
807 .fat_arrow_token()
808 .map_or(true, |arrow| it.text_range().start() < arrow.text_range().start()),
809 None => false,
810 };
811
812 PathKind::Expr {
813 expr_ctx: ExprCtx {
814 in_block_expr,
815 in_loop_body,
816 after_if_expr,
817 in_condition,
818 ref_expr_parent,
819 is_func_update,
820 innermost_ret_ty,
821 self_param,
822 incomplete_let,
823 impl_,
824 in_match_guard,
825 },
826 }
827 };
828 let make_path_kind_type = |ty: ast::Type| {
829 let location = type_location(ty.syntax());
830 PathKind::Type { location: location.unwrap_or(TypeLocation::Other) }
831 };
832
833 let mut kind_macro_call = |it: ast::MacroCall| {
834 path_ctx.has_macro_bang = it.excl_token().is_some();
835 let parent = it.syntax().parent()?;
836 // Any path in an item list will be treated as a macro call by the parser
837 let kind = match_ast! {
838 match parent {
839 ast::MacroExpr(expr) => make_path_kind_expr(expr.into()),
840 ast::MacroPat(it) => PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())},
841 ast::MacroType(ty) => make_path_kind_type(ty.into()),
842 ast::ItemList(_) => PathKind::Item { kind: ItemListKind::Module },
843 ast::AssocItemList(_) => PathKind::Item { kind: match parent.parent() {
844 Some(it) => match_ast! {
845 match it {
846 ast::Trait(_) => ItemListKind::Trait,
847 ast::Impl(it) => if it.trait_().is_some() {
848 ItemListKind::TraitImpl(find_node_in_file_compensated(sema, original_file, &it))
849 } else {
850 ItemListKind::Impl
851 },
852 _ => return None
853 }
854 },
855 None => return None,
856 } },
857 ast::ExternItemList(_) => PathKind::Item { kind: ItemListKind::ExternBlock },
858 ast::SourceFile(_) => PathKind::Item { kind: ItemListKind::SourceFile },
859 _ => return None,
860 }
861 };
862 Some(kind)
863 };
864 let make_path_kind_attr = |meta: ast::Meta| {
865 let attr = meta.parent_attr()?;
866 let kind = attr.kind();
867 let attached = attr.syntax().parent()?;
868 let is_trailing_outer_attr = kind != AttrKind::Inner
869 && non_trivia_sibling(attr.syntax().clone().into(), syntax::Direction::Next)
870 .is_none();
871 let annotated_item_kind =
872 if is_trailing_outer_attr { None } else { Some(attached.kind()) };
873 Some(PathKind::Attr { attr_ctx: AttrCtx { kind, annotated_item_kind } })
874 };
875
876 // Infer the path kind
877 let parent = path.syntax().parent()?;
878 let kind = match_ast! {
879 match parent {
880 ast::PathType(it) => make_path_kind_type(it.into()),
881 ast::PathExpr(it) => {
882 if let Some(p) = it.syntax().parent() {
883 if ast::ExprStmt::can_cast(p.kind()) {
884 if let Some(kind) = inbetween_body_and_decl_check(p) {
885 return Some(make_res(NameRefKind::Keyword(kind)));
886 }
887 }
888 }
889
890 path_ctx.has_call_parens = it.syntax().parent().map_or(false, |it| ast::CallExpr::can_cast(it.kind()));
891
892 make_path_kind_expr(it.into())
893 },
894 ast::TupleStructPat(it) => {
895 path_ctx.has_call_parens = true;
896 PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
897 },
898 ast::RecordPat(it) => {
899 path_ctx.has_call_parens = true;
900 PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
901 },
902 ast::PathPat(it) => {
903 PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}
904 },
905 ast::MacroCall(it) => {
906 // A macro call in this position is usually a result of parsing recovery, so check that
907 if let Some(kind) = inbetween_body_and_decl_check(it.syntax().clone()) {
908 return Some(make_res(NameRefKind::Keyword(kind)));
909 }
910
911 kind_macro_call(it)?
912 },
913 ast::Meta(meta) => make_path_kind_attr(meta)?,
914 ast::Visibility(it) => PathKind::Vis { has_in_token: it.in_token().is_some() },
915 ast::UseTree(_) => PathKind::Use,
916 // completing inside a qualifier
917 ast::Path(parent) => {
918 path_ctx.parent = Some(parent.clone());
919 let parent = iter::successors(Some(parent), |it| it.parent_path()).last()?.syntax().parent()?;
920 match_ast! {
921 match parent {
922 ast::PathType(it) => make_path_kind_type(it.into()),
923 ast::PathExpr(it) => {
924 path_ctx.has_call_parens = it.syntax().parent().map_or(false, |it| ast::CallExpr::can_cast(it.kind()));
925
926 make_path_kind_expr(it.into())
927 },
928 ast::TupleStructPat(it) => {
929 path_ctx.has_call_parens = true;
930 PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
931 },
932 ast::RecordPat(it) => {
933 path_ctx.has_call_parens = true;
934 PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
935 },
936 ast::PathPat(it) => {
937 PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}
938 },
939 ast::MacroCall(it) => {
940 kind_macro_call(it)?
941 },
942 ast::Meta(meta) => make_path_kind_attr(meta)?,
943 ast::Visibility(it) => PathKind::Vis { has_in_token: it.in_token().is_some() },
944 ast::UseTree(_) => PathKind::Use,
945 ast::RecordExpr(it) => make_path_kind_expr(it.into()),
946 _ => return None,
947 }
948 }
949 },
950 ast::RecordExpr(it) => make_path_kind_expr(it.into()),
951 _ => return None,
952 }
953 };
954
955 path_ctx.kind = kind;
956 path_ctx.has_type_args = segment.generic_arg_list().is_some();
957
958 // calculate the qualifier context
959 if let Some((qualifier, use_tree_parent)) = path_or_use_tree_qualifier(&path) {
960 path_ctx.use_tree_parent = use_tree_parent;
961 if !use_tree_parent && segment.coloncolon_token().is_some() {
962 path_ctx.qualified = Qualified::Absolute;
963 } else {
964 let qualifier = qualifier
965 .segment()
966 .and_then(|it| find_node_in_file(original_file, &it))
967 .map(|it| it.parent_path());
968 if let Some(qualifier) = qualifier {
969 let type_anchor = match qualifier.segment().and_then(|it| it.kind()) {
970 Some(ast::PathSegmentKind::Type {
971 type_ref: Some(type_ref),
972 trait_ref,
973 }) if qualifier.qualifier().is_none() => Some((type_ref, trait_ref)),
974 _ => None,
975 };
976
977 path_ctx.qualified = if let Some((ty, trait_ref)) = type_anchor {
978 let ty = match ty {
979 ast::Type::InferType(_) => None,
980 ty => sema.resolve_type(&ty),
981 };
982 let trait_ = trait_ref.and_then(|it| sema.resolve_trait(&it.path()?));
983 Qualified::TypeAnchor { ty, trait_ }
984 } else {
985 let res = sema.resolve_path(&qualifier);
986
987 // For understanding how and why super_chain_len is calculated the way it
988 // is check the documentation at it's definition
989 let mut segment_count = 0;
990 let super_count =
991 iter::successors(Some(qualifier.clone()), |p| p.qualifier())
992 .take_while(|p| {
993 p.segment()
994 .and_then(|s| {
995 segment_count += 1;
996 s.super_token()
997 })
998 .is_some()
999 })
1000 .count();
1001
1002 let super_chain_len =
1003 if segment_count > super_count { None } else { Some(super_count) };
1004
1005 Qualified::With { path: qualifier, resolution: res, super_chain_len }
1006 }
1007 };
1008 }
1009 } else if let Some(segment) = path.segment() {
1010 if segment.coloncolon_token().is_some() {
1011 path_ctx.qualified = Qualified::Absolute;
1012 }
1013 }
1014
1015 let mut qualifier_ctx = QualifierCtx::default();
1016 if path_ctx.is_trivial_path() {
1017 // fetch the full expression that may have qualifiers attached to it
1018 let top_node = match path_ctx.kind {
1019 PathKind::Expr { expr_ctx: ExprCtx { in_block_expr: true, .. } } => {
1020 parent.ancestors().find(|it| ast::PathExpr::can_cast(it.kind())).and_then(|p| {
1021 let parent = p.parent()?;
1022 if ast::StmtList::can_cast(parent.kind()) {
1023 Some(p)
1024 } else if ast::ExprStmt::can_cast(parent.kind()) {
1025 Some(parent)
1026 } else {
1027 None
1028 }
1029 })
1030 }
1031 PathKind::Item { .. } => {
1032 parent.ancestors().find(|it| ast::MacroCall::can_cast(it.kind()))
1033 }
1034 _ => None,
1035 };
1036 if let Some(top) = top_node {
1037 if let Some(NodeOrToken::Node(error_node)) =
1038 syntax::algo::non_trivia_sibling(top.clone().into(), syntax::Direction::Prev)
1039 {
1040 if error_node.kind() == SyntaxKind::ERROR {
1041 qualifier_ctx.unsafe_tok = error_node
1042 .children_with_tokens()
1043 .filter_map(NodeOrToken::into_token)
1044 .find(|it| it.kind() == T![unsafe]);
1045 qualifier_ctx.vis_node =
1046 error_node.children().find_map(ast::Visibility::cast);
1047 }
1048 }
1049
1050 if let PathKind::Item { .. } = path_ctx.kind {
1051 if qualifier_ctx.none() {
1052 if let Some(t) = top.first_token() {
1053 if let Some(prev) = t
1054 .prev_token()
1055 .and_then(|t| syntax::algo::skip_trivia_token(t, Direction::Prev))
1056 {
1057 if ![T![;], T!['}'], T!['{']].contains(&prev.kind()) {
1058 // This was inferred to be an item position path, but it seems
1059 // to be part of some other broken node which leaked into an item
1060 // list
1061 return None;
1062 }
1063 }
1064 }
1065 }
1066 }
1067 }
1068 }
1069 Some((NameRefContext { nameref, kind: NameRefKind::Path(path_ctx) }, qualifier_ctx))
1070 }
1071}
1072
1073fn pattern_context_for(
1074 sema: &Semantics<'_, RootDatabase>,
1075 original_file: &SyntaxNode,
1076 pat: ast::Pat,
1077) -> PatternContext {
1078 let mut param_ctx = None;
1079 let (refutability, has_type_ascription) =
1080 pat
1081 .syntax()
1082 .ancestors()
1083 .skip_while(|it| ast::Pat::can_cast(it.kind()))
1084 .next()
1085 .map_or((PatternRefutability::Irrefutable, false), |node| {
1086 let refutability = match_ast! {
1087 match node {
1088 ast::LetStmt(let_) => return (PatternRefutability::Irrefutable, let_.ty().is_some()),
1089 ast::Param(param) => {
1090 let has_type_ascription = param.ty().is_some();
1091 param_ctx = (|| {
1092 let fake_param_list = param.syntax().parent().and_then(ast::ParamList::cast)?;
1093 let param_list = find_node_in_file_compensated(sema, original_file, &fake_param_list)?;
1094 let param_list_owner = param_list.syntax().parent()?;
1095 let kind = match_ast! {
1096 match param_list_owner {
1097 ast::ClosureExpr(closure) => ParamKind::Closure(closure),
1098 ast::Fn(fn_) => ParamKind::Function(fn_),
1099 _ => return None,
1100 }
1101 };
1102 Some(ParamContext {
1103 param_list, param, kind
1104 })
1105 })();
1106 return (PatternRefutability::Irrefutable, has_type_ascription)
1107 },
1108 ast::MatchArm(_) => PatternRefutability::Refutable,
1109 ast::LetExpr(_) => PatternRefutability::Refutable,
1110 ast::ForExpr(_) => PatternRefutability::Irrefutable,
1111 _ => PatternRefutability::Irrefutable,
1112 }
1113 };
1114 (refutability, false)
1115 });
1116 let (ref_token, mut_token) = match &pat {
1117 ast::Pat::IdentPat(it) => (it.ref_token(), it.mut_token()),
1118 _ => (None, None),
1119 };
1120
1121 PatternContext {
1122 refutability,
1123 param_ctx,
1124 has_type_ascription,
1125 parent_pat: pat.syntax().parent().and_then(ast::Pat::cast),
1126 mut_token,
1127 ref_token,
1128 record_pat: None,
1129 impl_: fetch_immediate_impl(sema, original_file, pat.syntax()),
1130 }
1131}
1132
1133fn fetch_immediate_impl(
1134 sema: &Semantics<'_, RootDatabase>,
1135 original_file: &SyntaxNode,
1136 node: &SyntaxNode,
1137) -> Option<ast::Impl> {
1138 let mut ancestors = ancestors_in_file_compensated(sema, original_file, node)?
1139 .filter_map(ast::Item::cast)
1140 .filter(|it| !matches!(it, ast::Item::MacroCall(_)));
1141
1142 match ancestors.next()? {
1143 ast::Item::Const(_) | ast::Item::Fn(_) | ast::Item::TypeAlias(_) => (),
1144 ast::Item::Impl(it) => return Some(it),
1145 _ => return None,
1146 }
1147 match ancestors.next()? {
1148 ast::Item::Impl(it) => Some(it),
1149 _ => None,
1150 }
1151}
1152
1153/// Attempts to find `node` inside `syntax` via `node`'s text range.
1154/// If the fake identifier has been inserted after this node or inside of this node use the `_compensated` version instead.
1155fn find_opt_node_in_file<N: AstNode>(syntax: &SyntaxNode, node: Option<N>) -> Option<N> {
1156 find_node_in_file(syntax, &node?)
1157}
1158
1159/// Attempts to find `node` inside `syntax` via `node`'s text range.
1160/// If the fake identifier has been inserted after this node or inside of this node use the `_compensated` version instead.
1161fn find_node_in_file<N: AstNode>(syntax: &SyntaxNode, node: &N) -> Option<N> {
1162 let syntax_range = syntax.text_range();
1163 let range = node.syntax().text_range();
1164 let intersection = range.intersect(syntax_range)?;
1165 syntax.covering_element(intersection).ancestors().find_map(N::cast)
1166}
1167
1168/// Attempts to find `node` inside `syntax` via `node`'s text range while compensating
1169/// for the offset introduced by the fake ident.
1170/// This is wrong if `node` comes before the insertion point! Use `find_node_in_file` instead.
1171fn find_node_in_file_compensated<N: AstNode>(
1172 sema: &Semantics<'_, RootDatabase>,
1173 in_file: &SyntaxNode,
1174 node: &N,
1175) -> Option<N> {
1176 ancestors_in_file_compensated(sema, in_file, node.syntax())?.find_map(N::cast)
1177}
1178
1179fn ancestors_in_file_compensated<'sema>(
1180 sema: &'sema Semantics<'_, RootDatabase>,
1181 in_file: &SyntaxNode,
1182 node: &SyntaxNode,
1183) -> Option<impl Iterator<Item = SyntaxNode> + 'sema> {
1184 let syntax_range = in_file.text_range();
1185 let range = node.text_range();
1186 let end = range.end().checked_sub(TextSize::try_from(COMPLETION_MARKER.len()).ok()?)?;
1187 if end < range.start() {
1188 return None;
1189 }
1190 let range = TextRange::new(range.start(), end);
1191 // our inserted ident could cause `range` to go outside of the original syntax, so cap it
1192 let intersection = range.intersect(syntax_range)?;
1193 let node = match in_file.covering_element(intersection) {
1194 NodeOrToken::Node(node) => node,
1195 NodeOrToken::Token(tok) => tok.parent()?,
1196 };
1197 Some(sema.ancestors_with_macros(node))
1198}
1199
1200/// Attempts to find `node` inside `syntax` via `node`'s text range while compensating
1201/// for the offset introduced by the fake ident..
1202/// This is wrong if `node` comes before the insertion point! Use `find_node_in_file` instead.
1203fn find_opt_node_in_file_compensated<N: AstNode>(
1204 sema: &Semantics<'_, RootDatabase>,
1205 syntax: &SyntaxNode,
1206 node: Option<N>,
1207) -> Option<N> {
1208 find_node_in_file_compensated(sema, syntax, &node?)
1209}
1210
1211fn path_or_use_tree_qualifier(path: &ast::Path) -> Option<(ast::Path, bool)> {
1212 if let Some(qual) = path.qualifier() {
1213 return Some((qual, false));
1214 }
1215 let use_tree_list = path.syntax().ancestors().find_map(ast::UseTreeList::cast)?;
1216 let use_tree = use_tree_list.syntax().parent().and_then(ast::UseTree::cast)?;
1217 Some((use_tree.path()?, true))
1218}
1219
1220pub(crate) fn is_in_token_of_for_loop(element: SyntaxElement) -> bool {
1221 // oh my ...
1222 (|| {
1223 let syntax_token = element.into_token()?;
1224 let range = syntax_token.text_range();
1225 let for_expr = syntax_token.parent_ancestors().find_map(ast::ForExpr::cast)?;
1226
1227 // check if the current token is the `in` token of a for loop
1228 if let Some(token) = for_expr.in_token() {
1229 return Some(syntax_token == token);
1230 }
1231 let pat = for_expr.pat()?;
1232 if range.end() < pat.syntax().text_range().end() {
1233 // if we are inside or before the pattern we can't be at the `in` token position
1234 return None;
1235 }
1236 let next_sibl = next_non_trivia_sibling(pat.syntax().clone().into())?;
1237 Some(match next_sibl {
1238 // the loop body is some node, if our token is at the start we are at the `in` position,
1239 // otherwise we could be in a recovered expression, we don't wanna ruin completions there
1240 syntax::NodeOrToken::Node(n) => n.text_range().start() == range.start(),
1241 // the loop body consists of a single token, if we are this we are certainly at the `in` token position
1242 syntax::NodeOrToken::Token(t) => t == syntax_token,
1243 })
1244 })()
1245 .unwrap_or(false)
1246}
1247
1248#[test]
1249fn test_for_is_prev2() {
1250 crate::tests::check_pattern_is_applicable(r"fn __() { for i i$0 }", is_in_token_of_for_loop);
1251}
1252
1253pub(crate) fn is_in_loop_body(node: &SyntaxNode) -> bool {
1254 node.ancestors()
1255 .take_while(|it| it.kind() != SyntaxKind::FN && it.kind() != SyntaxKind::CLOSURE_EXPR)
1256 .find_map(|it| {
1257 let loop_body = match_ast! {
1258 match it {
1259 ast::ForExpr(it) => it.loop_body(),
1260 ast::WhileExpr(it) => it.loop_body(),
1261 ast::LoopExpr(it) => it.loop_body(),
1262 _ => None,
1263 }
1264 };
1265 loop_body.filter(|it| it.syntax().text_range().contains_range(node.text_range()))
1266 })
1267 .is_some()
1268}
1269
1270fn previous_non_trivia_token(e: impl Into<SyntaxElement>) -> Option<SyntaxToken> {
1271 let mut token = match e.into() {
1272 SyntaxElement::Node(n) => n.first_token()?,
1273 SyntaxElement::Token(t) => t,
1274 }
1275 .prev_token();
1276 while let Some(inner) = token {
1277 if !inner.kind().is_trivia() {
1278 return Some(inner);
1279 } else {
1280 token = inner.prev_token();
1281 }
1282 }
1283 None
1284}
1285
1286fn next_non_trivia_sibling(ele: SyntaxElement) -> Option<SyntaxElement> {
1287 let mut e = ele.next_sibling_or_token();
1288 while let Some(inner) = e {
1289 if !inner.kind().is_trivia() {
1290 return Some(inner);
1291 } else {
1292 e = inner.next_sibling_or_token();
1293 }
1294 }
1295 None
1296}