]> git.proxmox.com Git - rustc.git/blob - src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
New upstream version 1.66.0+dfsg1
[rustc.git] / src / tools / rust-analyzer / crates / hir-ty / src / infer / expr.rs
1 //! Type inference for expressions.
2
3 use std::{
4 collections::hash_map::Entry,
5 iter::{repeat, repeat_with},
6 mem,
7 };
8
9 use chalk_ir::{
10 cast::Cast, fold::Shift, DebruijnIndex, GenericArgData, Mutability, TyVariableKind,
11 };
12 use hir_def::{
13 expr::{
14 ArithOp, Array, BinaryOp, ClosureKind, CmpOp, Expr, ExprId, LabelId, Literal, Statement,
15 UnaryOp,
16 },
17 generics::TypeOrConstParamData,
18 path::{GenericArg, GenericArgs},
19 resolver::resolver_for_expr,
20 ConstParamId, FieldId, ItemContainerId, Lookup,
21 };
22 use hir_expand::name::Name;
23 use stdx::always;
24 use syntax::ast::RangeOp;
25
26 use crate::{
27 autoderef::{self, Autoderef},
28 consteval,
29 infer::{coerce::CoerceMany, find_continuable, BreakableKind},
30 lower::{
31 const_or_path_to_chalk, generic_arg_to_chalk, lower_to_chalk_mutability, ParamLoweringMode,
32 },
33 mapping::{from_chalk, ToChalk},
34 method_resolution::{self, lang_names_for_bin_op, VisibleFromModule},
35 primitive::{self, UintTy},
36 static_lifetime, to_chalk_trait_id,
37 utils::{generics, Generics},
38 AdtId, Binders, CallableDefId, FnPointer, FnSig, FnSubst, Interner, Rawness, Scalar,
39 Substitution, TraitRef, Ty, TyBuilder, TyExt, TyKind,
40 };
41
42 use super::{
43 coerce::auto_deref_adjust_steps, find_breakable, BindingMode, BreakableContext, Diverges,
44 Expectation, InferenceContext, InferenceDiagnostic, TypeMismatch,
45 };
46
47 impl<'a> InferenceContext<'a> {
48 pub(crate) fn infer_expr(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty {
49 let ty = self.infer_expr_inner(tgt_expr, expected);
50 if let Some(expected_ty) = expected.only_has_type(&mut self.table) {
51 let could_unify = self.unify(&ty, &expected_ty);
52 if !could_unify {
53 self.result.type_mismatches.insert(
54 tgt_expr.into(),
55 TypeMismatch { expected: expected_ty, actual: ty.clone() },
56 );
57 }
58 }
59 ty
60 }
61
62 /// Infer type of expression with possibly implicit coerce to the expected type.
63 /// Return the type after possible coercion.
64 pub(super) fn infer_expr_coerce(&mut self, expr: ExprId, expected: &Expectation) -> Ty {
65 let ty = self.infer_expr_inner(expr, expected);
66 if let Some(target) = expected.only_has_type(&mut self.table) {
67 match self.coerce(Some(expr), &ty, &target) {
68 Ok(res) => res,
69 Err(_) => {
70 self.result.type_mismatches.insert(
71 expr.into(),
72 TypeMismatch { expected: target.clone(), actual: ty.clone() },
73 );
74 target
75 }
76 }
77 } else {
78 ty
79 }
80 }
81
82 fn infer_expr_inner(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty {
83 self.db.unwind_if_cancelled();
84
85 let ty = match &self.body[tgt_expr] {
86 Expr::Missing => self.err_ty(),
87 &Expr::If { condition, then_branch, else_branch } => {
88 self.infer_expr(
89 condition,
90 &Expectation::has_type(TyKind::Scalar(Scalar::Bool).intern(Interner)),
91 );
92
93 let condition_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
94 let mut both_arms_diverge = Diverges::Always;
95
96 let result_ty = self.table.new_type_var();
97 let then_ty = self.infer_expr_inner(then_branch, expected);
98 both_arms_diverge &= mem::replace(&mut self.diverges, Diverges::Maybe);
99 let mut coerce = CoerceMany::new(result_ty);
100 coerce.coerce(self, Some(then_branch), &then_ty);
101 let else_ty = match else_branch {
102 Some(else_branch) => self.infer_expr_inner(else_branch, expected),
103 None => TyBuilder::unit(),
104 };
105 both_arms_diverge &= self.diverges;
106 // FIXME: create a synthetic `else {}` so we have something to refer to here instead of None?
107 coerce.coerce(self, else_branch, &else_ty);
108
109 self.diverges = condition_diverges | both_arms_diverge;
110
111 coerce.complete()
112 }
113 &Expr::Let { pat, expr } => {
114 let input_ty = self.infer_expr(expr, &Expectation::none());
115 self.infer_pat(pat, &input_ty, BindingMode::default());
116 TyKind::Scalar(Scalar::Bool).intern(Interner)
117 }
118 Expr::Block { statements, tail, label, id: _ } => {
119 let old_resolver = mem::replace(
120 &mut self.resolver,
121 resolver_for_expr(self.db.upcast(), self.owner, tgt_expr),
122 );
123 let ty = match label {
124 Some(_) => {
125 let break_ty = self.table.new_type_var();
126 let (breaks, ty) = self.with_breakable_ctx(
127 BreakableKind::Block,
128 break_ty.clone(),
129 *label,
130 |this| {
131 this.infer_block(
132 tgt_expr,
133 statements,
134 *tail,
135 &Expectation::has_type(break_ty),
136 )
137 },
138 );
139 breaks.unwrap_or(ty)
140 }
141 None => self.infer_block(tgt_expr, statements, *tail, expected),
142 };
143 self.resolver = old_resolver;
144 ty
145 }
146 Expr::Unsafe { body } => self.infer_expr(*body, expected),
147 Expr::Const { body } => {
148 self.with_breakable_ctx(BreakableKind::Border, self.err_ty(), None, |this| {
149 this.infer_expr(*body, expected)
150 })
151 .1
152 }
153 Expr::TryBlock { body } => {
154 self.with_breakable_ctx(BreakableKind::Border, self.err_ty(), None, |this| {
155 let _inner = this.infer_expr(*body, expected);
156 });
157 // FIXME should be std::result::Result<{inner}, _>
158 self.err_ty()
159 }
160 Expr::Async { body } => {
161 let ret_ty = self.table.new_type_var();
162 let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
163 let prev_ret_ty = mem::replace(&mut self.return_ty, ret_ty.clone());
164
165 let (_, inner_ty) =
166 self.with_breakable_ctx(BreakableKind::Border, self.err_ty(), None, |this| {
167 this.infer_expr_coerce(*body, &Expectation::has_type(ret_ty))
168 });
169
170 self.diverges = prev_diverges;
171 self.return_ty = prev_ret_ty;
172
173 // Use the first type parameter as the output type of future.
174 // existential type AsyncBlockImplTrait<InnerType>: Future<Output = InnerType>
175 let impl_trait_id = crate::ImplTraitId::AsyncBlockTypeImplTrait(self.owner, *body);
176 let opaque_ty_id = self.db.intern_impl_trait_id(impl_trait_id).into();
177 TyKind::OpaqueType(opaque_ty_id, Substitution::from1(Interner, inner_ty))
178 .intern(Interner)
179 }
180 &Expr::Loop { body, label } => {
181 let ty = self.table.new_type_var();
182 let (breaks, ()) =
183 self.with_breakable_ctx(BreakableKind::Loop, ty, label, |this| {
184 this.infer_expr(body, &Expectation::has_type(TyBuilder::unit()));
185 });
186
187 match breaks {
188 Some(breaks) => {
189 self.diverges = Diverges::Maybe;
190 breaks
191 }
192 None => TyKind::Never.intern(Interner),
193 }
194 }
195 &Expr::While { condition, body, label } => {
196 self.with_breakable_ctx(BreakableKind::Loop, self.err_ty(), label, |this| {
197 this.infer_expr(
198 condition,
199 &Expectation::has_type(TyKind::Scalar(Scalar::Bool).intern(Interner)),
200 );
201 this.infer_expr(body, &Expectation::has_type(TyBuilder::unit()));
202 });
203
204 // the body may not run, so it diverging doesn't mean we diverge
205 self.diverges = Diverges::Maybe;
206 TyBuilder::unit()
207 }
208 &Expr::For { iterable, body, pat, label } => {
209 let iterable_ty = self.infer_expr(iterable, &Expectation::none());
210 let into_iter_ty =
211 self.resolve_associated_type(iterable_ty, self.resolve_into_iter_item());
212 let pat_ty =
213 self.resolve_associated_type(into_iter_ty, self.resolve_iterator_item());
214
215 self.infer_pat(pat, &pat_ty, BindingMode::default());
216 self.with_breakable_ctx(BreakableKind::Loop, self.err_ty(), label, |this| {
217 this.infer_expr(body, &Expectation::has_type(TyBuilder::unit()));
218 });
219
220 // the body may not run, so it diverging doesn't mean we diverge
221 self.diverges = Diverges::Maybe;
222 TyBuilder::unit()
223 }
224 Expr::Closure { body, args, ret_type, arg_types, closure_kind } => {
225 assert_eq!(args.len(), arg_types.len());
226
227 let mut sig_tys = Vec::new();
228
229 // collect explicitly written argument types
230 for arg_type in arg_types.iter() {
231 let arg_ty = match arg_type {
232 Some(type_ref) => self.make_ty(type_ref),
233 None => self.table.new_type_var(),
234 };
235 sig_tys.push(arg_ty);
236 }
237
238 // add return type
239 let ret_ty = match ret_type {
240 Some(type_ref) => self.make_ty(type_ref),
241 None => self.table.new_type_var(),
242 };
243 sig_tys.push(ret_ty.clone());
244 let sig_ty = TyKind::Function(FnPointer {
245 num_binders: 0,
246 sig: FnSig { abi: (), safety: chalk_ir::Safety::Safe, variadic: false },
247 substitution: FnSubst(
248 Substitution::from_iter(Interner, sig_tys.clone()).shifted_in(Interner),
249 ),
250 })
251 .intern(Interner);
252
253 let (ty, resume_yield_tys) = if matches!(closure_kind, ClosureKind::Generator(_)) {
254 // FIXME: report error when there are more than 1 parameter.
255 let resume_ty = match sig_tys.first() {
256 // When `sig_tys.len() == 1` the first type is the return type, not the
257 // first parameter type.
258 Some(ty) if sig_tys.len() > 1 => ty.clone(),
259 _ => self.result.standard_types.unit.clone(),
260 };
261 let yield_ty = self.table.new_type_var();
262
263 let subst = TyBuilder::subst_for_generator(self.db, self.owner)
264 .push(resume_ty.clone())
265 .push(yield_ty.clone())
266 .push(ret_ty.clone())
267 .build();
268
269 let generator_id = self.db.intern_generator((self.owner, tgt_expr)).into();
270 let generator_ty = TyKind::Generator(generator_id, subst).intern(Interner);
271
272 (generator_ty, Some((resume_ty, yield_ty)))
273 } else {
274 let closure_id = self.db.intern_closure((self.owner, tgt_expr)).into();
275 let closure_ty =
276 TyKind::Closure(closure_id, Substitution::from1(Interner, sig_ty.clone()))
277 .intern(Interner);
278
279 (closure_ty, None)
280 };
281
282 // Eagerly try to relate the closure type with the expected
283 // type, otherwise we often won't have enough information to
284 // infer the body.
285 self.deduce_closure_type_from_expectations(tgt_expr, &ty, &sig_ty, expected);
286
287 // Now go through the argument patterns
288 for (arg_pat, arg_ty) in args.iter().zip(sig_tys) {
289 self.infer_pat(*arg_pat, &arg_ty, BindingMode::default());
290 }
291
292 let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
293 let prev_ret_ty = mem::replace(&mut self.return_ty, ret_ty.clone());
294 let prev_resume_yield_tys =
295 mem::replace(&mut self.resume_yield_tys, resume_yield_tys);
296
297 self.with_breakable_ctx(BreakableKind::Border, self.err_ty(), None, |this| {
298 this.infer_expr_coerce(*body, &Expectation::has_type(ret_ty));
299 });
300
301 self.diverges = prev_diverges;
302 self.return_ty = prev_ret_ty;
303 self.resume_yield_tys = prev_resume_yield_tys;
304
305 ty
306 }
307 Expr::Call { callee, args, .. } => {
308 let callee_ty = self.infer_expr(*callee, &Expectation::none());
309 let mut derefs = Autoderef::new(&mut self.table, callee_ty.clone());
310 let mut res = None;
311 let mut derefed_callee = callee_ty.clone();
312 // manual loop to be able to access `derefs.table`
313 while let Some((callee_deref_ty, _)) = derefs.next() {
314 res = derefs.table.callable_sig(&callee_deref_ty, args.len());
315 if res.is_some() {
316 derefed_callee = callee_deref_ty;
317 break;
318 }
319 }
320 // if the function is unresolved, we use is_varargs=true to
321 // suppress the arg count diagnostic here
322 let is_varargs =
323 derefed_callee.callable_sig(self.db).map_or(false, |sig| sig.is_varargs)
324 || res.is_none();
325 let (param_tys, ret_ty) = match res {
326 Some(res) => {
327 let adjustments = auto_deref_adjust_steps(&derefs);
328 self.write_expr_adj(*callee, adjustments);
329 res
330 }
331 None => (Vec::new(), self.err_ty()), // FIXME diagnostic
332 };
333 let indices_to_skip = self.check_legacy_const_generics(derefed_callee, args);
334 self.register_obligations_for_call(&callee_ty);
335
336 let expected_inputs = self.expected_inputs_for_expected_output(
337 expected,
338 ret_ty.clone(),
339 param_tys.clone(),
340 );
341
342 self.check_call_arguments(
343 tgt_expr,
344 args,
345 &expected_inputs,
346 &param_tys,
347 &indices_to_skip,
348 is_varargs,
349 );
350 self.normalize_associated_types_in(ret_ty)
351 }
352 Expr::MethodCall { receiver, args, method_name, generic_args } => self
353 .infer_method_call(
354 tgt_expr,
355 *receiver,
356 args,
357 method_name,
358 generic_args.as_deref(),
359 expected,
360 ),
361 Expr::Match { expr, arms } => {
362 let input_ty = self.infer_expr(*expr, &Expectation::none());
363
364 let expected = expected.adjust_for_branches(&mut self.table);
365
366 let result_ty = if arms.is_empty() {
367 TyKind::Never.intern(Interner)
368 } else {
369 match &expected {
370 Expectation::HasType(ty) => ty.clone(),
371 _ => self.table.new_type_var(),
372 }
373 };
374 let mut coerce = CoerceMany::new(result_ty);
375
376 let matchee_diverges = self.diverges;
377 let mut all_arms_diverge = Diverges::Always;
378
379 for arm in arms.iter() {
380 self.diverges = Diverges::Maybe;
381 let _pat_ty = self.infer_pat(arm.pat, &input_ty, BindingMode::default());
382 if let Some(guard_expr) = arm.guard {
383 self.infer_expr(
384 guard_expr,
385 &Expectation::has_type(TyKind::Scalar(Scalar::Bool).intern(Interner)),
386 );
387 }
388
389 let arm_ty = self.infer_expr_inner(arm.expr, &expected);
390 all_arms_diverge &= self.diverges;
391 coerce.coerce(self, Some(arm.expr), &arm_ty);
392 }
393
394 self.diverges = matchee_diverges | all_arms_diverge;
395
396 coerce.complete()
397 }
398 Expr::Path(p) => {
399 // FIXME this could be more efficient...
400 let resolver = resolver_for_expr(self.db.upcast(), self.owner, tgt_expr);
401 self.infer_path(&resolver, p, tgt_expr.into()).unwrap_or_else(|| self.err_ty())
402 }
403 Expr::Continue { label } => {
404 if let None = find_continuable(&mut self.breakables, label.as_ref()) {
405 self.push_diagnostic(InferenceDiagnostic::BreakOutsideOfLoop {
406 expr: tgt_expr,
407 is_break: false,
408 });
409 };
410 TyKind::Never.intern(Interner)
411 }
412 Expr::Break { expr, label } => {
413 let val_ty = if let Some(expr) = *expr {
414 self.infer_expr(expr, &Expectation::none())
415 } else {
416 TyBuilder::unit()
417 };
418
419 match find_breakable(&mut self.breakables, label.as_ref()) {
420 Some(ctxt) => {
421 // avoiding the borrowck
422 let mut coerce = mem::replace(
423 &mut ctxt.coerce,
424 CoerceMany::new(self.result.standard_types.unknown.clone()),
425 );
426
427 // FIXME: create a synthetic `()` during lowering so we have something to refer to here?
428 coerce.coerce(self, *expr, &val_ty);
429
430 let ctxt = find_breakable(&mut self.breakables, label.as_ref())
431 .expect("breakable stack changed during coercion");
432 ctxt.coerce = coerce;
433 ctxt.may_break = true;
434 }
435 None => {
436 self.push_diagnostic(InferenceDiagnostic::BreakOutsideOfLoop {
437 expr: tgt_expr,
438 is_break: true,
439 });
440 }
441 }
442 TyKind::Never.intern(Interner)
443 }
444 Expr::Return { expr } => {
445 if let Some(expr) = expr {
446 self.infer_expr_coerce(*expr, &Expectation::has_type(self.return_ty.clone()));
447 } else {
448 let unit = TyBuilder::unit();
449 let _ = self.coerce(Some(tgt_expr), &unit, &self.return_ty.clone());
450 }
451 TyKind::Never.intern(Interner)
452 }
453 Expr::Yield { expr } => {
454 if let Some((resume_ty, yield_ty)) = self.resume_yield_tys.clone() {
455 if let Some(expr) = expr {
456 self.infer_expr_coerce(*expr, &Expectation::has_type(yield_ty));
457 } else {
458 let unit = self.result.standard_types.unit.clone();
459 let _ = self.coerce(Some(tgt_expr), &unit, &yield_ty);
460 }
461 resume_ty
462 } else {
463 // FIXME: report error (yield expr in non-generator)
464 TyKind::Error.intern(Interner)
465 }
466 }
467 Expr::RecordLit { path, fields, spread, .. } => {
468 let (ty, def_id) = self.resolve_variant(path.as_deref(), false);
469 if let Some(variant) = def_id {
470 self.write_variant_resolution(tgt_expr.into(), variant);
471 }
472
473 if let Some(t) = expected.only_has_type(&mut self.table) {
474 self.unify(&ty, &t);
475 }
476
477 let substs = ty
478 .as_adt()
479 .map(|(_, s)| s.clone())
480 .unwrap_or_else(|| Substitution::empty(Interner));
481 let field_types = def_id.map(|it| self.db.field_types(it)).unwrap_or_default();
482 let variant_data = def_id.map(|it| it.variant_data(self.db.upcast()));
483 for field in fields.iter() {
484 let field_def =
485 variant_data.as_ref().and_then(|it| match it.field(&field.name) {
486 Some(local_id) => Some(FieldId { parent: def_id.unwrap(), local_id }),
487 None => {
488 self.push_diagnostic(InferenceDiagnostic::NoSuchField {
489 expr: field.expr,
490 });
491 None
492 }
493 });
494 let field_ty = field_def.map_or(self.err_ty(), |it| {
495 field_types[it.local_id].clone().substitute(Interner, &substs)
496 });
497 self.infer_expr_coerce(field.expr, &Expectation::has_type(field_ty));
498 }
499 if let Some(expr) = spread {
500 self.infer_expr(*expr, &Expectation::has_type(ty.clone()));
501 }
502 ty
503 }
504 Expr::Field { expr, name } => {
505 let receiver_ty = self.infer_expr_inner(*expr, &Expectation::none());
506
507 let mut autoderef = Autoderef::new(&mut self.table, receiver_ty);
508 let ty = autoderef.by_ref().find_map(|(derefed_ty, _)| {
509 let (field_id, parameters) = match derefed_ty.kind(Interner) {
510 TyKind::Tuple(_, substs) => {
511 return name.as_tuple_index().and_then(|idx| {
512 substs
513 .as_slice(Interner)
514 .get(idx)
515 .map(|a| a.assert_ty_ref(Interner))
516 .cloned()
517 });
518 }
519 TyKind::Adt(AdtId(hir_def::AdtId::StructId(s)), parameters) => {
520 let local_id = self.db.struct_data(*s).variant_data.field(name)?;
521 let field = FieldId { parent: (*s).into(), local_id };
522 (field, parameters.clone())
523 }
524 TyKind::Adt(AdtId(hir_def::AdtId::UnionId(u)), parameters) => {
525 let local_id = self.db.union_data(*u).variant_data.field(name)?;
526 let field = FieldId { parent: (*u).into(), local_id };
527 (field, parameters.clone())
528 }
529 _ => return None,
530 };
531 let is_visible = self.db.field_visibilities(field_id.parent)[field_id.local_id]
532 .is_visible_from(self.db.upcast(), self.resolver.module());
533 if !is_visible {
534 // Write down the first field resolution even if it is not visible
535 // This aids IDE features for private fields like goto def and in
536 // case of autoderef finding an applicable field, this will be
537 // overwritten in a following cycle
538 if let Entry::Vacant(entry) = self.result.field_resolutions.entry(tgt_expr)
539 {
540 entry.insert(field_id);
541 }
542 return None;
543 }
544 // can't have `write_field_resolution` here because `self.table` is borrowed :(
545 self.result.field_resolutions.insert(tgt_expr, field_id);
546 let ty = self.db.field_types(field_id.parent)[field_id.local_id]
547 .clone()
548 .substitute(Interner, &parameters);
549 Some(ty)
550 });
551 let ty = match ty {
552 Some(ty) => {
553 let adjustments = auto_deref_adjust_steps(&autoderef);
554 self.write_expr_adj(*expr, adjustments);
555 let ty = self.insert_type_vars(ty);
556 let ty = self.normalize_associated_types_in(ty);
557 ty
558 }
559 _ => self.err_ty(),
560 };
561 ty
562 }
563 Expr::Await { expr } => {
564 let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
565 self.resolve_associated_type(inner_ty, self.resolve_future_future_output())
566 }
567 Expr::Try { expr } => {
568 let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
569 self.resolve_associated_type(inner_ty, self.resolve_ops_try_ok())
570 }
571 Expr::Cast { expr, type_ref } => {
572 // FIXME: propagate the "castable to" expectation (and find a test case that shows this is necessary)
573 let _inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
574 let cast_ty = self.make_ty(type_ref);
575 // FIXME check the cast...
576 cast_ty
577 }
578 Expr::Ref { expr, rawness, mutability } => {
579 let mutability = lower_to_chalk_mutability(*mutability);
580 let expectation = if let Some((exp_inner, exp_rawness, exp_mutability)) = expected
581 .only_has_type(&mut self.table)
582 .as_ref()
583 .and_then(|t| t.as_reference_or_ptr())
584 {
585 if exp_mutability == Mutability::Mut && mutability == Mutability::Not {
586 // FIXME: record type error - expected mut reference but found shared ref,
587 // which cannot be coerced
588 }
589 if exp_rawness == Rawness::Ref && *rawness == Rawness::RawPtr {
590 // FIXME: record type error - expected reference but found ptr,
591 // which cannot be coerced
592 }
593 Expectation::rvalue_hint(&mut self.table, Ty::clone(exp_inner))
594 } else {
595 Expectation::none()
596 };
597 let inner_ty = self.infer_expr_inner(*expr, &expectation);
598 match rawness {
599 Rawness::RawPtr => TyKind::Raw(mutability, inner_ty),
600 Rawness::Ref => TyKind::Ref(mutability, static_lifetime(), inner_ty),
601 }
602 .intern(Interner)
603 }
604 &Expr::Box { expr } => self.infer_expr_box(expr, expected),
605 Expr::UnaryOp { expr, op } => {
606 let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
607 let inner_ty = self.resolve_ty_shallow(&inner_ty);
608 match op {
609 UnaryOp::Deref => {
610 autoderef::deref(&mut self.table, inner_ty).unwrap_or_else(|| self.err_ty())
611 }
612 UnaryOp::Neg => {
613 match inner_ty.kind(Interner) {
614 // Fast path for builtins
615 TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_) | Scalar::Float(_))
616 | TyKind::InferenceVar(
617 _,
618 TyVariableKind::Integer | TyVariableKind::Float,
619 ) => inner_ty,
620 // Otherwise we resolve via the std::ops::Neg trait
621 _ => self
622 .resolve_associated_type(inner_ty, self.resolve_ops_neg_output()),
623 }
624 }
625 UnaryOp::Not => {
626 match inner_ty.kind(Interner) {
627 // Fast path for builtins
628 TyKind::Scalar(Scalar::Bool | Scalar::Int(_) | Scalar::Uint(_))
629 | TyKind::InferenceVar(_, TyVariableKind::Integer) => inner_ty,
630 // Otherwise we resolve via the std::ops::Not trait
631 _ => self
632 .resolve_associated_type(inner_ty, self.resolve_ops_not_output()),
633 }
634 }
635 }
636 }
637 Expr::BinaryOp { lhs, rhs, op } => match op {
638 Some(BinaryOp::Assignment { op: None }) => {
639 let lhs = *lhs;
640 let is_ordinary = match &self.body[lhs] {
641 Expr::Array(_)
642 | Expr::RecordLit { .. }
643 | Expr::Tuple { .. }
644 | Expr::Underscore => false,
645 Expr::Call { callee, .. } => !matches!(&self.body[*callee], Expr::Path(_)),
646 _ => true,
647 };
648
649 // In ordinary (non-destructuring) assignments, the type of
650 // `lhs` must be inferred first so that the ADT fields
651 // instantiations in RHS can be coerced to it. Note that this
652 // cannot happen in destructuring assignments because of how
653 // they are desugared.
654 if is_ordinary {
655 let lhs_ty = self.infer_expr(lhs, &Expectation::none());
656 self.infer_expr_coerce(*rhs, &Expectation::has_type(lhs_ty));
657 } else {
658 let rhs_ty = self.infer_expr(*rhs, &Expectation::none());
659 self.infer_assignee_expr(lhs, &rhs_ty);
660 }
661 self.result.standard_types.unit.clone()
662 }
663 Some(BinaryOp::LogicOp(_)) => {
664 let bool_ty = self.result.standard_types.bool_.clone();
665 self.infer_expr_coerce(*lhs, &Expectation::HasType(bool_ty.clone()));
666 let lhs_diverges = self.diverges;
667 self.infer_expr_coerce(*rhs, &Expectation::HasType(bool_ty.clone()));
668 // Depending on the LHS' value, the RHS can never execute.
669 self.diverges = lhs_diverges;
670 bool_ty
671 }
672 Some(op) => self.infer_overloadable_binop(*lhs, *op, *rhs, tgt_expr),
673 _ => self.err_ty(),
674 },
675 Expr::Range { lhs, rhs, range_type } => {
676 let lhs_ty = lhs.map(|e| self.infer_expr_inner(e, &Expectation::none()));
677 let rhs_expect = lhs_ty
678 .as_ref()
679 .map_or_else(Expectation::none, |ty| Expectation::has_type(ty.clone()));
680 let rhs_ty = rhs.map(|e| self.infer_expr(e, &rhs_expect));
681 match (range_type, lhs_ty, rhs_ty) {
682 (RangeOp::Exclusive, None, None) => match self.resolve_range_full() {
683 Some(adt) => TyBuilder::adt(self.db, adt).build(),
684 None => self.err_ty(),
685 },
686 (RangeOp::Exclusive, None, Some(ty)) => match self.resolve_range_to() {
687 Some(adt) => TyBuilder::adt(self.db, adt).push(ty).build(),
688 None => self.err_ty(),
689 },
690 (RangeOp::Inclusive, None, Some(ty)) => {
691 match self.resolve_range_to_inclusive() {
692 Some(adt) => TyBuilder::adt(self.db, adt).push(ty).build(),
693 None => self.err_ty(),
694 }
695 }
696 (RangeOp::Exclusive, Some(_), Some(ty)) => match self.resolve_range() {
697 Some(adt) => TyBuilder::adt(self.db, adt).push(ty).build(),
698 None => self.err_ty(),
699 },
700 (RangeOp::Inclusive, Some(_), Some(ty)) => {
701 match self.resolve_range_inclusive() {
702 Some(adt) => TyBuilder::adt(self.db, adt).push(ty).build(),
703 None => self.err_ty(),
704 }
705 }
706 (RangeOp::Exclusive, Some(ty), None) => match self.resolve_range_from() {
707 Some(adt) => TyBuilder::adt(self.db, adt).push(ty).build(),
708 None => self.err_ty(),
709 },
710 (RangeOp::Inclusive, _, None) => self.err_ty(),
711 }
712 }
713 Expr::Index { base, index } => {
714 let base_ty = self.infer_expr_inner(*base, &Expectation::none());
715 let index_ty = self.infer_expr(*index, &Expectation::none());
716
717 if let Some(index_trait) = self.resolve_ops_index() {
718 let canonicalized = self.canonicalize(base_ty.clone());
719 let receiver_adjustments = method_resolution::resolve_indexing_op(
720 self.db,
721 self.trait_env.clone(),
722 canonicalized.value,
723 index_trait,
724 );
725 let (self_ty, adj) = receiver_adjustments
726 .map_or((self.err_ty(), Vec::new()), |adj| {
727 adj.apply(&mut self.table, base_ty)
728 });
729 self.write_expr_adj(*base, adj);
730 self.resolve_associated_type_with_params(
731 self_ty,
732 self.resolve_ops_index_output(),
733 &[GenericArgData::Ty(index_ty).intern(Interner)],
734 )
735 } else {
736 self.err_ty()
737 }
738 }
739 Expr::Tuple { exprs, .. } => {
740 let mut tys = match expected
741 .only_has_type(&mut self.table)
742 .as_ref()
743 .map(|t| t.kind(Interner))
744 {
745 Some(TyKind::Tuple(_, substs)) => substs
746 .iter(Interner)
747 .map(|a| a.assert_ty_ref(Interner).clone())
748 .chain(repeat_with(|| self.table.new_type_var()))
749 .take(exprs.len())
750 .collect::<Vec<_>>(),
751 _ => (0..exprs.len()).map(|_| self.table.new_type_var()).collect(),
752 };
753
754 for (expr, ty) in exprs.iter().zip(tys.iter_mut()) {
755 self.infer_expr_coerce(*expr, &Expectation::has_type(ty.clone()));
756 }
757
758 TyKind::Tuple(tys.len(), Substitution::from_iter(Interner, tys)).intern(Interner)
759 }
760 Expr::Array(array) => {
761 let elem_ty =
762 match expected.to_option(&mut self.table).as_ref().map(|t| t.kind(Interner)) {
763 Some(TyKind::Array(st, _) | TyKind::Slice(st)) => st.clone(),
764 _ => self.table.new_type_var(),
765 };
766 let mut coerce = CoerceMany::new(elem_ty.clone());
767
768 let expected = Expectation::has_type(elem_ty.clone());
769 let len = match array {
770 Array::ElementList { elements, .. } => {
771 for &expr in elements.iter() {
772 let cur_elem_ty = self.infer_expr_inner(expr, &expected);
773 coerce.coerce(self, Some(expr), &cur_elem_ty);
774 }
775 consteval::usize_const(Some(elements.len() as u128))
776 }
777 &Array::Repeat { initializer, repeat } => {
778 self.infer_expr_coerce(initializer, &Expectation::has_type(elem_ty));
779 self.infer_expr(
780 repeat,
781 &Expectation::has_type(
782 TyKind::Scalar(Scalar::Uint(UintTy::Usize)).intern(Interner),
783 ),
784 );
785
786 if let Some(g_def) = self.owner.as_generic_def_id() {
787 let generics = generics(self.db.upcast(), g_def);
788 consteval::eval_to_const(
789 repeat,
790 ParamLoweringMode::Placeholder,
791 self,
792 || generics,
793 DebruijnIndex::INNERMOST,
794 )
795 } else {
796 consteval::usize_const(None)
797 }
798 }
799 };
800
801 TyKind::Array(coerce.complete(), len).intern(Interner)
802 }
803 Expr::Literal(lit) => match lit {
804 Literal::Bool(..) => TyKind::Scalar(Scalar::Bool).intern(Interner),
805 Literal::String(..) => {
806 TyKind::Ref(Mutability::Not, static_lifetime(), TyKind::Str.intern(Interner))
807 .intern(Interner)
808 }
809 Literal::ByteString(bs) => {
810 let byte_type = TyKind::Scalar(Scalar::Uint(UintTy::U8)).intern(Interner);
811
812 let len = consteval::usize_const(Some(bs.len() as u128));
813
814 let array_type = TyKind::Array(byte_type, len).intern(Interner);
815 TyKind::Ref(Mutability::Not, static_lifetime(), array_type).intern(Interner)
816 }
817 Literal::Char(..) => TyKind::Scalar(Scalar::Char).intern(Interner),
818 Literal::Int(_v, ty) => match ty {
819 Some(int_ty) => {
820 TyKind::Scalar(Scalar::Int(primitive::int_ty_from_builtin(*int_ty)))
821 .intern(Interner)
822 }
823 None => self.table.new_integer_var(),
824 },
825 Literal::Uint(_v, ty) => match ty {
826 Some(int_ty) => {
827 TyKind::Scalar(Scalar::Uint(primitive::uint_ty_from_builtin(*int_ty)))
828 .intern(Interner)
829 }
830 None => self.table.new_integer_var(),
831 },
832 Literal::Float(_v, ty) => match ty {
833 Some(float_ty) => {
834 TyKind::Scalar(Scalar::Float(primitive::float_ty_from_builtin(*float_ty)))
835 .intern(Interner)
836 }
837 None => self.table.new_float_var(),
838 },
839 },
840 Expr::Underscore => {
841 // Underscore expressions may only appear in assignee expressions,
842 // which are handled by `infer_assignee_expr()`, so any underscore
843 // expression reaching this branch is an error.
844 self.err_ty()
845 }
846 };
847 // use a new type variable if we got unknown here
848 let ty = self.insert_type_vars_shallow(ty);
849 self.write_expr_ty(tgt_expr, ty.clone());
850 if self.resolve_ty_shallow(&ty).is_never() {
851 // Any expression that produces a value of type `!` must have diverged
852 self.diverges = Diverges::Always;
853 }
854 ty
855 }
856
857 fn infer_expr_box(&mut self, inner_expr: ExprId, expected: &Expectation) -> Ty {
858 if let Some(box_id) = self.resolve_boxed_box() {
859 let table = &mut self.table;
860 let inner_exp = expected
861 .to_option(table)
862 .as_ref()
863 .map(|e| e.as_adt())
864 .flatten()
865 .filter(|(e_adt, _)| e_adt == &box_id)
866 .map(|(_, subts)| {
867 let g = subts.at(Interner, 0);
868 Expectation::rvalue_hint(table, Ty::clone(g.assert_ty_ref(Interner)))
869 })
870 .unwrap_or_else(Expectation::none);
871
872 let inner_ty = self.infer_expr_inner(inner_expr, &inner_exp);
873 TyBuilder::adt(self.db, box_id)
874 .push(inner_ty)
875 .fill_with_defaults(self.db, || self.table.new_type_var())
876 .build()
877 } else {
878 self.err_ty()
879 }
880 }
881
882 pub(super) fn infer_assignee_expr(&mut self, lhs: ExprId, rhs_ty: &Ty) -> Ty {
883 let is_rest_expr = |expr| {
884 matches!(
885 &self.body[expr],
886 Expr::Range { lhs: None, rhs: None, range_type: RangeOp::Exclusive },
887 )
888 };
889
890 let rhs_ty = self.resolve_ty_shallow(rhs_ty);
891
892 let ty = match &self.body[lhs] {
893 Expr::Tuple { exprs, .. } => {
894 // We don't consider multiple ellipses. This is analogous to
895 // `hir_def::body::lower::ExprCollector::collect_tuple_pat()`.
896 let ellipsis = exprs.iter().position(|e| is_rest_expr(*e));
897 let exprs: Vec<_> = exprs.iter().filter(|e| !is_rest_expr(**e)).copied().collect();
898
899 self.infer_tuple_pat_like(&rhs_ty, (), ellipsis, &exprs)
900 }
901 Expr::Call { callee, args, .. } => {
902 // Tuple structs
903 let path = match &self.body[*callee] {
904 Expr::Path(path) => Some(path),
905 _ => None,
906 };
907
908 // We don't consider multiple ellipses. This is analogous to
909 // `hir_def::body::lower::ExprCollector::collect_tuple_pat()`.
910 let ellipsis = args.iter().position(|e| is_rest_expr(*e));
911 let args: Vec<_> = args.iter().filter(|e| !is_rest_expr(**e)).copied().collect();
912
913 self.infer_tuple_struct_pat_like(path, &rhs_ty, (), lhs, ellipsis, &args)
914 }
915 Expr::Array(Array::ElementList { elements, .. }) => {
916 let elem_ty = match rhs_ty.kind(Interner) {
917 TyKind::Array(st, _) => st.clone(),
918 _ => self.err_ty(),
919 };
920
921 // There's no need to handle `..` as it cannot be bound.
922 let sub_exprs = elements.iter().filter(|e| !is_rest_expr(**e));
923
924 for e in sub_exprs {
925 self.infer_assignee_expr(*e, &elem_ty);
926 }
927
928 match rhs_ty.kind(Interner) {
929 TyKind::Array(_, _) => rhs_ty.clone(),
930 // Even when `rhs_ty` is not an array type, this assignee
931 // expression is inferred to be an array (of unknown element
932 // type and length). This should not be just an error type,
933 // because we are to compute the unifiability of this type and
934 // `rhs_ty` in the end of this function to issue type mismatches.
935 _ => TyKind::Array(self.err_ty(), crate::consteval::usize_const(None))
936 .intern(Interner),
937 }
938 }
939 Expr::RecordLit { path, fields, .. } => {
940 let subs = fields.iter().map(|f| (f.name.clone(), f.expr));
941
942 self.infer_record_pat_like(path.as_deref(), &rhs_ty, (), lhs.into(), subs)
943 }
944 Expr::Underscore => rhs_ty.clone(),
945 _ => {
946 // `lhs` is a place expression, a unit struct, or an enum variant.
947 let lhs_ty = self.infer_expr(lhs, &Expectation::none());
948
949 // This is the only branch where this function may coerce any type.
950 // We are returning early to avoid the unifiability check below.
951 let lhs_ty = self.insert_type_vars_shallow(lhs_ty);
952 let ty = match self.coerce(None, &rhs_ty, &lhs_ty) {
953 Ok(ty) => ty,
954 Err(_) => {
955 self.result.type_mismatches.insert(
956 lhs.into(),
957 TypeMismatch { expected: rhs_ty.clone(), actual: lhs_ty.clone() },
958 );
959 // `rhs_ty` is returned so no further type mismatches are
960 // reported because of this mismatch.
961 rhs_ty
962 }
963 };
964 self.write_expr_ty(lhs, ty.clone());
965 return ty;
966 }
967 };
968
969 let ty = self.insert_type_vars_shallow(ty);
970 if !self.unify(&ty, &rhs_ty) {
971 self.result
972 .type_mismatches
973 .insert(lhs.into(), TypeMismatch { expected: rhs_ty.clone(), actual: ty.clone() });
974 }
975 self.write_expr_ty(lhs, ty.clone());
976 ty
977 }
978
979 fn infer_overloadable_binop(
980 &mut self,
981 lhs: ExprId,
982 op: BinaryOp,
983 rhs: ExprId,
984 tgt_expr: ExprId,
985 ) -> Ty {
986 let lhs_expectation = Expectation::none();
987 let lhs_ty = self.infer_expr(lhs, &lhs_expectation);
988 let rhs_ty = self.table.new_type_var();
989
990 let trait_func = lang_names_for_bin_op(op).and_then(|(name, lang_item)| {
991 let trait_id = self.resolve_lang_item(lang_item)?.as_trait()?;
992 let func = self.db.trait_data(trait_id).method_by_name(&name)?;
993 Some((trait_id, func))
994 });
995 let (trait_, func) = match trait_func {
996 Some(it) => it,
997 None => {
998 let rhs_ty = self.builtin_binary_op_rhs_expectation(op, lhs_ty.clone());
999 let rhs_ty = self.infer_expr_coerce(rhs, &Expectation::from_option(rhs_ty));
1000 return self
1001 .builtin_binary_op_return_ty(op, lhs_ty, rhs_ty)
1002 .unwrap_or_else(|| self.err_ty());
1003 }
1004 };
1005
1006 // HACK: We can use this substitution for the function because the function itself doesn't
1007 // have its own generic parameters.
1008 let subst = TyBuilder::subst_for_def(self.db, trait_, None)
1009 .push(lhs_ty.clone())
1010 .push(rhs_ty.clone())
1011 .build();
1012 self.write_method_resolution(tgt_expr, func, subst.clone());
1013
1014 let method_ty = self.db.value_ty(func.into()).substitute(Interner, &subst);
1015 self.register_obligations_for_call(&method_ty);
1016
1017 self.infer_expr_coerce(rhs, &Expectation::has_type(rhs_ty.clone()));
1018
1019 let ret_ty = match method_ty.callable_sig(self.db) {
1020 Some(sig) => sig.ret().clone(),
1021 None => self.err_ty(),
1022 };
1023
1024 let ret_ty = self.normalize_associated_types_in(ret_ty);
1025
1026 // FIXME: record autoref adjustments
1027
1028 // use knowledge of built-in binary ops, which can sometimes help inference
1029 if let Some(builtin_rhs) = self.builtin_binary_op_rhs_expectation(op, lhs_ty.clone()) {
1030 self.unify(&builtin_rhs, &rhs_ty);
1031 }
1032 if let Some(builtin_ret) = self.builtin_binary_op_return_ty(op, lhs_ty, rhs_ty) {
1033 self.unify(&builtin_ret, &ret_ty);
1034 }
1035
1036 ret_ty
1037 }
1038
1039 fn infer_block(
1040 &mut self,
1041 expr: ExprId,
1042 statements: &[Statement],
1043 tail: Option<ExprId>,
1044 expected: &Expectation,
1045 ) -> Ty {
1046 for stmt in statements {
1047 match stmt {
1048 Statement::Let { pat, type_ref, initializer, else_branch } => {
1049 let decl_ty = type_ref
1050 .as_ref()
1051 .map(|tr| self.make_ty(tr))
1052 .unwrap_or_else(|| self.err_ty());
1053
1054 // Always use the declared type when specified
1055 let mut ty = decl_ty.clone();
1056
1057 if let Some(expr) = initializer {
1058 let actual_ty =
1059 self.infer_expr_coerce(*expr, &Expectation::has_type(decl_ty.clone()));
1060 if decl_ty.is_unknown() {
1061 ty = actual_ty;
1062 }
1063 }
1064
1065 if let Some(expr) = else_branch {
1066 self.infer_expr_coerce(
1067 *expr,
1068 &Expectation::has_type(Ty::new(Interner, TyKind::Never)),
1069 );
1070 }
1071
1072 self.infer_pat(*pat, &ty, BindingMode::default());
1073 }
1074 Statement::Expr { expr, .. } => {
1075 self.infer_expr(*expr, &Expectation::none());
1076 }
1077 }
1078 }
1079
1080 if let Some(expr) = tail {
1081 self.infer_expr_coerce(expr, expected)
1082 } else {
1083 // Citing rustc: if there is no explicit tail expression,
1084 // that is typically equivalent to a tail expression
1085 // of `()` -- except if the block diverges. In that
1086 // case, there is no value supplied from the tail
1087 // expression (assuming there are no other breaks,
1088 // this implies that the type of the block will be
1089 // `!`).
1090 if self.diverges.is_always() {
1091 // we don't even make an attempt at coercion
1092 self.table.new_maybe_never_var()
1093 } else {
1094 if let Some(t) = expected.only_has_type(&mut self.table) {
1095 if self.coerce(Some(expr), &TyBuilder::unit(), &t).is_err() {
1096 self.result.type_mismatches.insert(
1097 expr.into(),
1098 TypeMismatch { expected: t.clone(), actual: TyBuilder::unit() },
1099 );
1100 }
1101 t
1102 } else {
1103 TyBuilder::unit()
1104 }
1105 }
1106 }
1107 }
1108
1109 fn infer_method_call(
1110 &mut self,
1111 tgt_expr: ExprId,
1112 receiver: ExprId,
1113 args: &[ExprId],
1114 method_name: &Name,
1115 generic_args: Option<&GenericArgs>,
1116 expected: &Expectation,
1117 ) -> Ty {
1118 let receiver_ty = self.infer_expr(receiver, &Expectation::none());
1119 let canonicalized_receiver = self.canonicalize(receiver_ty.clone());
1120
1121 let traits_in_scope = self.resolver.traits_in_scope(self.db.upcast());
1122
1123 let resolved = method_resolution::lookup_method(
1124 &canonicalized_receiver.value,
1125 self.db,
1126 self.trait_env.clone(),
1127 &traits_in_scope,
1128 VisibleFromModule::Filter(self.resolver.module()),
1129 method_name,
1130 );
1131 let (receiver_ty, method_ty, substs) = match resolved {
1132 Some((adjust, func)) => {
1133 let (ty, adjustments) = adjust.apply(&mut self.table, receiver_ty);
1134 let generics = generics(self.db.upcast(), func.into());
1135 let substs = self.substs_for_method_call(generics, generic_args);
1136 self.write_expr_adj(receiver, adjustments);
1137 self.write_method_resolution(tgt_expr, func, substs.clone());
1138 (ty, self.db.value_ty(func.into()), substs)
1139 }
1140 None => (
1141 receiver_ty,
1142 Binders::empty(Interner, self.err_ty()),
1143 Substitution::empty(Interner),
1144 ),
1145 };
1146 let method_ty = method_ty.substitute(Interner, &substs);
1147 self.register_obligations_for_call(&method_ty);
1148 let (formal_receiver_ty, param_tys, ret_ty, is_varargs) =
1149 match method_ty.callable_sig(self.db) {
1150 Some(sig) => {
1151 if !sig.params().is_empty() {
1152 (
1153 sig.params()[0].clone(),
1154 sig.params()[1..].to_vec(),
1155 sig.ret().clone(),
1156 sig.is_varargs,
1157 )
1158 } else {
1159 (self.err_ty(), Vec::new(), sig.ret().clone(), sig.is_varargs)
1160 }
1161 }
1162 None => (self.err_ty(), Vec::new(), self.err_ty(), true),
1163 };
1164 self.unify(&formal_receiver_ty, &receiver_ty);
1165
1166 let expected_inputs =
1167 self.expected_inputs_for_expected_output(expected, ret_ty.clone(), param_tys.clone());
1168
1169 self.check_call_arguments(tgt_expr, args, &expected_inputs, &param_tys, &[], is_varargs);
1170 self.normalize_associated_types_in(ret_ty)
1171 }
1172
1173 fn expected_inputs_for_expected_output(
1174 &mut self,
1175 expected_output: &Expectation,
1176 output: Ty,
1177 inputs: Vec<Ty>,
1178 ) -> Vec<Ty> {
1179 if let Some(expected_ty) = expected_output.to_option(&mut self.table) {
1180 self.table.fudge_inference(|table| {
1181 if table.try_unify(&expected_ty, &output).is_ok() {
1182 table.resolve_with_fallback(inputs, &|var, kind, _, _| match kind {
1183 chalk_ir::VariableKind::Ty(tk) => var.to_ty(Interner, tk).cast(Interner),
1184 chalk_ir::VariableKind::Lifetime => {
1185 var.to_lifetime(Interner).cast(Interner)
1186 }
1187 chalk_ir::VariableKind::Const(ty) => {
1188 var.to_const(Interner, ty).cast(Interner)
1189 }
1190 })
1191 } else {
1192 Vec::new()
1193 }
1194 })
1195 } else {
1196 Vec::new()
1197 }
1198 }
1199
1200 fn check_call_arguments(
1201 &mut self,
1202 expr: ExprId,
1203 args: &[ExprId],
1204 expected_inputs: &[Ty],
1205 param_tys: &[Ty],
1206 skip_indices: &[u32],
1207 is_varargs: bool,
1208 ) {
1209 if args.len() != param_tys.len() + skip_indices.len() && !is_varargs {
1210 self.push_diagnostic(InferenceDiagnostic::MismatchedArgCount {
1211 call_expr: expr,
1212 expected: param_tys.len() + skip_indices.len(),
1213 found: args.len(),
1214 });
1215 }
1216
1217 // Quoting https://github.com/rust-lang/rust/blob/6ef275e6c3cb1384ec78128eceeb4963ff788dca/src/librustc_typeck/check/mod.rs#L3325 --
1218 // We do this in a pretty awful way: first we type-check any arguments
1219 // that are not closures, then we type-check the closures. This is so
1220 // that we have more information about the types of arguments when we
1221 // type-check the functions. This isn't really the right way to do this.
1222 for &check_closures in &[false, true] {
1223 let mut skip_indices = skip_indices.into_iter().copied().fuse().peekable();
1224 let param_iter = param_tys.iter().cloned().chain(repeat(self.err_ty()));
1225 let expected_iter = expected_inputs
1226 .iter()
1227 .cloned()
1228 .chain(param_iter.clone().skip(expected_inputs.len()));
1229 for (idx, ((&arg, param_ty), expected_ty)) in
1230 args.iter().zip(param_iter).zip(expected_iter).enumerate()
1231 {
1232 let is_closure = matches!(&self.body[arg], Expr::Closure { .. });
1233 if is_closure != check_closures {
1234 continue;
1235 }
1236
1237 while skip_indices.peek().map_or(false, |i| *i < idx as u32) {
1238 skip_indices.next();
1239 }
1240 if skip_indices.peek().copied() == Some(idx as u32) {
1241 continue;
1242 }
1243
1244 // the difference between param_ty and expected here is that
1245 // expected is the parameter when the expected *return* type is
1246 // taken into account. So in `let _: &[i32] = identity(&[1, 2])`
1247 // the expected type is already `&[i32]`, whereas param_ty is
1248 // still an unbound type variable. We don't always want to force
1249 // the parameter to coerce to the expected type (for example in
1250 // `coerce_unsize_expected_type_4`).
1251 let param_ty = self.normalize_associated_types_in(param_ty);
1252 let expected = Expectation::rvalue_hint(&mut self.table, expected_ty);
1253 // infer with the expected type we have...
1254 let ty = self.infer_expr_inner(arg, &expected);
1255
1256 // then coerce to either the expected type or just the formal parameter type
1257 let coercion_target = if let Some(ty) = expected.only_has_type(&mut self.table) {
1258 // if we are coercing to the expectation, unify with the
1259 // formal parameter type to connect everything
1260 self.unify(&ty, &param_ty);
1261 ty
1262 } else {
1263 param_ty
1264 };
1265 if !coercion_target.is_unknown() {
1266 if self.coerce(Some(arg), &ty, &coercion_target).is_err() {
1267 self.result.type_mismatches.insert(
1268 arg.into(),
1269 TypeMismatch { expected: coercion_target, actual: ty.clone() },
1270 );
1271 }
1272 }
1273 }
1274 }
1275 }
1276
1277 fn substs_for_method_call(
1278 &mut self,
1279 def_generics: Generics,
1280 generic_args: Option<&GenericArgs>,
1281 ) -> Substitution {
1282 let (parent_params, self_params, type_params, const_params, impl_trait_params) =
1283 def_generics.provenance_split();
1284 assert_eq!(self_params, 0); // method shouldn't have another Self param
1285 let total_len = parent_params + type_params + const_params + impl_trait_params;
1286 let mut substs = Vec::with_capacity(total_len);
1287
1288 // handle provided arguments
1289 if let Some(generic_args) = generic_args {
1290 // if args are provided, it should be all of them, but we can't rely on that
1291 for (arg, kind_id) in generic_args
1292 .args
1293 .iter()
1294 .filter(|arg| !matches!(arg, GenericArg::Lifetime(_)))
1295 .take(type_params + const_params)
1296 .zip(def_generics.iter_id())
1297 {
1298 if let Some(g) = generic_arg_to_chalk(
1299 self.db,
1300 kind_id,
1301 arg,
1302 self,
1303 |this, type_ref| this.make_ty(type_ref),
1304 |this, c, ty| {
1305 const_or_path_to_chalk(
1306 this.db,
1307 &this.resolver,
1308 ty,
1309 c,
1310 ParamLoweringMode::Placeholder,
1311 || generics(this.db.upcast(), (&this.resolver).generic_def().unwrap()),
1312 DebruijnIndex::INNERMOST,
1313 )
1314 },
1315 ) {
1316 substs.push(g);
1317 }
1318 }
1319 };
1320
1321 // Handle everything else as unknown. This also handles generic arguments for the method's
1322 // parent (impl or trait), which should come after those for the method.
1323 for (id, data) in def_generics.iter().skip(substs.len()) {
1324 match data {
1325 TypeOrConstParamData::TypeParamData(_) => {
1326 substs.push(GenericArgData::Ty(self.table.new_type_var()).intern(Interner))
1327 }
1328 TypeOrConstParamData::ConstParamData(_) => {
1329 substs.push(
1330 GenericArgData::Const(self.table.new_const_var(
1331 self.db.const_param_ty(ConstParamId::from_unchecked(id)),
1332 ))
1333 .intern(Interner),
1334 )
1335 }
1336 }
1337 }
1338 assert_eq!(substs.len(), total_len);
1339 Substitution::from_iter(Interner, substs)
1340 }
1341
1342 fn register_obligations_for_call(&mut self, callable_ty: &Ty) {
1343 let callable_ty = self.resolve_ty_shallow(callable_ty);
1344 if let TyKind::FnDef(fn_def, parameters) = callable_ty.kind(Interner) {
1345 let def: CallableDefId = from_chalk(self.db, *fn_def);
1346 let generic_predicates = self.db.generic_predicates(def.into());
1347 for predicate in generic_predicates.iter() {
1348 let (predicate, binders) = predicate
1349 .clone()
1350 .substitute(Interner, parameters)
1351 .into_value_and_skipped_binders();
1352 always!(binders.len(Interner) == 0); // quantified where clauses not yet handled
1353 self.push_obligation(predicate.cast(Interner));
1354 }
1355 // add obligation for trait implementation, if this is a trait method
1356 match def {
1357 CallableDefId::FunctionId(f) => {
1358 if let ItemContainerId::TraitId(trait_) = f.lookup(self.db.upcast()).container {
1359 // construct a TraitRef
1360 let params_len = parameters.len(Interner);
1361 let trait_params_len = generics(self.db.upcast(), trait_.into()).len();
1362 let substs = Substitution::from_iter(
1363 Interner,
1364 // The generic parameters for the trait come after those for the
1365 // function.
1366 &parameters.as_slice(Interner)[params_len - trait_params_len..],
1367 );
1368 self.push_obligation(
1369 TraitRef { trait_id: to_chalk_trait_id(trait_), substitution: substs }
1370 .cast(Interner),
1371 );
1372 }
1373 }
1374 CallableDefId::StructId(_) | CallableDefId::EnumVariantId(_) => {}
1375 }
1376 }
1377 }
1378
1379 /// Returns the argument indices to skip.
1380 fn check_legacy_const_generics(&mut self, callee: Ty, args: &[ExprId]) -> Box<[u32]> {
1381 let (func, subst) = match callee.kind(Interner) {
1382 TyKind::FnDef(fn_id, subst) => {
1383 let callable = CallableDefId::from_chalk(self.db, *fn_id);
1384 let func = match callable {
1385 CallableDefId::FunctionId(f) => f,
1386 _ => return Default::default(),
1387 };
1388 (func, subst)
1389 }
1390 _ => return Default::default(),
1391 };
1392
1393 let data = self.db.function_data(func);
1394 if data.legacy_const_generics_indices.is_empty() {
1395 return Default::default();
1396 }
1397
1398 // only use legacy const generics if the param count matches with them
1399 if data.params.len() + data.legacy_const_generics_indices.len() != args.len() {
1400 if args.len() <= data.params.len() {
1401 return Default::default();
1402 } else {
1403 // there are more parameters than there should be without legacy
1404 // const params; use them
1405 let mut indices = data.legacy_const_generics_indices.clone();
1406 indices.sort();
1407 return indices;
1408 }
1409 }
1410
1411 // check legacy const parameters
1412 for (subst_idx, arg_idx) in data.legacy_const_generics_indices.iter().copied().enumerate() {
1413 let arg = match subst.at(Interner, subst_idx).constant(Interner) {
1414 Some(c) => c,
1415 None => continue, // not a const parameter?
1416 };
1417 if arg_idx >= args.len() as u32 {
1418 continue;
1419 }
1420 let _ty = arg.data(Interner).ty.clone();
1421 let expected = Expectation::none(); // FIXME use actual const ty, when that is lowered correctly
1422 self.infer_expr(args[arg_idx as usize], &expected);
1423 // FIXME: evaluate and unify with the const
1424 }
1425 let mut indices = data.legacy_const_generics_indices.clone();
1426 indices.sort();
1427 indices
1428 }
1429
1430 fn builtin_binary_op_return_ty(&mut self, op: BinaryOp, lhs_ty: Ty, rhs_ty: Ty) -> Option<Ty> {
1431 let lhs_ty = self.resolve_ty_shallow(&lhs_ty);
1432 let rhs_ty = self.resolve_ty_shallow(&rhs_ty);
1433 match op {
1434 BinaryOp::LogicOp(_) | BinaryOp::CmpOp(_) => {
1435 Some(TyKind::Scalar(Scalar::Bool).intern(Interner))
1436 }
1437 BinaryOp::Assignment { .. } => Some(TyBuilder::unit()),
1438 BinaryOp::ArithOp(ArithOp::Shl | ArithOp::Shr) => {
1439 // all integer combinations are valid here
1440 if matches!(
1441 lhs_ty.kind(Interner),
1442 TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_))
1443 | TyKind::InferenceVar(_, TyVariableKind::Integer)
1444 ) && matches!(
1445 rhs_ty.kind(Interner),
1446 TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_))
1447 | TyKind::InferenceVar(_, TyVariableKind::Integer)
1448 ) {
1449 Some(lhs_ty)
1450 } else {
1451 None
1452 }
1453 }
1454 BinaryOp::ArithOp(_) => match (lhs_ty.kind(Interner), rhs_ty.kind(Interner)) {
1455 // (int, int) | (uint, uint) | (float, float)
1456 (TyKind::Scalar(Scalar::Int(_)), TyKind::Scalar(Scalar::Int(_)))
1457 | (TyKind::Scalar(Scalar::Uint(_)), TyKind::Scalar(Scalar::Uint(_)))
1458 | (TyKind::Scalar(Scalar::Float(_)), TyKind::Scalar(Scalar::Float(_))) => {
1459 Some(rhs_ty)
1460 }
1461 // ({int}, int) | ({int}, uint)
1462 (
1463 TyKind::InferenceVar(_, TyVariableKind::Integer),
1464 TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_)),
1465 ) => Some(rhs_ty),
1466 // (int, {int}) | (uint, {int})
1467 (
1468 TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_)),
1469 TyKind::InferenceVar(_, TyVariableKind::Integer),
1470 ) => Some(lhs_ty),
1471 // ({float} | float)
1472 (
1473 TyKind::InferenceVar(_, TyVariableKind::Float),
1474 TyKind::Scalar(Scalar::Float(_)),
1475 ) => Some(rhs_ty),
1476 // (float, {float})
1477 (
1478 TyKind::Scalar(Scalar::Float(_)),
1479 TyKind::InferenceVar(_, TyVariableKind::Float),
1480 ) => Some(lhs_ty),
1481 // ({int}, {int}) | ({float}, {float})
1482 (
1483 TyKind::InferenceVar(_, TyVariableKind::Integer),
1484 TyKind::InferenceVar(_, TyVariableKind::Integer),
1485 )
1486 | (
1487 TyKind::InferenceVar(_, TyVariableKind::Float),
1488 TyKind::InferenceVar(_, TyVariableKind::Float),
1489 ) => Some(rhs_ty),
1490 _ => None,
1491 },
1492 }
1493 }
1494
1495 fn builtin_binary_op_rhs_expectation(&mut self, op: BinaryOp, lhs_ty: Ty) -> Option<Ty> {
1496 Some(match op {
1497 BinaryOp::LogicOp(..) => TyKind::Scalar(Scalar::Bool).intern(Interner),
1498 BinaryOp::Assignment { op: None } => lhs_ty,
1499 BinaryOp::CmpOp(CmpOp::Eq { .. }) => match self
1500 .resolve_ty_shallow(&lhs_ty)
1501 .kind(Interner)
1502 {
1503 TyKind::Scalar(_) | TyKind::Str => lhs_ty,
1504 TyKind::InferenceVar(_, TyVariableKind::Integer | TyVariableKind::Float) => lhs_ty,
1505 _ => return None,
1506 },
1507 BinaryOp::ArithOp(ArithOp::Shl | ArithOp::Shr) => return None,
1508 BinaryOp::CmpOp(CmpOp::Ord { .. })
1509 | BinaryOp::Assignment { op: Some(_) }
1510 | BinaryOp::ArithOp(_) => match self.resolve_ty_shallow(&lhs_ty).kind(Interner) {
1511 TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_) | Scalar::Float(_)) => lhs_ty,
1512 TyKind::InferenceVar(_, TyVariableKind::Integer | TyVariableKind::Float) => lhs_ty,
1513 _ => return None,
1514 },
1515 })
1516 }
1517
1518 fn with_breakable_ctx<T>(
1519 &mut self,
1520 kind: BreakableKind,
1521 ty: Ty,
1522 label: Option<LabelId>,
1523 cb: impl FnOnce(&mut Self) -> T,
1524 ) -> (Option<Ty>, T) {
1525 self.breakables.push({
1526 let label = label.map(|label| self.body[label].name.clone());
1527 BreakableContext { kind, may_break: false, coerce: CoerceMany::new(ty), label }
1528 });
1529 let res = cb(self);
1530 let ctx = self.breakables.pop().expect("breakable stack broken");
1531 (ctx.may_break.then(|| ctx.coerce.complete()), res)
1532 }
1533 }