Unnamed repository; edit this file 'description' to name the repository.
Diffstat (limited to 'crates/hir-ty/src/infer.rs')
| -rw-r--r-- | crates/hir-ty/src/infer.rs | 787 |
1 files changed, 426 insertions, 361 deletions
diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs index 016edb2310..d527a4ae29 100644 --- a/crates/hir-ty/src/infer.rs +++ b/crates/hir-ty/src/infer.rs @@ -21,12 +21,14 @@ pub(crate) mod diagnostics; mod expr; mod fallback; mod mutability; +mod op; mod opaques; mod pat; mod path; +mod place_op; pub(crate) mod unify; -use std::{cell::OnceCell, convert::identity, iter, ops::Index}; +use std::{cell::OnceCell, convert::identity, iter}; use base_db::Crate; use either::Either; @@ -35,11 +37,11 @@ use hir_def::{ ItemContainerId, LocalFieldId, Lookup, TraitId, TupleFieldId, TupleId, TypeAliasId, VariantId, expr_store::{Body, ExpressionStore, HygieneId, path::Path}, hir::{BindingAnnotation, BindingId, ExprId, ExprOrPatId, LabelId, PatId}, - lang_item::{LangItem, LangItemTarget, lang_item}, + lang_item::LangItems, layout::Integer, resolver::{HasResolver, ResolveValueResult, Resolver, TypeNs, ValueNs}, - signatures::{ConstSignature, StaticSignature}, - type_ref::{ConstRef, LifetimeRefId, TypeRefId}, + signatures::{ConstSignature, EnumSignature, StaticSignature}, + type_ref::{ConstRef, LifetimeRefId, TypeRef, TypeRefId}, }; use hir_expand::{mod_path::ModPath, name::Name}; use indexmap::IndexSet; @@ -49,13 +51,15 @@ use rustc_ast_ir::Mutability; use rustc_hash::{FxHashMap, FxHashSet}; use rustc_type_ir::{ AliasTyKind, TypeFoldable, - inherent::{AdtDef, IntoKind, Region as _, SliceLike, Ty as _}, + inherent::{AdtDef, IntoKind, Ty as _}, }; +use span::Edition; use stdx::never; -use triomphe::Arc; +use thin_vec::ThinVec; use crate::{ ImplTraitId, IncorrectGenericsLenKind, PathLoweringDiagnostic, TargetFeatures, + collect_type_inference_vars, db::{HirDatabase, InternedClosureId, InternedOpaqueTyId}, infer::{ coerce::{CoerceMany, DynamicCoerceMany}, @@ -65,10 +69,13 @@ use crate::{ lower::{ ImplTraitIdx, ImplTraitLoweringMode, LifetimeElisionKind, diagnostics::TyLoweringDiagnostic, }, + method_resolution::{CandidateId, MethodResolutionUnstableFeatures}, mir::MirSpan, next_solver::{ - AliasTy, Const, DbInterner, ErrorGuaranteed, GenericArg, GenericArgs, Region, Ty, TyKind, - Tys, abi::Safety, infer::traits::ObligationCause, + AliasTy, Const, DbInterner, ErrorGuaranteed, GenericArg, GenericArgs, Region, + StoredGenericArgs, StoredTy, StoredTys, Ty, TyKind, Tys, + abi::Safety, + infer::{InferCtxt, ObligationInspector, traits::ObligationCause}, }, traits::FnTrait, utils::TargetFeatureIsSafeInTarget, @@ -86,12 +93,24 @@ use cast::{CastCheck, CastError}; pub(crate) use closure::analysis::{CaptureKind, CapturedItem, CapturedItemWithoutTy}; /// The entry point of type inference. -pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult<'_>> { +fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> InferenceResult { + infer_query_with_inspect(db, def, None) +} + +pub fn infer_query_with_inspect<'db>( + db: &'db dyn HirDatabase, + def: DefWithBodyId, + inspect: Option<ObligationInspector<'db>>, +) -> InferenceResult { let _p = tracing::info_span!("infer_query").entered(); let resolver = def.resolver(db); let body = db.body(def); let mut ctx = InferenceContext::new(db, def, &body, resolver); + if let Some(inspect) = inspect { + ctx.table.infer_ctxt.attach_obligation_inspector(inspect); + } + match def { DefWithBodyId::FunctionId(f) => { ctx.collect_fn(f); @@ -99,25 +118,25 @@ pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<Infer DefWithBodyId::ConstId(c) => ctx.collect_const(c, &db.const_signature(c)), DefWithBodyId::StaticId(s) => ctx.collect_static(&db.static_signature(s)), DefWithBodyId::VariantId(v) => { - ctx.return_ty = match db.enum_signature(v.lookup(db).parent).variant_body_type() { + ctx.return_ty = match EnumSignature::variant_body_type(db, v.lookup(db).parent) { hir_def::layout::IntegerType::Pointer(signed) => match signed { - true => ctx.types.isize, - false => ctx.types.usize, + true => ctx.types.types.isize, + false => ctx.types.types.usize, }, hir_def::layout::IntegerType::Fixed(size, signed) => match signed { true => match size { - Integer::I8 => ctx.types.i8, - Integer::I16 => ctx.types.i16, - Integer::I32 => ctx.types.i32, - Integer::I64 => ctx.types.i64, - Integer::I128 => ctx.types.i128, + Integer::I8 => ctx.types.types.i8, + Integer::I16 => ctx.types.types.i16, + Integer::I32 => ctx.types.types.i32, + Integer::I64 => ctx.types.types.i64, + Integer::I128 => ctx.types.types.i128, }, false => match size { - Integer::I8 => ctx.types.u8, - Integer::I16 => ctx.types.u16, - Integer::I32 => ctx.types.u32, - Integer::I64 => ctx.types.u64, - Integer::I128 => ctx.types.u128, + Integer::I8 => ctx.types.types.u8, + Integer::I16 => ctx.types.types.u16, + Integer::I32 => ctx.types.types.u32, + Integer::I64 => ctx.types.types.u64, + Integer::I128 => ctx.types.types.u128, }, }, }; @@ -150,17 +169,14 @@ pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<Infer ctx.handle_opaque_type_uses(); - Arc::new(ctx.resolve_all()) + ctx.resolve_all() } -pub(crate) fn infer_cycle_result( - db: &dyn HirDatabase, - _: DefWithBodyId, -) -> Arc<InferenceResult<'_>> { - Arc::new(InferenceResult { +fn infer_cycle_result(db: &dyn HirDatabase, _: salsa::Id, _: DefWithBodyId) -> InferenceResult { + InferenceResult { has_errors: true, - ..InferenceResult::new(Ty::new_error(DbInterner::new_with(db, None, None), ErrorGuaranteed)) - }) + ..InferenceResult::new(Ty::new_error(DbInterner::new_no_crate(db), ErrorGuaranteed)) + } } /// Binding modes inferred for patterns. @@ -191,7 +207,7 @@ pub enum InferenceTyDiagnosticSource { } #[derive(Debug, PartialEq, Eq, Clone)] -pub enum InferenceDiagnostic<'db> { +pub enum InferenceDiagnostic { NoSuchField { field: ExprOrPatId, private: Option<LocalFieldId>, @@ -207,16 +223,16 @@ pub enum InferenceDiagnostic<'db> { }, UnresolvedField { expr: ExprId, - receiver: Ty<'db>, + receiver: StoredTy, name: Name, method_with_same_name_exists: bool, }, UnresolvedMethodCall { expr: ExprId, - receiver: Ty<'db>, + receiver: StoredTy, name: Name, /// Contains the type the field resolves to - field_with_same_name: Option<Ty<'db>>, + field_with_same_name: Option<StoredTy>, assoc_func_with_same_name: Option<FunctionId>, }, UnresolvedAssocItem { @@ -243,21 +259,21 @@ pub enum InferenceDiagnostic<'db> { }, ExpectedFunction { call_expr: ExprId, - found: Ty<'db>, + found: StoredTy, }, TypedHole { expr: ExprId, - expected: Ty<'db>, + expected: StoredTy, }, CastToUnsized { expr: ExprId, - cast_ty: Ty<'db>, + cast_ty: StoredTy, }, InvalidCast { expr: ExprId, error: CastError, - expr_ty: Ty<'db>, - cast_ty: Ty<'db>, + expr_ty: StoredTy, + cast_ty: StoredTy, }, TyDiagnostic { source: InferenceTyDiagnosticSource, @@ -285,9 +301,9 @@ pub enum InferenceDiagnostic<'db> { /// A mismatch between an expected and an inferred type. #[derive(Clone, PartialEq, Eq, Debug, Hash)] -pub struct TypeMismatch<'db> { - pub expected: Ty<'db>, - pub actual: Ty<'db>, +pub struct TypeMismatch { + pub expected: StoredTy, + pub actual: StoredTy, } /// Represents coercing a value to a different type of value. @@ -331,15 +347,23 @@ pub struct TypeMismatch<'db> { /// case this is analogous to transforming a struct. E.g., Box<[i32; 4]> -> /// Box<[i32]> is an `Adjust::Unsize` with the target `Box<[i32]>`. #[derive(Clone, Debug, PartialEq, Eq, Hash)] -pub struct Adjustment<'db> { - pub kind: Adjust<'db>, - pub target: Ty<'db>, +pub struct Adjustment { + pub kind: Adjust, + pub target: StoredTy, } -impl<'db> Adjustment<'db> { - pub fn borrow(interner: DbInterner<'db>, m: Mutability, ty: Ty<'db>, lt: Region<'db>) -> Self { +impl Adjustment { + pub fn borrow<'db>( + interner: DbInterner<'db>, + m: Mutability, + ty: Ty<'db>, + lt: Region<'db>, + ) -> Self { let ty = Ty::new_ref(interner, lt, ty, m); - Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(lt, m)), target: ty } + Adjustment { + kind: Adjust::Borrow(AutoBorrow::Ref(AutoBorrowMutability::new(m, AllowTwoPhase::No))), + target: ty.store(), + } } } @@ -357,20 +381,20 @@ impl<'db> Adjustment<'db> { /// capable mutable borrows. /// See #49434 for tracking. #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub(crate) enum AllowTwoPhase { +pub enum AllowTwoPhase { // FIXME: We should use this when appropriate. Yes, No, } #[derive(Clone, Debug, PartialEq, Eq, Hash)] -pub enum Adjust<'db> { +pub enum Adjust { /// Go from ! to any type. NeverToAny, /// Dereference once, producing a place. Deref(Option<OverloadedDeref>), /// Take the address and produce either a `&` or `*` pointer. - Borrow(AutoBorrow<'db>), + Borrow(AutoBorrow), Pointer(PointerCast), } @@ -381,18 +405,47 @@ pub enum Adjust<'db> { #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub struct OverloadedDeref(pub Option<Mutability>); -#[derive(Clone, Debug, PartialEq, Eq, Hash)] -pub enum AutoBorrow<'db> { +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +pub enum AutoBorrowMutability { + Mut { allow_two_phase_borrow: AllowTwoPhase }, + Not, +} + +impl AutoBorrowMutability { + /// Creates an `AutoBorrowMutability` from a mutability and allowance of two phase borrows. + /// + /// Note that when `mutbl.is_not()`, `allow_two_phase_borrow` is ignored + pub fn new(mutbl: Mutability, allow_two_phase_borrow: AllowTwoPhase) -> Self { + match mutbl { + Mutability::Not => Self::Not, + Mutability::Mut => Self::Mut { allow_two_phase_borrow }, + } + } +} + +impl From<AutoBorrowMutability> for Mutability { + fn from(m: AutoBorrowMutability) -> Self { + match m { + AutoBorrowMutability::Mut { .. } => Mutability::Mut, + AutoBorrowMutability::Not => Mutability::Not, + } + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub enum AutoBorrow { /// Converts from T to &T. - Ref(Region<'db>, Mutability), + Ref(AutoBorrowMutability), /// Converts from T to *T. RawPtr(Mutability), } -impl<'db> AutoBorrow<'db> { - fn mutability(&self) -> Mutability { - let (AutoBorrow::Ref(_, m) | AutoBorrow::RawPtr(m)) = self; - *m +impl AutoBorrow { + fn mutability(self) -> Mutability { + match self { + AutoBorrow::Ref(mutbl) => mutbl.into(), + AutoBorrow::RawPtr(mutbl) => mutbl, + } } } @@ -434,40 +487,46 @@ pub enum PointerCast { /// `resolve_completely()`'ing them in `InferenceContext::resolve_all()`. Inference variables must /// not appear in the final inference result. #[derive(Clone, PartialEq, Eq, Debug)] -pub struct InferenceResult<'db> { +pub struct InferenceResult { /// For each method call expr, records the function it resolves to. - method_resolutions: FxHashMap<ExprId, (FunctionId, GenericArgs<'db>)>, + method_resolutions: FxHashMap<ExprId, (FunctionId, StoredGenericArgs)>, /// For each field access expr, records the field it resolves to. field_resolutions: FxHashMap<ExprId, Either<FieldId, TupleFieldId>>, /// For each struct literal or pattern, records the variant it resolves to. variant_resolutions: FxHashMap<ExprOrPatId, VariantId>, /// For each associated item record what it resolves to - assoc_resolutions: FxHashMap<ExprOrPatId, (AssocItemId, GenericArgs<'db>)>, + assoc_resolutions: FxHashMap<ExprOrPatId, (CandidateId, StoredGenericArgs)>, /// Whenever a tuple field expression access a tuple field, we allocate a tuple id in /// [`InferenceContext`] and store the tuples substitution there. This map is the reverse of /// that which allows us to resolve a [`TupleFieldId`]s type. - tuple_field_access_types: FxHashMap<TupleId, Tys<'db>>, - /// During inference this field is empty and [`InferenceContext::diagnostics`] is filled instead. - diagnostics: Vec<InferenceDiagnostic<'db>>, - pub(crate) type_of_expr: ArenaMap<ExprId, Ty<'db>>, + tuple_field_access_types: ThinVec<StoredTys>, + + pub(crate) type_of_expr: ArenaMap<ExprId, StoredTy>, /// For each pattern record the type it resolves to. /// /// **Note**: When a pattern type is resolved it may still contain /// unresolved or missing subpatterns or subpatterns of mismatched types. - pub(crate) type_of_pat: ArenaMap<PatId, Ty<'db>>, - pub(crate) type_of_binding: ArenaMap<BindingId, Ty<'db>>, - pub(crate) type_of_opaque: FxHashMap<InternedOpaqueTyId, Ty<'db>>, - type_mismatches: FxHashMap<ExprOrPatId, TypeMismatch<'db>>, + pub(crate) type_of_pat: ArenaMap<PatId, StoredTy>, + pub(crate) type_of_binding: ArenaMap<BindingId, StoredTy>, + pub(crate) type_of_type_placeholder: FxHashMap<TypeRefId, StoredTy>, + pub(crate) type_of_opaque: FxHashMap<InternedOpaqueTyId, StoredTy>, + + pub(crate) type_mismatches: Option<Box<FxHashMap<ExprOrPatId, TypeMismatch>>>, /// Whether there are any type-mismatching errors in the result. // FIXME: This isn't as useful as initially thought due to us falling back placeholders to // `TyKind::Error`. // Which will then mark this field. pub(crate) has_errors: bool, + /// During inference this field is empty and [`InferenceContext::diagnostics`] is filled instead. + diagnostics: ThinVec<InferenceDiagnostic>, + /// Interned `Error` type to return references to. // FIXME: Remove this. - error_ty: Ty<'db>, + error_ty: StoredTy, + + pub(crate) expr_adjustments: FxHashMap<ExprId, Box<[Adjustment]>>, /// Stores the types which were implicitly dereferenced in pattern binding modes. - pub(crate) pat_adjustments: FxHashMap<PatId, Vec<Ty<'db>>>, + pub(crate) pat_adjustments: FxHashMap<PatId, Vec<StoredTy>>, /// Stores the binding mode (`ref` in `let ref x = 2`) of bindings. /// /// This one is tied to the `PatId` instead of `BindingId`, because in some rare cases, a binding in an @@ -482,15 +541,24 @@ pub struct InferenceResult<'db> { /// ``` /// the first `rest` has implicit `ref` binding mode, but the second `rest` binding mode is `move`. pub(crate) binding_modes: ArenaMap<PatId, BindingMode>, - pub(crate) expr_adjustments: FxHashMap<ExprId, Box<[Adjustment<'db>]>>, - pub(crate) closure_info: FxHashMap<InternedClosureId, (Vec<CapturedItem<'db>>, FnTrait)>, + + pub(crate) closure_info: FxHashMap<InternedClosureId, (Vec<CapturedItem>, FnTrait)>, // FIXME: remove this field pub mutated_bindings_in_closure: FxHashSet<BindingId>, + pub(crate) coercion_casts: FxHashSet<ExprId>, } -impl<'db> InferenceResult<'db> { - fn new(error_ty: Ty<'db>) -> Self { +#[salsa::tracked] +impl InferenceResult { + #[salsa::tracked(returns(ref), cycle_result = infer_cycle_result)] + pub fn for_body(db: &dyn HirDatabase, def: DefWithBodyId) -> InferenceResult { + infer_query(db, def) + } +} + +impl InferenceResult { + fn new(error_ty: Ty<'_>) -> Self { Self { method_resolutions: Default::default(), field_resolutions: Default::default(), @@ -501,10 +569,11 @@ impl<'db> InferenceResult<'db> { type_of_expr: Default::default(), type_of_pat: Default::default(), type_of_binding: Default::default(), + type_of_type_placeholder: Default::default(), type_of_opaque: Default::default(), type_mismatches: Default::default(), has_errors: Default::default(), - error_ty, + error_ty: error_ty.store(), pat_adjustments: Default::default(), binding_modes: Default::default(), expr_adjustments: Default::default(), @@ -514,8 +583,8 @@ impl<'db> InferenceResult<'db> { } } - pub fn method_resolution(&self, expr: ExprId) -> Option<(FunctionId, GenericArgs<'db>)> { - self.method_resolutions.get(&expr).copied() + pub fn method_resolution<'db>(&self, expr: ExprId) -> Option<(FunctionId, GenericArgs<'db>)> { + self.method_resolutions.get(&expr).map(|(func, args)| (*func, args.as_ref())) } pub fn field_resolution(&self, expr: ExprId) -> Option<Either<FieldId, TupleFieldId>> { self.field_resolutions.get(&expr).copied() @@ -532,91 +601,103 @@ impl<'db> InferenceResult<'db> { ExprOrPatId::PatId(id) => self.variant_resolution_for_pat(id), } } - pub fn assoc_resolutions_for_expr( + pub fn assoc_resolutions_for_expr<'db>( &self, id: ExprId, - ) -> Option<(AssocItemId, GenericArgs<'db>)> { - self.assoc_resolutions.get(&id.into()).copied() + ) -> Option<(CandidateId, GenericArgs<'db>)> { + self.assoc_resolutions.get(&id.into()).map(|(assoc, args)| (*assoc, args.as_ref())) } - pub fn assoc_resolutions_for_pat(&self, id: PatId) -> Option<(AssocItemId, GenericArgs<'db>)> { - self.assoc_resolutions.get(&id.into()).copied() + pub fn assoc_resolutions_for_pat<'db>( + &self, + id: PatId, + ) -> Option<(CandidateId, GenericArgs<'db>)> { + self.assoc_resolutions.get(&id.into()).map(|(assoc, args)| (*assoc, args.as_ref())) } - pub fn assoc_resolutions_for_expr_or_pat( + pub fn assoc_resolutions_for_expr_or_pat<'db>( &self, id: ExprOrPatId, - ) -> Option<(AssocItemId, GenericArgs<'db>)> { + ) -> Option<(CandidateId, GenericArgs<'db>)> { match id { ExprOrPatId::ExprId(id) => self.assoc_resolutions_for_expr(id), ExprOrPatId::PatId(id) => self.assoc_resolutions_for_pat(id), } } - pub fn type_mismatch_for_expr(&self, expr: ExprId) -> Option<&TypeMismatch<'db>> { - self.type_mismatches.get(&expr.into()) + pub fn type_mismatch_for_expr(&self, expr: ExprId) -> Option<&TypeMismatch> { + self.type_mismatches.as_deref()?.get(&expr.into()) } - pub fn type_mismatch_for_pat(&self, pat: PatId) -> Option<&TypeMismatch<'db>> { - self.type_mismatches.get(&pat.into()) + pub fn type_mismatch_for_pat(&self, pat: PatId) -> Option<&TypeMismatch> { + self.type_mismatches.as_deref()?.get(&pat.into()) } - pub fn type_mismatches(&self) -> impl Iterator<Item = (ExprOrPatId, &TypeMismatch<'db>)> { - self.type_mismatches.iter().map(|(expr_or_pat, mismatch)| (*expr_or_pat, mismatch)) + pub fn type_mismatches(&self) -> impl Iterator<Item = (ExprOrPatId, &TypeMismatch)> { + self.type_mismatches + .as_deref() + .into_iter() + .flatten() + .map(|(expr_or_pat, mismatch)| (*expr_or_pat, mismatch)) + } + pub fn expr_type_mismatches(&self) -> impl Iterator<Item = (ExprId, &TypeMismatch)> { + self.type_mismatches.as_deref().into_iter().flatten().filter_map( + |(expr_or_pat, mismatch)| match *expr_or_pat { + ExprOrPatId::ExprId(expr) => Some((expr, mismatch)), + _ => None, + }, + ) } - pub fn expr_type_mismatches(&self) -> impl Iterator<Item = (ExprId, &TypeMismatch<'db>)> { - self.type_mismatches.iter().filter_map(|(expr_or_pat, mismatch)| match *expr_or_pat { - ExprOrPatId::ExprId(expr) => Some((expr, mismatch)), - _ => None, - }) + pub fn placeholder_types<'db>(&self) -> impl Iterator<Item = (TypeRefId, Ty<'db>)> { + self.type_of_type_placeholder.iter().map(|(&type_ref, ty)| (type_ref, ty.as_ref())) } - pub fn closure_info(&self, closure: InternedClosureId) -> &(Vec<CapturedItem<'db>>, FnTrait) { + pub fn type_of_type_placeholder<'db>(&self, type_ref: TypeRefId) -> Option<Ty<'db>> { + self.type_of_type_placeholder.get(&type_ref).map(|ty| ty.as_ref()) + } + pub fn closure_info(&self, closure: InternedClosureId) -> &(Vec<CapturedItem>, FnTrait) { self.closure_info.get(&closure).unwrap() } - pub fn type_of_expr_or_pat(&self, id: ExprOrPatId) -> Option<Ty<'db>> { + pub fn type_of_expr_or_pat<'db>(&self, id: ExprOrPatId) -> Option<Ty<'db>> { match id { - ExprOrPatId::ExprId(id) => self.type_of_expr.get(id).copied(), - ExprOrPatId::PatId(id) => self.type_of_pat.get(id).copied(), + ExprOrPatId::ExprId(id) => self.type_of_expr.get(id).map(|it| it.as_ref()), + ExprOrPatId::PatId(id) => self.type_of_pat.get(id).map(|it| it.as_ref()), } } - pub fn type_of_expr_with_adjust(&self, id: ExprId) -> Option<Ty<'db>> { + pub fn type_of_expr_with_adjust<'db>(&self, id: ExprId) -> Option<Ty<'db>> { match self.expr_adjustments.get(&id).and_then(|adjustments| { - adjustments - .iter() - .filter(|adj| { - // https://github.com/rust-lang/rust/blob/67819923ac8ea353aaa775303f4c3aacbf41d010/compiler/rustc_mir_build/src/thir/cx/expr.rs#L140 - !matches!( - adj, - Adjustment { - kind: Adjust::NeverToAny, - target, - } if target.is_never() - ) - }) - .next_back() + adjustments.iter().rfind(|adj| { + // https://github.com/rust-lang/rust/blob/67819923ac8ea353aaa775303f4c3aacbf41d010/compiler/rustc_mir_build/src/thir/cx/expr.rs#L140 + !matches!( + adj, + Adjustment { + kind: Adjust::NeverToAny, + target, + } if target.as_ref().is_never() + ) + }) }) { - Some(adjustment) => Some(adjustment.target), - None => self.type_of_expr.get(id).copied(), + Some(adjustment) => Some(adjustment.target.as_ref()), + None => self.type_of_expr.get(id).map(|it| it.as_ref()), } } - pub fn type_of_pat_with_adjust(&self, id: PatId) -> Option<Ty<'db>> { + pub fn type_of_pat_with_adjust<'db>(&self, id: PatId) -> Option<Ty<'db>> { match self.pat_adjustments.get(&id).and_then(|adjustments| adjustments.last()) { - Some(adjusted) => Some(*adjusted), - None => self.type_of_pat.get(id).copied(), + Some(adjusted) => Some(adjusted.as_ref()), + None => self.type_of_pat.get(id).map(|it| it.as_ref()), } } pub fn is_erroneous(&self) -> bool { self.has_errors && self.type_of_expr.iter().count() == 0 } - pub fn diagnostics(&self) -> &[InferenceDiagnostic<'db>] { + pub fn diagnostics(&self) -> &[InferenceDiagnostic] { &self.diagnostics } - pub fn tuple_field_access_type(&self, id: TupleId) -> Tys<'db> { - self.tuple_field_access_types[&id] + pub fn tuple_field_access_type<'db>(&self, id: TupleId) -> Tys<'db> { + self.tuple_field_access_types[id.0 as usize].as_ref() } - pub fn pat_adjustment(&self, id: PatId) -> Option<&[Ty<'db>]> { + pub fn pat_adjustment(&self, id: PatId) -> Option<&[StoredTy]> { self.pat_adjustments.get(&id).map(|it| &**it) } - pub fn expr_adjustment(&self, id: ExprId) -> Option<&[Adjustment<'db>]> { + pub fn expr_adjustment(&self, id: ExprId) -> Option<&[Adjustment]> { self.expr_adjustments.get(&id).map(|it| &**it) } @@ -625,137 +706,47 @@ impl<'db> InferenceResult<'db> { } // This method is consumed by external tools to run rust-analyzer as a library. Don't remove, please. - pub fn expression_types(&self) -> impl Iterator<Item = (ExprId, Ty<'db>)> { - self.type_of_expr.iter().map(|(k, v)| (k, *v)) + pub fn expression_types<'db>(&self) -> impl Iterator<Item = (ExprId, Ty<'db>)> { + self.type_of_expr.iter().map(|(k, v)| (k, v.as_ref())) } // This method is consumed by external tools to run rust-analyzer as a library. Don't remove, please. - pub fn pattern_types(&self) -> impl Iterator<Item = (PatId, Ty<'db>)> { - self.type_of_pat.iter().map(|(k, v)| (k, *v)) + pub fn pattern_types<'db>(&self) -> impl Iterator<Item = (PatId, Ty<'db>)> { + self.type_of_pat.iter().map(|(k, v)| (k, v.as_ref())) } // This method is consumed by external tools to run rust-analyzer as a library. Don't remove, please. - pub fn binding_types(&self) -> impl Iterator<Item = (BindingId, Ty<'db>)> { - self.type_of_binding.iter().map(|(k, v)| (k, *v)) + pub fn binding_types<'db>(&self) -> impl Iterator<Item = (BindingId, Ty<'db>)> { + self.type_of_binding.iter().map(|(k, v)| (k, v.as_ref())) } // This method is consumed by external tools to run rust-analyzer as a library. Don't remove, please. - pub fn return_position_impl_trait_types( - &self, + pub fn return_position_impl_trait_types<'db>( + &'db self, db: &'db dyn HirDatabase, - ) -> impl Iterator<Item = (ImplTraitIdx<'db>, Ty<'db>)> { - self.type_of_opaque.iter().filter_map(move |(&id, &ty)| { + ) -> impl Iterator<Item = (ImplTraitIdx, Ty<'db>)> { + self.type_of_opaque.iter().filter_map(move |(&id, ty)| { let ImplTraitId::ReturnTypeImplTrait(_, rpit_idx) = id.loc(db) else { return None; }; - Some((rpit_idx, ty)) + Some((rpit_idx, ty.as_ref())) }) } -} - -impl<'db> Index<ExprId> for InferenceResult<'db> { - type Output = Ty<'db>; - - fn index(&self, expr: ExprId) -> &Ty<'db> { - self.type_of_expr.get(expr).unwrap_or(&self.error_ty) - } -} - -impl<'db> Index<PatId> for InferenceResult<'db> { - type Output = Ty<'db>; - fn index(&self, pat: PatId) -> &Ty<'db> { - self.type_of_pat.get(pat).unwrap_or(&self.error_ty) + pub fn expr_ty<'db>(&self, id: ExprId) -> Ty<'db> { + self.type_of_expr.get(id).map_or(self.error_ty.as_ref(), |it| it.as_ref()) } -} - -impl<'db> Index<ExprOrPatId> for InferenceResult<'db> { - type Output = Ty<'db>; - fn index(&self, id: ExprOrPatId) -> &Ty<'db> { - match id { - ExprOrPatId::ExprId(id) => &self[id], - ExprOrPatId::PatId(id) => &self[id], - } + pub fn pat_ty<'db>(&self, id: PatId) -> Ty<'db> { + self.type_of_pat.get(id).map_or(self.error_ty.as_ref(), |it| it.as_ref()) } -} - -impl<'db> Index<BindingId> for InferenceResult<'db> { - type Output = Ty<'db>; - fn index(&self, b: BindingId) -> &Ty<'db> { - self.type_of_binding.get(b).unwrap_or(&self.error_ty) + pub fn expr_or_pat_ty<'db>(&self, id: ExprOrPatId) -> Ty<'db> { + self.type_of_expr_or_pat(id).unwrap_or(self.error_ty.as_ref()) } -} - -#[derive(Debug, Clone)] -struct InternedStandardTypes<'db> { - unit: Ty<'db>, - never: Ty<'db>, - char: Ty<'db>, - bool: Ty<'db>, - i8: Ty<'db>, - i16: Ty<'db>, - i32: Ty<'db>, - i64: Ty<'db>, - i128: Ty<'db>, - isize: Ty<'db>, - u8: Ty<'db>, - u16: Ty<'db>, - u32: Ty<'db>, - u64: Ty<'db>, - u128: Ty<'db>, - usize: Ty<'db>, - f16: Ty<'db>, - f32: Ty<'db>, - f64: Ty<'db>, - f128: Ty<'db>, - static_str_ref: Ty<'db>, - error: Ty<'db>, - - re_static: Region<'db>, - re_error: Region<'db>, - re_erased: Region<'db>, - - empty_args: GenericArgs<'db>, - empty_tys: Tys<'db>, -} -impl<'db> InternedStandardTypes<'db> { - fn new(interner: DbInterner<'db>) -> Self { - let str = Ty::new(interner, rustc_type_ir::TyKind::Str); - let re_static = Region::new_static(interner); - Self { - unit: Ty::new_unit(interner), - never: Ty::new(interner, TyKind::Never), - char: Ty::new(interner, TyKind::Char), - bool: Ty::new(interner, TyKind::Bool), - i8: Ty::new_int(interner, rustc_type_ir::IntTy::I8), - i16: Ty::new_int(interner, rustc_type_ir::IntTy::I16), - i32: Ty::new_int(interner, rustc_type_ir::IntTy::I32), - i64: Ty::new_int(interner, rustc_type_ir::IntTy::I64), - i128: Ty::new_int(interner, rustc_type_ir::IntTy::I128), - isize: Ty::new_int(interner, rustc_type_ir::IntTy::Isize), - u8: Ty::new_uint(interner, rustc_type_ir::UintTy::U8), - u16: Ty::new_uint(interner, rustc_type_ir::UintTy::U16), - u32: Ty::new_uint(interner, rustc_type_ir::UintTy::U32), - u64: Ty::new_uint(interner, rustc_type_ir::UintTy::U64), - u128: Ty::new_uint(interner, rustc_type_ir::UintTy::U128), - usize: Ty::new_uint(interner, rustc_type_ir::UintTy::Usize), - f16: Ty::new_float(interner, rustc_type_ir::FloatTy::F16), - f32: Ty::new_float(interner, rustc_type_ir::FloatTy::F32), - f64: Ty::new_float(interner, rustc_type_ir::FloatTy::F64), - f128: Ty::new_float(interner, rustc_type_ir::FloatTy::F128), - static_str_ref: Ty::new_ref(interner, re_static, str, Mutability::Not), - error: Ty::new_error(interner, ErrorGuaranteed), - - re_static, - re_error: Region::error(interner), - re_erased: Region::new_erased(interner), - - empty_args: GenericArgs::new_from_iter(interner, []), - empty_tys: Tys::new_from_iter(interner, []), - } + pub fn binding_ty<'db>(&self, id: BindingId) -> Ty<'db> { + self.type_of_binding.get(id).map_or(self.error_ty.as_ref(), |it| it.as_ref()) } } @@ -768,12 +759,15 @@ pub(crate) struct InferenceContext<'body, 'db> { /// Generally you should not resolve things via this resolver. Instead create a TyLoweringContext /// and resolve the path via its methods. This will ensure proper error reporting. pub(crate) resolver: Resolver<'db>, - target_features: OnceCell<(TargetFeatures, TargetFeatureIsSafeInTarget)>, + target_features: OnceCell<(TargetFeatures<'db>, TargetFeatureIsSafeInTarget)>, + pub(crate) unstable_features: MethodResolutionUnstableFeatures, + pub(crate) edition: Edition, pub(crate) generic_def: GenericDefId, - table: unify::InferenceTable<'db>, + pub(crate) table: unify::InferenceTable<'db>, + pub(crate) lang_items: &'db LangItems, /// The traits in scope, disregarding block modules. This is used for caching purposes. traits_in_scope: FxHashSet<TraitId>, - pub(crate) result: InferenceResult<'db>, + pub(crate) result: InferenceResult, tuple_field_accesses_rev: IndexSet<Tys<'db>, std::hash::BuildHasherDefault<rustc_hash::FxHasher>>, /// The return type of the function being inferred, the closure or async block if we're @@ -790,7 +784,7 @@ pub(crate) struct InferenceContext<'body, 'db> { resume_yield_tys: Option<(Ty<'db>, Ty<'db>)>, diverges: Diverges, breakables: Vec<BreakableContext<'db>>, - types: InternedStandardTypes<'db>, + types: &'db crate::next_solver::DefaultAny<'db>, /// Whether we are inside the pattern of a destructuring assignment. inside_assignment: bool, @@ -798,7 +792,7 @@ pub(crate) struct InferenceContext<'body, 'db> { deferred_cast_checks: Vec<CastCheck<'db>>, // fields related to closure capture - current_captures: Vec<CapturedItemWithoutTy<'db>>, + current_captures: Vec<CapturedItemWithoutTy>, /// A stack that has an entry for each projection in the current capture. /// /// For example, in `a.b.c`, we capture the spans of `a`, `a.b`, and `a.b.c`. @@ -811,7 +805,7 @@ pub(crate) struct InferenceContext<'body, 'db> { closure_dependencies: FxHashMap<InternedClosureId, Vec<InternedClosureId>>, deferred_closures: FxHashMap<InternedClosureId, Vec<(Ty<'db>, Ty<'db>, Vec<Ty<'db>>, ExprId)>>, - diagnostics: Diagnostics<'db>, + diagnostics: Diagnostics, } #[derive(Clone, Debug)] @@ -866,13 +860,18 @@ impl<'body, 'db> InferenceContext<'body, 'db> { resolver: Resolver<'db>, ) -> Self { let trait_env = db.trait_environment_for_body(owner); - let table = unify::InferenceTable::new(db, trait_env, Some(owner)); - let types = InternedStandardTypes::new(table.interner()); + let table = unify::InferenceTable::new(db, trait_env, resolver.krate(), Some(owner)); + let types = crate::next_solver::default_types(db); InferenceContext { - result: InferenceResult::new(types.error), - return_ty: types.error, // set in collect_* calls + result: InferenceResult::new(types.types.error), + return_ty: types.types.error, // set in collect_* calls types, target_features: OnceCell::new(), + unstable_features: MethodResolutionUnstableFeatures::from_def_map( + resolver.top_level_def_map(), + ), + lang_items: table.interner().lang_items(), + edition: resolver.krate().data(db).edition, table, tuple_field_accesses_rev: Default::default(), resume_yield_tys: None, @@ -906,18 +905,13 @@ impl<'body, 'db> InferenceContext<'body, 'db> { self.resolver.krate() } - fn target_features<'a>( - db: &dyn HirDatabase, - target_features: &'a OnceCell<(TargetFeatures, TargetFeatureIsSafeInTarget)>, - owner: DefWithBodyId, - krate: Crate, - ) -> (&'a TargetFeatures, TargetFeatureIsSafeInTarget) { - let (target_features, target_feature_is_safe) = target_features.get_or_init(|| { - let target_features = match owner { - DefWithBodyId::FunctionId(id) => TargetFeatures::from_attrs(&db.attrs(id.into())), + fn target_features(&self) -> (&TargetFeatures<'db>, TargetFeatureIsSafeInTarget) { + let (target_features, target_feature_is_safe) = self.target_features.get_or_init(|| { + let target_features = match self.owner { + DefWithBodyId::FunctionId(id) => TargetFeatures::from_fn(self.db, id), _ => TargetFeatures::default(), }; - let target_feature_is_safe = match &krate.workspace_data(db).target { + let target_feature_is_safe = match &self.krate().workspace_data(self.db).target { Ok(target) => crate::utils::target_feature_is_safe_in_target(target), Err(_) => TargetFeatureIsSafeInTarget::No, }; @@ -927,13 +921,13 @@ impl<'body, 'db> InferenceContext<'body, 'db> { } #[inline] - pub(crate) fn set_tainted_by_errors(&mut self) { + fn set_tainted_by_errors(&mut self) { self.result.has_errors = true; } /// Clones `self` and calls `resolve_all()` on it. // FIXME: Remove this. - pub(crate) fn fixme_resolve_all_clone(&self) -> InferenceResult<'db> { + pub(crate) fn fixme_resolve_all_clone(&self) -> InferenceResult { let mut ctx = self.clone(); ctx.type_inference_fallback(); @@ -957,7 +951,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> { // `InferenceResult` in the middle of inference. See the fixme comment in `consteval::eval_to_const`. If you // used this function for another workaround, mention it here. If you really need this function and believe that // there is no problem in it being `pub(crate)`, remove this comment. - fn resolve_all(self) -> InferenceResult<'db> { + fn resolve_all(self) -> InferenceResult { let InferenceContext { mut table, mut result, tuple_field_accesses_rev, diagnostics, .. } = self; @@ -972,6 +966,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> { type_of_expr, type_of_pat, type_of_binding, + type_of_type_placeholder, type_of_opaque, type_mismatches, has_errors, @@ -990,52 +985,58 @@ impl<'body, 'db> InferenceContext<'body, 'db> { } = &mut result; for ty in type_of_expr.values_mut() { - *ty = table.resolve_completely(*ty); - *has_errors = *has_errors || ty.references_non_lt_error(); + *ty = table.resolve_completely(ty.as_ref()).store(); + *has_errors = *has_errors || ty.as_ref().references_non_lt_error(); } type_of_expr.shrink_to_fit(); for ty in type_of_pat.values_mut() { - *ty = table.resolve_completely(*ty); - *has_errors = *has_errors || ty.references_non_lt_error(); + *ty = table.resolve_completely(ty.as_ref()).store(); + *has_errors = *has_errors || ty.as_ref().references_non_lt_error(); } type_of_pat.shrink_to_fit(); for ty in type_of_binding.values_mut() { - *ty = table.resolve_completely(*ty); - *has_errors = *has_errors || ty.references_non_lt_error(); + *ty = table.resolve_completely(ty.as_ref()).store(); + *has_errors = *has_errors || ty.as_ref().references_non_lt_error(); } type_of_binding.shrink_to_fit(); + for ty in type_of_type_placeholder.values_mut() { + *ty = table.resolve_completely(ty.as_ref()).store(); + *has_errors = *has_errors || ty.as_ref().references_non_lt_error(); + } + type_of_type_placeholder.shrink_to_fit(); type_of_opaque.shrink_to_fit(); - *has_errors |= !type_mismatches.is_empty(); - - for mismatch in (*type_mismatches).values_mut() { - mismatch.expected = table.resolve_completely(mismatch.expected); - mismatch.actual = table.resolve_completely(mismatch.actual); + if let Some(type_mismatches) = type_mismatches { + *has_errors = true; + for mismatch in type_mismatches.values_mut() { + mismatch.expected = table.resolve_completely(mismatch.expected.as_ref()).store(); + mismatch.actual = table.resolve_completely(mismatch.actual.as_ref()).store(); + } + type_mismatches.shrink_to_fit(); } - type_mismatches.shrink_to_fit(); diagnostics.retain_mut(|diagnostic| { use InferenceDiagnostic::*; match diagnostic { ExpectedFunction { found: ty, .. } | UnresolvedField { receiver: ty, .. } | UnresolvedMethodCall { receiver: ty, .. } => { - *ty = table.resolve_completely(*ty); + *ty = table.resolve_completely(ty.as_ref()).store(); // FIXME: Remove this when we are on par with rustc in terms of inference - if ty.references_non_lt_error() { + if ty.as_ref().references_non_lt_error() { return false; } if let UnresolvedMethodCall { field_with_same_name, .. } = diagnostic && let Some(ty) = field_with_same_name { - *ty = table.resolve_completely(*ty); - if ty.references_non_lt_error() { + *ty = table.resolve_completely(ty.as_ref()).store(); + if ty.as_ref().references_non_lt_error() { *field_with_same_name = None; } } } TypedHole { expected: ty, .. } => { - *ty = table.resolve_completely(*ty); + *ty = table.resolve_completely(ty.as_ref()).store(); } _ => (), } @@ -1043,31 +1044,33 @@ impl<'body, 'db> InferenceContext<'body, 'db> { }); diagnostics.shrink_to_fit(); for (_, subst) in method_resolutions.values_mut() { - *subst = table.resolve_completely(*subst); - *has_errors = *has_errors || subst.types().any(|ty| ty.references_non_lt_error()); + *subst = table.resolve_completely(subst.as_ref()).store(); + *has_errors = + *has_errors || subst.as_ref().types().any(|ty| ty.references_non_lt_error()); } method_resolutions.shrink_to_fit(); for (_, subst) in assoc_resolutions.values_mut() { - *subst = table.resolve_completely(*subst); - *has_errors = *has_errors || subst.types().any(|ty| ty.references_non_lt_error()); + *subst = table.resolve_completely(subst.as_ref()).store(); + *has_errors = + *has_errors || subst.as_ref().types().any(|ty| ty.references_non_lt_error()); } assoc_resolutions.shrink_to_fit(); for adjustment in expr_adjustments.values_mut().flatten() { - adjustment.target = table.resolve_completely(adjustment.target); - *has_errors = *has_errors || adjustment.target.references_non_lt_error(); + adjustment.target = table.resolve_completely(adjustment.target.as_ref()).store(); + *has_errors = *has_errors || adjustment.target.as_ref().references_non_lt_error(); } expr_adjustments.shrink_to_fit(); for adjustment in pat_adjustments.values_mut().flatten() { - *adjustment = table.resolve_completely(*adjustment); - *has_errors = *has_errors || adjustment.references_non_lt_error(); + *adjustment = table.resolve_completely(adjustment.as_ref()).store(); + *has_errors = *has_errors || adjustment.as_ref().references_non_lt_error(); } pat_adjustments.shrink_to_fit(); result.tuple_field_access_types = tuple_field_accesses_rev .into_iter() - .enumerate() - .map(|(idx, subst)| (TupleId(idx as u32), table.resolve_completely(subst))) - .inspect(|(_, subst)| { - *has_errors = *has_errors || subst.iter().any(|ty| ty.references_non_lt_error()); + .map(|subst| table.resolve_completely(subst).store()) + .inspect(|subst| { + *has_errors = + *has_errors || subst.as_ref().iter().any(|ty| ty.references_non_lt_error()); }) .collect(); result.tuple_field_access_types.shrink_to_fit(); @@ -1093,7 +1096,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> { data.type_ref, &data.store, InferenceTyDiagnosticSource::Signature, - LifetimeElisionKind::Elided(self.types.re_static), + LifetimeElisionKind::Elided(self.types.regions.statik), ); self.return_ty = return_ty; @@ -1151,7 +1154,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> { ); self.process_user_written_ty(return_ty) } - None => self.types.unit, + None => self.types.types.unit, }; self.return_coercion = Some(CoerceMany::new(self.return_ty)); @@ -1162,6 +1165,11 @@ impl<'body, 'db> InferenceContext<'body, 'db> { self.table.interner() } + #[inline] + pub(crate) fn infcx(&self) -> &InferCtxt<'db> { + &self.table.infer_ctxt + } + fn infer_body(&mut self) { match self.return_coercion { Some(_) => self.infer_return(self.body.body_expr), @@ -1176,10 +1184,10 @@ impl<'body, 'db> InferenceContext<'body, 'db> { } fn write_expr_ty(&mut self, expr: ExprId, ty: Ty<'db>) { - self.result.type_of_expr.insert(expr, ty); + self.result.type_of_expr.insert(expr, ty.store()); } - fn write_expr_adj(&mut self, expr: ExprId, adjustments: Box<[Adjustment<'db>]>) { + pub(crate) fn write_expr_adj(&mut self, expr: ExprId, adjustments: Box<[Adjustment]>) { if adjustments.is_empty() { return; } @@ -1192,7 +1200,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> { ) => { // NeverToAny coercion can target any type, so instead of adding a new // adjustment on top we can change the target. - *target = *new_target; + *target = new_target.clone(); } _ => { *entry.get_mut() = adjustments; @@ -1205,15 +1213,20 @@ impl<'body, 'db> InferenceContext<'body, 'db> { } } - fn write_pat_adj(&mut self, pat: PatId, adjustments: Box<[Ty<'db>]>) { + fn write_pat_adj(&mut self, pat: PatId, adjustments: Box<[StoredTy]>) { if adjustments.is_empty() { return; } self.result.pat_adjustments.entry(pat).or_default().extend(adjustments); } - fn write_method_resolution(&mut self, expr: ExprId, func: FunctionId, subst: GenericArgs<'db>) { - self.result.method_resolutions.insert(expr, (func, subst)); + pub(crate) fn write_method_resolution( + &mut self, + expr: ExprId, + func: FunctionId, + subst: GenericArgs<'db>, + ) { + self.result.method_resolutions.insert(expr, (func, subst.store())); } fn write_variant_resolution(&mut self, id: ExprOrPatId, variant: VariantId) { @@ -1223,21 +1236,25 @@ impl<'body, 'db> InferenceContext<'body, 'db> { fn write_assoc_resolution( &mut self, id: ExprOrPatId, - item: AssocItemId, + item: CandidateId, subs: GenericArgs<'db>, ) { - self.result.assoc_resolutions.insert(id, (item, subs)); + self.result.assoc_resolutions.insert(id, (item, subs.store())); } fn write_pat_ty(&mut self, pat: PatId, ty: Ty<'db>) { - self.result.type_of_pat.insert(pat, ty); + self.result.type_of_pat.insert(pat, ty.store()); + } + + fn write_type_placeholder_ty(&mut self, type_ref: TypeRefId, ty: Ty<'db>) { + self.result.type_of_type_placeholder.insert(type_ref, ty.store()); } fn write_binding_ty(&mut self, id: BindingId, ty: Ty<'db>) { - self.result.type_of_binding.insert(id, ty); + self.result.type_of_binding.insert(id, ty.store()); } - fn push_diagnostic(&self, diagnostic: InferenceDiagnostic<'db>) { + pub(crate) fn push_diagnostic(&self, diagnostic: InferenceDiagnostic) { self.diagnostics.push(diagnostic); } @@ -1281,10 +1298,30 @@ impl<'body, 'db> InferenceContext<'body, 'db> { ) -> Ty<'db> { let ty = self .with_ty_lowering(store, type_source, lifetime_elision, |ctx| ctx.lower_ty(type_ref)); - self.process_user_written_ty(ty) + let ty = self.process_user_written_ty(ty); + + // Record the association from placeholders' TypeRefId to type variables. + // We only record them if their number matches. This assumes TypeRef::walk and TypeVisitable process the items in the same order. + let type_variables = collect_type_inference_vars(&ty); + let mut placeholder_ids = vec![]; + TypeRef::walk(type_ref, store, &mut |type_ref_id, type_ref| { + if matches!(type_ref, TypeRef::Placeholder) { + placeholder_ids.push(type_ref_id); + } + }); + + if placeholder_ids.len() == type_variables.len() { + for (placeholder_id, type_variable) in + placeholder_ids.into_iter().zip(type_variables.into_iter()) + { + self.write_type_placeholder_ty(placeholder_id, type_variable); + } + } + + ty } - fn make_body_ty(&mut self, type_ref: TypeRefId) -> Ty<'db> { + pub(crate) fn make_body_ty(&mut self, type_ref: TypeRefId) -> Ty<'db> { self.make_ty( type_ref, self.body, @@ -1293,7 +1330,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> { ) } - fn make_body_const(&mut self, const_ref: ConstRef, ty: Ty<'db>) -> Const<'db> { + pub(crate) fn make_body_const(&mut self, const_ref: ConstRef, ty: Ty<'db>) -> Const<'db> { let const_ = self.with_ty_lowering( self.body, InferenceTyDiagnosticSource::Body, @@ -1303,7 +1340,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> { self.insert_type_vars(const_) } - fn make_path_as_body_const(&mut self, path: &Path, ty: Ty<'db>) -> Const<'db> { + pub(crate) fn make_path_as_body_const(&mut self, path: &Path, ty: Ty<'db>) -> Const<'db> { let const_ = self.with_ty_lowering( self.body, InferenceTyDiagnosticSource::Body, @@ -1314,10 +1351,10 @@ impl<'body, 'db> InferenceContext<'body, 'db> { } fn err_ty(&self) -> Ty<'db> { - self.types.error + self.types.types.error } - fn make_body_lifetime(&mut self, lifetime_ref: LifetimeRefId) -> Region<'db> { + pub(crate) fn make_body_lifetime(&mut self, lifetime_ref: LifetimeRefId) -> Region<'db> { let lt = self.with_ty_lowering( self.body, InferenceTyDiagnosticSource::Body, @@ -1371,7 +1408,13 @@ impl<'body, 'db> InferenceContext<'body, 'db> { match ty.kind() { TyKind::Adt(adt_def, substs) => match adt_def.def_id().0 { AdtId::StructId(struct_id) => { - match self.db.field_types(struct_id.into()).values().next_back().copied() { + match self + .db + .field_types(struct_id.into()) + .values() + .next_back() + .map(|it| it.get()) + { Some(field) => { ty = field.instantiate(self.interner(), substs); } @@ -1399,19 +1442,13 @@ impl<'body, 'db> InferenceContext<'body, 'db> { } /// Whenever you lower a user-written type, you should call this. - fn process_user_written_ty<T>(&mut self, ty: T) -> T - where - T: TypeFoldable<DbInterner<'db>>, - { + fn process_user_written_ty(&mut self, ty: Ty<'db>) -> Ty<'db> { self.table.process_user_written_ty(ty) } /// The difference of this method from `process_user_written_ty()` is that this method doesn't register a well-formed obligation, /// while `process_user_written_ty()` should (but doesn't currently). - fn process_remote_user_written_ty<T>(&mut self, ty: T) -> T - where - T: TypeFoldable<DbInterner<'db>>, - { + fn process_remote_user_written_ty(&mut self, ty: Ty<'db>) -> Ty<'db> { self.table.process_remote_user_written_ty(ty) } @@ -1427,16 +1464,73 @@ impl<'body, 'db> InferenceContext<'body, 'db> { self.resolve_associated_type_with_params(inner_ty, assoc_ty, &[]) } - fn demand_eqtype(&mut self, expected: Ty<'db>, actual: Ty<'db>) { + fn demand_eqtype( + &mut self, + id: ExprOrPatId, + expected: Ty<'db>, + actual: Ty<'db>, + ) -> Result<(), ()> { + let result = self.demand_eqtype_fixme_no_diag(expected, actual); + if result.is_err() { + self.result + .type_mismatches + .get_or_insert_default() + .insert(id, TypeMismatch { expected: expected.store(), actual: actual.store() }); + } + result + } + + fn demand_eqtype_fixme_no_diag( + &mut self, + expected: Ty<'db>, + actual: Ty<'db>, + ) -> Result<(), ()> { let result = self .table - .infer_ctxt - .at(&ObligationCause::new(), self.table.trait_env.env) + .at(&ObligationCause::new()) .eq(expected, actual) .map(|infer_ok| self.table.register_infer_ok(infer_ok)); + result.map_err(drop) + } + + fn demand_suptype(&mut self, expected: Ty<'db>, actual: Ty<'db>) { + let result = self + .table + .at(&ObligationCause::new()) + .sup(expected, actual) + .map(|infer_ok| self.table.register_infer_ok(infer_ok)); + if let Err(_err) = result { + // FIXME: Emit diagnostic. + } + } + + fn demand_coerce( + &mut self, + expr: ExprId, + checked_ty: Ty<'db>, + expected: Ty<'db>, + allow_two_phase: AllowTwoPhase, + expr_is_read: ExprIsRead, + ) -> Ty<'db> { + let result = self.coerce(expr.into(), checked_ty, expected, allow_two_phase, expr_is_read); if let Err(_err) = result { // FIXME: Emit diagnostic. } + result.unwrap_or(self.types.types.error) + } + + fn expr_ty(&self, expr: ExprId) -> Ty<'db> { + self.result.expr_ty(expr) + } + + fn expr_ty_after_adjustments(&self, e: ExprId) -> Ty<'db> { + let mut ty = None; + if let Some(it) = self.result.expr_adjustments.get(&e) + && let Some(it) = it.last() + { + ty = Some(it.target.as_ref()); + } + ty.unwrap_or_else(|| self.expr_ty(e)) } fn resolve_associated_type_with_params( @@ -1596,9 +1690,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> { (ty, _) = path_ctx.lower_partly_resolved_path(resolution, true); tried_resolving_once = true; - ty = self.table.insert_type_vars(ty); - ty = self.table.normalize_associated_types_in(ty); - ty = self.table.structurally_resolve_type(ty); + ty = self.table.process_user_written_ty(ty); if ty.is_ty_error() { return (self.err_ty(), None); } @@ -1656,7 +1748,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> { result } else { // FIXME diagnostic - (ctx.types.error, None) + (ctx.types.types.error, None) } } } @@ -1700,33 +1792,13 @@ impl<'body, 'db> InferenceContext<'body, 'db> { } } - fn resolve_lang_item(&self, item: LangItem) -> Option<LangItemTarget> { - let krate = self.resolver.krate(); - lang_item(self.db, krate, item) - } - fn resolve_output_on(&self, trait_: TraitId) -> Option<TypeAliasId> { trait_.trait_items(self.db).associated_type_by_name(&Name::new_symbol_root(sym::Output)) } - fn resolve_lang_trait(&self, lang: LangItem) -> Option<TraitId> { - self.resolve_lang_item(lang)?.as_trait() - } - - fn resolve_ops_neg_output(&self) -> Option<TypeAliasId> { - self.resolve_output_on(self.resolve_lang_trait(LangItem::Neg)?) - } - - fn resolve_ops_not_output(&self) -> Option<TypeAliasId> { - self.resolve_output_on(self.resolve_lang_trait(LangItem::Not)?) - } - fn resolve_future_future_output(&self) -> Option<TypeAliasId> { - let ItemContainerId::TraitId(trait_) = self - .resolve_lang_item(LangItem::IntoFutureIntoFuture)? - .as_function()? - .lookup(self.db) - .container + let ItemContainerId::TraitId(trait_) = + self.lang_items.IntoFutureIntoFuture?.lookup(self.db).container else { return None; }; @@ -1734,58 +1806,51 @@ impl<'body, 'db> InferenceContext<'body, 'db> { } fn resolve_boxed_box(&self) -> Option<AdtId> { - let struct_ = self.resolve_lang_item(LangItem::OwnedBox)?.as_struct()?; + let struct_ = self.lang_items.OwnedBox?; Some(struct_.into()) } fn resolve_range_full(&self) -> Option<AdtId> { - let struct_ = self.resolve_lang_item(LangItem::RangeFull)?.as_struct()?; + let struct_ = self.lang_items.RangeFull?; Some(struct_.into()) } fn resolve_range(&self) -> Option<AdtId> { - let struct_ = self.resolve_lang_item(LangItem::Range)?.as_struct()?; + let struct_ = self.lang_items.Range?; Some(struct_.into()) } fn resolve_range_inclusive(&self) -> Option<AdtId> { - let struct_ = self.resolve_lang_item(LangItem::RangeInclusiveStruct)?.as_struct()?; + let struct_ = self.lang_items.RangeInclusiveStruct?; Some(struct_.into()) } fn resolve_range_from(&self) -> Option<AdtId> { - let struct_ = self.resolve_lang_item(LangItem::RangeFrom)?.as_struct()?; + let struct_ = self.lang_items.RangeFrom?; Some(struct_.into()) } fn resolve_range_to(&self) -> Option<AdtId> { - let struct_ = self.resolve_lang_item(LangItem::RangeTo)?.as_struct()?; + let struct_ = self.lang_items.RangeTo?; Some(struct_.into()) } fn resolve_range_to_inclusive(&self) -> Option<AdtId> { - let struct_ = self.resolve_lang_item(LangItem::RangeToInclusive)?.as_struct()?; + let struct_ = self.lang_items.RangeToInclusive?; Some(struct_.into()) } - fn resolve_ops_index_output(&self) -> Option<TypeAliasId> { - self.resolve_output_on(self.resolve_lang_trait(LangItem::Index)?) - } - fn resolve_va_list(&self) -> Option<AdtId> { - let struct_ = self.resolve_lang_item(LangItem::VaList)?.as_struct()?; + let struct_ = self.lang_items.VaList?; Some(struct_.into()) } - fn get_traits_in_scope<'a>( - resolver: &Resolver<'db>, - traits_in_scope: &'a FxHashSet<TraitId>, - ) -> Either<FxHashSet<TraitId>, &'a FxHashSet<TraitId>> { - let mut b_traits = resolver.traits_in_scope_from_block_scopes().peekable(); + pub(crate) fn get_traits_in_scope(&self) -> Either<FxHashSet<TraitId>, &FxHashSet<TraitId>> { + let mut b_traits = self.resolver.traits_in_scope_from_block_scopes().peekable(); if b_traits.peek().is_some() { - Either::Left(traits_in_scope.iter().copied().chain(b_traits).collect()) + Either::Left(self.traits_in_scope.iter().copied().chain(b_traits).collect()) } else { - Either::Right(traits_in_scope) + Either::Right(&self.traits_in_scope) } } } |