Merge pull request #22101 from ChayimFriedman2/port-infer-call

fix: Port call expr type checking and closure upvar inference from rustc
This commit is contained in:
Chayim Refael Friedman
2026-04-20 12:01:40 +00:00
committed by GitHub
67 changed files with 5254 additions and 2234 deletions
+1
View File
@@ -786,6 +786,7 @@ dependencies = [
"hir-ty",
"intern",
"itertools 0.14.0",
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"ra-ap-rustc_type_ir",
"rustc-hash 2.1.1",
"serde_json",
@@ -675,6 +675,9 @@ pub fn walk_child_exprs(&self, expr_id: ExprId, mut f: impl FnMut(ExprId)) {
f(*expr);
arms.iter().for_each(|arm| {
f(arm.expr);
if let Some(guard) = arm.guard {
f(guard);
}
self.walk_exprs_in_pat(arm.pat, &mut f);
});
}
@@ -926,6 +929,13 @@ pub fn coroutine_for_closure(coroutine_closure: ExprId) -> ExprId {
// We keep the async closure exactly one expr before.
ExprId::from_raw(la_arena::RawIdx::from_u32(coroutine_closure.into_raw().into_u32() - 1))
}
/// The opposite of [`Self::coroutine_for_closure()`].
#[inline]
pub fn closure_for_coroutine(coroutine: ExprId) -> ExprId {
// We keep the async closure exactly one expr before.
ExprId::from_raw(la_arena::RawIdx::from_u32(coroutine.into_raw().into_u32() + 1))
}
}
impl Index<ExprId> for ExpressionStore {
@@ -133,7 +133,7 @@ pub fn pretty_print_expr(
expr: ExprId,
edition: Edition,
) -> String {
pretty::print_expr_hir(db, self, owner, expr, edition)
pretty::print_expr_hir(db, self, owner.into(), expr, edition)
}
pub fn pretty_print_pat(
@@ -144,7 +144,7 @@ pub fn pretty_print_pat(
oneline: bool,
edition: Edition,
) -> String {
pretty::print_pat_hir(db, self, owner, pat, oneline, edition)
pretty::print_pat_hir(db, self, owner.into(), pat, oneline, edition)
}
}
@@ -945,12 +945,19 @@ pub(super) fn lower_generic_args(
})
}
/// An `async fn` needs to capture all parameters in the generated `async` block, even if they have
/// non-captured patterns such as wildcards (to ensure consistent drop order).
fn lower_async_fn(&mut self, params: &mut Vec<PatId>, body: ExprId) -> ExprId {
/// Lowers a desugared coroutine body after moving all of the arguments
/// into the body. This is to make sure that the future actually owns the
/// arguments that are passed to the function, and to ensure things like
/// drop order are stable.
fn lower_async_block_with_moved_arguments(
&mut self,
params: &mut [PatId],
body: ExprId,
coroutine_source: CoroutineSource,
) -> ExprId {
let mut statements = Vec::new();
for param in params {
let name = match self.store.pats[*param] {
let (name, hygiene) = match self.store.pats[*param] {
Pat::Bind { id, .. }
if matches!(
self.store.bindings[id].mode,
@@ -962,14 +969,16 @@ fn lower_async_fn(&mut self, params: &mut Vec<PatId>, body: ExprId) -> ExprId {
}
Pat::Bind { id, .. } => {
// If this is a `ref` binding, we can't leave it as is but we can at least reuse the name, for better display.
self.store.bindings[id].name.clone()
(self.store.bindings[id].name.clone(), self.store.bindings[id].hygiene)
}
_ => self.generate_new_name(),
_ => (self.generate_new_name(), HygieneId::ROOT),
};
let binding_id =
self.alloc_binding(name.clone(), BindingAnnotation::Mutable, HygieneId::ROOT);
let binding_id = self.alloc_binding(name.clone(), BindingAnnotation::Mutable, hygiene);
let pat_id = self.alloc_pat_desugared(Pat::Bind { id: binding_id, subpat: None });
let expr = self.alloc_expr_desugared(Expr::Path(name.into()));
if !hygiene.is_root() {
self.store.ident_hygiene.insert(expr.into(), hygiene);
}
statements.push(Statement::Let {
pat: *param,
type_ref: None,
@@ -980,12 +989,17 @@ fn lower_async_fn(&mut self, params: &mut Vec<PatId>, body: ExprId) -> ExprId {
}
let async_ = self.async_block(
CoroutineSource::Fn,
CaptureBy::Value,
coroutine_source,
// The default capture mode here is by-ref. Later on during upvar analysis,
// we will force the captured arguments to by-move, but for async closures,
// we want to make sure that we avoid unnecessarily moving captures, or else
// all async closures would default to `FnOnce` as their calling mode.
CaptureBy::Ref,
None,
statements.into_boxed_slice(),
Some(body),
);
// It's important that this comes last, see the lowering of async closures for why.
self.alloc_expr_desugared(async_)
}
@@ -1010,14 +1024,18 @@ fn async_block(
fn collect(
&mut self,
params: &mut Vec<PatId>,
params: &mut [PatId],
expr: Option<ast::Expr>,
awaitable: Awaitable,
) -> ExprId {
self.awaitable_context.replace(awaitable);
self.with_label_rib(RibKind::Closure, |this| {
let body = this.collect_expr_opt(expr);
if awaitable == Awaitable::Yes { this.lower_async_fn(params, body) } else { body }
if awaitable == Awaitable::Yes {
this.lower_async_block_with_moved_arguments(params, body, CoroutineSource::Fn)
} else {
body
}
})
}
@@ -1407,9 +1425,11 @@ fn maybe_collect_expr(&mut self, expr: ast::Expr) -> Option<ExprId> {
}
}
ast::Expr::ClosureExpr(e) => self.with_label_rib(RibKind::Closure, |this| {
this.with_binding_owner(|this| {
this.with_binding_owner_and_return(|this| {
let mut args = Vec::new();
let mut arg_types = Vec::new();
// For coroutine closures, the body, aka. the coroutine is the bindings owner, and not the closure.
let mut body_is_bindings_owner = false;
if let Some(pl) = e.param_list() {
let num_params = pl.params().count();
args.reserve_exact(num_params);
@@ -1448,18 +1468,12 @@ fn maybe_collect_expr(&mut self, expr: ast::Expr) -> Option<ExprId> {
} else if e.async_token().is_some() {
// It's important that this expr is allocated immediately before the closure.
// We rely on it for `coroutine_for_closure()`.
body = this.alloc_expr_desugared(Expr::Closure {
args: Box::default(),
arg_types: Box::default(),
ret_type: None,
body = this.lower_async_block_with_moved_arguments(
&mut args,
body,
closure_kind: ClosureKind::AsyncBlock {
source: CoroutineSource::Closure,
},
// The block may need to capture by move, but we cannot know it now.
// It will be fixed in capture analysis.
capture_by: CaptureBy::Ref,
});
CoroutineSource::Closure,
);
body_is_bindings_owner = true;
ClosureKind::AsyncClosure
} else {
@@ -1469,7 +1483,7 @@ fn maybe_collect_expr(&mut self, expr: ast::Expr) -> Option<ExprId> {
if e.move_token().is_some() { CaptureBy::Value } else { CaptureBy::Ref };
this.is_lowering_coroutine = prev_is_lowering_coroutine;
this.current_try_block = prev_try_block;
this.alloc_expr(
let closure = this.alloc_expr(
Expr::Closure {
args: args.into(),
arg_types: arg_types.into(),
@@ -1479,7 +1493,9 @@ fn maybe_collect_expr(&mut self, expr: ast::Expr) -> Option<ExprId> {
capture_by,
},
syntax_ptr,
)
);
(if body_is_bindings_owner { body } else { closure }, closure)
})
}),
ast::Expr::BinExpr(e) => {
@@ -1781,13 +1797,24 @@ fn collect_tuple(
}
}
fn with_binding_owner(&mut self, create_expr: impl FnOnce(&mut Self) -> ExprId) -> ExprId {
/// The callback should return two exprs: the first is the bindings owner, the second is the expr to return.
fn with_binding_owner_and_return(
&mut self,
create_expr: impl FnOnce(&mut Self) -> (ExprId, ExprId),
) -> ExprId {
let prev_unowned_bindings_len = self.unowned_bindings.len();
let expr_id = create_expr(self);
let (bindings_owner, expr_to_return) = create_expr(self);
for binding in self.unowned_bindings.drain(prev_unowned_bindings_len..) {
self.store.binding_owners.insert(binding, expr_id);
self.store.binding_owners.insert(binding, bindings_owner);
}
expr_id
expr_to_return
}
fn with_binding_owner(&mut self, create_expr: impl FnOnce(&mut Self) -> ExprId) -> ExprId {
self.with_binding_owner_and_return(move |this| {
let expr = create_expr(this);
(expr, expr)
})
}
/// Desugar `try { <stmts>; <expr> }` into `'<new_label>: { <stmts>; ::std::ops::Try::from_output(<expr>) }`,
@@ -401,7 +401,7 @@ fn print_generic_params(db: &dyn DefDatabase, generic_params: &GenericParams, p:
pub fn print_expr_hir(
db: &dyn DefDatabase,
store: &ExpressionStore,
_owner: DefWithBodyId,
_owner: ExpressionStoreOwnerId,
expr: ExprId,
edition: Edition,
) -> String {
@@ -420,7 +420,7 @@ pub fn print_expr_hir(
pub fn print_pat_hir(
db: &dyn DefDatabase,
store: &ExpressionStore,
_owner: DefWithBodyId,
_owner: ExpressionStoreOwnerId,
pat: PatId,
oneline: bool,
edition: Edition,
@@ -652,7 +652,7 @@ fn async_fn_weird_param_patterns() {
async fn main(&self, param1: i32, ref mut param2: i32, _: i32, param4 @ _: i32, 123: i32) {}
"#,
expect![[r#"
fn main(self, param1, mut param2, mut <ra@gennew>0, param4 @ _, mut <ra@gennew>1) async move {
fn main(self, param1, mut param2, mut <ra@gennew>0, param4 @ _, mut <ra@gennew>1) async {
let ref mut param2 = param2;
let _ = <ra@gennew>0;
let 123 = <ra@gennew>1;
@@ -306,6 +306,7 @@ pub fn from_symbol(symbol: &Symbol) -> Option<Self> {
/// Trait injected by `#[derive(Eq)]`, (i.e. "Total EQ"; no, I will not apologize).
StructuralTeq, sym::structural_teq, TraitId;
Copy, sym::copy, TraitId;
UseCloned, sym::use_cloned, TraitId;
Clone, sym::clone, TraitId;
TrivialClone, sym::trivial_clone, TraitId;
Sync, sym::sync, TraitId;
@@ -324,6 +325,7 @@ pub fn from_symbol(symbol: &Symbol) -> Option<Self> {
Drop, sym::drop, TraitId;
Destruct, sym::destruct, TraitId;
BikeshedGuaranteedNoDrop,sym::bikeshed_guaranteed_no_drop, TraitId;
CoerceUnsized, sym::coerce_unsized, TraitId;
DispatchFromDyn, sym::dispatch_from_dyn, TraitId;
@@ -373,6 +375,8 @@ pub fn from_symbol(symbol: &Symbol) -> Option<Self> {
AsyncFn, sym::async_fn, TraitId;
AsyncFnMut, sym::async_fn_mut, TraitId;
AsyncFnOnce, sym::async_fn_once, TraitId;
AsyncFnKindHelper, sym::async_fn_kind_helper,TraitId;
AsyncFnKindUpvars, sym::async_fn_kind_upvars,TypeAliasId;
CallRefFuture, sym::call_ref_future, TypeAliasId;
CallOnceFuture, sym::call_once_future, TypeAliasId;
@@ -489,6 +493,8 @@ pub fn from_symbol(symbol: &Symbol) -> Option<Self> {
IntoIterIntoIter, sym::into_iter, FunctionId;
IteratorNext, sym::next, FunctionId;
Iterator, sym::iterator, TraitId;
FusedIterator, sym::fused_iterator, TraitId;
AsyncIterator, sym::async_iterator, TraitId;
PinNewUnchecked, sym::new_unchecked, FunctionId;
@@ -509,6 +515,10 @@ pub fn from_symbol(symbol: &Symbol) -> Option<Self> {
CStr, sym::CStr, StructId;
Ordering, sym::Ordering, EnumId;
Field, sym::field, TraitId;
FieldBase, sym::field_base, TypeAliasId;
FieldType, sym::field_type, TypeAliasId;
@non_lang_core_traits:
core::default, Default;
core::fmt, Debug;
@@ -1792,14 +1792,14 @@ extern "rust-call" fn call_once(self, arg: (i32, i32)) -> i32 {
fn closure_capture_unsized_type() {
check_number(
r#"
//- minicore: fn, copy, slice, index, coerce_unsized
//- minicore: fn, copy, slice, index, coerce_unsized, sized
fn f<T: A>(x: &<T as A>::Ty) -> &<T as A>::Ty {
let c = || &*x;
c()
}
trait A {
type Ty;
type Ty: ?Sized;
}
impl A for i32 {
@@ -1810,7 +1810,7 @@ impl A for i32 {
let k: &[u8] = &[1, 2, 3];
let k = f::<i32>(k);
k[0] + k[1] + k[2]
}
};
"#,
6,
);
@@ -238,8 +238,7 @@ fn validate_match(&mut self, match_expr: ExprId, scrutinee_expr: ExprId, arms: &
if (pat_ty == scrut_ty
|| scrut_ty
.as_reference()
.map(|(match_expr_ty, ..)| match_expr_ty == pat_ty)
.unwrap_or(false))
.is_none_or(|(match_expr_ty, ..)| match_expr_ty == pat_ty))
&& types_of_subpatterns_do_match(arm.pat, self.body, self.infer)
{
// If we had a NotUsefulMatchArm diagnostic, we could
@@ -38,6 +38,12 @@ fn enumerate_and_adjust(
}
impl<T: ExactSizeIterator> EnumerateAndAdjustIterator for T {
/// When there is a list of items with a gap of an unknown length inside, and another list
/// of item it should be zipped against, this operates on the list with the gap and returns,
/// for each item, the index it should match in the other list.
///
/// When compiling Rust, such situation often occurs for tuple structs/tuples with a rest pattern
/// that should be matched against the fields.
fn enumerate_and_adjust(
self,
expected_len: usize,
@@ -47,7 +47,7 @@
use stdx::never;
use crate::{
CallableDefId, FnAbi, ImplTraitId, InferenceResult, MemoryMap, ParamEnvAndCrate, consteval,
CallableDefId, FnAbi, ImplTraitId, MemoryMap, ParamEnvAndCrate, consteval,
db::{HirDatabase, InternedClosure},
generics::generics,
layout::Layout,
@@ -1495,9 +1495,7 @@ fn hir_fmt(&self, f @ &mut HirFormatter { db, .. }: &mut HirFormatter<'_, 'db>)
}
let sig = interner.signature_unclosure(substs.as_closure().sig(), Safety::Safe);
let sig = sig.skip_binder();
let InternedClosure(owner, _) = id.loc(db);
let infer = InferenceResult::of(db, owner);
let (_, kind) = infer.closure_info(id);
let kind = substs.as_closure().kind();
match f.closure_style {
ClosureStyle::ImplFn => write!(f, "impl {kind:?}(")?,
ClosureStyle::RANotation => write!(f, "|")?,
@@ -1,15 +1,16 @@
//! Utilities for computing drop info about types.
use hir_def::{
AdtId,
AdtId, ImplId,
signatures::{StructFlags, StructSignature},
};
use rustc_hash::FxHashSet;
use rustc_type_ir::inherent::{AdtDef, IntoKind};
use rustc_type_ir::inherent::{AdtDef, GenericArgs as _, IntoKind};
use stdx::never;
use crate::{
InferenceResult, consteval,
consteval,
db::HirDatabase,
method_resolution::TraitImpls,
next_solver::{
DbInterner, ParamEnv, SimplifiedType, Ty, TyKind,
@@ -18,24 +19,23 @@
},
};
fn has_destructor(interner: DbInterner<'_>, adt: AdtId) -> bool {
let db = interner.db;
#[salsa::tracked]
pub fn destructor(db: &dyn HirDatabase, adt: AdtId) -> Option<ImplId> {
let module = match adt {
AdtId::EnumId(id) => db.lookup_intern_enum(id).container,
AdtId::StructId(id) => db.lookup_intern_struct(id).container,
AdtId::UnionId(id) => db.lookup_intern_union(id).container,
};
let Some(drop_trait) = interner.lang_items().Drop else {
return false;
};
let interner = DbInterner::new_with(db, module.krate(db));
let drop_trait = interner.lang_items().Drop?;
let impls = match module.block(db) {
Some(block) => match TraitImpls::for_block(db, block) {
Some(it) => &**it,
None => return false,
None => return None,
},
None => TraitImpls::for_crate(db, module.krate(db)),
};
!impls.for_trait_and_self_ty(drop_trait, &SimplifiedType::Adt(adt.into())).0.is_empty()
impls.for_trait_and_self_ty(drop_trait, &SimplifiedType::Adt(adt.into())).0.first().copied()
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
@@ -71,7 +71,7 @@ fn has_drop_glue_impl<'db>(
match ty.kind() {
TyKind::Adt(adt_def, subst) => {
let adt_id = adt_def.def_id().0;
if has_destructor(infcx.interner, adt_id) {
if adt_def.destructor(infcx.interner).is_some() {
return DropGlue::HasDropGlue;
}
match adt_id {
@@ -132,21 +132,17 @@ fn has_drop_glue_impl<'db>(
has_drop_glue_impl(infcx, ty, env, visited)
}
TyKind::Slice(ty) => has_drop_glue_impl(infcx, ty, env, visited),
TyKind::Closure(closure_id, subst) => {
let owner = closure_id.0.loc(db).0;
let infer = InferenceResult::of(db, owner);
let (captures, _) = infer.closure_info(closure_id.0);
let env = db.trait_environment(owner);
captures
.iter()
.map(|capture| has_drop_glue_impl(infcx, capture.ty(db, subst), env, visited))
.max()
.unwrap_or(DropGlue::None)
TyKind::Closure(_, args) => {
has_drop_glue_impl(infcx, args.as_closure().tupled_upvars_ty(), env, visited)
}
// FIXME: Handle coroutines.
TyKind::Coroutine(..) | TyKind::CoroutineWitness(..) | TyKind::CoroutineClosure(..) => {
DropGlue::None
TyKind::Coroutine(_, args) => {
has_drop_glue_impl(infcx, args.as_coroutine().tupled_upvars_ty(), env, visited)
}
TyKind::CoroutineClosure(_, args) => {
has_drop_glue_impl(infcx, args.as_coroutine_closure().tupled_upvars_ty(), env, visited)
}
// FIXME: Coroutine witness.
TyKind::CoroutineWitness(..) => DropGlue::None,
TyKind::Ref(..)
| TyKind::RawPtr(..)
| TyKind::FnDef(..)
@@ -14,6 +14,7 @@
//! the `ena` crate, which is extracted from rustc.
mod autoderef;
mod callee;
pub(crate) mod cast;
pub(crate) mod closure;
mod coerce;
@@ -28,9 +29,9 @@
mod place_op;
pub(crate) mod unify;
use std::{cell::OnceCell, convert::identity, iter};
use std::{cell::OnceCell, convert::identity, fmt, iter, ops::Deref};
use base_db::Crate;
use base_db::{Crate, FxIndexMap};
use either::Either;
use hir_def::{
AdtId, AssocItemId, ConstId, ConstParamId, DefWithBodyId, ExpressionStoreOwnerId, FieldId,
@@ -54,15 +55,22 @@
AliasTyKind, TypeFoldable,
inherent::{AdtDef, IntoKind, Ty as _},
};
use smallvec::SmallVec;
use span::Edition;
use stdx::never;
use thin_vec::ThinVec;
use crate::{
ImplTraitId, IncorrectGenericsLenKind, PathLoweringDiagnostic, TargetFeatures,
closure_analysis::PlaceBase,
collect_type_inference_vars,
db::{HirDatabase, InternedClosureId, InternedOpaqueTyId},
db::{HirDatabase, InternedOpaqueTyId},
infer::{
callee::DeferredCallResolution,
closure::analysis::{
BorrowKind,
expr_use_visitor::{FakeReadCause, Place},
},
coerce::{CoerceMany, DynamicCoerceMany},
diagnostics::{Diagnostics, InferenceTyLoweringContext as TyLoweringContext},
expr::ExprIsRead,
@@ -71,14 +79,12 @@
ImplTraitIdx, ImplTraitLoweringMode, LifetimeElisionKind, diagnostics::TyLoweringDiagnostic,
},
method_resolution::{CandidateId, MethodResolutionUnstableFeatures},
mir::MirSpan,
next_solver::{
AliasTy, Const, DbInterner, ErrorGuaranteed, GenericArg, GenericArgs, Region,
StoredGenericArgs, StoredTy, StoredTys, Ty, TyKind, Tys,
abi::Safety,
infer::{InferCtxt, ObligationInspector, traits::ObligationCause},
},
traits::FnTrait,
utils::TargetFeatureIsSafeInTarget,
};
@@ -91,7 +97,6 @@
pub use unify::{could_unify, could_unify_deeply};
use cast::{CastCheck, CastError};
pub(crate) use closure::analysis::{CaptureKind, CapturedItem, CapturedItemWithoutTy};
/// The entry point of type inference.
fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> InferenceResult {
@@ -266,7 +271,10 @@ fn infer_finalize(mut ctx: InferenceContext<'_, '_>) -> InferenceResult {
ctx.table.select_obligations_where_possible();
ctx.infer_closures();
// Closure and coroutine analysis may run after fallback
// because they don't constrain other type variables.
ctx.closure_analyze();
assert!(ctx.deferred_call_resolutions.is_empty());
ctx.table.select_obligations_where_possible();
@@ -498,7 +506,7 @@ pub enum Adjust {
/// The target type is `U` in both cases, with the region and mutability
/// being those shared by both the receiver and the returned reference.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub struct OverloadedDeref(pub Option<Mutability>);
pub struct OverloadedDeref(pub Mutability);
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
pub enum AutoBorrowMutability {
@@ -535,15 +543,6 @@ pub enum AutoBorrow {
RawPtr(Mutability),
}
impl AutoBorrow {
fn mutability(self) -> Mutability {
match self {
AutoBorrow::Ref(mutbl) => mutbl.into(),
AutoBorrow::RawPtr(mutbl) => mutbl,
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum PointerCast {
/// Go from a fn-item type to a fn-pointer type.
@@ -637,11 +636,226 @@ pub struct InferenceResult {
/// the first `rest` has implicit `ref` binding mode, but the second `rest` binding mode is `move`.
pub(crate) binding_modes: ArenaMap<PatId, BindingMode>,
pub(crate) closure_info: FxHashMap<InternedClosureId, (Vec<CapturedItem>, FnTrait)>,
// FIXME: remove this field
pub mutated_bindings_in_closure: FxHashSet<BindingId>,
pub(crate) coercion_casts: FxHashSet<ExprId>,
pub closures_data: FxHashMap<ExprId, ClosureData>,
}
#[derive(Clone, PartialEq, Eq, Debug, Default)]
pub struct ClosureData {
/// Tracks the minimum captures required for a closure;
/// see `MinCaptureInformationMap` for more details.
pub min_captures: RootVariableMinCaptureList,
/// Tracks the fake reads required for a closure and the reason for the fake read.
/// When performing pattern matching for closures, there are times we don't end up
/// reading places that are mentioned in a closure (because of _ patterns). However,
/// to ensure the places are initialized, we introduce fake reads.
/// Consider these two examples:
/// ```ignore (discriminant matching with only wildcard arm)
/// let x: u8;
/// let c = || match x { _ => () };
/// ```
/// In this example, we don't need to actually read/borrow `x` in `c`, and so we don't
/// want to capture it. However, we do still want an error here, because `x` should have
/// to be initialized at the point where c is created. Therefore, we add a "fake read"
/// instead.
/// ```ignore (destructured assignments)
/// let c = || {
/// let (t1, t2) = t;
/// }
/// ```
/// In the second example, we capture the disjoint fields of `t` (`t.0` & `t.1`), but
/// we never capture `t`. This becomes an issue when we build MIR as we require
/// information on `t` in order to create place `t.0` and `t.1`. We can solve this
/// issue by fake reading `t`.
pub fake_reads: Box<[(Place, FakeReadCause, SmallVec<[CaptureSourceStack; 2]>)]>,
}
/// Part of `MinCaptureInformationMap`; Maps a root variable to the list of `CapturedPlace`.
/// Used to track the minimum set of `Place`s that need to be captured to support all
/// Places captured by the closure starting at a given root variable.
///
/// This provides a convenient and quick way of checking if a variable being used within
/// a closure is a capture of a local variable.
pub(crate) type RootVariableMinCaptureList = FxIndexMap<BindingId, MinCaptureList>;
/// Part of `MinCaptureInformationMap`; List of `CapturePlace`s.
pub(crate) type MinCaptureList = Vec<CapturedPlace>;
/// A composite describing a `Place` that is captured by a closure.
#[derive(Eq, PartialEq, Clone, Debug, Hash)]
pub struct CapturedPlace {
/// The `Place` that is captured.
pub place: Place,
/// `CaptureKind` and expression(s) that resulted in such capture of `place`.
pub info: CaptureInfo,
/// Represents if `place` can be mutated or not.
pub mutability: Mutability,
}
impl CapturedPlace {
pub fn is_by_ref(&self) -> bool {
match self.info.capture_kind {
UpvarCapture::ByValue | UpvarCapture::ByUse => false,
UpvarCapture::ByRef(..) => true,
}
}
pub fn captured_local(&self) -> BindingId {
match self.place.base {
PlaceBase::Upvar { var_id: local, .. } | PlaceBase::Local(local) => local,
PlaceBase::Rvalue | PlaceBase::StaticItem => {
unreachable!("only locals can be captured")
}
}
}
/// The type of the capture stored in the closure, which is different from the type of the captured place
/// if we capture by reference.
pub fn captured_ty<'db>(&self, db: &'db dyn HirDatabase) -> Ty<'db> {
let place_ty = self.place.ty();
let make_ref = |mutbl| {
let interner = DbInterner::new_no_crate(db);
let region = Region::new_erased(interner);
Ty::new_ref(interner, region, place_ty, mutbl)
};
match self.info.capture_kind {
UpvarCapture::ByUse | UpvarCapture::ByValue => place_ty,
UpvarCapture::ByRef(kind) => make_ref(kind.to_mutbl_lossy()),
}
}
}
#[derive(Clone)]
pub struct CaptureSourceStack(CaptureSourceStackRepr);
#[derive(Clone)]
enum CaptureSourceStackRepr {
One(ExprOrPatId),
Two([ExprOrPatId; 2]),
Many(ThinVec<ExprOrPatId>),
}
impl PartialEq for CaptureSourceStack {
fn eq(&self, other: &Self) -> bool {
**self == **other
}
}
impl Eq for CaptureSourceStack {}
impl std::hash::Hash for CaptureSourceStack {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
(**self).hash(state);
}
}
const _: () = assert!(size_of::<CaptureSourceStack>() == 16);
impl Deref for CaptureSourceStack {
type Target = [ExprOrPatId];
#[inline]
fn deref(&self) -> &Self::Target {
match &self.0 {
CaptureSourceStackRepr::One(it) => std::slice::from_ref(it),
CaptureSourceStackRepr::Two(it) => it,
CaptureSourceStackRepr::Many(it) => it,
}
}
}
impl fmt::Debug for CaptureSourceStack {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("CaptureSourceStack").field(&&**self).finish()
}
}
impl CaptureSourceStack {
#[inline]
pub fn len(&self) -> usize {
match &self.0 {
CaptureSourceStackRepr::One(_) => 1,
CaptureSourceStackRepr::Two(_) => 2,
CaptureSourceStackRepr::Many(it) => it.len(),
}
}
#[inline]
pub(crate) fn from_single(id: ExprOrPatId) -> Self {
Self(CaptureSourceStackRepr::One(id))
}
#[inline]
pub fn final_source(&self) -> ExprOrPatId {
*self.last().expect("should always have a final source")
}
pub fn push(&mut self, new_id: ExprOrPatId) {
match &mut self.0 {
CaptureSourceStackRepr::One(old_id) => {
self.0 = CaptureSourceStackRepr::Two([*old_id, new_id])
}
CaptureSourceStackRepr::Two([old_id1, old_id2]) => {
self.0 = CaptureSourceStackRepr::Many(ThinVec::from([*old_id1, *old_id2, new_id]));
}
CaptureSourceStackRepr::Many(old_ids) => old_ids.push(new_id),
}
}
pub fn truncate(&mut self, new_len: usize) {
debug_assert!(new_len > 0);
match &mut self.0 {
CaptureSourceStackRepr::One(_) => {}
CaptureSourceStackRepr::Two([first, _]) => {
if new_len == 1 {
self.0 = CaptureSourceStackRepr::One(*first)
}
}
CaptureSourceStackRepr::Many(ids) => ids.truncate(new_len),
}
}
pub fn shrink_to_fit(&mut self) {
match &mut self.0 {
CaptureSourceStackRepr::One(_) | CaptureSourceStackRepr::Two(_) => {}
CaptureSourceStackRepr::Many(ids) => match **ids {
[one] => self.0 = CaptureSourceStackRepr::One(one),
[first, second] => self.0 = CaptureSourceStackRepr::Two([first, second]),
_ => ids.shrink_to_fit(),
},
}
}
}
/// Part of `MinCaptureInformationMap`; describes the capture kind (&, &mut, move)
/// for a particular capture as well as identifying the part of the source code
/// that triggered this capture to occur.
#[derive(Eq, PartialEq, Clone, Debug, Hash)]
pub struct CaptureInfo {
pub sources: SmallVec<[CaptureSourceStack; 2]>,
/// Capture mode that was selected
pub capture_kind: UpvarCapture,
}
/// Information describing the capture of an upvar. This is computed
/// during `typeck`, specifically by `regionck`.
#[derive(Eq, PartialEq, Clone, Debug, Copy, Hash)]
pub enum UpvarCapture {
/// Upvar is captured by value. This is always true when the
/// closure is labeled `move`, but can also be true in other cases
/// depending on inference.
ByValue,
/// Upvar is captured by use. This is true when the closure is labeled `use`.
ByUse,
/// Upvar is captured by reference.
ByRef(BorrowKind),
}
#[salsa::tracked]
@@ -699,9 +913,8 @@ fn new(error_ty: Ty<'_>) -> Self {
pat_adjustments: Default::default(),
binding_modes: Default::default(),
expr_adjustments: Default::default(),
closure_info: Default::default(),
mutated_bindings_in_closure: Default::default(),
coercion_casts: Default::default(),
closures_data: Default::default(),
}
}
@@ -771,9 +984,6 @@ pub fn placeholder_types<'db>(&self) -> impl Iterator<Item = (TypeRefId, Ty<'db>
pub fn type_of_type_placeholder<'db>(&self, type_ref: TypeRefId) -> Option<Ty<'db>> {
self.type_of_type_placeholder.get(&type_ref).map(|ty| ty.as_ref())
}
pub fn closure_info(&self, closure: InternedClosureId) -> &(Vec<CapturedItem>, FnTrait) {
self.closure_info.get(&closure).unwrap()
}
pub fn type_of_expr_or_pat<'db>(&self, id: ExprOrPatId) -> Option<Ty<'db>> {
match id {
ExprOrPatId::ExprId(id) => self.type_of_expr.get(id).map(|it| it.as_ref()),
@@ -870,6 +1080,26 @@ pub fn expr_or_pat_ty<'db>(&self, id: ExprOrPatId) -> Ty<'db> {
pub fn binding_ty<'db>(&self, id: BindingId) -> Ty<'db> {
self.type_of_binding.get(id).map_or(self.error_ty.as_ref(), |it| it.as_ref())
}
/// This does not deduplicate, which means you'll get the types once per capture.
pub fn closure_captures_tys<'db>(&self, closure: ExprId) -> impl Iterator<Item = Ty<'db>> {
self.closures_data[&closure]
.min_captures
.values()
.flat_map(|captures| captures.iter().map(|capture| capture.place.ty()))
}
/// Like [`Self::closure_captures_tys()`], but using [`CapturedPlace::captured_ty()`].
pub fn closure_captures_captured_tys<'db>(
&self,
db: &'db dyn HirDatabase,
closure: ExprId,
) -> impl Iterator<Item = Ty<'db>> {
self.closures_data[&closure]
.min_captures
.values()
.flat_map(|captures| captures.iter().map(|capture| capture.captured_ty(db)))
}
}
/// The inference context contains all information needed during type inference.
@@ -913,19 +1143,8 @@ pub(crate) struct InferenceContext<'body, 'db> {
deferred_cast_checks: Vec<CastCheck<'db>>,
// fields related to closure capture
current_captures: Vec<CapturedItemWithoutTy>,
/// A stack that has an entry for each projection in the current capture.
///
/// For example, in `a.b.c`, we capture the spans of `a`, `a.b`, and `a.b.c`.
/// We do that because sometimes we truncate projections (when a closure captures
/// both `a.b` and `a.b.c`), and we want to provide accurate spans in this case.
current_capture_span_stack: Vec<MirSpan>,
current_closure: Option<InternedClosureId>,
/// Stores the list of closure ids that need to be analyzed before this closure. See the
/// comment on `InferenceContext::sort_closures`
closure_dependencies: FxHashMap<InternedClosureId, Vec<InternedClosureId>>,
deferred_closures: FxHashMap<InternedClosureId, Vec<(Ty<'db>, Ty<'db>, Vec<Ty<'db>>, ExprId)>>,
/// The key is an expression defining a closure or a coroutine closure.
deferred_call_resolutions: FxHashMap<ExprId, Vec<DeferredCallResolution<'db>>>,
diagnostics: Diagnostics,
}
@@ -1017,13 +1236,9 @@ fn new(
diverges: Diverges::Maybe,
breakables: Vec::new(),
deferred_cast_checks: Vec::new(),
current_captures: Vec::new(),
current_capture_span_stack: Vec::new(),
current_closure: None,
deferred_closures: FxHashMap::default(),
closure_dependencies: FxHashMap::default(),
inside_assignment: false,
diagnostics: Diagnostics::default(),
deferred_call_resolutions: FxHashMap::default(),
}
}
@@ -1082,7 +1297,12 @@ pub(crate) fn fixme_resolve_all_clone(&self) -> InferenceResult {
// there is no problem in it being `pub(crate)`, remove this comment.
fn resolve_all(self) -> InferenceResult {
let InferenceContext {
mut table, mut result, tuple_field_accesses_rev, diagnostics, ..
mut table,
mut result,
tuple_field_accesses_rev,
diagnostics,
types,
..
} = self;
let mut diagnostics = diagnostics.finish();
// Destructure every single field so whenever new fields are added to `InferenceResult` we
@@ -1098,16 +1318,12 @@ fn resolve_all(self) -> InferenceResult {
type_of_type_placeholder,
type_of_opaque,
type_mismatches,
closures_data,
has_errors,
error_ty: _,
pat_adjustments,
binding_modes: _,
expr_adjustments,
// Types in `closure_info` have already been `resolve_completely()`'d during
// `InferenceContext::infer_closures()` (in `HirPlace::ty()` specifically), so no need
// to resolve them here.
closure_info: _,
mutated_bindings_in_closure: _,
tuple_field_access_types: _,
coercion_casts: _,
diagnostics: _,
@@ -1194,6 +1410,38 @@ fn resolve_all(self) -> InferenceResult {
*has_errors = *has_errors || adjustment.as_ref().references_non_lt_error();
}
pat_adjustments.shrink_to_fit();
for closure_data in closures_data.values_mut() {
let ClosureData { min_captures, fake_reads } = closure_data;
let dummy_place = || Place {
base_ty: types.types.error.store(),
base: closure::analysis::expr_use_visitor::PlaceBase::Rvalue,
projections: Vec::new(),
};
for (place, _, sources) in fake_reads {
*place = table.resolve_completely(std::mem::replace(place, dummy_place()));
place.projections.shrink_to_fit();
for source in &mut *sources {
source.shrink_to_fit();
}
sources.shrink_to_fit();
}
for min_capture in min_captures.values_mut() {
for captured in &mut *min_capture {
let CapturedPlace { place, info, mutability: _ } = captured;
*place = table.resolve_completely(std::mem::replace(place, dummy_place()));
let CaptureInfo { sources, capture_kind: _ } = info;
for source in &mut *sources {
source.shrink_to_fit();
}
sources.shrink_to_fit();
}
min_capture.shrink_to_fit();
}
min_captures.shrink_to_fit();
}
closures_data.shrink_to_fit();
result.tuple_field_access_types = tuple_field_accesses_rev
.into_iter()
.map(|subst| table.resolve_completely(subst).store())
@@ -1387,6 +1635,21 @@ pub(crate) fn push_diagnostic(&self, diagnostic: InferenceDiagnostic) {
self.diagnostics.push(diagnostic);
}
fn record_deferred_call_resolution(
&mut self,
closure_def_id: ExprId,
r: DeferredCallResolution<'db>,
) {
self.deferred_call_resolutions.entry(closure_def_id).or_default().push(r);
}
fn remove_deferred_call_resolutions(
&mut self,
closure_def_id: ExprId,
) -> Vec<DeferredCallResolution<'db>> {
self.deferred_call_resolutions.remove(&closure_def_id).unwrap_or_default()
}
fn with_ty_lowering<R>(
&mut self,
store: &ExpressionStore,
@@ -1646,6 +1909,23 @@ fn demand_coerce(
result.unwrap_or(self.types.types.error)
}
pub(crate) fn type_must_be_known_at_this_point(
&self,
_id: ExprOrPatId,
_ty: Ty<'db>,
) -> Ty<'db> {
// FIXME: Emit an diagnostic.
self.types.types.error
}
pub(crate) fn require_type_is_sized(&mut self, ty: Ty<'db>) {
if !ty.references_non_lt_error()
&& let Some(sized_trait) = self.lang_items.Sized
{
self.table.register_bound(ty, sized_trait, ObligationCause::new());
}
}
fn expr_ty(&self, expr: ExprId) -> Ty<'db> {
self.result.expr_ty(expr)
}
@@ -36,7 +36,7 @@ pub(crate) fn adjust_steps_as_infer_ok(&mut self) -> InferOk<'db, Vec<Adjustment
.iter()
.map(|&(_source, kind)| {
if let AutoderefKind::Overloaded = kind {
Some(OverloadedDeref(Some(Mutability::Not)))
Some(OverloadedDeref(Mutability::Not))
} else {
None
}
@@ -0,0 +1,543 @@
//! Inference of calls.
use std::iter;
use intern::sym;
use tracing::debug;
use hir_def::{CallableDefId, hir::ExprId, signatures::FunctionSignature};
use rustc_type_ir::{
InferTy, Interner,
inherent::{GenericArgs as _, IntoKind, Ty as _},
};
use crate::{
Adjust, Adjustment, AutoBorrow, FnAbi,
autoderef::{GeneralAutoderef, InferenceContextAutoderef},
infer::{
AllowTwoPhase, AutoBorrowMutability, Expectation, InferenceContext, InferenceDiagnostic,
expr::{ExprIsRead, TupleArgumentsFlag},
},
method_resolution::{MethodCallee, TreatNotYetDefinedOpaques},
next_solver::{
FnSig, Ty, TyKind,
infer::{BoundRegionConversionTime, traits::ObligationCause},
},
};
#[derive(Debug)]
enum CallStep<'db> {
Builtin(Ty<'db>),
DeferredClosure(ExprId, FnSig<'db>),
/// Call overloading when callee implements one of the Fn* traits.
Overloaded(MethodCallee<'db>),
}
impl<'db> InferenceContext<'_, 'db> {
pub(crate) fn infer_call(
&mut self,
call_expr: ExprId,
callee_expr: ExprId,
arg_exprs: &[ExprId],
expected: &Expectation<'db>,
) -> Ty<'db> {
let original_callee_ty = self.infer_expr_no_expect(callee_expr, ExprIsRead::Yes);
let expr_ty = self.table.try_structurally_resolve_type(original_callee_ty);
let mut autoderef = GeneralAutoderef::new_from_inference_context(self, expr_ty);
let mut result = None;
while result.is_none() && autoderef.next().is_some() {
result =
Self::try_overloaded_call_step(call_expr, callee_expr, arg_exprs, &mut autoderef);
}
// FIXME: rustc does some ABI checks here, but the ABI mapping is in rustc_target and we don't have access to that crate.
let obligations = autoderef.take_obligations();
self.table.register_predicates(obligations);
let output = match result {
None => {
// Check all of the arg expressions, but with no expectations
// since we don't have a signature to compare them to.
for &arg in arg_exprs {
self.infer_expr_no_expect(arg, ExprIsRead::Yes);
}
self.push_diagnostic(InferenceDiagnostic::ExpectedFunction {
call_expr,
found: original_callee_ty.store(),
});
self.types.types.error
}
Some(CallStep::Builtin(callee_ty)) => {
self.confirm_builtin_call(call_expr, callee_ty, arg_exprs, expected)
}
Some(CallStep::DeferredClosure(_def_id, fn_sig)) => {
self.confirm_deferred_closure_call(call_expr, arg_exprs, expected, fn_sig)
}
Some(CallStep::Overloaded(method_callee)) => {
self.confirm_overloaded_call(call_expr, arg_exprs, expected, method_callee)
}
};
// we must check that return type of called functions is WF:
self.table.register_wf_obligation(output.into(), ObligationCause::new());
output
}
fn try_overloaded_call_step(
call_expr: ExprId,
callee_expr: ExprId,
arg_exprs: &[ExprId],
autoderef: &mut InferenceContextAutoderef<'_, '_, 'db>,
) -> Option<CallStep<'db>> {
let final_ty = autoderef.final_ty();
let adjusted_ty = autoderef.ctx().table.try_structurally_resolve_type(final_ty);
// If the callee is a function pointer or a closure, then we're all set.
match adjusted_ty.kind() {
TyKind::FnDef(..) | TyKind::FnPtr(..) => {
let adjust_steps = autoderef.adjust_steps_as_infer_ok();
let adjustments =
autoderef.ctx().table.register_infer_ok(adjust_steps).into_boxed_slice();
autoderef.ctx().write_expr_adj(callee_expr, adjustments);
return Some(CallStep::Builtin(adjusted_ty));
}
// Check whether this is a call to a closure where we
// haven't yet decided on whether the closure is fn vs
// fnmut vs fnonce. If so, we have to defer further processing.
TyKind::Closure(def_id, args)
if autoderef.ctx().infcx().closure_kind(adjusted_ty).is_none() =>
{
let closure_sig = args.as_closure().sig();
let closure_sig = autoderef.ctx().infcx().instantiate_binder_with_fresh_vars(
BoundRegionConversionTime::FnCall,
closure_sig,
);
let adjust_steps = autoderef.adjust_steps_as_infer_ok();
let adjustments = autoderef.ctx().table.register_infer_ok(adjust_steps);
let def_id = def_id.0.loc(autoderef.ctx().db).1;
autoderef.ctx().record_deferred_call_resolution(
def_id,
DeferredCallResolution {
call_expr,
callee_expr,
closure_ty: adjusted_ty,
adjustments,
fn_sig: closure_sig,
},
);
return Some(CallStep::DeferredClosure(def_id, closure_sig));
}
// When calling a `CoroutineClosure` that is local to the body, we will
// not know what its `closure_kind` is yet. Instead, just fill in the
// signature with an infer var for the `tupled_upvars_ty` of the coroutine,
// and record a deferred call resolution which will constrain that var
// as part of `AsyncFn*` trait confirmation.
TyKind::CoroutineClosure(def_id, args)
if autoderef.ctx().infcx().closure_kind(adjusted_ty).is_none() =>
{
let closure_args = args.as_coroutine_closure();
let coroutine_closure_sig =
autoderef.ctx().infcx().instantiate_binder_with_fresh_vars(
BoundRegionConversionTime::FnCall,
closure_args.coroutine_closure_sig(),
);
let tupled_upvars_ty = autoderef.ctx().table.next_ty_var();
// We may actually receive a coroutine back whose kind is different
// from the closure that this dispatched from. This is because when
// we have no captures, we automatically implement `FnOnce`. This
// impl forces the closure kind to `FnOnce` i.e. `u8`.
let kind_ty = autoderef.ctx().table.next_ty_var();
let interner = autoderef.ctx().interner();
let call_sig = interner.mk_fn_sig(
[coroutine_closure_sig.tupled_inputs_ty],
coroutine_closure_sig.to_coroutine(
interner,
closure_args.parent_args(),
kind_ty,
interner.coroutine_for_closure(def_id),
tupled_upvars_ty,
),
coroutine_closure_sig.c_variadic,
coroutine_closure_sig.safety,
coroutine_closure_sig.abi,
);
let adjust_steps = autoderef.adjust_steps_as_infer_ok();
let adjustments = autoderef.ctx().table.register_infer_ok(adjust_steps);
let def_id = def_id.0.loc(autoderef.ctx().db).1;
autoderef.ctx().record_deferred_call_resolution(
def_id,
DeferredCallResolution {
call_expr,
callee_expr,
closure_ty: adjusted_ty,
adjustments,
fn_sig: call_sig,
},
);
return Some(CallStep::DeferredClosure(def_id, call_sig));
}
// Hack: we know that there are traits implementing Fn for &F
// where F:Fn and so forth. In the particular case of types
// like `f: &mut FnMut()`, if there is a call `f()`, we would
// normally translate to `FnMut::call_mut(&mut f, ())`, but
// that winds up potentially requiring the user to mark their
// variable as `mut` which feels unnecessary and unexpected.
//
// fn foo(f: &mut impl FnMut()) { f() }
// ^ without this hack `f` would have to be declared as mutable
//
// The simplest fix by far is to just ignore this case and deref again,
// so we wind up with `FnMut::call_mut(&mut *f, ())`.
TyKind::Ref(..) if autoderef.step_count() == 0 => {
return None;
}
TyKind::Infer(InferTy::TyVar(vid))
// If we end up with an inference variable which is not the hidden type of
// an opaque, emit an error.
if !autoderef.ctx().infcx().has_opaques_with_sub_unified_hidden_type(vid) => {
autoderef
.ctx()
.type_must_be_known_at_this_point(callee_expr.into(), adjusted_ty);
return None;
}
TyKind::Error(_) => {
return None;
}
_ => {}
}
// Now, we look for the implementation of a Fn trait on the object's type.
// We first do it with the explicit instruction to look for an impl of
// `Fn<Tuple>`, with the tuple `Tuple` having an arity corresponding
// to the number of call parameters.
// If that fails (or_else branch), we try again without specifying the
// shape of the tuple (hence the None). This allows to detect an Fn trait
// is implemented, and use this information for diagnostic.
autoderef
.ctx()
.try_overloaded_call_traits(adjusted_ty, Some(arg_exprs))
.or_else(|| autoderef.ctx().try_overloaded_call_traits(adjusted_ty, None))
.map(|(autoref, method)| {
let adjustments = autoderef.adjust_steps_as_infer_ok();
let mut adjustments = autoderef.ctx().table.register_infer_ok(adjustments);
adjustments.extend(autoref);
autoderef.ctx().write_expr_adj(callee_expr, adjustments.into_boxed_slice());
CallStep::Overloaded(method)
})
}
fn try_overloaded_call_traits(
&mut self,
adjusted_ty: Ty<'db>,
opt_arg_exprs: Option<&[ExprId]>,
) -> Option<(Option<Adjustment>, MethodCallee<'db>)> {
// HACK(async_closures): For async closures, prefer `AsyncFn*`
// over `Fn*`, since all async closures implement `FnOnce`, but
// choosing that over `AsyncFn`/`AsyncFnMut` would be more restrictive.
// For other callables, just prefer `Fn*` for perf reasons.
//
// The order of trait choices here is not that big of a deal,
// since it just guides inference (and our choice of autoref).
// Though in the future, I'd like typeck to choose:
// `Fn > AsyncFn > FnMut > AsyncFnMut > FnOnce > AsyncFnOnce`
// ...or *ideally*, we just have `LendingFn`/`LendingFnMut`, which
// would naturally unify these two trait hierarchies in the most
// general way.
let call_trait_choices = if self.shallow_resolve(adjusted_ty).is_coroutine_closure() {
[
(self.lang_items.AsyncFn, sym::async_call, true),
(self.lang_items.AsyncFnMut, sym::async_call_mut, true),
(self.lang_items.AsyncFnOnce, sym::async_call_once, false),
(self.lang_items.Fn, sym::call, true),
(self.lang_items.FnMut, sym::call_mut, true),
(self.lang_items.FnOnce, sym::call_once, false),
]
} else {
[
(self.lang_items.Fn, sym::call, true),
(self.lang_items.FnMut, sym::call_mut, true),
(self.lang_items.FnOnce, sym::call_once, false),
(self.lang_items.AsyncFn, sym::async_call, true),
(self.lang_items.AsyncFnMut, sym::async_call_mut, true),
(self.lang_items.AsyncFnOnce, sym::async_call_once, false),
]
};
// Try the options that are least restrictive on the caller first.
for (opt_trait_def_id, method_name, borrow) in call_trait_choices {
let Some(trait_def_id) = opt_trait_def_id else {
continue;
};
let opt_input_type = opt_arg_exprs.map(|arg_exprs| {
Ty::new_tup_from_iter(
self.interner(),
arg_exprs.iter().map(|_| self.table.next_ty_var()),
)
});
// We use `TreatNotYetDefinedOpaques::AsRigid` here so that if the `adjusted_ty`
// is `Box<impl FnOnce()>` we choose `FnOnce` instead of `Fn`.
//
// We try all the different call traits in order and choose the first
// one which may apply. So if we treat opaques as inference variables
// `Box<impl FnOnce()>: Fn` is considered ambiguous and chosen.
if let Some(ok) = self.table.lookup_method_for_operator(
ObligationCause::new(),
method_name,
trait_def_id,
adjusted_ty,
opt_input_type,
TreatNotYetDefinedOpaques::AsRigid,
) {
let method = self.table.register_infer_ok(ok);
let mut autoref = None;
if borrow {
// Check for &self vs &mut self in the method signature. Since this is either
// the Fn or FnMut trait, it should be one of those.
let TyKind::Ref(_, _, mutbl) = method.sig.inputs_and_output.inputs()[0].kind()
else {
panic!("Expected `FnMut`/`Fn` to take receiver by-ref/by-mut")
};
// For initial two-phase borrow
// deployment, conservatively omit
// overloaded function call ops.
let mutbl = AutoBorrowMutability::new(mutbl, AllowTwoPhase::No);
autoref = Some(Adjustment {
kind: Adjust::Borrow(AutoBorrow::Ref(mutbl)),
target: method.sig.inputs_and_output.inputs()[0].store(),
});
}
return Some((autoref, method));
}
}
None
}
/// Returns the argument indices to skip.
fn check_legacy_const_generics(
&mut self,
callee: Option<CallableDefId>,
args: &[ExprId],
) -> Box<[u32]> {
let func = match callee {
Some(CallableDefId::FunctionId(func)) => func,
_ => return Default::default(),
};
let data = FunctionSignature::of(self.db, func);
let Some(legacy_const_generics_indices) = data.legacy_const_generics_indices(self.db, func)
else {
return Default::default();
};
let mut legacy_const_generics_indices = Box::<[u32]>::from(legacy_const_generics_indices);
// only use legacy const generics if the param count matches with them
if data.params.len() + legacy_const_generics_indices.len() != args.len() {
if args.len() <= data.params.len() {
return Default::default();
} else {
// there are more parameters than there should be without legacy
// const params; use them
legacy_const_generics_indices.sort_unstable();
return legacy_const_generics_indices;
}
}
// check legacy const parameters
for arg_idx in legacy_const_generics_indices.iter().copied() {
if arg_idx >= args.len() as u32 {
continue;
}
let expected = Expectation::none(); // FIXME use actual const ty, when that is lowered correctly
self.infer_expr(args[arg_idx as usize], &expected, ExprIsRead::Yes);
// FIXME: evaluate and unify with the const
}
legacy_const_generics_indices.sort_unstable();
legacy_const_generics_indices
}
fn confirm_builtin_call(
&mut self,
call_expr: ExprId,
callee_ty: Ty<'db>,
arg_exprs: &[ExprId],
expected: &Expectation<'db>,
) -> Ty<'db> {
let (fn_sig, def_id) = match callee_ty.kind() {
TyKind::FnDef(def_id, args) => {
let fn_sig =
self.db.callable_item_signature(def_id.0).instantiate(self.interner(), args);
(fn_sig, Some(def_id.0))
}
// FIXME(const_trait_impl): these arms should error because we can't enforce them
TyKind::FnPtr(sig_tys, hdr) => (sig_tys.with(hdr), None),
_ => unreachable!(),
};
// Replace any late-bound regions that appear in the function
// signature with region variables. We also have to
// renormalize the associated types at this point, since they
// previously appeared within a `Binder<>` and hence would not
// have been normalized before.
let fn_sig = self
.infcx()
.instantiate_binder_with_fresh_vars(BoundRegionConversionTime::FnCall, fn_sig);
let indices_to_skip = self.check_legacy_const_generics(def_id, arg_exprs);
self.check_call_arguments(
call_expr,
fn_sig.inputs(),
fn_sig.output(),
expected,
arg_exprs,
&indices_to_skip,
fn_sig.c_variadic,
TupleArgumentsFlag::DontTupleArguments,
);
if fn_sig.abi == FnAbi::RustCall
&& let Some(ty) = fn_sig.inputs().last().copied()
&& let Some(tuple_trait) = self.lang_items.Tuple
{
self.table.register_bound(ty, tuple_trait, ObligationCause::new());
self.require_type_is_sized(ty);
}
fn_sig.output()
}
fn confirm_deferred_closure_call(
&mut self,
call_expr: ExprId,
arg_exprs: &[ExprId],
expected: &Expectation<'db>,
fn_sig: FnSig<'db>,
) -> Ty<'db> {
// `fn_sig` is the *signature* of the closure being called. We
// don't know the full details yet (`Fn` vs `FnMut` etc), but we
// do know the types expected for each argument and the return
// type.
self.check_call_arguments(
call_expr,
fn_sig.inputs(),
fn_sig.output(),
expected,
arg_exprs,
&[],
fn_sig.c_variadic,
TupleArgumentsFlag::TupleArguments,
);
fn_sig.output()
}
fn confirm_overloaded_call(
&mut self,
call_expr: ExprId,
arg_exprs: &[ExprId],
expected: &Expectation<'db>,
method: MethodCallee<'db>,
) -> Ty<'db> {
self.check_call_arguments(
call_expr,
&method.sig.inputs()[1..],
method.sig.output(),
expected,
arg_exprs,
&[],
method.sig.c_variadic,
TupleArgumentsFlag::TupleArguments,
);
self.write_method_resolution(call_expr, method.def_id, method.args);
method.sig.output()
}
}
#[derive(Debug, Clone)]
pub(crate) struct DeferredCallResolution<'db> {
call_expr: ExprId,
callee_expr: ExprId,
closure_ty: Ty<'db>,
adjustments: Vec<Adjustment>,
fn_sig: FnSig<'db>,
}
impl<'a, 'db> DeferredCallResolution<'db> {
pub(crate) fn resolve(self, ctx: &mut InferenceContext<'a, 'db>) {
debug!("DeferredCallResolution::resolve() {:?}", self);
// we should not be invoked until the closure kind has been
// determined by upvar inference
assert!(ctx.infcx().closure_kind(self.closure_ty).is_some());
// We may now know enough to figure out fn vs fnmut etc.
match ctx.try_overloaded_call_traits(self.closure_ty, None) {
Some((autoref, method_callee)) => {
// One problem is that when we get here, we are going
// to have a newly instantiated function signature
// from the call trait. This has to be reconciled with
// the older function signature we had before. In
// principle we *should* be able to fn_sigs(), but we
// can't because of the annoying need for a TypeTrace.
// (This always bites me, should find a way to
// refactor it.)
let method_sig = method_callee.sig;
debug!("attempt_resolution: method_callee={:?}", method_callee);
for (method_arg_ty, self_arg_ty) in
iter::zip(method_sig.inputs().iter().skip(1), self.fn_sig.inputs())
{
_ = ctx.demand_eqtype(self.call_expr.into(), *self_arg_ty, *method_arg_ty);
}
_ = ctx.demand_eqtype(
self.call_expr.into(),
method_sig.output(),
self.fn_sig.output(),
);
let mut adjustments = self.adjustments;
adjustments.extend(autoref);
ctx.write_expr_adj(self.callee_expr, adjustments.into_boxed_slice());
ctx.write_method_resolution(
self.call_expr,
method_callee.def_id,
method_callee.args,
);
}
None => {
assert!(
ctx.lang_items.FnOnce.is_none(),
"Expected to find a suitable `Fn`/`FnMut`/`FnOnce` implementation for `{:?}`",
self.closure_ty
)
}
}
}
}
@@ -23,14 +23,13 @@
infer::{BreakableKind, Diverges, coerce::CoerceMany},
next_solver::{
AliasTy, Binder, ClauseKind, DbInterner, ErrorGuaranteed, FnSig, GenericArgs, PolyFnSig,
PolyProjectionPredicate, Predicate, PredicateKind, SolverDefId, Ty, TyKind, Tys,
PolyProjectionPredicate, Predicate, PredicateKind, SolverDefId, Ty, TyKind,
abi::Safety,
infer::{
BoundRegionConversionTime, InferOk, InferResult,
traits::{ObligationCause, PredicateObligations},
},
},
traits::FnTrait,
};
use super::{Expectation, InferenceContext};
@@ -78,40 +77,14 @@ pub(super) fn infer_closure(
let parent_args = GenericArgs::identity_for_item(interner, self.generic_def.into());
// FIXME: Do this when we infer closures correctly:
// let tupled_upvars_ty = self.table.next_ty_var();
let tupled_upvars_ty = self.types.types.unit;
let tupled_upvars_ty = self.table.next_ty_var();
let mut current_closure_id = None;
// FIXME: We could probably actually just unify this further --
// instead of having a `FnSig` and a `Option<CoroutineTypes>`,
// we can have a `ClosureSignature { Coroutine { .. }, Closure { .. } }`,
// similar to how `ty::GenSig` is a distinct data structure.
let (closure_ty, resume_yield_tys) = match closure_kind {
ClosureKind::Closure => {
let closure_id =
InternedClosureId::new(self.db, InternedClosure(self.owner, closure_expr));
current_closure_id = Some(closure_id);
self.deferred_closures.entry(closure_id).or_default();
self.add_current_closure_dependency(closure_id);
match expected_kind {
Some(kind) => {
self.result.closure_info.insert(
closure_id,
(
Vec::new(),
match kind {
rustc_type_ir::ClosureKind::Fn => FnTrait::Fn,
rustc_type_ir::ClosureKind::FnMut => FnTrait::FnMut,
rustc_type_ir::ClosureKind::FnOnce => FnTrait::FnOnce,
},
),
);
}
None => {}
};
// Tuple up the arguments and insert the resulting function type into
// the `closures` table.
let sig = bound_sig.map_bound(|sig| {
@@ -130,9 +103,7 @@ pub(super) fn infer_closure(
Some(kind) => Ty::from_closure_kind(interner, kind),
// Create a type variable (for now) to represent the closure kind.
// It will be unified during the upvar inference phase (`upvar.rs`)
// FIXME: This too should be the next line:
// None => self.table.next_ty_var(),
None => self.types.types.i8,
None => self.table.next_ty_var(),
};
let closure_args = ClosureArgs::new(
@@ -145,6 +116,9 @@ pub(super) fn infer_closure(
},
);
let closure_id =
InternedClosureId::new(self.db, InternedClosure(self.owner, closure_expr));
(Ty::new_closure(interner, closure_id.into(), closure_args.args), None)
}
ClosureKind::Coroutine(_) | ClosureKind::AsyncBlock { .. } => {
@@ -202,25 +176,11 @@ pub(super) fn infer_closure(
// Create a type variable (for now) to represent the closure kind.
// It will be unified during the upvar inference phase (`upvar.rs`)
// FIXME: Here again the next line should be active.
// None => self.table.next_ty_var(),
None => self.types.types.i8,
None => self.table.next_ty_var(),
};
// FIXME: Another line that should be enabled.
// let coroutine_captures_by_ref_ty = self.table.next_ty_var();
let coroutine_captures_by_ref_ty = Ty::new_fn_ptr(
interner,
Binder::bind_with_vars(
FnSig {
inputs_and_output: Tys::new_from_slice(&[self.types.types.unit]),
c_variadic: false,
safety: Safety::Safe,
abi: FnAbi::Rust,
},
self.types.coroutine_captures_by_ref_bound_var_kinds,
),
);
let coroutine_captures_by_ref_ty = self.table.next_ty_var();
let closure_args = CoroutineClosureArgs::new(
interner,
CoroutineClosureArgsParts {
@@ -254,9 +214,7 @@ pub(super) fn infer_closure(
// Create a type variable (for now) to represent the closure kind.
// It will be unified during the upvar inference phase (`upvar.rs`)
// FIXME: And here again.
// None => self.table.next_ty_var(),
None => self.types.types.i16,
None => self.table.next_ty_var(),
};
let coroutine_upvars_ty = self.table.next_ty_var();
@@ -310,7 +268,6 @@ pub(super) fn infer_closure(
// FIXME: lift these out into a struct
let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
let prev_closure = mem::replace(&mut self.current_closure, current_closure_id);
let prev_ret_ty = mem::replace(&mut self.return_ty, liberated_sig.output());
let prev_ret_coercion =
self.return_coercion.replace(CoerceMany::new(liberated_sig.output()));
@@ -323,7 +280,6 @@ pub(super) fn infer_closure(
self.diverges = prev_diverges;
self.return_ty = prev_ret_ty;
self.return_coercion = prev_ret_coercion;
self.current_closure = prev_closure;
self.resume_yield_tys = prev_resume_yield_tys;
closure_ty
@@ -1,1308 +1,1676 @@
//! Post-inference closure analysis: captures and closure kind.
//! ### Inferring borrow kinds for upvars
//!
//! Whenever there is a closure expression, we need to determine how each
//! upvar is used. We do this by initially assigning each upvar an
//! immutable "borrow kind" (see `BorrowKind` for details) and then
//! "escalating" the kind as needed. The borrow kind proceeds according to
//! the following lattice:
//! ```ignore (not-rust)
//! ty::ImmBorrow -> ty::UniqueImmBorrow -> ty::MutBorrow
//! ```
//! So, for example, if we see an assignment `x = 5` to an upvar `x`, we
//! will promote its borrow kind to mutable borrow. If we see an `&mut x`
//! we'll do the same. Naturally, this applies not just to the upvar, but
//! to everything owned by `x`, so the result is the same for something
//! like `x.f = 5` and so on (presuming `x` is not a borrowed pointer to a
//! struct). These adjustments are performed in
//! `adjust_for_non_move_closure` (you can trace backwards through the code
//! from there).
//!
//! The fact that we are inferring borrow kinds as we go results in a
//! semi-hacky interaction with the way `ExprUseVisitor` is computing
//! `Place`s. In particular, it will query the current borrow kind as it
//! goes, and we'll return the *current* value, but this may get
//! adjusted later. Therefore, in this module, we generally ignore the
//! borrow kind (and derived mutabilities) that `ExprUseVisitor` returns
//! within `Place`s, since they may be inaccurate. (Another option
//! would be to use a unification scheme, where instead of returning a
//! concrete borrow kind like `ty::ImmBorrow`, we return a
//! `ty::InferBorrow(upvar_id)` or something like that, but this would
//! then mean that all later passes would have to check for these figments
//! and report an error, and it just seems like more mess in the end.)
use std::{cmp, mem};
use std::{iter, mem};
use base_db::Crate;
use hir_def::{
ExpressionStoreOwnerId, FieldId, HasModule, VariantId,
expr_store::{Body, ExpressionStore, path::Path},
expr_store::ExpressionStore,
hir::{
Array, AsmOperand, BinaryOp, BindingId, CaptureBy, Expr, ExprId, ExprOrPatId, Pat, PatId,
RecordSpread, Statement, UnaryOp,
BindingAnnotation, BindingId, CaptureBy, CoroutineSource, Expr, ExprId, ExprOrPatId, Pat,
PatId, Statement,
},
item_tree::FieldsShape,
resolver::ValueNs,
signatures::VariantFields,
};
use macros::{TypeFoldable, TypeVisitable};
use rustc_ast_ir::Mutability;
use rustc_hash::{FxHashMap, FxHashSet};
use rustc_type_ir::inherent::{GenericArgs as _, IntoKind, Ty as _};
use rustc_hash::{FxBuildHasher, FxHashMap};
use rustc_type_ir::{
BoundVar, ClosureKind, TypeVisitableExt as _,
inherent::{AdtDef as _, GenericArgs as _, IntoKind as _, Ty as _},
};
use smallvec::{SmallVec, smallvec};
use stdx::{format_to, never};
use syntax::utils::is_raw_identifier;
use span::Edition;
use tracing::{debug, instrument};
use crate::{
Adjust, Adjustment, BindingMode,
db::{HirDatabase, InternedClosure, InternedClosureId},
display::{DisplayTarget, HirDisplay as _},
infer::InferenceContext,
mir::{BorrowKind, MirSpan, MutBorrowKind},
next_solver::{
DbInterner, ErrorGuaranteed, GenericArgs, ParamEnv, StoredEarlyBinder, StoredTy, Ty,
TyKind,
infer::{InferCtxt, traits::ObligationCause},
obligation_ctxt::ObligationCtxt,
FnAbi,
infer::{
CaptureInfo, CaptureSourceStack, CapturedPlace, InferenceContext, UpvarCapture,
closure::analysis::expr_use_visitor::{
self as euv, FakeReadCause, Place, PlaceBase, PlaceWithOrigin, Projection,
ProjectionKind,
},
},
traits::FnTrait,
next_solver::{
Binder, BoundRegion, BoundRegionKind, DbInterner, GenericArgs, Region, Ty, TyKind,
abi::Safety, infer::traits::ObligationCause, normalize,
},
upvars::{Upvars, UpvarsRef},
};
// The below functions handle capture and closure kind (Fn, FnMut, ..)
pub(crate) mod expr_use_visitor;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub(crate) enum HirPlaceProjection {
Deref,
Field(FieldId),
TupleField(u32),
#[derive(Debug, Copy, Clone, TypeVisitable, TypeFoldable)]
enum UpvarArgs<'db> {
Closure(GenericArgs<'db>),
Coroutine(GenericArgs<'db>),
CoroutineClosure(GenericArgs<'db>),
}
impl HirPlaceProjection {
fn projected_ty<'db>(
self,
infcx: &InferCtxt<'db>,
env: ParamEnv<'db>,
mut base: Ty<'db>,
krate: Crate,
) -> Ty<'db> {
let interner = infcx.interner;
let db = interner.db;
if base.is_ty_error() {
return Ty::new_error(interner, ErrorGuaranteed);
}
if matches!(base.kind(), TyKind::Alias(..)) {
let mut ocx = ObligationCtxt::new(infcx);
match ocx.structurally_normalize_ty(&ObligationCause::dummy(), env, base) {
Ok(it) => base = it,
Err(_) => return Ty::new_error(interner, ErrorGuaranteed),
}
}
impl<'db> UpvarArgs<'db> {
#[inline]
fn tupled_upvars_ty(self) -> Ty<'db> {
match self {
HirPlaceProjection::Deref => match base.kind() {
TyKind::RawPtr(inner, _) | TyKind::Ref(_, inner, _) => inner,
TyKind::Adt(adt_def, subst) if adt_def.is_box() => subst.type_at(0),
_ => {
never!(
"Overloaded deref on type {} is not a projection",
base.display(db, DisplayTarget::from_crate(db, krate))
);
Ty::new_error(interner, ErrorGuaranteed)
}
},
HirPlaceProjection::Field(f) => match base.kind() {
TyKind::Adt(_, subst) => {
db.field_types(f.parent)[f.local_id].get().instantiate(interner, subst)
}
ty => {
never!("Only adt has field, found {:?}", ty);
Ty::new_error(interner, ErrorGuaranteed)
}
},
HirPlaceProjection::TupleField(idx) => match base.kind() {
TyKind::Tuple(subst) => {
subst.as_slice().get(idx as usize).copied().unwrap_or_else(|| {
never!("Out of bound tuple field");
Ty::new_error(interner, ErrorGuaranteed)
})
}
ty => {
never!("Only tuple has tuple field: {:?}", ty);
Ty::new_error(interner, ErrorGuaranteed)
}
},
UpvarArgs::Closure(args) => args.as_closure().tupled_upvars_ty(),
UpvarArgs::Coroutine(args) => args.as_coroutine().tupled_upvars_ty(),
UpvarArgs::CoroutineClosure(args) => args.as_coroutine_closure().tupled_upvars_ty(),
}
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, salsa::Update)]
pub(crate) struct HirPlace {
pub(crate) local: BindingId,
pub(crate) projections: Vec<HirPlaceProjection>,
}
#[derive(Eq, Clone, PartialEq, Debug, Copy, Hash)]
pub enum BorrowKind {
/// Data must be immutable and is aliasable.
Immutable,
impl HirPlace {
fn ty<'db>(&self, ctx: &mut InferenceContext<'_, 'db>) -> Ty<'db> {
let krate = ctx.krate();
let mut ty = ctx.table.resolve_completely(ctx.result.binding_ty(self.local));
for p in &self.projections {
ty = p.projected_ty(ctx.infcx(), ctx.table.param_env, ty, krate);
}
ty
}
fn capture_kind_of_truncated_place(
&self,
mut current_capture: CaptureKind,
len: usize,
) -> CaptureKind {
if let CaptureKind::ByRef(BorrowKind::Mut {
kind: MutBorrowKind::Default | MutBorrowKind::TwoPhasedBorrow,
}) = current_capture
&& self.projections[len..].contains(&HirPlaceProjection::Deref)
{
current_capture =
CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::ClosureCapture });
}
current_capture
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub enum CaptureKind {
ByRef(BorrowKind),
ByValue,
}
#[derive(Debug, Clone, PartialEq, Eq, salsa::Update)]
pub struct CapturedItem {
pub(crate) place: HirPlace,
pub(crate) kind: CaptureKind,
/// The inner vec is the stacks; the outer vec is for each capture reference.
/// Data must be immutable but not aliasable. This kind of borrow
/// cannot currently be expressed by the user and is used only in
/// implicit closure bindings. It is needed when the closure
/// is borrowing or mutating a mutable referent, e.g.:
///
/// Even though we always report only the last span (i.e. the most inclusive span),
/// we need to keep them all, since when a closure occurs inside a closure, we
/// copy all captures of the inner closure to the outer closure, and then we may
/// truncate them, and we want the correct span to be reported.
span_stacks: SmallVec<[SmallVec<[MirSpan; 3]>; 3]>,
pub(crate) ty: StoredEarlyBinder<StoredTy>,
/// ```
/// let mut z = 3;
/// let x: &mut isize = &mut z;
/// let y = || *x += 5;
/// ```
///
/// If we were to try to translate this closure into a more explicit
/// form, we'd encounter an error with the code as written:
///
/// ```compile_fail,E0594
/// struct Env<'a> { x: &'a &'a mut isize }
/// let mut z = 3;
/// let x: &mut isize = &mut z;
/// let y = (&mut Env { x: &x }, fn_ptr); // Closure is pair of env and fn
/// fn fn_ptr(env: &mut Env) { **env.x += 5; }
/// ```
///
/// This is then illegal because you cannot mutate a `&mut` found
/// in an aliasable location. To solve, you'd have to translate with
/// an `&mut` borrow:
///
/// ```compile_fail,E0596
/// struct Env<'a> { x: &'a mut &'a mut isize }
/// let mut z = 3;
/// let x: &mut isize = &mut z;
/// let y = (&mut Env { x: &mut x }, fn_ptr); // changed from &x to &mut x
/// fn fn_ptr(env: &mut Env) { **env.x += 5; }
/// ```
///
/// Now the assignment to `**env.x` is legal, but creating a
/// mutable pointer to `x` is not because `x` is not mutable. We
/// could fix this by declaring `x` as `let mut x`. This is ok in
/// user code, if awkward, but extra weird for closures, since the
/// borrow is hidden.
///
/// So we introduce a "unique imm" borrow -- the referent is
/// immutable, but not aliasable. This solves the problem. For
/// simplicity, we don't give users the way to express this
/// borrow, it's just used when translating closures.
///
/// FIXME: Rename this to indicate the borrow is actually not immutable.
UniqueImmutable,
/// Data is mutable and not aliasable.
Mutable,
}
impl CapturedItem {
pub fn local(&self) -> BindingId {
self.place.local
impl BorrowKind {
pub fn from_hir_mutbl(m: hir_def::hir::type_ref::Mutability) -> BorrowKind {
match m {
hir_def::hir::type_ref::Mutability::Mut => BorrowKind::Mutable,
hir_def::hir::type_ref::Mutability::Shared => BorrowKind::Immutable,
}
}
/// Returns whether this place has any field (aka. non-deref) projections.
pub fn has_field_projections(&self) -> bool {
self.place.projections.iter().any(|it| !matches!(it, HirPlaceProjection::Deref))
pub fn from_mutbl(m: Mutability) -> BorrowKind {
match m {
Mutability::Mut => BorrowKind::Mutable,
Mutability::Not => BorrowKind::Immutable,
}
}
pub fn ty<'db>(&self, db: &'db dyn HirDatabase, subst: GenericArgs<'db>) -> Ty<'db> {
let interner = DbInterner::new_no_crate(db);
self.ty.get().instantiate(interner, subst.as_closure().parent_args())
/// Returns a mutability `m` such that an `&m T` pointer could be used to obtain this borrow
/// kind. Because borrow kinds are richer than mutabilities, we sometimes have to pick a
/// mutability that is stronger than necessary so that it at least *would permit* the borrow in
/// question.
pub fn to_mutbl_lossy(self) -> Mutability {
match self {
BorrowKind::Mutable => Mutability::Mut,
BorrowKind::Immutable => Mutability::Not,
// We have no type corresponding to a unique imm borrow, so
// use `&mut`. It gives all the capabilities of a `&uniq`
// and hence is a safe "over approximation".
BorrowKind::UniqueImmutable => Mutability::Mut,
}
}
}
/// Describe the relationship between the paths of two places
/// eg:
/// - `foo` is ancestor of `foo.bar.baz`
/// - `foo.bar.baz` is an descendant of `foo.bar`
/// - `foo.bar` and `foo.baz` are divergent
enum PlaceAncestryRelation {
Ancestor,
Descendant,
SamePlace,
Divergent,
}
/// Intermediate format to store a captured `Place` and associated `CaptureInfo`
/// during capture analysis. Information in this map feeds into the minimum capture
/// analysis pass.
type InferredCaptureInformation = Vec<(Place, CaptureInfo)>;
impl<'a, 'db> InferenceContext<'a, 'db> {
pub(crate) fn closure_analyze(&mut self) {
let upvars = crate::upvars::upvars_mentioned(self.db, self.owner)
.unwrap_or(const { &FxHashMap::with_hasher(FxBuildHasher) });
for root_expr in self.store.expr_roots() {
self.analyze_closures_in_expr(root_expr, upvars);
}
// it's our job to process these.
assert!(self.deferred_call_resolutions.is_empty());
}
pub fn kind(&self) -> CaptureKind {
self.kind
}
fn analyze_closures_in_expr(&mut self, expr: ExprId, upvars: &'db FxHashMap<ExprId, Upvars>) {
self.store.walk_child_exprs(expr, |expr| self.analyze_closures_in_expr(expr, upvars));
pub fn spans(&self) -> SmallVec<[MirSpan; 3]> {
self.span_stacks.iter().map(|stack| *stack.last().expect("empty span stack")).collect()
}
/// Converts the place to a name that can be inserted into source code.
pub fn place_to_name(&self, owner: ExpressionStoreOwnerId, db: &dyn HirDatabase) -> String {
let krate = owner.krate(db);
let edition = krate.data(db).edition;
let mut result = match owner {
ExpressionStoreOwnerId::Signature(generic_def_id) => {
ExpressionStore::of(db, generic_def_id.into())[self.place.local]
.name
.display(db, edition)
.to_string()
match &self.store[expr] {
Expr::Closure { args, body, closure_kind, capture_by, .. } => {
self.analyze_closure(
expr,
args,
*body,
*capture_by,
*closure_kind,
upvars.get(&expr).map(|upvars| upvars.as_ref()).unwrap_or_default(),
);
}
ExpressionStoreOwnerId::Body(def_with_body_id) => Body::of(db, def_with_body_id)
[self.place.local]
.name
.display(db, edition)
.to_string(),
ExpressionStoreOwnerId::VariantFields(variant_id) => {
let fields = VariantFields::of(db, variant_id);
fields.store[self.place.local].name.display(db, edition).to_string()
_ => {}
}
}
/// Analysis starting point.
#[instrument(skip(self, body), level = "debug")]
fn analyze_closure(
&mut self,
closure_expr_id: ExprId,
params: &[PatId],
body: ExprId,
mut capture_clause: CaptureBy,
closure_kind: hir_def::hir::ClosureKind,
upvars: UpvarsRef<'db>,
) {
// Extract the type of the closure.
let ty = self.expr_ty(closure_expr_id);
let (args, infer_kind) = match ty.kind() {
TyKind::Closure(_def_id, args) => {
(UpvarArgs::Closure(args), self.infcx().closure_kind(ty).is_none())
}
TyKind::CoroutineClosure(_def_id, args) => {
(UpvarArgs::CoroutineClosure(args), self.infcx().closure_kind(ty).is_none())
}
TyKind::Coroutine(_def_id, args) => (UpvarArgs::Coroutine(args), false),
TyKind::Error(_) => {
// #51714: skip analysis when we have already encountered type errors
return;
}
_ => {
panic!("type of closure expr {:?} is not a closure {:?}", closure_expr_id, ty);
}
};
for proj in &self.place.projections {
match proj {
HirPlaceProjection::Deref => {}
HirPlaceProjection::Field(f) => {
let variant_data = f.parent.fields(db);
match variant_data.shape {
FieldsShape::Record => {
result.push('_');
result.push_str(variant_data.fields()[f.local_id].name.as_str())
let args = self.infcx().resolve_vars_if_possible(args);
let mut delegate = InferBorrowKind {
closure_def_id: closure_expr_id,
capture_information: Default::default(),
fake_reads: Default::default(),
};
let _ = euv::ExprUseVisitor::new(self, closure_expr_id, upvars, &mut delegate)
.consume_closure_body(params, body);
// There are several curious situations with coroutine-closures where
// analysis is too aggressive with borrows when the coroutine-closure is
// marked `move`. Specifically:
//
// 1. If the coroutine-closure was inferred to be `FnOnce` during signature
// inference, then it's still possible that we try to borrow upvars from
// the coroutine-closure because they are not used by the coroutine body
// in a way that forces a move. See the test:
// `async-await/async-closures/force-move-due-to-inferred-kind.rs`.
//
// 2. If the coroutine-closure is forced to be `FnOnce` due to the way it
// uses its upvars (e.g. it consumes a non-copy value), but not *all* upvars
// would force the closure to `FnOnce`.
// See the test: `async-await/async-closures/force-move-due-to-actually-fnonce.rs`.
//
// This would lead to an impossible to satisfy situation, since `AsyncFnOnce`
// coroutine bodies can't borrow from their parent closure. To fix this,
// we force the inner coroutine to also be `move`. This only matters for
// coroutine-closures that are `move` since otherwise they themselves will
// be borrowing from the outer environment, so there's no self-borrows occurring.
if let UpvarArgs::Coroutine(..) = args
&& let hir_def::hir::ClosureKind::AsyncBlock { source: CoroutineSource::Closure } =
closure_kind
&& let parent_hir_id = ExpressionStore::closure_for_coroutine(closure_expr_id)
&& let parent_ty = self.result.expr_ty(parent_hir_id)
&& let Expr::Closure { capture_by: CaptureBy::Value, .. } = self.store[parent_hir_id]
{
// (1.) Closure signature inference forced this closure to `FnOnce`.
if let Some(ClosureKind::FnOnce) = self.infcx().closure_kind(parent_ty) {
capture_clause = CaptureBy::Value;
}
// (2.) The way that the closure uses its upvars means it's `FnOnce`.
else if self.coroutine_body_consumes_upvars(closure_expr_id, body, upvars) {
capture_clause = CaptureBy::Value;
}
}
// As noted in `lower_coroutine_body_with_moved_arguments`, we default the capture mode
// to `ByRef` for the `async {}` block internal to async fns/closure. This means
// that we would *not* be moving all of the parameters into the async block in all cases.
// For example, when one of the arguments is `Copy`, we turn a consuming use into a copy of
// a reference, so for `async fn x(t: i32) {}`, we'd only take a reference to `t`.
//
// We force all of these arguments to be captured by move before we do expr use analysis.
//
// FIXME(async_closures): This could be cleaned up. It's a bit janky that we're just
// moving all of the `LocalSource::AsyncFn` locals here.
if let hir_def::hir::ClosureKind::AsyncBlock {
source: CoroutineSource::Fn | CoroutineSource::Closure,
} = closure_kind
{
let Expr::Block { statements, .. } = &self.store[body] else {
panic!();
};
for stmt in statements {
let Statement::Let { pat, initializer: Some(init), .. } = *stmt else {
panic!();
};
let Pat::Bind { .. } = self.store[pat] else {
// Complex pattern, skip the non-upvar local.
continue;
};
let Expr::Path(path) = &self.store[init] else {
panic!();
};
let update_guard = self.resolver.update_to_inner_scope(self.db, self.owner, init);
let Some(ValueNs::LocalBinding(local_id)) =
self.resolver.resolve_path_in_value_ns_fully(
self.db,
path,
self.store.expr_path_hygiene(init),
)
else {
panic!();
};
self.resolver.reset_to_guard(update_guard);
let place = self.place_for_root_variable(closure_expr_id, local_id);
delegate.capture_information.push((
place,
CaptureInfo {
sources: smallvec![CaptureSourceStack::from_single(init.into())],
capture_kind: UpvarCapture::ByValue,
},
));
}
}
debug!(
"For closure={:?}, capture_information={:#?}",
closure_expr_id, delegate.capture_information
);
let (capture_information, closure_kind, _origin) = self
.process_collected_capture_information(capture_clause, &delegate.capture_information);
self.compute_min_captures(closure_expr_id, capture_information);
// We now fake capture information for all variables that are mentioned within the closure
// We do this after handling migrations so that min_captures computes before
if !enable_precise_capture(self.edition) {
let mut capture_information: InferredCaptureInformation = Default::default();
for var_hir_id in upvars.iter() {
let place = Place {
base_ty: self.result.binding_ty(var_hir_id).store(),
base: PlaceBase::Upvar { closure: closure_expr_id, var_id: var_hir_id },
projections: Vec::new(),
};
debug!("seed place {:?}", place);
let capture_kind = self.init_capture_kind_for_place(&place, capture_clause);
let fake_info = CaptureInfo { sources: SmallVec::new(), capture_kind };
capture_information.push((place, fake_info));
}
// This will update the min captures based on this new fake information.
self.compute_min_captures(closure_expr_id, capture_information);
}
if infer_kind {
// Unify the (as yet unbound) type variable in the closure
// args with the kind we inferred.
let closure_kind_ty = match args {
UpvarArgs::Closure(args) => args.as_closure().kind_ty(),
UpvarArgs::CoroutineClosure(args) => args.as_coroutine_closure().kind_ty(),
UpvarArgs::Coroutine(_) => unreachable!("coroutines don't have an inferred kind"),
};
_ = self.demand_eqtype(
closure_expr_id.into(),
Ty::from_closure_kind(self.interner(), closure_kind),
closure_kind_ty,
);
}
// For coroutine-closures, we additionally must compute the
// `coroutine_captures_by_ref_ty` type, which is used to generate the by-ref
// version of the coroutine-closure's output coroutine.
if let UpvarArgs::CoroutineClosure(args) = args
&& !args.references_error()
{
let closure_env_region: Region<'_> = Region::new_bound(
self.interner(),
rustc_type_ir::INNERMOST,
BoundRegion { var: BoundVar::ZERO, kind: BoundRegionKind::ClosureEnv },
);
let num_args = args
.as_coroutine_closure()
.coroutine_closure_sig()
.skip_binder()
.tupled_inputs_ty
.tuple_fields()
.len();
let tupled_upvars_ty_for_borrow = Ty::new_tup_from_iter(
self.interner(),
analyze_coroutine_closure_captures(
self.closure_min_captures_flattened(closure_expr_id),
self.closure_min_captures_flattened(ExpressionStore::coroutine_for_closure(
closure_expr_id,
))
// Skip the captures that are just moving the closure's args
// into the coroutine. These are always by move, and we append
// those later in the `CoroutineClosureSignature` helper functions.
.skip(num_args),
|(_, parent_capture), (_, child_capture)| {
// This is subtle. See documentation on function.
let needs_ref = should_reborrow_from_env_of_parent_coroutine_closure(
parent_capture,
child_capture,
);
let upvar_ty = child_capture.place.ty();
let capture = child_capture.info.capture_kind;
// Not all upvars are captured by ref, so use
// `apply_capture_kind_on_capture_ty` to ensure that we
// compute the right captured type.
apply_capture_kind_on_capture_ty(
self.interner(),
upvar_ty,
capture,
if needs_ref { closure_env_region } else { self.types.regions.erased },
)
},
),
);
let coroutine_captures_by_ref_ty = Ty::new_fn_ptr(
self.interner(),
Binder::bind_with_vars(
self.interner().mk_fn_sig(
[],
tupled_upvars_ty_for_borrow,
false,
Safety::Safe,
FnAbi::Rust,
),
self.types.coroutine_captures_by_ref_bound_var_kinds,
),
);
_ = self.demand_eqtype(
closure_expr_id.into(),
args.as_coroutine_closure().coroutine_captures_by_ref_ty(),
coroutine_captures_by_ref_ty,
);
// Additionally, we can now constrain the coroutine's kind type.
//
// We only do this if `infer_kind`, because if we have constrained
// the kind from closure signature inference, the kind inferred
// for the inner coroutine may actually be more restrictive.
if infer_kind {
let TyKind::Coroutine(_, coroutine_args) = self.result.expr_ty(body).kind() else {
panic!();
};
_ = self.demand_eqtype(
closure_expr_id.into(),
coroutine_args.as_coroutine().kind_ty(),
Ty::from_coroutine_closure_kind(self.interner(), closure_kind),
);
}
}
// Now that we've analyzed the closure, we know how each
// variable is borrowed, and we know what traits the closure
// implements (Fn vs FnMut etc). We now have some updates to do
// with that information.
//
// Note that no closure type C may have an upvar of type C
// (though it may reference itself via a trait object). This
// results from the desugaring of closures to a struct like
// `Foo<..., UV0...UVn>`. If one of those upvars referenced
// C, then the type would have infinite size (and the
// inference algorithm will reject it).
// Equate the type variables for the upvars with the actual types.
let final_upvar_tys = self.final_upvar_tys(closure_expr_id);
debug!(?closure_expr_id, ?args, ?final_upvar_tys);
// Build a tuple (U0..Un) of the final upvar types U0..Un
// and unify the upvar tuple type in the closure with it:
let final_tupled_upvars_type = Ty::new_tup(self.interner(), &final_upvar_tys);
self.demand_suptype(args.tupled_upvars_ty(), final_tupled_upvars_type);
let fake_reads = delegate.fake_reads;
self.result.closures_data.entry(closure_expr_id).or_default().fake_reads =
fake_reads.into_boxed_slice();
// If we are also inferred the closure kind here,
// process any deferred resolutions.
let deferred_call_resolutions = self.remove_deferred_call_resolutions(closure_expr_id);
for deferred_call_resolution in deferred_call_resolutions {
deferred_call_resolution.resolve(self);
}
}
/// Determines whether the body of the coroutine uses its upvars in a way that
/// consumes (i.e. moves) the value, which would force the coroutine to `FnOnce`.
/// In a more detailed comment above, we care whether this happens, since if
/// this happens, we want to force the coroutine to move all of the upvars it
/// would've borrowed from the parent coroutine-closure.
///
/// This only really makes sense to be called on the child coroutine of a
/// coroutine-closure.
fn coroutine_body_consumes_upvars(
&mut self,
coroutine_def_id: ExprId,
body: ExprId,
upvars: UpvarsRef<'db>,
) -> bool {
let mut delegate = InferBorrowKind {
closure_def_id: coroutine_def_id,
capture_information: Default::default(),
fake_reads: Default::default(),
};
let _ = euv::ExprUseVisitor::new(self, coroutine_def_id, upvars, &mut delegate)
.consume_expr(body);
let (_, kind, _) = self
.process_collected_capture_information(CaptureBy::Ref, &delegate.capture_information);
matches!(kind, ClosureKind::FnOnce)
}
// Returns a list of `Ty`s for each upvar.
fn final_upvar_tys(&self, closure_id: ExprId) -> Vec<Ty<'db>> {
self.closure_min_captures_flattened(closure_id)
.map(|captured_place| {
let upvar_ty = captured_place.place.ty();
let capture = captured_place.info.capture_kind;
debug!(?captured_place.place, ?upvar_ty, ?capture, ?captured_place.mutability);
apply_capture_kind_on_capture_ty(
self.interner(),
upvar_ty,
capture,
self.types.regions.erased,
)
})
.collect()
}
/// Adjusts the closure capture information to ensure that the operations aren't unsafe,
/// and that the path can be captured with required capture kind (depending on use in closure,
/// move closure etc.)
///
/// Returns the set of adjusted information along with the inferred closure kind and span
/// associated with the closure kind inference.
///
/// Note that we *always* infer a minimal kind, even if
/// we don't always *use* that in the final result (i.e., sometimes
/// we've taken the closure kind from the expectations instead, and
/// for coroutines we don't even implement the closure traits
/// really).
///
/// If we inferred that the closure needs to be FnMut/FnOnce, last element of the returned tuple
/// contains a `Some()` with the `Place` that caused us to do so.
fn process_collected_capture_information(
&mut self,
capture_clause: CaptureBy,
capture_information: &InferredCaptureInformation,
) -> (InferredCaptureInformation, ClosureKind, Option<Place>) {
let mut closure_kind = ClosureKind::LATTICE_BOTTOM;
let mut origin: Option<Place> = None;
let processed = capture_information
.iter()
.cloned()
.map(|(place, mut capture_info)| {
// Apply rules for safety before inferring closure kind
let place = restrict_capture_precision(place, &mut capture_info);
let place = truncate_capture_for_optimization(place, &mut capture_info);
let updated = match capture_info.capture_kind {
UpvarCapture::ByValue => match closure_kind {
ClosureKind::Fn | ClosureKind::FnMut => {
(ClosureKind::FnOnce, Some(place.clone()))
}
FieldsShape::Tuple => {
let index =
variant_data.fields().iter().position(|it| it.0 == f.local_id);
if let Some(index) = index {
format_to!(result, "_{index}");
// If closure is already FnOnce, don't update
ClosureKind::FnOnce => (closure_kind, origin.take()),
},
UpvarCapture::ByRef(BorrowKind::Mutable | BorrowKind::UniqueImmutable) => {
match closure_kind {
ClosureKind::Fn => (ClosureKind::FnMut, Some(place.clone())),
// Don't update the origin
ClosureKind::FnMut | ClosureKind::FnOnce => {
(closure_kind, origin.take())
}
}
FieldsShape::Unit => {}
}
}
HirPlaceProjection::TupleField(idx) => {
format_to!(result, "_{idx}")
}
}
}
if is_raw_identifier(&result, owner.module(db).krate(db).data(db).edition) {
result.insert_str(0, "r#");
}
result
_ => (closure_kind, origin.take()),
};
closure_kind = updated.0;
origin = updated.1;
let place = match capture_clause {
CaptureBy::Value => adjust_for_move_closure(place, &mut capture_info),
CaptureBy::Ref => adjust_for_non_move_closure(place, &mut capture_info),
};
// This restriction needs to be applied after we have handled adjustments for `move`
// closures. We want to make sure any adjustment that might make us move the place into
// the closure gets handled.
let place = restrict_precision_for_drop_types(self, place, &mut capture_info);
(place, capture_info)
})
.collect();
(processed, closure_kind, origin)
}
pub fn display_place_source_code(
&self,
owner: ExpressionStoreOwnerId,
db: &dyn HirDatabase,
) -> String {
let krate = owner.krate(db);
let edition = krate.data(db).edition;
let mut result = match owner {
ExpressionStoreOwnerId::Signature(generic_def_id) => {
ExpressionStore::of(db, generic_def_id.into())[self.place.local]
.name
.display(db, edition)
.to_string()
}
ExpressionStoreOwnerId::Body(def_with_body_id) => Body::of(db, def_with_body_id)
[self.place.local]
.name
.display(db, edition)
.to_string(),
ExpressionStoreOwnerId::VariantFields(variant_id) => {
let fields = VariantFields::of(db, variant_id);
fields.store[self.place.local].name.display(db, edition).to_string()
}
};
for proj in &self.place.projections {
match proj {
// In source code autoderef kicks in.
HirPlaceProjection::Deref => {}
HirPlaceProjection::Field(f) => {
let variant_data = f.parent.fields(db);
match variant_data.shape {
FieldsShape::Record => format_to!(
result,
".{}",
variant_data.fields()[f.local_id].name.display(db, edition)
),
FieldsShape::Tuple => format_to!(
result,
".{}",
variant_data
.fields()
.iter()
.position(|it| it.0 == f.local_id)
.unwrap_or_default()
),
FieldsShape::Unit => {}
/// Analyzes the information collected by `InferBorrowKind` to compute the min number of
/// Places (and corresponding capture kind) that we need to keep track of to support all
/// the required captured paths.
///
///
/// Note: If this function is called multiple times for the same closure, it will update
/// the existing min_capture map that is stored in TypeckResults.
///
/// Eg:
/// ```
/// #[derive(Debug)]
/// struct Point { x: i32, y: i32 }
///
/// let s = String::from("s"); // hir_id_s
/// let mut p = Point { x: 2, y: -2 }; // his_id_p
/// let c = || {
/// println!("{s:?}"); // L1
/// p.x += 10; // L2
/// println!("{}" , p.y); // L3
/// println!("{p:?}"); // L4
/// drop(s); // L5
/// };
/// ```
/// and let hir_id_L1..5 be the expressions pointing to use of a captured variable on
/// the lines L1..5 respectively.
///
/// InferBorrowKind results in a structure like this:
///
/// ```ignore (illustrative)
/// {
/// Place(base: hir_id_s, projections: [], ....) -> {
/// capture_kind_expr: hir_id_L5,
/// path_expr_id: hir_id_L5,
/// capture_kind: ByValue
/// },
/// Place(base: hir_id_p, projections: [Field(0, 0)], ...) -> {
/// capture_kind_expr: hir_id_L2,
/// path_expr_id: hir_id_L2,
/// capture_kind: ByValue
/// },
/// Place(base: hir_id_p, projections: [Field(1, 0)], ...) -> {
/// capture_kind_expr: hir_id_L3,
/// path_expr_id: hir_id_L3,
/// capture_kind: ByValue
/// },
/// Place(base: hir_id_p, projections: [], ...) -> {
/// capture_kind_expr: hir_id_L4,
/// path_expr_id: hir_id_L4,
/// capture_kind: ByValue
/// },
/// }
/// ```
///
/// After the min capture analysis, we get:
/// ```ignore (illustrative)
/// {
/// hir_id_s -> [
/// Place(base: hir_id_s, projections: [], ....) -> {
/// capture_kind_expr: hir_id_L5,
/// path_expr_id: hir_id_L5,
/// capture_kind: ByValue
/// },
/// ],
/// hir_id_p -> [
/// Place(base: hir_id_p, projections: [], ...) -> {
/// capture_kind_expr: hir_id_L2,
/// path_expr_id: hir_id_L4,
/// capture_kind: ByValue
/// },
/// ],
/// }
/// ```
#[instrument(level = "debug", skip(self))]
fn compute_min_captures(
&mut self,
closure_def_id: ExprId,
capture_information: InferredCaptureInformation,
) {
if capture_information.is_empty() {
return;
}
let mut closure_data =
self.result.closures_data.remove(&closure_def_id).unwrap_or_default();
let root_var_min_capture_list = &mut closure_data.min_captures;
let mut dedup_sources_scratch = FxHashMap::default();
for (mut place, capture_info) in capture_information.into_iter() {
let var_hir_id = match place.base {
PlaceBase::Upvar { var_id, .. } => var_id,
base => panic!("Expected upvar, found={:?}", base),
};
let Some(min_cap_list) = root_var_min_capture_list.get_mut(&var_hir_id) else {
let mutability = self.determine_capture_mutability(&place);
let min_cap_list = vec![CapturedPlace { place, info: capture_info, mutability }];
root_var_min_capture_list.insert(var_hir_id, min_cap_list);
continue;
};
// Go through each entry in the current list of min_captures
// - if ancestor is found, update its capture kind to account for current place's
// capture information.
//
// - if descendant is found, remove it from the list, and update the current place's
// capture information to account for the descendant's capture kind.
//
// We can never be in a case where the list contains both an ancestor and a descendant
// Also there can only be ancestor but in case of descendants there might be
// multiple.
let mut descendant_found = false;
let mut updated_capture_info = capture_info;
min_cap_list.retain(|possible_descendant| {
match determine_place_ancestry_relation(&place, &possible_descendant.place) {
// current place is ancestor of possible_descendant
PlaceAncestryRelation::Ancestor => {
descendant_found = true;
let mut possible_descendant = possible_descendant.clone();
// Truncate the descendant (already in min_captures) to be same as the ancestor to handle any
// possible change in capture mode.
truncate_place_to_len_and_update_capture_kind(
&mut possible_descendant.place,
&mut possible_descendant.info,
place.projections.len(),
);
let backup_path_sources = determine_capture_sources(
&mut updated_capture_info,
&mut possible_descendant.info,
&mut dedup_sources_scratch,
);
determine_capture_info(
&mut updated_capture_info,
&mut possible_descendant.info,
);
// we need to keep the ancestor's `path_expr_id`
updated_capture_info.sources = backup_path_sources;
false
}
_ => true,
}
});
let mut ancestor_found = false;
if !descendant_found {
for possible_ancestor in min_cap_list.iter_mut() {
match determine_place_ancestry_relation(&place, &possible_ancestor.place) {
PlaceAncestryRelation::SamePlace => {
ancestor_found = true;
let backup_path_sources = determine_capture_sources(
&mut updated_capture_info,
&mut possible_ancestor.info,
&mut dedup_sources_scratch,
);
determine_capture_info(
&mut possible_ancestor.info,
&mut updated_capture_info,
);
possible_ancestor.info.sources = backup_path_sources;
// Only one related place will be in the list.
break;
}
// current place is descendant of possible_ancestor
PlaceAncestryRelation::Descendant => {
ancestor_found = true;
// Truncate the descendant (current place) to be same as the ancestor to handle any
// possible change in capture mode.
truncate_place_to_len_and_update_capture_kind(
&mut place,
&mut updated_capture_info,
possible_ancestor.place.projections.len(),
);
let backup_path_sources = determine_capture_sources(
&mut updated_capture_info,
&mut possible_ancestor.info,
&mut dedup_sources_scratch,
);
determine_capture_info(
&mut possible_ancestor.info,
&mut updated_capture_info,
);
// we need to keep the ancestor's `sources`
possible_ancestor.info.sources = backup_path_sources;
// Only one related place will be in the list.
break;
}
_ => {}
}
}
HirPlaceProjection::TupleField(idx) => {
format_to!(result, ".{idx}")
}
}
// Only need to insert when we don't have an ancestor in the existing min capture list
if !ancestor_found {
let mutability = self.determine_capture_mutability(&place);
let captured_place =
CapturedPlace { place, info: updated_capture_info, mutability };
min_cap_list.push(captured_place);
}
}
let final_derefs_count = self
debug!(
"For closure={:?}, min_captures before sorting={:?}",
closure_def_id, root_var_min_capture_list
);
// Now that we have the minimized list of captures, sort the captures by field id.
// This causes the closure to capture the upvars in the same order as the fields are
// declared which is also the drop order. Thus, in situations where we capture all the
// fields of some type, the observable drop order will remain the same as it previously
// was even though we're dropping each capture individually.
// See https://github.com/rust-lang/project-rfc-2229/issues/42 and
// `tests/ui/closures/2229_closure_analysis/preserve_field_drop_order.rs`.
for (_, captures) in &mut *root_var_min_capture_list {
captures.sort_by(|capture1, capture2| {
fn is_field(p: &&Projection) -> bool {
match p.kind {
ProjectionKind::Field { .. } => true,
ProjectionKind::Deref | ProjectionKind::UnwrapUnsafeBinder => false,
p @ (ProjectionKind::Subslice | ProjectionKind::Index) => {
panic!("ProjectionKind {:?} was unexpected", p)
}
}
}
// Need to sort only by Field projections, so filter away others.
// A previous implementation considered other projection types too
// but that caused ICE #118144
let capture1_field_projections = capture1.place.projections.iter().filter(is_field);
let capture2_field_projections = capture2.place.projections.iter().filter(is_field);
for (p1, p2) in capture1_field_projections.zip(capture2_field_projections) {
// We do not need to look at the `Projection.ty` fields here because at each
// step of the iteration, the projections will either be the same and therefore
// the types must be as well or the current projection will be different and
// we will return the result of comparing the field indexes.
match (p1.kind, p2.kind) {
(
ProjectionKind::Field { field_idx: i1, .. },
ProjectionKind::Field { field_idx: i2, .. },
) => {
// Compare only if paths are different.
// Otherwise continue to the next iteration
if i1 != i2 {
return i1.cmp(&i2);
}
}
// Given the filter above, this arm should never be hit
(l, r) => panic!("ProjectionKinds {:?} or {:?} were unexpected", l, r),
}
}
std::cmp::Ordering::Equal
});
}
debug!(
"For closure={:?}, min_captures after sorting={:#?}",
closure_def_id, root_var_min_capture_list
);
self.result.closures_data.insert(closure_def_id, closure_data);
}
fn normalize_capture_place(&self, place: Place) -> Place {
let mut place = self.infcx().resolve_vars_if_possible(place);
// In the new solver, types in HIR `Place`s can contain unnormalized aliases,
// which can ICE later (e.g. when projecting fields for diagnostics).
let cause = ObligationCause::misc();
let at = self.table.at(&cause);
match normalize::deeply_normalize_with_skipped_universes_and_ambiguous_coroutine_goals(
at,
place.clone(),
vec![],
) {
Ok((normalized, goals)) => {
if !goals.is_empty() {
// FIXME: Insert coroutine stalled predicates, this matters for MIR.
// let mut typeck_results = self.typeck_results.borrow_mut();
// typeck_results.coroutine_stalled_predicates.extend(
// goals
// .into_iter()
// // FIXME: throwing away the param-env :(
// .map(|goal| (goal.predicate, self.misc(span))),
// );
}
normalized
}
Err(_errors) => {
place.base_ty = self.types.types.error.store();
for proj in &mut place.projections {
proj.ty = self.types.types.error.store();
}
place
}
}
}
fn closure_min_captures_flattened(
&self,
closure_expr_id: ExprId,
) -> impl Iterator<Item = &CapturedPlace> {
self.result
.closures_data
.get(&closure_expr_id)
.map(|closure_data| closure_data.min_captures.values().flatten())
.into_iter()
.flatten()
}
fn init_capture_kind_for_place(
&self,
place: &Place,
capture_clause: CaptureBy,
) -> UpvarCapture {
match capture_clause {
// In case of a move closure if the data is accessed through a reference we
// want to capture by ref to allow precise capture using reborrows.
//
// If the data will be moved out of this place, then the place will be truncated
// at the first Deref in `adjust_for_move_closure` and then moved into the closure.
//
// For example:
//
// struct Buffer<'a> {
// x: &'a String,
// y: Vec<u8>,
// }
//
// fn get<'a>(b: Buffer<'a>) -> impl Sized + 'a {
// let c = move || b.x;
// drop(b);
// c
// }
//
// Even though the closure is declared as move, when we are capturing borrowed data (in
// this case, *b.x) we prefer to capture by reference.
// Otherwise you'd get an error in 2021 immediately because you'd be trying to take
// ownership of the (borrowed) String or else you'd take ownership of b, as in 2018 and
// before, which is also an error.
CaptureBy::Value if !place.deref_tys().any(Ty::is_ref) => UpvarCapture::ByValue,
CaptureBy::Value | CaptureBy::Ref => UpvarCapture::ByRef(BorrowKind::Immutable),
}
}
fn place_for_root_variable(&self, closure_def_id: ExprId, var_hir_id: BindingId) -> Place {
let place = Place {
base_ty: self.result.binding_ty(var_hir_id).store(),
base: PlaceBase::Upvar { closure: closure_def_id, var_id: var_hir_id },
projections: Default::default(),
};
// Normalize eagerly when inserting into `capture_information`, so all downstream
// capture analysis can assume a normalized `Place`.
self.normalize_capture_place(place)
}
/// A captured place is mutable if
/// 1. Projections don't include a Deref of an immut-borrow, **and**
/// 2. PlaceBase is mut or projections include a Deref of a mut-borrow.
fn determine_capture_mutability(&mut self, place: &Place) -> Mutability {
let var_hir_id = match place.base {
PlaceBase::Upvar { var_id, .. } => var_id,
_ => unreachable!(),
};
let mut is_mutbl = if self.store[var_hir_id].mode == BindingAnnotation::Mutable {
Mutability::Mut
} else {
Mutability::Not
};
for pointer_ty in place.deref_tys() {
match self.table.structurally_resolve_type(pointer_ty).kind() {
// We don't capture derefs of raw ptrs
TyKind::RawPtr(_, _) => unreachable!(),
// Dereferencing a mut-ref allows us to mut the Place if we don't deref
// an immut-ref after on top of this.
TyKind::Ref(.., Mutability::Mut) => is_mutbl = Mutability::Mut,
// The place isn't mutable once we dereference an immutable reference.
TyKind::Ref(.., Mutability::Not) => return Mutability::Not,
// Dereferencing a box doesn't change mutability
TyKind::Adt(def, ..) if def.is_box() => {}
unexpected_ty => panic!("deref of unexpected pointer type {:?}", unexpected_ty),
}
}
is_mutbl
}
}
/// Determines whether a child capture that is derived from a parent capture
/// should be borrowed with the lifetime of the parent coroutine-closure's env.
///
/// There are two cases when this needs to happen:
///
/// (1.) Are we borrowing data owned by the parent closure? We can determine if
/// that is the case by checking if the parent capture is by move, EXCEPT if we
/// apply a deref projection of an immutable reference, reborrows of immutable
/// references which aren't restricted to the LUB of the lifetimes of the deref
/// chain. This is why `&'short mut &'long T` can be reborrowed as `&'long T`.
///
/// ```rust
/// let x = &1i32; // Let's call this lifetime `'1`.
/// let c = async move || {
/// println!("{:?}", *x);
/// // Even though the inner coroutine borrows by ref, we're only capturing `*x`,
/// // not `x`, so the inner closure is allowed to reborrow the data for `'1`.
/// };
/// ```
///
/// (2.) If a coroutine is mutably borrowing from a parent capture, then that
/// mutable borrow cannot live for longer than either the parent *or* the borrow
/// that we have on the original upvar. Therefore we always need to borrow the
/// child capture with the lifetime of the parent coroutine-closure's env.
///
/// ```rust
/// let mut x = 1i32;
/// let c = async || {
/// x = 1;
/// // The parent borrows `x` for some `&'1 mut i32`.
/// // However, when we call `c()`, we implicitly autoref for the signature of
/// // `AsyncFnMut::async_call_mut`. Let's call that lifetime `'call`. Since
/// // the maximum that `&'call mut &'1 mut i32` can be reborrowed is `&'call mut i32`,
/// // the inner coroutine should capture w/ the lifetime of the coroutine-closure.
/// };
/// ```
///
/// If either of these cases apply, then we should capture the borrow with the
/// lifetime of the parent coroutine-closure's env. Luckily, if this function is
/// not correct, then the program is not unsound, since we still borrowck and validate
/// the choices made from this function -- the only side-effect is that the user
/// may receive unnecessary borrowck errors.
fn should_reborrow_from_env_of_parent_coroutine_closure(
parent_capture: &CapturedPlace,
child_capture: &CapturedPlace,
) -> bool {
// (1.)
(!parent_capture.is_by_ref()
// This is just inlined `place.deref_tys()` but truncated to just
// the child projections. Namely, look for a `&T` deref, since we
// can always extend `&'short mut &'long T` to `&'long T`.
&& !child_capture
.place
.projections
.iter()
.rev()
.take_while(|proj| matches!(proj, HirPlaceProjection::Deref))
.count();
result.insert_str(0, &"*".repeat(final_derefs_count));
result
}
pub fn display_place(&self, owner: ExpressionStoreOwnerId, db: &dyn HirDatabase) -> String {
let krate = owner.krate(db);
let edition = krate.data(db).edition;
let mut result = match owner {
ExpressionStoreOwnerId::Signature(generic_def_id) => {
ExpressionStore::of(db, generic_def_id.into())[self.place.local]
.name
.display(db, edition)
.to_string()
}
ExpressionStoreOwnerId::Body(def_with_body_id) => Body::of(db, def_with_body_id)
[self.place.local]
.name
.display(db, edition)
.to_string(),
ExpressionStoreOwnerId::VariantFields(variant_id) => {
let fields = VariantFields::of(db, variant_id);
fields.store[self.place.local].name.display(db, edition).to_string()
}
};
let mut field_need_paren = false;
for proj in &self.place.projections {
match proj {
HirPlaceProjection::Deref => {
result = format!("*{result}");
field_need_paren = true;
}
HirPlaceProjection::Field(f) => {
if field_need_paren {
result = format!("({result})");
}
let variant_data = f.parent.fields(db);
let field = match variant_data.shape {
FieldsShape::Record => {
variant_data.fields()[f.local_id].name.as_str().to_owned()
}
FieldsShape::Tuple => variant_data
.fields()
.iter()
.position(|it| it.0 == f.local_id)
.unwrap_or_default()
.to_string(),
FieldsShape::Unit => "[missing field]".to_owned(),
};
result = format!("{result}.{field}");
field_need_paren = false;
}
HirPlaceProjection::TupleField(idx) => {
if field_need_paren {
result = format!("({result})");
}
result = format!("{result}.{idx}");
field_need_paren = false;
}
}
}
result
}
.enumerate()
.skip(parent_capture.place.projections.len())
.any(|(idx, proj)| {
matches!(proj.kind, ProjectionKind::Deref)
&& matches!(
child_capture.place.ty_before_projection(idx).kind(),
TyKind::Ref(.., Mutability::Not)
)
}))
// (2.)
|| matches!(child_capture.info.capture_kind, UpvarCapture::ByRef(BorrowKind::Mutable))
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) struct CapturedItemWithoutTy {
pub(crate) place: HirPlace,
pub(crate) kind: CaptureKind,
/// The inner vec is the stacks; the outer vec is for each capture reference.
pub(crate) span_stacks: SmallVec<[SmallVec<[MirSpan; 3]>; 3]>,
}
/// Truncate the capture so that the place being borrowed is in accordance with RFC 1240,
/// which states that it's unsafe to take a reference into a struct marked `repr(packed)`.
fn restrict_repr_packed_field_ref_capture(
mut place: Place,
capture_info: &mut CaptureInfo,
) -> Place {
let pos = place.projections.iter().enumerate().position(|(i, p)| {
let ty = place.ty_before_projection(i);
impl CapturedItemWithoutTy {
fn with_ty(self, ctx: &mut InferenceContext<'_, '_>) -> CapturedItem {
let ty = self.place.ty(ctx);
let ty = match &self.kind {
CaptureKind::ByValue => ty,
CaptureKind::ByRef(bk) => {
let m = match bk {
BorrowKind::Mut { .. } => Mutability::Mut,
_ => Mutability::Not,
};
Ty::new_ref(ctx.interner(), ctx.types.regions.error, ty, m)
}
};
CapturedItem {
place: self.place,
kind: self.kind,
span_stacks: self.span_stacks,
ty: StoredEarlyBinder::bind(ty.store()),
}
}
}
// Return true for fields of packed structs.
match p.kind {
ProjectionKind::Field { .. } => match ty.kind() {
TyKind::Adt(def, _) if def.repr().packed() => {
// We stop here regardless of field alignment. Field alignment can change as
// types change, including the types of private fields in other crates, and that
// shouldn't affect how we compute our captures.
true
}
impl<'db> InferenceContext<'_, 'db> {
fn place_of_expr(&mut self, tgt_expr: ExprId) -> Option<HirPlace> {
let r = self.place_of_expr_without_adjust(tgt_expr)?;
let adjustments =
self.result.expr_adjustments.get(&tgt_expr).map(|it| &**it).unwrap_or_default();
apply_adjusts_to_place(&mut self.current_capture_span_stack, r, adjustments)
}
/// Pushes the span into `current_capture_span_stack`, *without clearing it first*.
fn path_place(&mut self, path: &Path, id: ExprOrPatId) -> Option<HirPlace> {
if path.type_anchor().is_some() {
return None;
}
let hygiene = self.store.expr_or_pat_path_hygiene(id);
self.resolver.resolve_path_in_value_ns_fully(self.db, path, hygiene).and_then(|result| {
match result {
ValueNs::LocalBinding(binding) => {
let mir_span = match id {
ExprOrPatId::ExprId(id) => MirSpan::ExprId(id),
ExprOrPatId::PatId(id) => MirSpan::PatId(id),
};
self.current_capture_span_stack.push(mir_span);
Some(HirPlace { local: binding, projections: Vec::new() })
}
_ => None,
}
})
}
/// Changes `current_capture_span_stack` to contain the stack of spans for this expr.
fn place_of_expr_without_adjust(&mut self, tgt_expr: ExprId) -> Option<HirPlace> {
self.current_capture_span_stack.clear();
match &self.store[tgt_expr] {
Expr::Path(p) => {
let resolver_guard =
self.resolver.update_to_inner_scope(self.db, self.owner, tgt_expr);
let result = self.path_place(p, tgt_expr.into());
self.resolver.reset_to_guard(resolver_guard);
return result;
}
Expr::Field { expr, name: _ } => {
let mut place = self.place_of_expr(*expr)?;
let field = self.result.field_resolution(tgt_expr)?;
self.current_capture_span_stack.push(MirSpan::ExprId(tgt_expr));
place.projections.push(field.either(HirPlaceProjection::Field, |f| {
HirPlaceProjection::TupleField(f.index)
}));
return Some(place);
}
Expr::UnaryOp { expr, op: UnaryOp::Deref } => {
let is_builtin_deref = match self.expr_ty(*expr).kind() {
TyKind::Ref(..) | TyKind::RawPtr(..) => true,
TyKind::Adt(adt_def, _) if adt_def.is_box() => true,
_ => false,
};
if is_builtin_deref {
let mut place = self.place_of_expr(*expr)?;
self.current_capture_span_stack.push(MirSpan::ExprId(tgt_expr));
place.projections.push(HirPlaceProjection::Deref);
return Some(place);
}
}
_ => (),
}
None
}
fn push_capture(&mut self, place: HirPlace, kind: CaptureKind) {
self.current_captures.push(CapturedItemWithoutTy {
place,
kind,
span_stacks: smallvec![self.current_capture_span_stack.iter().copied().collect()],
});
}
fn truncate_capture_spans(&self, capture: &mut CapturedItemWithoutTy, mut truncate_to: usize) {
// The first span is the identifier, and it must always remain.
truncate_to += 1;
for span_stack in &mut capture.span_stacks {
let mut remained = truncate_to;
let mut actual_truncate_to = 0;
for &span in &*span_stack {
actual_truncate_to += 1;
if !span.is_ref_span(self.store) {
remained -= 1;
if remained == 0 {
break;
}
}
}
if actual_truncate_to < span_stack.len()
&& span_stack[actual_truncate_to].is_ref_span(self.store)
{
// Include the ref operator if there is one, we will fix it later (in `strip_captures_ref_span()`) if it's incorrect.
actual_truncate_to += 1;
}
span_stack.truncate(actual_truncate_to);
}
}
fn ref_expr(&mut self, expr: ExprId, place: Option<HirPlace>) {
if let Some(place) = place {
self.add_capture(place, CaptureKind::ByRef(BorrowKind::Shared));
}
self.walk_expr(expr);
}
fn add_capture(&mut self, place: HirPlace, kind: CaptureKind) {
if self.is_upvar(&place) {
self.push_capture(place, kind);
}
}
fn mutate_path_pat(&mut self, path: &Path, id: PatId) {
if let Some(place) = self.path_place(path, id.into()) {
self.add_capture(
place,
CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::Default }),
);
self.current_capture_span_stack.pop(); // Remove the pattern span.
}
}
fn mutate_expr(&mut self, expr: ExprId, place: Option<HirPlace>) {
if let Some(place) = place {
self.add_capture(
place,
CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::Default }),
);
}
self.walk_expr(expr);
}
fn consume_expr(&mut self, expr: ExprId) {
if let Some(place) = self.place_of_expr(expr) {
self.consume_place(place);
}
self.walk_expr(expr);
}
fn consume_place(&mut self, place: HirPlace) {
if self.is_upvar(&place) {
let ty = place.ty(self);
let kind = if self.is_ty_copy(ty) {
CaptureKind::ByRef(BorrowKind::Shared)
} else {
CaptureKind::ByValue
};
self.push_capture(place, kind);
}
}
fn walk_expr_with_adjust(&mut self, tgt_expr: ExprId, adjustment: &[Adjustment]) {
if let Some((last, rest)) = adjustment.split_last() {
match &last.kind {
Adjust::NeverToAny | Adjust::Deref(None) | Adjust::Pointer(_) => {
self.walk_expr_with_adjust(tgt_expr, rest)
}
Adjust::Deref(Some(m)) => match m.0 {
Some(m) => {
self.ref_capture_with_adjusts(m, tgt_expr, rest);
}
None => unreachable!(),
},
Adjust::Borrow(b) => {
self.ref_capture_with_adjusts(b.mutability(), tgt_expr, rest);
}
}
} else {
self.walk_expr_without_adjust(tgt_expr);
}
}
fn ref_capture_with_adjusts(&mut self, m: Mutability, tgt_expr: ExprId, rest: &[Adjustment]) {
let capture_kind = match m {
Mutability::Mut => CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::Default }),
Mutability::Not => CaptureKind::ByRef(BorrowKind::Shared),
};
if let Some(place) = self.place_of_expr_without_adjust(tgt_expr)
&& let Some(place) =
apply_adjusts_to_place(&mut self.current_capture_span_stack, place, rest)
{
self.add_capture(place, capture_kind);
}
self.walk_expr_with_adjust(tgt_expr, rest);
}
fn walk_expr(&mut self, tgt_expr: ExprId) {
if let Some(it) = self.result.expr_adjustments.get_mut(&tgt_expr) {
// FIXME: this take is completely unneeded, and just is here to make borrow checker
// happy. Remove it if you can.
let x_taken = mem::take(it);
self.walk_expr_with_adjust(tgt_expr, &x_taken);
*self.result.expr_adjustments.get_mut(&tgt_expr).unwrap() = x_taken;
} else {
self.walk_expr_without_adjust(tgt_expr);
}
}
fn walk_expr_without_adjust(&mut self, tgt_expr: ExprId) {
match &self.store[tgt_expr] {
Expr::OffsetOf(_) => (),
Expr::InlineAsm(e) => e.operands.iter().for_each(|(_, op)| match op {
AsmOperand::In { expr, .. }
| AsmOperand::Out { expr: Some(expr), .. }
| AsmOperand::InOut { expr, .. } => self.walk_expr_without_adjust(*expr),
AsmOperand::SplitInOut { in_expr, out_expr, .. } => {
self.walk_expr_without_adjust(*in_expr);
if let Some(out_expr) = out_expr {
self.walk_expr_without_adjust(*out_expr);
}
}
AsmOperand::Out { expr: None, .. }
| AsmOperand::Const(_)
| AsmOperand::Label(_)
| AsmOperand::Sym(_) => (),
}),
Expr::If { condition, then_branch, else_branch } => {
self.consume_expr(*condition);
self.consume_expr(*then_branch);
if let &Some(expr) = else_branch {
self.consume_expr(expr);
}
}
Expr::Unsafe { statements, tail, .. } | Expr::Block { statements, tail, .. } => {
for s in statements.iter() {
match s {
Statement::Let { pat, type_ref: _, initializer, else_branch } => {
if let Some(else_branch) = else_branch {
self.consume_expr(*else_branch);
}
if let Some(initializer) = initializer {
if else_branch.is_some() {
self.consume_expr(*initializer);
} else {
self.walk_expr(*initializer);
}
if let Some(place) = self.place_of_expr(*initializer) {
self.consume_with_pat(place, *pat);
}
}
}
Statement::Expr { expr, has_semi: _ } => {
self.consume_expr(*expr);
}
Statement::Item(_) => (),
}
}
if let Some(tail) = tail {
self.consume_expr(*tail);
}
}
Expr::Call { callee, args } => {
self.consume_expr(*callee);
self.consume_exprs(args.iter().copied());
}
Expr::MethodCall { receiver, args, .. } => {
self.consume_expr(*receiver);
self.consume_exprs(args.iter().copied());
}
Expr::Match { expr, arms } => {
for arm in arms.iter() {
self.consume_expr(arm.expr);
if let Some(guard) = arm.guard {
self.consume_expr(guard);
}
}
self.walk_expr(*expr);
if let Some(discr_place) = self.place_of_expr(*expr)
&& self.is_upvar(&discr_place)
{
let mut capture_mode = None;
for arm in arms.iter() {
self.walk_pat(&mut capture_mode, arm.pat);
}
if let Some(c) = capture_mode {
self.push_capture(discr_place, c);
}
}
}
Expr::Break { expr, label: _ }
| Expr::Return { expr }
| Expr::Yield { expr }
| Expr::Yeet { expr } => {
if let &Some(expr) = expr {
self.consume_expr(expr);
}
}
&Expr::Become { expr } => {
self.consume_expr(expr);
}
Expr::RecordLit { fields, spread, .. } => {
if let RecordSpread::Expr(expr) = *spread {
self.consume_expr(expr);
}
self.consume_exprs(fields.iter().map(|it| it.expr));
}
Expr::Field { expr, name: _ } => self.select_from_expr(*expr),
Expr::UnaryOp { expr, op: UnaryOp::Deref } => {
if self.result.method_resolution(tgt_expr).is_some() {
// Overloaded deref.
match self.expr_ty_after_adjustments(*expr).kind() {
TyKind::Ref(_, _, mutability) => {
let place = self.place_of_expr(*expr);
match mutability {
Mutability::Mut => self.mutate_expr(*expr, place),
Mutability::Not => self.ref_expr(*expr, place),
}
}
// FIXME: Is this correct wrt. raw pointer derefs?
TyKind::RawPtr(..) => self.select_from_expr(*expr),
_ => never!("deref adjustments should include taking a mutable reference"),
}
} else {
self.select_from_expr(*expr);
}
}
Expr::Let { pat, expr } => {
self.walk_expr(*expr);
if let Some(place) = self.place_of_expr(*expr) {
self.consume_with_pat(place, *pat);
}
}
Expr::UnaryOp { expr, op: _ }
| Expr::Array(Array::Repeat { initializer: expr, repeat: _ })
| Expr::Await { expr }
| Expr::Loop { body: expr, label: _ }
| Expr::Box { expr }
| Expr::Cast { expr, type_ref: _ } => {
self.consume_expr(*expr);
}
Expr::Ref { expr, rawness: _, mutability } => {
// We need to do this before we push the span so the order will be correct.
let place = self.place_of_expr(*expr);
self.current_capture_span_stack.push(MirSpan::ExprId(tgt_expr));
match mutability {
hir_def::type_ref::Mutability::Shared => self.ref_expr(*expr, place),
hir_def::type_ref::Mutability::Mut => self.mutate_expr(*expr, place),
}
}
Expr::BinaryOp { lhs, rhs, op } => {
let Some(op) = op else {
return;
};
if matches!(op, BinaryOp::Assignment { .. }) {
let place = self.place_of_expr(*lhs);
self.mutate_expr(*lhs, place);
self.consume_expr(*rhs);
return;
}
self.consume_expr(*lhs);
self.consume_expr(*rhs);
}
Expr::Range { lhs, rhs, range_type: _ } => {
if let &Some(expr) = lhs {
self.consume_expr(expr);
}
if let &Some(expr) = rhs {
self.consume_expr(expr);
}
}
Expr::Index { base, index } => {
self.select_from_expr(*base);
self.consume_expr(*index);
}
Expr::Closure { .. } => {
let ty = self.expr_ty(tgt_expr);
let TyKind::Closure(id, _) = ty.kind() else {
// A coroutine or a coroutine closure.
return;
};
let (captures, _) =
self.result.closure_info.get(&id.0).expect(
"We sort closures, so we should always have data for inner closures",
);
let mut cc = mem::take(&mut self.current_captures);
cc.extend(captures.iter().filter(|it| self.is_upvar(&it.place)).map(|it| {
CapturedItemWithoutTy {
place: it.place.clone(),
kind: it.kind,
span_stacks: it.span_stacks.clone(),
}
}));
self.current_captures = cc;
}
Expr::Array(Array::ElementList { elements: exprs }) | Expr::Tuple { exprs } => {
self.consume_exprs(exprs.iter().copied())
}
&Expr::Assignment { target, value } => {
self.walk_expr(value);
let resolver_guard =
self.resolver.update_to_inner_scope(self.db, self.owner, tgt_expr);
match self.place_of_expr(value) {
Some(rhs_place) => {
self.inside_assignment = true;
self.consume_with_pat(rhs_place, target);
self.inside_assignment = false;
}
None => self.store.walk_pats(target, &mut |pat| match &self.store[pat] {
Pat::Path(path) => self.mutate_path_pat(path, pat),
&Pat::Expr(expr) => {
let place = self.place_of_expr(expr);
self.mutate_expr(expr, place);
}
_ => {}
}),
}
self.resolver.reset_to_guard(resolver_guard);
}
Expr::Missing
| Expr::Continue { .. }
| Expr::Path(_)
| Expr::Literal(_)
| Expr::Const(_)
| Expr::Underscore => (),
}
}
fn walk_pat(&mut self, result: &mut Option<CaptureKind>, pat: PatId) {
let mut update_result = |ck: CaptureKind| match result {
Some(r) => {
*r = cmp::max(*r, ck);
}
None => *result = Some(ck),
};
self.walk_pat_inner(
pat,
&mut update_result,
BorrowKind::Mut { kind: MutBorrowKind::Default },
);
}
fn walk_pat_inner(
&mut self,
p: PatId,
update_result: &mut impl FnMut(CaptureKind),
mut for_mut: BorrowKind,
) {
match &self.store[p] {
Pat::Ref { .. }
| Pat::Box { .. }
| Pat::Missing
| Pat::Wild
| Pat::Tuple { .. }
| Pat::Expr(_)
| Pat::Or(_) => (),
Pat::TupleStruct { .. } | Pat::Record { .. } => {
if let Some(variant) = self.result.variant_resolution_for_pat(p) {
let adt = variant.adt_id(self.db);
let is_multivariant = match adt {
hir_def::AdtId::EnumId(e) => e.enum_variants(self.db).variants.len() != 1,
_ => false,
};
if is_multivariant {
update_result(CaptureKind::ByRef(BorrowKind::Shared));
}
}
}
Pat::Slice { .. }
| Pat::ConstBlock(_)
| Pat::Path(_)
| Pat::Lit(_)
| Pat::Range { .. } => {
update_result(CaptureKind::ByRef(BorrowKind::Shared));
}
Pat::Bind { id, .. } => match self.result.binding_modes[p] {
crate::BindingMode::Move => {
if self.is_ty_copy(self.result.binding_ty(*id)) {
update_result(CaptureKind::ByRef(BorrowKind::Shared));
} else {
update_result(CaptureKind::ByValue);
}
}
crate::BindingMode::Ref(r) => match r {
Mutability::Mut => update_result(CaptureKind::ByRef(for_mut)),
Mutability::Not => update_result(CaptureKind::ByRef(BorrowKind::Shared)),
},
_ => false,
},
_ => false,
}
if self.result.pat_adjustments.get(&p).is_some_and(|it| !it.is_empty()) {
for_mut = BorrowKind::Mut { kind: MutBorrowKind::ClosureCapture };
}
self.store.walk_pats_shallow(p, |p| self.walk_pat_inner(p, update_result, for_mut));
});
if let Some(pos) = pos {
truncate_place_to_len_and_update_capture_kind(&mut place, capture_info, pos);
}
fn is_upvar(&self, place: &HirPlace) -> bool {
if let Some(c) = self.current_closure {
let InternedClosure(_, root) = c.loc(self.db);
return self.store.is_binding_upvar(place.local, root);
place
}
/// Returns a Ty that applies the specified capture kind on the provided capture Ty
fn apply_capture_kind_on_capture_ty<'db>(
interner: DbInterner<'db>,
ty: Ty<'db>,
capture_kind: UpvarCapture,
region: Region<'db>,
) -> Ty<'db> {
match capture_kind {
UpvarCapture::ByValue | UpvarCapture::ByUse => ty,
UpvarCapture::ByRef(kind) => Ty::new_ref(interner, region, ty, kind.to_mutbl_lossy()),
}
}
struct InferBorrowKind {
// The def-id of the closure whose kind and upvar accesses are being inferred.
closure_def_id: ExprId,
/// For each Place that is captured by the closure, we track the minimal kind of
/// access we need (ref, ref mut, move, etc) and the expression that resulted in such access.
///
/// Consider closure where s.str1 is captured via an ImmutableBorrow and
/// s.str2 via a MutableBorrow
///
/// ```rust,no_run
/// struct SomeStruct { str1: String, str2: String };
///
/// // Assume that the HirId for the variable definition is `V1`
/// let mut s = SomeStruct { str1: format!("s1"), str2: format!("s2") };
///
/// let fix_s = |new_s2| {
/// // Assume that the HirId for the expression `s.str1` is `E1`
/// println!("Updating SomeStruct with str1={0}", s.str1);
/// // Assume that the HirId for the expression `*s.str2` is `E2`
/// s.str2 = new_s2;
/// };
/// ```
///
/// For closure `fix_s`, (at a high level) the map contains
///
/// ```ignore (illustrative)
/// Place { V1, [ProjectionKind::Field(Index=0, Variant=0)] } : CaptureKind { E1, ImmutableBorrow }
/// Place { V1, [ProjectionKind::Field(Index=1, Variant=0)] } : CaptureKind { E2, MutableBorrow }
/// ```
capture_information: InferredCaptureInformation,
fake_reads: Vec<(Place, FakeReadCause, SmallVec<[CaptureSourceStack; 2]>)>,
}
impl<'db> euv::Delegate<'db> for InferBorrowKind {
#[instrument(skip(self), level = "debug")]
fn fake_read(
&mut self,
place_with_id: PlaceWithOrigin,
cause: FakeReadCause,
ctx: &mut InferenceContext<'_, 'db>,
) {
let PlaceBase::Upvar { .. } = place_with_id.place.base else { return };
// We need to restrict Fake Read precision to avoid fake reading unsafe code,
// such as deref of a raw pointer.
let dummy_capture_kind = UpvarCapture::ByRef(BorrowKind::Immutable);
let mut dummy_capture_info =
CaptureInfo { sources: SmallVec::new(), capture_kind: dummy_capture_kind };
let place = ctx.normalize_capture_place(place_with_id.place.clone());
let place = restrict_capture_precision(place, &mut dummy_capture_info);
dummy_capture_info.capture_kind = dummy_capture_kind;
let place = restrict_repr_packed_field_ref_capture(place, &mut dummy_capture_info);
self.fake_reads.push((place, cause, place_with_id.origins));
}
#[instrument(skip(self), level = "debug")]
fn consume(&mut self, place_with_id: PlaceWithOrigin, ctx: &mut InferenceContext<'_, 'db>) {
let PlaceBase::Upvar { closure: upvar_closure, .. } = place_with_id.place.base else {
return;
};
assert_eq!(self.closure_def_id, upvar_closure);
let place = ctx.normalize_capture_place(place_with_id.place.clone());
self.capture_information.push((
place,
CaptureInfo { sources: place_with_id.origins, capture_kind: UpvarCapture::ByValue },
));
}
#[instrument(skip(self), level = "debug")]
fn use_cloned(&mut self, place_with_id: PlaceWithOrigin, ctx: &mut InferenceContext<'_, 'db>) {
let PlaceBase::Upvar { closure: upvar_closure, .. } = place_with_id.place.base else {
return;
};
assert_eq!(self.closure_def_id, upvar_closure);
let place = ctx.normalize_capture_place(place_with_id.place.clone());
self.capture_information.push((
place,
CaptureInfo { sources: place_with_id.origins, capture_kind: UpvarCapture::ByUse },
));
}
#[instrument(skip(self), level = "debug")]
fn borrow(
&mut self,
place_with_id: PlaceWithOrigin,
bk: BorrowKind,
ctx: &mut InferenceContext<'_, 'db>,
) {
let PlaceBase::Upvar { closure: upvar_closure, .. } = place_with_id.place.base else {
return;
};
assert_eq!(self.closure_def_id, upvar_closure);
// The region here will get discarded/ignored
let capture_kind = UpvarCapture::ByRef(bk);
let mut capture_info =
CaptureInfo { sources: place_with_id.origins.iter().cloned().collect(), capture_kind };
let place = ctx.normalize_capture_place(place_with_id.place.clone());
// We only want repr packed restriction to be applied to reading references into a packed
// struct, and not when the data is being moved. Therefore we call this method here instead
// of in `restrict_capture_precision`.
let place = restrict_repr_packed_field_ref_capture(place, &mut capture_info);
// Raw pointers don't inherit mutability
if place.deref_tys().any(Ty::is_raw_ptr) {
capture_info.capture_kind = UpvarCapture::ByRef(BorrowKind::Immutable);
}
false
self.capture_information.push((place, capture_info));
}
fn is_ty_copy(&mut self, ty: Ty<'db>) -> bool {
if let TyKind::Closure(id, _) = ty.kind() {
// FIXME: We handle closure as a special case, since chalk consider every closure as copy. We
// should probably let chalk know which closures are copy, but I don't know how doing it
// without creating query cycles.
return self
.result
.closure_info
.get(&id.0)
.map(|it| it.1 == FnTrait::Fn)
.unwrap_or(true);
}
let ty = self.table.resolve_completely(ty);
self.table.type_is_copy_modulo_regions(ty)
#[instrument(skip(self), level = "debug")]
fn mutate(&mut self, assignee_place: PlaceWithOrigin, ctx: &mut InferenceContext<'_, 'db>) {
self.borrow(assignee_place, BorrowKind::Mutable, ctx);
}
}
fn select_from_expr(&mut self, expr: ExprId) {
self.walk_expr(expr);
}
/// Rust doesn't permit moving fields out of a type that implements drop
#[instrument(skip(fcx), ret, level = "debug")]
fn restrict_precision_for_drop_types<'a, 'db>(
fcx: &mut InferenceContext<'a, 'db>,
mut place: Place,
capture_info: &mut CaptureInfo,
) -> Place {
let is_copy_type = fcx.infcx().type_is_copy_modulo_regions(fcx.table.param_env, place.ty());
fn restrict_precision_for_unsafe(&mut self) {
// FIXME: Borrow checker problems without this.
let mut current_captures = std::mem::take(&mut self.current_captures);
for capture in &mut current_captures {
let mut ty = self.table.resolve_completely(self.result.binding_ty(capture.place.local));
if ty.is_raw_ptr() || ty.is_union() {
capture.kind = CaptureKind::ByRef(BorrowKind::Shared);
self.truncate_capture_spans(capture, 0);
capture.place.projections.clear();
continue;
if let (false, UpvarCapture::ByValue) = (is_copy_type, capture_info.capture_kind) {
for i in 0..place.projections.len() {
match place.ty_before_projection(i).kind() {
TyKind::Adt(def, _) if def.destructor(fcx.interner()).is_some() => {
truncate_place_to_len_and_update_capture_kind(&mut place, capture_info, i);
break;
}
_ => {}
}
for (i, p) in capture.place.projections.iter().enumerate() {
ty = p.projected_ty(
&self.table.infer_ctxt,
self.table.param_env,
ty,
self.owner.krate(self.db),
);
if ty.is_raw_ptr() || ty.is_union() {
capture.kind = CaptureKind::ByRef(BorrowKind::Shared);
self.truncate_capture_spans(capture, i + 1);
capture.place.projections.truncate(i + 1);
}
}
place
}
/// Truncate `place` so that an `unsafe` block isn't required to capture it.
/// - No projections are applied to raw pointers, since these require unsafe blocks. We capture
/// them completely.
/// - No projections are applied on top of Union ADTs, since these require unsafe blocks.
fn restrict_precision_for_unsafe(mut place: Place, capture_info: &mut CaptureInfo) -> Place {
if place.base_ty.as_ref().is_raw_ptr() {
truncate_place_to_len_and_update_capture_kind(&mut place, capture_info, 0);
}
if place.base_ty.as_ref().is_union() {
truncate_place_to_len_and_update_capture_kind(&mut place, capture_info, 0);
}
for (i, proj) in place.projections.iter().enumerate() {
if proj.ty.as_ref().is_raw_ptr() {
// Don't apply any projections on top of a raw ptr.
truncate_place_to_len_and_update_capture_kind(&mut place, capture_info, i + 1);
break;
}
if proj.ty.as_ref().is_union() {
// Don't capture precise fields of a union.
truncate_place_to_len_and_update_capture_kind(&mut place, capture_info, i + 1);
break;
}
}
place
}
/// Truncate projections so that the following rules are obeyed by the captured `place`:
/// - No Index projections are captured, since arrays are captured completely.
/// - No unsafe block is required to capture `place`.
///
/// Returns the truncated place and updated capture mode.
#[instrument(ret, level = "debug")]
fn restrict_capture_precision(place: Place, capture_info: &mut CaptureInfo) -> Place {
let mut place = restrict_precision_for_unsafe(place, capture_info);
if place.projections.is_empty() {
// Nothing to do here
return place;
}
for (i, proj) in place.projections.iter().enumerate() {
match proj.kind {
ProjectionKind::Index | ProjectionKind::Subslice => {
// Arrays are completely captured, so we drop Index and Subslice projections
truncate_place_to_len_and_update_capture_kind(&mut place, capture_info, i);
return place;
}
ProjectionKind::Deref => {}
ProjectionKind::Field { .. } => {}
ProjectionKind::UnwrapUnsafeBinder => {}
}
}
place
}
/// Truncate deref of any reference.
#[instrument(ret, level = "debug")]
fn adjust_for_move_closure(mut place: Place, capture_info: &mut CaptureInfo) -> Place {
let first_deref = place.projections.iter().position(|proj| proj.kind == ProjectionKind::Deref);
if let Some(idx) = first_deref {
truncate_place_to_len_and_update_capture_kind(&mut place, capture_info, idx);
}
capture_info.capture_kind = UpvarCapture::ByValue;
place
}
/// Adjust closure capture just that if taking ownership of data, only move data
/// from enclosing stack frame.
#[instrument(ret, level = "debug")]
fn adjust_for_non_move_closure(mut place: Place, capture_info: &mut CaptureInfo) -> Place {
let contains_deref =
place.projections.iter().position(|proj| proj.kind == ProjectionKind::Deref);
match capture_info.capture_kind {
UpvarCapture::ByValue | UpvarCapture::ByUse => {
if let Some(idx) = contains_deref {
truncate_place_to_len_and_update_capture_kind(&mut place, capture_info, idx);
}
}
UpvarCapture::ByRef(..) => {}
}
place
}
/// At the end, `capture_info_a` will contain the selected info.
fn determine_capture_info(capture_info_a: &mut CaptureInfo, capture_info_b: &mut CaptureInfo) {
// If the capture kind is equivalent then, we don't need to escalate and can compare the
// expressions.
let eq_capture_kind = match (capture_info_a.capture_kind, capture_info_b.capture_kind) {
(UpvarCapture::ByValue, UpvarCapture::ByValue) => true,
(UpvarCapture::ByUse, UpvarCapture::ByUse) => true,
(UpvarCapture::ByRef(ref_a), UpvarCapture::ByRef(ref_b)) => ref_a == ref_b,
(UpvarCapture::ByValue, _) | (UpvarCapture::ByUse, _) | (UpvarCapture::ByRef(_), _) => {
false
}
};
let swap = if eq_capture_kind {
false
} else {
// We select the CaptureKind which ranks higher based the following priority order:
// (ByUse | ByValue) > MutBorrow > UniqueImmBorrow > ImmBorrow
match (capture_info_a.capture_kind, capture_info_b.capture_kind) {
(UpvarCapture::ByUse, UpvarCapture::ByValue)
| (UpvarCapture::ByValue, UpvarCapture::ByUse) => {
panic!("Same capture can't be ByUse and ByValue at the same time")
}
(UpvarCapture::ByValue, UpvarCapture::ByValue)
| (UpvarCapture::ByUse, UpvarCapture::ByUse)
| (UpvarCapture::ByValue | UpvarCapture::ByUse, UpvarCapture::ByRef(_)) => false,
(UpvarCapture::ByRef(_), UpvarCapture::ByValue | UpvarCapture::ByUse) => true,
(UpvarCapture::ByRef(ref_a), UpvarCapture::ByRef(ref_b)) => {
match (ref_a, ref_b) {
// Take LHS:
(BorrowKind::UniqueImmutable | BorrowKind::Mutable, BorrowKind::Immutable)
| (BorrowKind::Mutable, BorrowKind::UniqueImmutable) => false,
// Take RHS:
(BorrowKind::Immutable, BorrowKind::UniqueImmutable | BorrowKind::Mutable)
| (BorrowKind::UniqueImmutable, BorrowKind::Mutable) => true,
(BorrowKind::Immutable, BorrowKind::Immutable)
| (BorrowKind::UniqueImmutable, BorrowKind::UniqueImmutable)
| (BorrowKind::Mutable, BorrowKind::Mutable) => {
panic!("Expected unequal capture kinds");
}
}
}
}
};
if swap {
mem::swap(capture_info_a, capture_info_b);
}
}
fn determine_capture_sources(
capture_info_a: &mut CaptureInfo,
capture_info_b: &mut CaptureInfo,
dedup_sources_scratch: &mut FxHashMap<ExprOrPatId, CaptureSourceStack>,
) -> SmallVec<[CaptureSourceStack; 2]> {
dedup_sources_scratch.clear();
dedup_sources_scratch.extend(
mem::take(&mut capture_info_a.sources).into_iter().map(|it| (it.final_source(), it)),
);
dedup_sources_scratch.extend(
mem::take(&mut capture_info_b.sources).into_iter().map(|it| (it.final_source(), it)),
);
let mut result = mem::take(&mut capture_info_a.sources);
result.clear();
result.extend(dedup_sources_scratch.values().cloned());
result
}
/// Truncates `place` to have up to `len` projections.
/// `curr_mode` is the current required capture kind for the place.
/// Returns the truncated `place` and the updated required capture kind.
///
/// Note: Capture kind changes from `MutBorrow` to `UniqueImmBorrow` if the truncated part of the `place`
/// contained `Deref` of `&mut`.
fn truncate_place_to_len_and_update_capture_kind(
place: &mut Place,
info: &mut CaptureInfo,
len: usize,
) {
let is_mut_ref = |ty: Ty<'_>| matches!(ty.kind(), TyKind::Ref(.., Mutability::Mut));
// If the truncated part of the place contains `Deref` of a `&mut` then convert MutBorrow ->
// UniqueImmBorrow
// Note that if the place contained Deref of a raw pointer it would've not been MutBorrow, so
// we don't need to worry about that case here.
match info.capture_kind {
UpvarCapture::ByRef(BorrowKind::Mutable) => {
for i in len..place.projections.len() {
if place.projections[i].kind == ProjectionKind::Deref
&& is_mut_ref(place.ty_before_projection(i))
{
info.capture_kind = UpvarCapture::ByRef(BorrowKind::UniqueImmutable);
break;
}
}
}
self.current_captures = current_captures;
UpvarCapture::ByRef(..) => {}
UpvarCapture::ByValue | UpvarCapture::ByUse => {}
}
fn adjust_for_move_closure(&mut self) {
// FIXME: Borrow checker won't allow without this.
let mut current_captures = std::mem::take(&mut self.current_captures);
for capture in &mut current_captures {
if let Some(first_deref) =
capture.place.projections.iter().position(|proj| *proj == HirPlaceProjection::Deref)
{
self.truncate_capture_spans(capture, first_deref);
capture.place.projections.truncate(first_deref);
}
capture.kind = CaptureKind::ByValue;
}
self.current_captures = current_captures;
// Now fix the sources, to point at the smaller place.
for source in &mut info.sources {
// +1 because the first place is the base.
source.truncate(len + 1);
}
fn minimize_captures(&mut self) {
self.current_captures.sort_unstable_by_key(|it| it.place.projections.len());
let mut hash_map = FxHashMap::<HirPlace, usize>::default();
let result = mem::take(&mut self.current_captures);
for mut item in result {
let mut lookup_place = HirPlace { local: item.place.local, projections: vec![] };
let mut it = item.place.projections.iter();
let prev_index = loop {
if let Some(k) = hash_map.get(&lookup_place) {
break Some(*k);
}
match it.next() {
Some(it) => {
lookup_place.projections.push(*it);
}
None => break None,
}
};
match prev_index {
Some(p) => {
let prev_projections_len = self.current_captures[p].place.projections.len();
self.truncate_capture_spans(&mut item, prev_projections_len);
self.current_captures[p].span_stacks.extend(item.span_stacks);
let len = self.current_captures[p].place.projections.len();
let kind_after_truncate =
item.place.capture_kind_of_truncated_place(item.kind, len);
self.current_captures[p].kind =
cmp::max(kind_after_truncate, self.current_captures[p].kind);
}
None => {
hash_map.insert(item.place.clone(), self.current_captures.len());
self.current_captures.push(item);
}
}
}
place.projections.truncate(len);
}
/// Determines the Ancestry relationship of Place A relative to Place B
///
/// `PlaceAncestryRelation::Ancestor` implies Place A is ancestor of Place B
/// `PlaceAncestryRelation::Descendant` implies Place A is descendant of Place B
/// `PlaceAncestryRelation::Divergent` implies neither of them is the ancestor of the other.
fn determine_place_ancestry_relation(place_a: &Place, place_b: &Place) -> PlaceAncestryRelation {
// If Place A and Place B don't start off from the same root variable, they are divergent.
if place_a.base != place_b.base {
return PlaceAncestryRelation::Divergent;
}
fn consume_with_pat(&mut self, mut place: HirPlace, tgt_pat: PatId) {
let adjustments_count =
self.result.pat_adjustments.get(&tgt_pat).map(|it| it.len()).unwrap_or_default();
place.projections.extend((0..adjustments_count).map(|_| HirPlaceProjection::Deref));
self.current_capture_span_stack
.extend((0..adjustments_count).map(|_| MirSpan::PatId(tgt_pat)));
'reset_span_stack: {
match &self.store[tgt_pat] {
Pat::Missing | Pat::Wild => (),
Pat::Tuple { args, ellipsis } => {
let (al, ar) = args.split_at(ellipsis.map_or(args.len(), |it| it as usize));
let field_count = match self.result.pat_ty(tgt_pat).kind() {
TyKind::Tuple(s) => s.len(),
_ => break 'reset_span_stack,
};
let fields = 0..field_count;
let it = al.iter().zip(fields.clone()).chain(ar.iter().rev().zip(fields.rev()));
for (&arg, i) in it {
let mut p = place.clone();
self.current_capture_span_stack.push(MirSpan::PatId(arg));
p.projections.push(HirPlaceProjection::TupleField(i as u32));
self.consume_with_pat(p, arg);
self.current_capture_span_stack.pop();
}
}
Pat::Or(pats) => {
for pat in pats.iter() {
self.consume_with_pat(place.clone(), *pat);
}
}
Pat::Record { args, .. } => {
let Some(variant) = self.result.variant_resolution_for_pat(tgt_pat) else {
break 'reset_span_stack;
};
match variant {
VariantId::EnumVariantId(_) | VariantId::UnionId(_) => {
self.consume_place(place)
}
VariantId::StructId(s) => {
let vd = s.fields(self.db);
for field_pat in args.iter() {
let arg = field_pat.pat;
let Some(local_id) = vd.field(&field_pat.name) else {
continue;
};
let mut p = place.clone();
self.current_capture_span_stack.push(MirSpan::PatId(arg));
p.projections.push(HirPlaceProjection::Field(FieldId {
parent: variant,
local_id,
}));
self.consume_with_pat(p, arg);
self.current_capture_span_stack.pop();
}
}
}
}
Pat::Range { .. } | Pat::Slice { .. } | Pat::ConstBlock(_) | Pat::Lit(_) => {
self.consume_place(place)
}
Pat::Path(path) => {
if self.inside_assignment {
self.mutate_path_pat(path, tgt_pat);
}
self.consume_place(place);
}
&Pat::Bind { id, subpat: _ } => {
let mode = self.result.binding_modes[tgt_pat];
let capture_kind = match mode {
BindingMode::Move => {
self.consume_place(place);
break 'reset_span_stack;
}
BindingMode::Ref(Mutability::Not) => BorrowKind::Shared,
BindingMode::Ref(Mutability::Mut) => {
BorrowKind::Mut { kind: MutBorrowKind::Default }
}
};
self.current_capture_span_stack.push(MirSpan::BindingId(id));
self.add_capture(place, CaptureKind::ByRef(capture_kind));
self.current_capture_span_stack.pop();
}
Pat::TupleStruct { path: _, args, ellipsis } => {
let Some(variant) = self.result.variant_resolution_for_pat(tgt_pat) else {
break 'reset_span_stack;
};
match variant {
VariantId::EnumVariantId(_) | VariantId::UnionId(_) => {
self.consume_place(place)
}
VariantId::StructId(s) => {
let vd = s.fields(self.db);
let (al, ar) =
args.split_at(ellipsis.map_or(args.len(), |it| it as usize));
let fields = vd.fields().iter();
let it = al
.iter()
.zip(fields.clone())
.chain(ar.iter().rev().zip(fields.rev()));
for (&arg, (i, _)) in it {
let mut p = place.clone();
self.current_capture_span_stack.push(MirSpan::PatId(arg));
p.projections.push(HirPlaceProjection::Field(FieldId {
parent: variant,
local_id: i,
}));
self.consume_with_pat(p, arg);
self.current_capture_span_stack.pop();
}
}
}
}
Pat::Ref { pat, mutability: _ } => {
self.current_capture_span_stack.push(MirSpan::PatId(tgt_pat));
place.projections.push(HirPlaceProjection::Deref);
self.consume_with_pat(place, *pat);
self.current_capture_span_stack.pop();
}
Pat::Box { .. } => (), // not supported
&Pat::Expr(expr) => {
self.consume_place(place);
let pat_capture_span_stack = mem::take(&mut self.current_capture_span_stack);
let old_inside_assignment = mem::replace(&mut self.inside_assignment, false);
let lhs_place = self.place_of_expr(expr);
self.mutate_expr(expr, lhs_place);
self.inside_assignment = old_inside_assignment;
self.current_capture_span_stack = pat_capture_span_stack;
}
}
}
self.current_capture_span_stack
.truncate(self.current_capture_span_stack.len() - adjustments_count);
}
fn consume_exprs(&mut self, exprs: impl Iterator<Item = ExprId>) {
for expr in exprs {
self.consume_expr(expr);
}
}
fn closure_kind(&self) -> FnTrait {
let mut r = FnTrait::Fn;
for it in &self.current_captures {
r = cmp::min(
r,
match &it.kind {
CaptureKind::ByRef(BorrowKind::Mut { .. }) => FnTrait::FnMut,
CaptureKind::ByRef(BorrowKind::Shallow | BorrowKind::Shared) => FnTrait::Fn,
CaptureKind::ByValue => FnTrait::FnOnce,
},
)
}
r
}
fn analyze_closure(&mut self, closure: InternedClosureId) -> FnTrait {
let InternedClosure(_, root) = closure.loc(self.db);
self.current_closure = Some(closure);
let Expr::Closure { body, capture_by, .. } = &self.store[root] else {
unreachable!("Closure expression id is always closure");
};
self.consume_expr(*body);
for item in &self.current_captures {
if matches!(
item.kind,
CaptureKind::ByRef(BorrowKind::Mut {
kind: MutBorrowKind::Default | MutBorrowKind::TwoPhasedBorrow
})
) && !item.place.projections.contains(&HirPlaceProjection::Deref)
{
// FIXME: remove the `mutated_bindings_in_closure` completely and add proper fake reads in
// MIR. I didn't do that due duplicate diagnostics.
self.result.mutated_bindings_in_closure.insert(item.place.local);
}
}
self.restrict_precision_for_unsafe();
// `closure_kind` should be done before adjust_for_move_closure
// If there exists pre-deduced kind of a closure, use it instead of one determined by capture, as rustc does.
// rustc also does diagnostics here if the latter is not a subtype of the former.
let closure_kind = self
.result
.closure_info
.get(&closure)
.map_or_else(|| self.closure_kind(), |info| info.1);
match capture_by {
CaptureBy::Value => self.adjust_for_move_closure(),
CaptureBy::Ref => (),
}
self.minimize_captures();
self.strip_captures_ref_span();
let result = mem::take(&mut self.current_captures);
let captures = result.into_iter().map(|it| it.with_ty(self)).collect::<Vec<_>>();
self.result.closure_info.insert(closure, (captures, closure_kind));
closure_kind
}
fn strip_captures_ref_span(&mut self) {
// FIXME: Borrow checker won't allow without this.
let mut captures = std::mem::take(&mut self.current_captures);
for capture in &mut captures {
if matches!(capture.kind, CaptureKind::ByValue) {
for span_stack in &mut capture.span_stacks {
if span_stack[span_stack.len() - 1].is_ref_span(self.store) {
span_stack.truncate(span_stack.len() - 1);
}
}
}
}
self.current_captures = captures;
}
pub(crate) fn infer_closures(&mut self) {
let deferred_closures = self.sort_closures();
for (closure, exprs) in deferred_closures.into_iter().rev() {
self.current_captures = vec![];
let kind = self.analyze_closure(closure);
for (derefed_callee, callee_ty, params, expr) in exprs {
if let &Expr::Call { callee, .. } = &self.store[expr] {
let mut adjustments =
self.result.expr_adjustments.remove(&callee).unwrap_or_default().into_vec();
self.write_fn_trait_method_resolution(
kind,
derefed_callee,
&mut adjustments,
callee_ty,
&params,
expr,
);
self.result.expr_adjustments.insert(callee, adjustments.into_boxed_slice());
}
}
}
}
/// We want to analyze some closures before others, to have a correct analysis:
/// * We should analyze nested closures before the parent, since the parent should capture some of
/// the things that its children captures.
/// * If a closure calls another closure, we need to analyze the callee, to find out how we should
/// capture it (e.g. by move for FnOnce)
///
/// These dependencies are collected in the main inference. We do a topological sort in this function. It
/// will consume the `deferred_closures` field and return its content in a sorted vector.
fn sort_closures(
&mut self,
) -> Vec<(InternedClosureId, Vec<(Ty<'db>, Ty<'db>, Vec<Ty<'db>>, ExprId)>)> {
let mut deferred_closures = mem::take(&mut self.deferred_closures);
let mut dependents_count: FxHashMap<InternedClosureId, usize> =
deferred_closures.keys().map(|it| (*it, 0)).collect();
for deps in self.closure_dependencies.values() {
for dep in deps {
*dependents_count.entry(*dep).or_default() += 1;
}
}
let mut queue: Vec<_> =
deferred_closures.keys().copied().filter(|&it| dependents_count[&it] == 0).collect();
let mut result = vec![];
while let Some(it) = queue.pop() {
if let Some(d) = deferred_closures.remove(&it) {
result.push((it, d));
}
for &dep in self.closure_dependencies.get(&it).into_iter().flat_map(|it| it.iter()) {
let cnt = dependents_count.get_mut(&dep).unwrap();
*cnt -= 1;
if *cnt == 0 {
queue.push(dep);
}
}
}
assert!(deferred_closures.is_empty(), "we should have analyzed all closures");
result
}
pub(crate) fn add_current_closure_dependency(&mut self, dep: InternedClosureId) {
if let Some(c) = self.current_closure
&& !dep_creates_cycle(&self.closure_dependencies, &mut FxHashSet::default(), c, dep)
{
self.closure_dependencies.entry(c).or_default().push(dep);
}
fn dep_creates_cycle(
closure_dependencies: &FxHashMap<InternedClosureId, Vec<InternedClosureId>>,
visited: &mut FxHashSet<InternedClosureId>,
from: InternedClosureId,
to: InternedClosureId,
) -> bool {
if !visited.insert(from) {
return false;
}
if from == to {
return true;
}
if let Some(deps) = closure_dependencies.get(&to) {
for dep in deps {
if dep_creates_cycle(closure_dependencies, visited, from, *dep) {
return true;
}
}
}
false
// Assume of length of projections_a = n
let projections_a = &place_a.projections;
// Assume of length of projections_b = m
let projections_b = &place_b.projections;
let same_initial_projections =
iter::zip(projections_a, projections_b).all(|(proj_a, proj_b)| proj_a.kind == proj_b.kind);
if same_initial_projections {
use std::cmp::Ordering;
// First min(n, m) projections are the same
// Select Ancestor/Descendant
match projections_b.len().cmp(&projections_a.len()) {
Ordering::Greater => PlaceAncestryRelation::Ancestor,
Ordering::Equal => PlaceAncestryRelation::SamePlace,
Ordering::Less => PlaceAncestryRelation::Descendant,
}
} else {
PlaceAncestryRelation::Divergent
}
}
/// Call this only when the last span in the stack isn't a split.
fn apply_adjusts_to_place(
current_capture_span_stack: &mut Vec<MirSpan>,
mut r: HirPlace,
adjustments: &[Adjustment],
) -> Option<HirPlace> {
let span = *current_capture_span_stack.last().expect("empty capture span stack");
for adj in adjustments {
match &adj.kind {
Adjust::Deref(None) => {
current_capture_span_stack.push(span);
r.projections.push(HirPlaceProjection::Deref);
}
_ => return None,
/// Reduces the precision of the captured place when the precision doesn't yield any benefit from
/// borrow checking perspective, allowing us to save us on the size of the capture.
///
///
/// Fields that are read through a shared reference will always be read via a shared ref or a copy,
/// and therefore capturing precise paths yields no benefit. This optimization truncates the
/// rightmost deref of the capture if the deref is applied to a shared ref.
///
/// Reason we only drop the last deref is because of the following edge case:
///
/// ```
/// # struct A { field_of_a: Box<i32> }
/// # struct B {}
/// # struct C<'a>(&'a i32);
/// struct MyStruct<'a> {
/// a: &'static A,
/// b: B,
/// c: C<'a>,
/// }
///
/// fn foo<'a, 'b>(m: &'a MyStruct<'b>) -> impl FnMut() + 'static {
/// || drop(&*m.a.field_of_a)
/// // Here we really do want to capture `*m.a` because that outlives `'static`
///
/// // If we capture `m`, then the closure no longer outlives `'static`
/// // it is constrained to `'a`
/// }
/// ```
#[instrument(ret, level = "debug")]
fn truncate_capture_for_optimization(mut place: Place, info: &mut CaptureInfo) -> Place {
let is_shared_ref = |ty: Ty<'_>| matches!(ty.kind(), TyKind::Ref(.., Mutability::Not));
// Find the rightmost deref (if any). All the projections that come after this
// are fields or other "in-place pointer adjustments"; these refer therefore to
// data owned by whatever pointer is being dereferenced here.
let idx = place.projections.iter().rposition(|proj| ProjectionKind::Deref == proj.kind);
match idx {
// If that pointer is a shared reference, then we don't need those fields.
Some(idx) if is_shared_ref(place.ty_before_projection(idx)) => {
truncate_place_to_len_and_update_capture_kind(&mut place, info, idx + 1)
}
None | Some(_) => {}
}
Some(r)
place
}
/// Precise capture is enabled if user is using Rust Edition 2021 or higher.
/// `span` is the span of the closure.
fn enable_precise_capture(edition: Edition) -> bool {
// FIXME: We should use the edition from the closure expr.
edition.at_least_2021()
}
fn analyze_coroutine_closure_captures<'a, T>(
parent_captures: impl IntoIterator<Item = &'a CapturedPlace>,
child_captures: impl IntoIterator<Item = &'a CapturedPlace>,
mut for_each: impl FnMut((usize, &'a CapturedPlace), (usize, &'a CapturedPlace)) -> T,
) -> impl Iterator<Item = T> {
let mut result = SmallVec::<[_; 10]>::new();
let mut child_captures = child_captures.into_iter().enumerate().peekable();
// One parent capture may correspond to several child captures if we end up
// refining the set of captures via edition-2021 precise captures. We want to
// match up any number of child captures with one parent capture, so we keep
// peeking off this `Peekable` until the child doesn't match anymore.
for (parent_field_idx, parent_capture) in parent_captures.into_iter().enumerate() {
// Make sure we use every field at least once, b/c why are we capturing something
// if it's not used in the inner coroutine.
let mut field_used_at_least_once = false;
// A parent matches a child if they share the same prefix of projections.
// The child may have more, if it is capturing sub-fields out of
// something that is captured by-move in the parent closure.
while child_captures.peek().is_some_and(|(_, child_capture)| {
child_prefix_matches_parent_projections(parent_capture, child_capture)
}) {
let (child_field_idx, child_capture) = child_captures.next().unwrap();
// This analysis only makes sense if the parent capture is a
// prefix of the child capture.
assert!(
child_capture.place.projections.len() >= parent_capture.place.projections.len(),
"parent capture ({parent_capture:#?}) expected to be prefix of \
child capture ({child_capture:#?})"
);
result.push(for_each(
(parent_field_idx, parent_capture),
(child_field_idx, child_capture),
));
field_used_at_least_once = true;
}
// Make sure the field was used at least once.
assert!(
field_used_at_least_once,
"we captured {parent_capture:#?} but it was not used in the child coroutine?"
);
}
assert_eq!(child_captures.next(), None, "leftover child captures?");
result.into_iter()
}
fn child_prefix_matches_parent_projections(
parent_capture: &CapturedPlace,
child_capture: &CapturedPlace,
) -> bool {
let PlaceBase::Upvar { var_id: parent_base, .. } = parent_capture.place.base else {
panic!("expected capture to be an upvar");
};
let PlaceBase::Upvar { var_id: child_base, .. } = child_capture.place.base else {
panic!("expected capture to be an upvar");
};
parent_base == child_base
&& std::iter::zip(&child_capture.place.projections, &parent_capture.place.projections)
.all(|(child, parent)| child.kind == parent.kind)
}
@@ -0,0 +1,1705 @@
//! A different sort of visitor for walking fn bodies. Unlike the
//! normal visitor, which just walks the entire body in one shot, the
//! `ExprUseVisitor` determines how expressions are being used.
//!
//! This is only used for upvar inference.
use either::Either;
use hir_def::{
AdtId, HasModule, VariantId,
attrs::AttrFlags,
hir::{
Array, AsmOperand, BindingId, Expr, ExprId, ExprOrPatId, MatchArm, Pat, PatId,
RecordLitField, RecordSpread, Statement,
},
resolver::ValueNs,
};
use rustc_ast_ir::{try_visit, visit::VisitorResult};
use rustc_type_ir::{
FallibleTypeFolder, TypeFoldable, TypeFolder, TypeVisitable, TypeVisitor,
inherent::{AdtDef, IntoKind, Ty as _},
};
use smallvec::{SmallVec, smallvec};
use syntax::ast::{BinaryOp, UnaryOp};
use tracing::{debug, instrument};
use crate::{
Adjust, Adjustment, AutoBorrow, BindingMode,
infer::{CaptureSourceStack, InferenceContext, UpvarCapture, closure::analysis::BorrowKind},
method_resolution::CandidateId,
next_solver::{DbInterner, ErrorGuaranteed, StoredTy, Ty, TyKind},
upvars::UpvarsRef,
utils::EnumerateAndAdjustIterator,
};
type Result<T = (), E = ErrorGuaranteed> = std::result::Result<T, E>;
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum ProjectionKind {
/// A dereference of a pointer, reference or `Box<T>` of the given type.
Deref,
/// `B.F` where `B` is the base expression and `F` is
/// the field. The field is identified by which variant
/// it appears in along with a field index. The variant
/// is used for enums.
Field { field_idx: u32, variant_idx: u32 },
/// Some index like `B[x]`, where `B` is the base
/// expression. We don't preserve the index `x` because
/// we won't need it.
Index,
/// A subslice covering a range of values like `B[x..y]`.
Subslice,
/// `unwrap_binder!(expr)`
UnwrapUnsafeBinder,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum PlaceBase {
/// A temporary variable.
Rvalue,
/// A named `static` item.
StaticItem,
/// A named local variable.
Local(BindingId),
/// An upvar referenced by closure env.
Upvar { closure: ExprId, var_id: BindingId },
}
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct Projection {
/// Type after the projection is applied.
pub ty: StoredTy,
/// Defines the kind of access made by the projection.
pub kind: ProjectionKind,
}
/// A `Place` represents how a value is located in memory. This does not
/// always correspond to a syntactic place expression. For example, when
/// processing a pattern, a `Place` can be used to refer to the sub-value
/// currently being inspected.
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct Place {
/// The type of the `PlaceBase`
pub base_ty: StoredTy,
/// The "outermost" place that holds this value.
pub base: PlaceBase,
/// How this place is derived from the base place.
pub projections: Vec<Projection>,
}
impl<'db> TypeVisitable<DbInterner<'db>> for Place {
fn visit_with<V: TypeVisitor<DbInterner<'db>>>(&self, visitor: &mut V) -> V::Result {
let Self { base_ty, base: _, projections } = self;
try_visit!(base_ty.as_ref().visit_with(visitor));
for proj in projections {
let Projection { ty, kind: _ } = proj;
try_visit!(ty.as_ref().visit_with(visitor));
}
V::Result::output()
}
}
impl<'db> TypeFoldable<DbInterner<'db>> for Place {
fn try_fold_with<F: FallibleTypeFolder<DbInterner<'db>>>(
self,
folder: &mut F,
) -> Result<Self, F::Error> {
let Self { base_ty, base, projections } = self;
let base_ty = base_ty.as_ref().try_fold_with(folder)?.store();
let projections = projections
.into_iter()
.map(|proj| {
let Projection { ty, kind } = proj;
let ty = ty.as_ref().try_fold_with(folder)?.store();
Ok(Projection { ty, kind })
})
.collect::<Result<_, _>>()?;
Ok(Self { base_ty, base, projections })
}
fn fold_with<F: TypeFolder<DbInterner<'db>>>(self, folder: &mut F) -> Self {
let Self { base_ty, base, projections } = self;
let base_ty = base_ty.as_ref().fold_with(folder).store();
let projections = projections
.into_iter()
.map(|proj| {
let Projection { ty, kind } = proj;
let ty = ty.as_ref().fold_with(folder).store();
Projection { ty, kind }
})
.collect();
Self { base_ty, base, projections }
}
}
impl Place {
/// Returns an iterator of the types that have to be dereferenced to access
/// the `Place`.
///
/// The types are in the reverse order that they are applied. So if
/// `x: &*const u32` and the `Place` is `**x`, then the types returned are
///`*const u32` then `&*const u32`.
pub fn deref_tys<'db>(&self) -> impl Iterator<Item = Ty<'db>> {
self.projections.iter().enumerate().rev().filter_map(move |(index, proj)| {
if ProjectionKind::Deref == proj.kind {
Some(self.ty_before_projection(index))
} else {
None
}
})
}
/// Returns the type of this `Place` after all projections have been applied.
pub fn ty<'db>(&self) -> Ty<'db> {
self.projections.last().map_or(self.base_ty.as_ref(), |proj| proj.ty.as_ref())
}
/// Returns the type of this `Place` immediately before `projection_index`th projection
/// is applied.
pub fn ty_before_projection<'db>(&self, projection_index: usize) -> Ty<'db> {
assert!(projection_index < self.projections.len());
if projection_index == 0 {
self.base_ty.as_ref()
} else {
self.projections[projection_index - 1].ty.as_ref()
}
}
}
/// A `PlaceWithOrigin` represents how a value is located in memory. This does not
/// always correspond to a syntactic place expression. For example, when
/// processing a pattern, a `Place` can be used to refer to the sub-value
/// currently being inspected.
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub(crate) struct PlaceWithOrigin {
/// `ExprId`s or `PatId`s of the expressions or patterns producing this value.
pub origins: SmallVec<[CaptureSourceStack; 2]>,
/// Information about the `Place`.
pub place: Place,
}
impl PlaceWithOrigin {
fn new_no_projections<'db>(
origin: impl Into<ExprOrPatId>,
base_ty: Ty<'db>,
base: PlaceBase,
) -> PlaceWithOrigin {
Self::new(
smallvec![CaptureSourceStack::from_single(origin.into())],
base_ty,
base,
Vec::new(),
)
}
fn new<'db>(
origins: SmallVec<[CaptureSourceStack; 2]>,
base_ty: Ty<'db>,
base: PlaceBase,
projections: Vec<Projection>,
) -> PlaceWithOrigin {
debug_assert!(origins.iter().all(|origin| origin.len() == projections.len() + 1));
PlaceWithOrigin { origins, place: Place { base_ty: base_ty.store(), base, projections } }
}
fn push_projection(&mut self, projection: Projection, origin: ExprOrPatId) {
self.place.projections.push(projection);
for origin_stack in &mut self.origins {
origin_stack.push(origin);
}
}
}
/// The `FakeReadCause` describes the type of pattern why a FakeRead statement exists.
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
pub enum FakeReadCause {
/// A fake read injected into a match guard to ensure that the discriminants
/// that are being matched on aren't modified while the match guard is being
/// evaluated.
///
/// At the beginning of each match guard, a fake borrow is
/// inserted for each discriminant accessed in the entire `match` statement.
///
/// Then, at the end of the match guard, a `FakeRead(ForMatchGuard)` is
/// inserted to keep the fake borrows alive until that point.
///
/// This should ensure that you cannot change the variant for an enum while
/// you are in the midst of matching on it.
ForMatchGuard,
/// Fake read of the scrutinee of a `match` or destructuring `let`
/// (i.e. `let` with non-trivial pattern).
///
/// In `match x { ... }`, we generate a `FakeRead(ForMatchedPlace, x)`
/// and insert it into the `otherwise_block` (which is supposed to be
/// unreachable for irrefutable pattern-matches like `match` or `let`).
///
/// This is necessary because `let x: !; match x {}` doesn't generate any
/// actual read of x, so we need to generate a `FakeRead` to check that it
/// is initialized.
///
/// If the `FakeRead(ForMatchedPlace)` is being performed with a closure
/// that doesn't capture the required upvars, the `FakeRead` within the
/// closure is omitted entirely.
///
/// To make sure that this is still sound, if a closure matches against
/// a Place starting with an Upvar, we hoist the `FakeRead` to the
/// definition point of the closure.
///
/// If the `FakeRead` comes from being hoisted out of a closure like this,
/// we record the `ExprId` of the closure. Otherwise, the `Option` will be `None`.
//
// We can use LocalDefId here since fake read statements are removed
// before codegen in the `CleanupNonCodegenStatements` pass.
ForMatchedPlace(Option<ExprId>),
/// A fake read injected into a match guard to ensure that the places
/// bound by the pattern are immutable for the duration of the match guard.
///
/// Within a match guard, references are created for each place that the
/// pattern creates a binding for — this is known as the `RefWithinGuard`
/// version of the variables. To make sure that the references stay
/// alive until the end of the match guard, and properly prevent the
/// places in question from being modified, a `FakeRead(ForGuardBinding)`
/// is inserted at the end of the match guard.
///
/// For details on how these references are created, see the extensive
/// documentation on `bind_matched_candidate_for_guard` in
/// `rustc_mir_build`.
ForGuardBinding,
/// Officially, the semantics of
///
/// `let pattern = <expr>;`
///
/// is that `<expr>` is evaluated into a temporary and then this temporary is
/// into the pattern.
///
/// However, if we see the simple pattern `let var = <expr>`, we optimize this to
/// evaluate `<expr>` directly into the variable `var`. This is mostly unobservable,
/// but in some cases it can affect the borrow checker, as in #53695.
///
/// Therefore, we insert a `FakeRead(ForLet)` immediately after each `let`
/// with a trivial pattern.
///
/// FIXME: `ExprUseVisitor` has an entirely different opinion on what `FakeRead(ForLet)`
/// is supposed to mean. If it was accurate to what MIR lowering does,
/// would it even make sense to hoist these out of closures like
/// `ForMatchedPlace`?
ForLet(Option<ExprId>),
/// Currently, index expressions overloaded through the `Index` trait
/// get lowered differently than index expressions with builtin semantics
/// for arrays and slices — the latter will emit code to perform
/// bound checks, and then return a MIR place that will only perform the
/// indexing "for real" when it gets incorporated into an instruction.
///
/// This is observable in the fact that the following compiles:
///
/// ```
/// fn f(x: &mut [&mut [u32]], i: usize) {
/// x[i][x[i].len() - 1] += 1;
/// }
/// ```
///
/// However, we need to be careful to not let the user invalidate the
/// bound check with an expression like
///
/// `(*x)[1][{ x = y; 4}]`
///
/// Here, the first bounds check would be invalidated when we evaluate the
/// second index expression. To make sure that this doesn't happen, we
/// create a fake borrow of `x` and hold it while we evaluate the second
/// index.
///
/// This borrow is kept alive by a `FakeRead(ForIndex)` at the end of its
/// scope.
ForIndex,
}
/// This trait defines the callbacks you can expect to receive when
/// employing the ExprUseVisitor.
pub(crate) trait Delegate<'db> {
/// The value found at `place` is moved, depending
/// on `mode`. Where `diag_expr_id` is the id used for diagnostics for `place`.
///
/// If the value is `Copy`, [`copy`][Self::copy] is called instead, which
/// by default falls back to [`borrow`][Self::borrow].
///
/// The parameter `diag_expr_id` indicates the HIR id that ought to be used for
/// diagnostics. Around pattern matching such as `let pat = expr`, the diagnostic
/// id will be the id of the expression `expr` but the place itself will have
/// the id of the binding in the pattern `pat`.
fn consume(&mut self, place_with_id: PlaceWithOrigin, ctx: &mut InferenceContext<'_, 'db>);
/// The value found at `place` is used, depending
/// on `mode`. Where `diag_expr_id` is the id used for diagnostics for `place`.
///
/// Use of a `Copy` type in a ByUse context is considered a use
/// by `ImmBorrow` and `borrow` is called instead. This is because
/// a shared borrow is the "minimum access" that would be needed
/// to perform a copy.
///
///
/// The parameter `diag_expr_id` indicates the HIR id that ought to be used for
/// diagnostics. Around pattern matching such as `let pat = expr`, the diagnostic
/// id will be the id of the expression `expr` but the place itself will have
/// the id of the binding in the pattern `pat`.
fn use_cloned(&mut self, place_with_id: PlaceWithOrigin, ctx: &mut InferenceContext<'_, 'db>);
/// The value found at `place` is being borrowed with kind `bk`.
/// `diag_expr_id` is the id used for diagnostics (see `consume` for more details).
fn borrow(
&mut self,
place_with_id: PlaceWithOrigin,
bk: BorrowKind,
ctx: &mut InferenceContext<'_, 'db>,
);
/// The value found at `place` is being copied.
/// `diag_expr_id` is the id used for diagnostics (see `consume` for more details).
///
/// If an implementation is not provided, use of a `Copy` type in a ByValue context is instead
/// considered a use by `ImmBorrow` and `borrow` is called instead. This is because a shared
/// borrow is the "minimum access" that would be needed to perform a copy.
fn copy(&mut self, place_with_id: PlaceWithOrigin, ctx: &mut InferenceContext<'_, 'db>) {
// In most cases, copying data from `x` is equivalent to doing `*&x`, so by default
// we treat a copy of `x` as a borrow of `x`.
self.borrow(place_with_id, BorrowKind::Immutable, ctx)
}
/// The path at `assignee_place` is being assigned to.
/// `diag_expr_id` is the id used for diagnostics (see `consume` for more details).
fn mutate(&mut self, assignee_place: PlaceWithOrigin, ctx: &mut InferenceContext<'_, 'db>);
/// The path at `binding_place` is a binding that is being initialized.
///
/// This covers cases such as `let x = 42;`
fn bind(&mut self, binding_place: PlaceWithOrigin, ctx: &mut InferenceContext<'_, 'db>) {
// Bindings can normally be treated as a regular assignment, so by default we
// forward this to the mutate callback.
self.mutate(binding_place, ctx)
}
/// The `place` should be a fake read because of specified `cause`.
fn fake_read(
&mut self,
place_with_id: PlaceWithOrigin,
cause: FakeReadCause,
ctx: &mut InferenceContext<'_, 'db>,
);
}
impl<'db, D: Delegate<'db>> Delegate<'db> for &mut D {
fn consume(&mut self, place_with_id: PlaceWithOrigin, ctx: &mut InferenceContext<'_, 'db>) {
(**self).consume(place_with_id, ctx)
}
fn use_cloned(&mut self, place_with_id: PlaceWithOrigin, ctx: &mut InferenceContext<'_, 'db>) {
(**self).use_cloned(place_with_id, ctx)
}
fn borrow(
&mut self,
place_with_id: PlaceWithOrigin,
bk: BorrowKind,
ctx: &mut InferenceContext<'_, 'db>,
) {
(**self).borrow(place_with_id, bk, ctx)
}
fn copy(&mut self, place_with_id: PlaceWithOrigin, ctx: &mut InferenceContext<'_, 'db>) {
(**self).copy(place_with_id, ctx)
}
fn mutate(&mut self, assignee_place: PlaceWithOrigin, ctx: &mut InferenceContext<'_, 'db>) {
(**self).mutate(assignee_place, ctx)
}
fn bind(&mut self, binding_place: PlaceWithOrigin, ctx: &mut InferenceContext<'_, 'db>) {
(**self).bind(binding_place, ctx)
}
fn fake_read(
&mut self,
place_with_id: PlaceWithOrigin,
cause: FakeReadCause,
ctx: &mut InferenceContext<'_, 'db>,
) {
(**self).fake_read(place_with_id, cause, ctx)
}
}
/// A visitor that reports how each expression is being used.
///
/// See [module-level docs][self] and [`Delegate`] for details.
pub(crate) struct ExprUseVisitor<'a, 'b, 'db, D: Delegate<'db>> {
cx: &'a mut InferenceContext<'b, 'db>,
delegate: D,
closure_expr: ExprId,
upvars: UpvarsRef<'db>,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum PatWalkMode {
/// `let`, `match`.
Declaration,
/// Destructuring assignment.
Assignment,
}
impl<'a, 'b, 'db, D: Delegate<'db>> ExprUseVisitor<'a, 'b, 'db, D> {
/// Creates the ExprUseVisitor, configuring it with the various options provided:
///
/// - `delegate` -- who receives the callbacks
/// - `param_env` --- parameter environment for trait lookups (esp. pertaining to `Copy`)
/// - `typeck_results` --- typeck results for the code being analyzed
pub(crate) fn new(
cx: &'a mut InferenceContext<'b, 'db>,
closure_expr: ExprId,
upvars: UpvarsRef<'db>,
delegate: D,
) -> Self {
ExprUseVisitor { delegate, closure_expr, upvars, cx }
}
pub(crate) fn consume_closure_body(&mut self, params: &[PatId], body: ExprId) -> Result {
for &param in params {
let param_ty = self.pat_ty_adjusted(param)?;
debug!("consume_body: param_ty = {:?}", param_ty);
let param_place = self.cat_rvalue(param.into(), param_ty);
self.fake_read_scrutinee(param_place.clone(), false);
self.walk_pat(param_place, param, false, PatWalkMode::Declaration)?;
}
self.consume_expr(body)?;
Ok(())
}
#[instrument(skip(self), level = "debug")]
fn consume_or_copy(&mut self, place_with_id: PlaceWithOrigin) {
if self.cx.table.type_is_copy_modulo_regions(place_with_id.place.ty()) {
self.delegate.copy(place_with_id, self.cx);
} else {
self.delegate.consume(place_with_id, self.cx);
}
}
#[instrument(skip(self), level = "debug")]
pub(crate) fn consume_clone_or_copy(&mut self, place_with_id: PlaceWithOrigin) {
// `x.use` will do one of the following
// * if it implements `Copy`, it will be a copy
// * if it implements `UseCloned`, it will be a call to `clone`
// * otherwise, it is a move
//
// we do a conservative approximation of this, treating it as a move unless we know that it implements copy or `UseCloned`
if self.cx.table.type_is_copy_modulo_regions(place_with_id.place.ty()) {
self.delegate.copy(place_with_id, self.cx);
} else if self.cx.table.type_is_use_cloned_modulo_regions(place_with_id.place.ty()) {
self.delegate.use_cloned(place_with_id, self.cx);
} else {
self.delegate.consume(place_with_id, self.cx);
}
}
fn consume_exprs(&mut self, exprs: &[ExprId]) -> Result {
for &expr in exprs {
self.consume_expr(expr)?;
}
Ok(())
}
// FIXME: It's suspicious that this is public; clippy should probably use `walk_expr`.
#[instrument(skip(self), level = "debug")]
pub(crate) fn consume_expr(&mut self, expr: ExprId) -> Result {
let place_with_id = self.cat_expr(expr)?;
self.consume_or_copy(place_with_id);
self.walk_expr(expr)?;
Ok(())
}
fn mutate_expr(&mut self, expr: ExprId) -> Result {
let place_with_id = self.cat_expr(expr)?;
self.delegate.mutate(place_with_id, self.cx);
self.walk_expr(expr)?;
Ok(())
}
#[instrument(skip(self), level = "debug")]
fn borrow_expr(&mut self, expr: ExprId, bk: BorrowKind) -> Result {
let place_with_id = self.cat_expr(expr)?;
self.delegate.borrow(place_with_id, bk, self.cx);
self.walk_expr(expr)?;
Ok(())
}
#[instrument(skip(self), level = "debug")]
pub(crate) fn walk_expr(&mut self, expr: ExprId) -> Result {
self.walk_adjustment(expr)?;
match self.cx.store[expr] {
Expr::Path(_) => {}
Expr::UnaryOp { op: UnaryOp::Deref, expr: base } => {
// *base
self.walk_expr(base)?;
}
Expr::Field { expr: base, .. } => {
// base.f
self.walk_expr(base)?;
}
Expr::Index { base: lhs, index: rhs } => {
// lhs[rhs]
self.walk_expr(lhs)?;
self.consume_expr(rhs)?;
}
Expr::Call { callee, ref args } => {
// callee(args)
self.consume_expr(callee)?;
self.consume_exprs(args)?;
}
Expr::MethodCall { receiver, ref args, .. } => {
// callee.m(args)
self.consume_expr(receiver)?;
self.consume_exprs(args)?;
}
Expr::RecordLit { ref fields, spread, .. } => {
self.walk_struct_expr(fields, spread)?;
}
Expr::Tuple { ref exprs } => {
self.consume_exprs(exprs)?;
}
Expr::If {
condition: cond_expr,
then_branch: then_expr,
else_branch: opt_else_expr,
} => {
self.consume_expr(cond_expr)?;
self.consume_expr(then_expr)?;
if let Some(else_expr) = opt_else_expr {
self.consume_expr(else_expr)?;
}
}
Expr::Let { pat, expr: init } => {
self.walk_local(init, pat, None, |this| {
this.borrow_expr(init, BorrowKind::Immutable)
})?;
}
Expr::Match { expr: discr, ref arms } => {
let discr_place = self.cat_expr(discr)?;
self.fake_read_scrutinee(discr_place.clone(), true);
self.walk_expr(discr)?;
for arm in arms {
self.walk_arm(discr_place.clone(), arm)?;
}
}
Expr::Array(Array::ElementList { elements: ref exprs }) => {
self.consume_exprs(exprs)?;
}
Expr::Ref { expr: base, mutability: m, .. } => {
// &base
// make sure that the thing we are pointing out stays valid
// for the lifetime `scope_r` of the resulting ptr:
let bk = BorrowKind::from_hir_mutbl(m);
self.borrow_expr(base, bk)?;
}
Expr::InlineAsm(ref asm) => {
for (_, op) in &asm.operands {
match *op {
AsmOperand::In { expr, .. } => {
self.consume_expr(expr)?;
}
AsmOperand::Out { expr: Some(expr), .. }
| AsmOperand::InOut { expr, .. } => {
self.mutate_expr(expr)?;
}
AsmOperand::SplitInOut { in_expr, out_expr, .. } => {
self.consume_expr(in_expr)?;
if let Some(out_expr) = out_expr {
self.mutate_expr(out_expr)?;
}
}
AsmOperand::Out { expr: None, .. }
| AsmOperand::Const { .. }
| AsmOperand::Sym { .. } => {}
AsmOperand::Label(block) => {
self.walk_expr(block)?;
}
}
}
}
Expr::Continue { .. }
| Expr::Literal(..)
| Expr::Const(..)
| Expr::OffsetOf(..)
| Expr::Missing
| Expr::Underscore => {}
Expr::Loop { body: blk, .. } => {
self.walk_expr(blk)?;
}
Expr::UnaryOp { expr: lhs, .. } => {
self.consume_expr(lhs)?;
}
Expr::BinaryOp {
lhs,
rhs,
op: Some(BinaryOp::ArithOp(..) | BinaryOp::CmpOp(..) | BinaryOp::LogicOp(..)),
} => {
self.consume_expr(lhs)?;
self.consume_expr(rhs)?;
}
Expr::Block { ref statements, tail, .. }
| Expr::Unsafe { ref statements, tail, .. } => {
for stmt in statements {
self.walk_stmt(stmt)?;
}
if let Some(tail_expr) = tail {
self.consume_expr(tail_expr)?;
}
}
Expr::Break { expr: opt_expr, .. } | Expr::Return { expr: opt_expr } => {
if let Some(expr) = opt_expr {
self.consume_expr(expr)?;
}
}
Expr::Become { expr } | Expr::Await { expr } | Expr::Box { expr } => {
self.consume_expr(expr)?;
}
Expr::Assignment { target, value } => {
self.walk_expr(value)?;
let expr_place = self.cat_expr(value)?;
let update_guard =
self.cx.resolver.update_to_inner_scope(self.cx.db, self.cx.owner, expr);
self.walk_pat(expr_place, target, false, PatWalkMode::Assignment)?;
self.cx.resolver.reset_to_guard(update_guard);
}
Expr::Cast { expr: base, .. } => {
self.consume_expr(base)?;
}
Expr::BinaryOp { lhs, rhs, op: None | Some(BinaryOp::Assignment { .. }) } => {
self.consume_expr(lhs)?;
self.consume_expr(rhs)?;
}
Expr::Array(Array::Repeat { initializer: base, .. }) => {
self.consume_expr(base)?;
}
Expr::Closure { .. } => {
self.walk_captures(expr);
}
Expr::Yield { expr: value } | Expr::Yeet { expr: value } => {
if let Some(value) = value {
self.consume_expr(value)?;
}
}
Expr::Range { lhs, rhs, .. } => {
if let Some(lhs) = lhs {
self.consume_expr(lhs)?;
}
if let Some(rhs) = rhs {
self.consume_expr(rhs)?;
}
}
}
Ok(())
}
fn walk_stmt(&mut self, stmt: &Statement) -> Result {
match *stmt {
Statement::Let { pat, initializer: Some(expr), else_branch: els, .. } => {
self.walk_local(expr, pat, els, |_| Ok(()))?;
}
Statement::Let { .. } => {}
Statement::Item(_) => {
// We don't visit nested items in this visitor,
// only the fn body we were given.
}
Statement::Expr { expr, .. } => {
self.consume_expr(expr)?;
}
}
Ok(())
}
#[instrument(skip(self), level = "debug")]
fn fake_read_scrutinee(&mut self, discr_place: PlaceWithOrigin, refutable: bool) {
let closure_def_id = match discr_place.place.base {
PlaceBase::Upvar { closure, var_id: _ } => Some(closure),
_ => None,
};
let cause = if refutable {
FakeReadCause::ForMatchedPlace(closure_def_id)
} else {
FakeReadCause::ForLet(closure_def_id)
};
self.delegate.fake_read(discr_place, cause, self.cx);
}
fn walk_local<F>(&mut self, expr: ExprId, pat: PatId, els: Option<ExprId>, mut f: F) -> Result
where
F: FnMut(&mut Self) -> Result,
{
self.walk_expr(expr)?;
let expr_place = self.cat_expr(expr)?;
f(self)?;
self.fake_read_scrutinee(expr_place.clone(), els.is_some());
self.walk_pat(expr_place, pat, false, PatWalkMode::Declaration)?;
if let Some(els) = els {
self.walk_expr(els)?;
}
Ok(())
}
fn walk_struct_expr(&mut self, fields: &[RecordLitField], spread: RecordSpread) -> Result {
// Consume the expressions supplying values for each field.
for field in fields {
self.consume_expr(field.expr)?;
}
let RecordSpread::Expr(with_expr) = spread else { return Ok(()) };
let with_place = self.cat_expr(with_expr)?;
// Select just those fields of the `with`
// expression that will actually be used
match self.cx.table.structurally_resolve_type(with_place.place.ty()).kind() {
TyKind::Adt(adt, args) if adt.is_struct() => {
let AdtId::StructId(adt) = adt.def_id().0 else { unreachable!() };
let adt_fields = VariantId::from(adt).fields(self.cx.db).fields();
let adt_field_types = self.cx.db.field_types(adt.into());
// Consume those fields of the with expression that are needed.
for (f_index, with_field) in adt_fields.iter() {
let is_mentioned = fields.iter().any(|f| f.name == with_field.name);
if !is_mentioned {
let field_place = self.cat_projection(
with_expr.into(),
with_place.clone(),
adt_field_types[f_index].get().instantiate(self.cx.interner(), args),
ProjectionKind::Field {
field_idx: f_index.into_raw().into_u32(),
variant_idx: 0,
},
);
self.consume_or_copy(field_place);
}
}
}
_ => {}
}
// walk the with expression so that complex expressions
// are properly handled.
self.walk_expr(with_expr)?;
Ok(())
}
fn expr_adjustments(&self, expr: ExprId) -> SmallVec<[Adjustment; 5]> {
// Due to borrowck problems, we cannot borrow the adjustments, unfortunately.
self.cx.result.expr_adjustment(expr).unwrap_or_default().into()
}
/// Invoke the appropriate delegate calls for anything that gets
/// consumed or borrowed as part of the automatic adjustment
/// process.
fn walk_adjustment(&mut self, expr: ExprId) -> Result {
let adjustments = self.expr_adjustments(expr);
let mut place_with_id = self.cat_expr_unadjusted(expr)?;
for adjustment in &adjustments {
debug!("walk_adjustment expr={:?} adj={:?}", expr, adjustment);
match adjustment.kind {
Adjust::NeverToAny | Adjust::Pointer(_) => {
// Creating a closure/fn-pointer or unsizing consumes
// the input and stores it into the resulting rvalue.
self.consume_or_copy(place_with_id.clone());
}
Adjust::Deref(None) => {}
// Autoderefs for overloaded Deref calls in fact reference
// their receiver. That is, if we have `(*x)` where `x`
// is of type `Rc<T>`, then this in fact is equivalent to
// `x.deref()`. Since `deref()` is declared with `&self`,
// this is an autoref of `x`.
Adjust::Deref(Some(ref deref)) => {
let bk = BorrowKind::from_mutbl(deref.0);
self.delegate.borrow(place_with_id.clone(), bk, self.cx);
}
Adjust::Borrow(ref autoref) => {
self.walk_autoref(expr, place_with_id.clone(), autoref);
}
}
place_with_id = self.cat_expr_adjusted(expr, place_with_id, adjustment)?;
}
Ok(())
}
/// Walks the autoref `autoref` applied to the autoderef'd
/// `expr`. `base_place` is `expr` represented as a place,
/// after all relevant autoderefs have occurred.
fn walk_autoref(&mut self, expr: ExprId, base_place: PlaceWithOrigin, autoref: &AutoBorrow) {
debug!("walk_autoref(expr={:?} base_place={:?} autoref={:?})", expr, base_place, autoref);
match *autoref {
AutoBorrow::Ref(m) => {
self.delegate.borrow(base_place, BorrowKind::from_mutbl(m.into()), self.cx);
}
AutoBorrow::RawPtr(m) => {
debug!("walk_autoref: expr={:?} base_place={:?}", expr, base_place);
self.delegate.borrow(base_place, BorrowKind::from_mutbl(m), self.cx);
}
}
}
fn walk_arm(&mut self, discr_place: PlaceWithOrigin, arm: &MatchArm) -> Result {
self.walk_pat(discr_place, arm.pat, arm.guard.is_some(), PatWalkMode::Declaration)?;
if let Some(e) = arm.guard {
self.consume_expr(e)?;
}
self.consume_expr(arm.expr)
}
/// The core driver for walking a pattern
///
/// This should mirror how pattern-matching gets lowered to MIR, as
/// otherwise lowering will ICE when trying to resolve the upvars.
///
/// However, it is okay to approximate it here by doing *more* accesses than
/// the actual MIR builder will, which is useful when some checks are too
/// cumbersome to perform here. For example, if after typeck it becomes
/// clear that only one variant of an enum is inhabited, and therefore a
/// read of the discriminant is not necessary, `walk_pat` will have
/// over-approximated the necessary upvar capture granularity.
///
/// Do note that discrepancies like these do still create obscure corners
/// in the semantics of the language, and should be avoided if possible.
#[instrument(skip(self), level = "debug")]
fn walk_pat(
&mut self,
discr_place: PlaceWithOrigin,
pat: PatId,
has_guard: bool,
mode: PatWalkMode,
) -> Result {
self.cat_pattern(discr_place.clone(), pat, &mut |this, place, pat| {
debug!("walk_pat: pat.kind={:?}", this.cx.store[pat]);
let read_discriminant = {
let place = place.clone();
|this: &mut Self| {
this.delegate.borrow(place, BorrowKind::Immutable, this.cx);
}
};
match this.cx.store[pat] {
Pat::Bind { id, .. } => {
debug!("walk_pat: binding place={:?} pat={:?}", place, pat);
let bm = this.cx.result.binding_modes[pat];
debug!("walk_pat: pat.hir_id={:?} bm={:?}", pat, bm);
// pat_ty: the type of the binding being produced.
let pat_ty = this.node_ty(pat.into())?;
debug!("walk_pat: pat_ty={:?}", pat_ty);
if let Ok(binding_place) = this.cat_local(pat.into(), pat_ty, id) {
this.delegate.bind(binding_place, this.cx);
}
// Subtle: MIR desugaring introduces immutable borrows for each pattern
// binding when lowering pattern guards to ensure that the guard does not
// modify the scrutinee.
if has_guard {
read_discriminant(this);
}
// It is also a borrow or copy/move of the value being matched.
// In a cases of pattern like `let pat = upvar`, don't use the span
// of the pattern, as this just looks confusing, instead use the span
// of the discriminant.
match this.cx.result.binding_mode(pat) {
Some(BindingMode::Ref(m)) => {
let bk = BorrowKind::from_mutbl(m);
this.delegate.borrow(place, bk, this.cx);
}
None | Some(BindingMode::Move) => {
debug!("walk_pat binding consuming pat");
this.consume_or_copy(place);
}
}
}
Pat::Path(ref path) => {
// A `Path` pattern is just a name like `Foo`. This is either a
// named constant or else it refers to an ADT variant
let is_assoc_const = this
.cx
.result
.assoc_resolutions_for_pat(pat)
.is_some_and(|it| matches!(it.0, CandidateId::ConstId(_)));
let resolution = this.cx.resolver.resolve_path_in_value_ns_fully(
this.cx.db,
path,
this.cx.store.pat_path_hygiene(pat),
);
let is_normal_const = matches!(resolution, Some(ValueNs::ConstId(_)));
if mode == PatWalkMode::Assignment
&& let Some(ValueNs::LocalBinding(local)) = resolution
{
let pat_ty = this.pat_ty(pat)?;
let place = this.cat_local(pat.into(), pat_ty, local)?;
this.delegate.mutate(place, this.cx);
} else if is_assoc_const || is_normal_const {
// Named constants have to be equated with the value
// being matched, so that's a read of the value being matched.
//
// FIXME: Does the MIR code skip this read when matching on a ZST?
// If so, we can also skip it here.
read_discriminant(this);
} else if this.is_multivariant_adt(place.place.ty()) {
// Otherwise, this is a struct/enum variant, and so it's
// only a read if we need to read the discriminant.
read_discriminant(this);
}
}
Pat::Lit(_) | Pat::ConstBlock(_) | Pat::Range { .. } => {
// When matching against a literal or range, we need to
// borrow the place to compare it against the pattern.
//
// Note that we do this read even if the range matches all
// possible values, such as 0..=u8::MAX. This is because
// we don't want to depend on consteval here.
//
// FIXME: What if the type being matched only has one
// possible value?
read_discriminant(this);
}
Pat::Record { .. } | Pat::TupleStruct { .. } => {
if this.is_multivariant_adt(place.place.ty()) {
read_discriminant(this);
}
}
Pat::Slice { prefix: ref lhs, slice: wild, suffix: ref rhs } => {
// We don't need to test the length if the pattern is `[..]`
if matches!((&**lhs, wild, &**rhs), (&[], Some(_), &[]))
// Arrays have a statically known size, so
// there is no need to read their length
|| place.place.ty().strip_references().is_array()
{
// No read necessary
} else {
read_discriminant(this);
}
}
Pat::Expr(expr) if mode == PatWalkMode::Assignment => {
// Destructuring assignment.
this.mutate_expr(expr)?;
}
Pat::Or(_)
| Pat::Box { .. }
| Pat::Ref { .. }
| Pat::Tuple { .. }
| Pat::Wild
| Pat::Missing => {
// If the PatKind is Or, Box, Ref, Guard, or Tuple, the relevant accesses
// are made later as these patterns contains subpatterns.
// If the PatKind is Missing, Wild or Err, any relevant accesses are made when processing
// the other patterns that are part of the match
}
Pat::Expr(_) => {}
}
Ok(())
})
}
/// Handle the case where the current body contains a closure.
///
/// When the current body being handled is a closure, then we must make sure that
/// - The parent closure only captures Places from the nested closure that are not local to it.
///
/// In the following example the closures `c` only captures `p.x` even though `incr`
/// is a capture of the nested closure
///
/// ```
/// struct P { x: i32 }
/// let mut p = P { x: 4 };
/// let c = || {
/// let incr = 10;
/// let nested = || p.x += incr;
/// };
/// ```
///
/// - When reporting the Place back to the Delegate, ensure that the UpvarId uses the enclosing
/// closure as the DefId.
#[instrument(skip(self), level = "debug")]
fn walk_captures(&mut self, closure_expr: ExprId) {
fn upvar_is_local_variable(upvars: UpvarsRef<'_>, var_id: BindingId) -> bool {
upvars.contains(var_id)
}
// If we have a nested closure, we want to include the fake reads present in the nested
// closure.
// `remove()` then re-insert and not `get()` due to borrowck errors.
if let Some(closure_data) = self.cx.result.closures_data.remove(&closure_expr) {
for (fake_read, cause, origins) in closure_data.fake_reads.iter() {
match fake_read.base {
PlaceBase::Upvar { var_id, closure: _ } => {
if upvar_is_local_variable(self.upvars, var_id) {
// The nested closure might be fake reading the current (enclosing) closure's local variables.
// The only places we want to fake read before creating the parent closure are the ones that
// are not local to it/ defined by it.
//
// ```rust,ignore(cannot-test-this-because-pseudo-code)
// let v1 = (0, 1);
// let c = || { // fake reads: v1
// let v2 = (0, 1);
// let e = || { // fake reads: v1, v2
// let (_, t1) = v1;
// let (_, t2) = v2;
// }
// }
// ```
// This check is performed when visiting the body of the outermost closure (`c`) and ensures
// that we don't add a fake read of v2 in c.
continue;
}
}
_ => {
panic!(
"Do not know how to get ExprId out of Rvalue and StaticItem {:?}",
fake_read.base
);
}
};
self.delegate.fake_read(
PlaceWithOrigin { place: fake_read.clone(), origins: origins.clone() },
*cause,
self.cx,
);
}
for (var_id, min_list) in closure_data.min_captures.iter() {
if !self.upvars.contains(*var_id) {
// The nested closure might be capturing the current (enclosing) closure's local variables.
// We check if the root variable is ever mentioned within the enclosing closure, if not
// then for the current body (if it's a closure) these aren't captures, we will ignore them.
continue;
}
for captured_place in min_list {
let place = &captured_place.place;
let capture_info = &captured_place.info;
// Mark the place to be captured by the enclosing closure
let place_base =
PlaceBase::Upvar { var_id: *var_id, closure: self.closure_expr };
let place_with_id = PlaceWithOrigin::new(
capture_info.sources.clone(),
place.base_ty.as_ref(),
place_base,
place.projections.clone(),
);
match capture_info.capture_kind {
UpvarCapture::ByValue => {
self.consume_or_copy(place_with_id);
}
UpvarCapture::ByUse => {
self.consume_clone_or_copy(place_with_id);
}
UpvarCapture::ByRef(upvar_borrow) => {
self.delegate.borrow(place_with_id, upvar_borrow, self.cx);
}
}
}
}
self.cx.result.closures_data.insert(closure_expr, closure_data);
}
}
fn error_reported_in_ty(&self, ty: Ty<'db>) -> Result {
if ty.is_ty_error() { Err(ErrorGuaranteed) } else { Ok(()) }
}
}
/// The job of the methods whose name starts with `cat_` is to analyze
/// expressions and construct the corresponding [`Place`]s. The `cat`
/// stands for "categorize", this is a leftover from long ago when
/// places were called "categorizations".
///
/// Note that a [`Place`] differs somewhat from the expression itself. For
/// example, auto-derefs are explicit. Also, an index `a[b]` is decomposed into
/// two operations: a dereference to reach the array data and then an index to
/// jump forward to the relevant item.
impl<'db, D: Delegate<'db>> ExprUseVisitor<'_, '_, 'db, D> {
fn expect_and_resolve_type(&mut self, ty: Option<Ty<'db>>) -> Result<Ty<'db>> {
match ty {
Some(ty) => {
let ty = self.cx.infcx().resolve_vars_if_possible(ty);
self.error_reported_in_ty(ty)?;
Ok(ty)
}
None => Err(ErrorGuaranteed),
}
}
fn node_ty(&mut self, id: ExprOrPatId) -> Result<Ty<'db>> {
self.expect_and_resolve_type(self.cx.result.type_of_expr_or_pat(id))
}
fn expr_ty(&mut self, expr: ExprId) -> Result<Ty<'db>> {
self.node_ty(expr.into())
}
fn pat_ty(&mut self, pat: PatId) -> Result<Ty<'db>> {
self.node_ty(pat.into())
}
fn expr_ty_adjusted(&mut self, expr: ExprId) -> Result<Ty<'db>> {
self.expect_and_resolve_type(self.cx.result.type_of_expr_with_adjust(expr))
}
/// Returns the type of value that this pattern matches against.
/// Some non-obvious cases:
///
/// - a `ref x` binding matches against a value of type `T` and gives
/// `x` the type `&T`; we return `T`.
/// - a pattern with implicit derefs (thanks to default binding
/// modes #42640) may look like `Some(x)` but in fact have
/// implicit deref patterns attached (e.g., it is really
/// `&Some(x)`). In that case, we return the "outermost" type
/// (e.g., `&Option<T>`).
fn pat_ty_adjusted(&mut self, pat: PatId) -> Result<Ty<'db>> {
// Check for implicit `&` types wrapping the pattern; note
// that these are never attached to binding patterns, so
// actually this is somewhat "disjoint" from the code below
// that aims to account for `ref x`.
if let Some(vec) = self.cx.result.pat_adjustments.get(&pat)
&& let Some(first_adjust) = vec.first()
{
debug!("pat_ty(pat={:?}) found adjustment `{:?}`", pat, first_adjust);
return Ok(first_adjust.as_ref());
}
self.pat_ty_unadjusted(pat)
}
/// Like [`Self::pat_ty_adjusted`], but ignores implicit `&` patterns.
fn pat_ty_unadjusted(&mut self, pat: PatId) -> Result<Ty<'db>> {
Ok(self.cx.result.pat_ty(pat))
}
fn cat_expr(&mut self, expr: ExprId) -> Result<PlaceWithOrigin> {
self.cat_expr_(expr, &self.expr_adjustments(expr))
}
/// This recursion helper avoids going through *too many*
/// adjustments, since *only* non-overloaded deref recurses.
fn cat_expr_(&mut self, expr: ExprId, adjustments: &[Adjustment]) -> Result<PlaceWithOrigin> {
match adjustments.split_last() {
None => self.cat_expr_unadjusted(expr),
Some((adjustment, previous)) => {
self.cat_expr_adjusted_with(expr, |this| this.cat_expr_(expr, previous), adjustment)
}
}
}
fn cat_expr_adjusted(
&mut self,
expr: ExprId,
previous: PlaceWithOrigin,
adjustment: &Adjustment,
) -> Result<PlaceWithOrigin> {
self.cat_expr_adjusted_with(expr, |_this| Ok(previous), adjustment)
}
fn cat_expr_adjusted_with<F>(
&mut self,
expr: ExprId,
previous: F,
adjustment: &Adjustment,
) -> Result<PlaceWithOrigin>
where
F: FnOnce(&mut Self) -> Result<PlaceWithOrigin>,
{
let target = self.cx.infcx().resolve_vars_if_possible(adjustment.target.as_ref());
match adjustment.kind {
Adjust::Deref(overloaded) => {
// Equivalent to *expr or something similar.
let base = if let Some(deref) = overloaded {
let ref_ty = Ty::new_ref(
self.cx.interner(),
self.cx.types.regions.erased,
target,
deref.0,
);
self.cat_rvalue(expr.into(), ref_ty)
} else {
previous(self)?
};
self.cat_deref(expr.into(), base)
}
Adjust::NeverToAny | Adjust::Pointer(_) | Adjust::Borrow(_) => {
// Result is an rvalue.
Ok(self.cat_rvalue(expr.into(), target))
}
}
}
fn cat_expr_unadjusted(&mut self, expr: ExprId) -> Result<PlaceWithOrigin> {
let expr_ty = self.expr_ty(expr)?;
match self.cx.store[expr] {
Expr::UnaryOp { expr: e_base, op: UnaryOp::Deref } => {
if self.cx.result.method_resolutions.contains_key(&expr) {
self.cat_overloaded_place(expr, e_base)
} else {
let base = self.cat_expr(e_base)?;
self.cat_deref(expr.into(), base)
}
}
Expr::Field { expr: base, .. } => {
let base = self.cat_expr(base)?;
debug!(?base);
let field_idx = self
.cx
.result
.field_resolutions
.get(&expr)
.map(|field| match *field {
Either::Left(field) => field.local_id.into_raw().into_u32(),
Either::Right(tuple_field) => tuple_field.index,
})
.ok_or(ErrorGuaranteed)?;
Ok(self.cat_projection(
expr.into(),
base,
expr_ty,
ProjectionKind::Field { field_idx, variant_idx: 0 },
))
}
Expr::Index { base, index: _ } => {
// rustc checks if this is an overloaded index, but the check is buggy and treats any indexing
// as overloaded, see https://rust-lang.zulipchat.com/#narrow/channel/144729-t-types/topic/.E2.9C.94.20Is.20builtin.20indexing.20any.20special.20in.20typeck.3F/near/565881390.
// So that's what we do here.
self.cat_overloaded_place(expr, base)
}
Expr::Path(ref path) => {
let resolver_guard =
self.cx.resolver.update_to_inner_scope(self.cx.db, self.cx.owner, expr);
let resolution = self.cx.resolver.resolve_path_in_value_ns_fully(
self.cx.db,
path,
self.cx.store.expr_path_hygiene(expr),
);
self.cx.resolver.reset_to_guard(resolver_guard);
match (resolution, self.cx.result.assoc_resolutions_for_expr(expr)) {
(_, Some((CandidateId::FunctionId(_) | CandidateId::ConstId(_), _)))
| (
Some(
ValueNs::ConstId(_)
| ValueNs::GenericParam(_)
| ValueNs::FunctionId(_)
| ValueNs::ImplSelf(_)
| ValueNs::EnumVariantId(_)
| ValueNs::StructId(_),
),
None,
) => Ok(self.cat_rvalue(expr.into(), expr_ty)),
(Some(ValueNs::StaticId(_)), None) => Ok(PlaceWithOrigin::new_no_projections(
expr,
expr_ty,
PlaceBase::StaticItem,
)),
(Some(ValueNs::LocalBinding(var_id)), None) => {
self.cat_local(expr.into(), expr_ty, var_id)
}
(None, None) => Err(ErrorGuaranteed),
}
}
_ => Ok(self.cat_rvalue(expr.into(), expr_ty)),
}
}
fn cat_local(
&mut self,
id: ExprOrPatId,
expr_ty: Ty<'db>,
var_id: BindingId,
) -> Result<PlaceWithOrigin> {
if self.upvars.contains(var_id) {
self.cat_upvar(id, var_id)
} else {
Ok(PlaceWithOrigin::new_no_projections(id, expr_ty, PlaceBase::Local(var_id)))
}
}
/// Categorize an upvar.
///
/// Note: the actual upvar access contains invisible derefs of closure
/// environment and upvar reference as appropriate. Only regionck cares
/// about these dereferences, so we let it compute them as needed.
fn cat_upvar(&mut self, hir_id: ExprOrPatId, var_id: BindingId) -> Result<PlaceWithOrigin> {
let var_ty = self.expect_and_resolve_type(
self.cx.result.type_of_binding.get(var_id).map(|it| it.as_ref()),
)?;
Ok(PlaceWithOrigin::new_no_projections(
hir_id,
var_ty,
PlaceBase::Upvar { closure: self.closure_expr, var_id },
))
}
fn cat_rvalue(&self, hir_id: ExprOrPatId, expr_ty: Ty<'db>) -> PlaceWithOrigin {
PlaceWithOrigin::new_no_projections(hir_id, expr_ty, PlaceBase::Rvalue)
}
fn cat_projection(
&self,
node: ExprOrPatId,
mut base_place: PlaceWithOrigin,
ty: Ty<'db>,
kind: ProjectionKind,
) -> PlaceWithOrigin {
base_place.push_projection(Projection { kind, ty: ty.store() }, node);
base_place
}
fn cat_overloaded_place(&mut self, expr: ExprId, base: ExprId) -> Result<PlaceWithOrigin> {
// Reconstruct the output assuming it's a reference with the
// same region and mutability as the receiver. This holds for
// `Deref(Mut)::Deref(_mut)` and `Index(Mut)::index(_mut)`.
let place_ty = self.expr_ty(expr)?;
let base_ty = self.expr_ty_adjusted(base)?;
let TyKind::Ref(region, _, mutbl) = self.cx.table.structurally_resolve_type(base_ty).kind()
else {
return Err(ErrorGuaranteed);
};
let ref_ty = Ty::new_ref(self.cx.interner(), region, place_ty, mutbl);
let base = self.cat_rvalue(expr.into(), ref_ty);
self.cat_deref(expr.into(), base)
}
fn cat_deref(
&mut self,
node: ExprOrPatId,
mut base_place: PlaceWithOrigin,
) -> Result<PlaceWithOrigin> {
let base_curr_ty = base_place.place.ty();
let Some(deref_ty) =
self.cx.table.structurally_resolve_type(base_curr_ty).builtin_deref(true)
else {
debug!("explicit deref of non-derefable type: {:?}", base_curr_ty);
return Err(ErrorGuaranteed);
};
base_place.push_projection(
Projection { kind: ProjectionKind::Deref, ty: deref_ty.store() },
node,
);
Ok(base_place)
}
/// Returns the variant index for an ADT used within a Struct or TupleStruct pattern
/// Here `pat_hir_id` is the ExprId of the pattern itself.
fn variant_index_for_adt(&self, pat_id: PatId) -> Result<(u32, VariantId)> {
let variant = self.cx.result.variant_resolution_for_pat(pat_id).ok_or(ErrorGuaranteed)?;
let variant_idx = match variant {
VariantId::EnumVariantId(variant) => variant.loc(self.cx.db).index,
VariantId::StructId(_) | VariantId::UnionId(_) => 0,
};
Ok((variant_idx, variant))
}
/// Returns the total number of fields in a tuple used within a Tuple pattern.
/// Here `pat_hir_id` is the ExprId of the pattern itself.
fn total_fields_in_tuple(&mut self, pat_id: PatId) -> usize {
let ty = self.cx.result.pat_ty(pat_id);
match self.cx.table.structurally_resolve_type(ty).kind() {
TyKind::Tuple(args) => args.len(),
_ => panic!("tuple pattern not applied to a tuple"),
}
}
/// Here, `place` is the `PlaceWithId` being matched and pat is the pattern it
/// is being matched against.
///
/// In general, the way that this works is that we walk down the pattern,
/// constructing a `PlaceWithId` that represents the path that will be taken
/// to reach the value being matched.
fn cat_pattern<F>(
&mut self,
mut place_with_id: PlaceWithOrigin,
pat: PatId,
op: &mut F,
) -> Result
where
F: FnMut(&mut Self, PlaceWithOrigin, PatId) -> Result,
{
// If (pattern) adjustments are active for this pattern, adjust the `PlaceWithId` correspondingly.
// `PlaceWithId`s are constructed differently from patterns. For example, in
//
// ```
// match foo {
// &&Some(x, ) => { ... },
// _ => { ... },
// }
// ```
//
// the pattern `&&Some(x,)` is represented as `Ref { Ref { TupleStruct }}`. To build the
// corresponding `PlaceWithId` we start with the `PlaceWithId` for `foo`, and then, by traversing the
// pattern, try to answer the question: given the address of `foo`, how is `x` reached?
//
// `&&Some(x,)` `place_foo`
// `&Some(x,)` `deref { place_foo}`
// `Some(x,)` `deref { deref { place_foo }}`
// `(x,)` `field0 { deref { deref { place_foo }}}` <- resulting place
//
// The above example has no adjustments. If the code were instead the (after adjustments,
// equivalent) version
//
// ```
// match foo {
// Some(x, ) => { ... },
// _ => { ... },
// }
// ```
//
// Then we see that to get the same result, we must start with
// `deref { deref { place_foo }}` instead of `place_foo` since the pattern is now `Some(x,)`
// and not `&&Some(x,)`, even though its assigned type is that of `&&Some(x,)`.
let adjustments_len = self.cx.result.pat_adjustment(pat).map_or(0, |it| it.len());
for _ in 0..adjustments_len {
debug!("applying adjustment to place_with_id={:?}", place_with_id);
// FIXME: We need to adjust this once we implement deref patterns (or pin ergonomics, for that matter).
place_with_id = self.cat_deref(pat.into(), place_with_id)?;
}
let place_with_id = place_with_id; // lose mutability
debug!("applied adjustment derefs to get place_with_id={:?}", place_with_id);
// Invoke the callback, but only now, after the `place_with_id` has adjusted.
//
// To see that this makes sense, consider `match &Some(3) { Some(x) => { ... }}`. In that
// case, the initial `place_with_id` will be that for `&Some(3)` and the pattern is `Some(x)`. We
// don't want to call `op` with these incompatible values. As written, what happens instead
// is that `op` is called with the adjusted place (that for `*&Some(3)`) and the pattern
// `Some(x)` (which matches). Recursing once more, `*&Some(3)` and the pattern `Some(x)`
// result in the place `Downcast<Some>(*&Some(3)).0` associated to `x` and invoke `op` with
// that (where the `ref` on `x` is implied).
op(self, place_with_id.clone(), pat)?;
match self.cx.store[pat] {
Pat::Tuple { args: ref subpats, ellipsis: dots_pos } => {
// (p1, ..., pN)
let total_fields = self.total_fields_in_tuple(pat);
for (i, &subpat) in subpats.iter().enumerate_and_adjust(total_fields, dots_pos) {
let subpat_ty = self.pat_ty_adjusted(subpat)?;
let projection_kind =
ProjectionKind::Field { field_idx: i as u32, variant_idx: 0 };
let sub_place = self.cat_projection(
pat.into(),
place_with_id.clone(),
subpat_ty,
projection_kind,
);
self.cat_pattern(sub_place, subpat, op)?;
}
}
Pat::TupleStruct { args: ref subpats, ellipsis: dots_pos, .. } => {
// S(p1, ..., pN)
let (variant_index, variant) = self.variant_index_for_adt(pat)?;
let total_fields = variant.fields(self.cx.db).len();
for (i, &subpat) in subpats.iter().enumerate_and_adjust(total_fields, dots_pos) {
let subpat_ty = self.pat_ty_adjusted(subpat)?;
let projection_kind =
ProjectionKind::Field { variant_idx: variant_index, field_idx: i as u32 };
let sub_place = self.cat_projection(
pat.into(),
place_with_id.clone(),
subpat_ty,
projection_kind,
);
self.cat_pattern(sub_place, subpat, op)?;
}
}
Pat::Record { args: ref field_pats, .. } => {
// S { f1: p1, ..., fN: pN }
let (variant_index, variant) = self.variant_index_for_adt(pat)?;
let fields = variant.fields(self.cx.db);
for fp in field_pats {
let field_ty = self.pat_ty_adjusted(fp.pat)?;
let field_index = fields.field(&fp.name).ok_or(ErrorGuaranteed)?;
let field_place = self.cat_projection(
pat.into(),
place_with_id.clone(),
field_ty,
ProjectionKind::Field {
variant_idx: variant_index,
field_idx: field_index.into_raw().into_u32(),
},
);
self.cat_pattern(field_place, fp.pat, op)?;
}
}
Pat::Or(ref pats) => {
for &pat in pats {
self.cat_pattern(place_with_id.clone(), pat, op)?;
}
}
Pat::Bind { subpat: Some(subpat), .. } => {
self.cat_pattern(place_with_id, subpat, op)?;
}
Pat::Box { inner: subpat } | Pat::Ref { pat: subpat, .. } => {
// box p1, &p1, &mut p1. we can ignore the mutability of
// PatKind::Ref since that information is already contained
// in the type.
let subplace = self.cat_deref(pat.into(), place_with_id)?;
self.cat_pattern(subplace, subpat, op)?;
}
Pat::Slice { prefix: ref before, slice, suffix: ref after } => {
let Some(element_ty) = self
.cx
.table
.structurally_resolve_type(place_with_id.place.ty())
.builtin_index()
else {
debug!("explicit index of non-indexable type {:?}", place_with_id);
panic!("explicit index of non-indexable type");
};
let elt_place = self.cat_projection(
pat.into(),
place_with_id.clone(),
element_ty,
ProjectionKind::Index,
);
for &before_pat in before {
self.cat_pattern(elt_place.clone(), before_pat, op)?;
}
if let Some(slice_pat) = slice {
let slice_pat_ty = self.pat_ty_adjusted(slice_pat)?;
let slice_place = self.cat_projection(
pat.into(),
place_with_id,
slice_pat_ty,
ProjectionKind::Subslice,
);
self.cat_pattern(slice_place, slice_pat, op)?;
}
for &after_pat in after {
self.cat_pattern(elt_place.clone(), after_pat, op)?;
}
}
Pat::Bind { subpat: None, .. }
| Pat::Expr(..)
| Pat::Path(_)
| Pat::Lit(..)
| Pat::ConstBlock(..)
| Pat::Range { .. }
| Pat::Missing
| Pat::Wild => {
// always ok
}
}
Ok(())
}
/// Checks whether a type has multiple variants, and therefore, whether a
/// read of the discriminant might be necessary. Note that the actual MIR
/// builder code does a more specific check, filtering out variants that
/// happen to be uninhabited.
///
/// Here, it is not practical to perform such a check, because inhabitedness
/// queries require typeck results, and typeck requires closure capture analysis.
///
/// Moreover, the language is moving towards uninhabited variants still semantically
/// causing a discriminant read, so we *shouldn't* perform any such check.
///
/// FIXME(never_patterns): update this comment once the aforementioned MIR builder
/// code is changed to be insensitive to inhhabitedness.
#[instrument(skip(self), level = "debug")]
fn is_multivariant_adt(&mut self, ty: Ty<'db>) -> bool {
if let TyKind::Adt(def, _) = self.cx.table.structurally_resolve_type(ty).kind() {
// Note that if a non-exhaustive SingleVariant is defined in another crate, we need
// to assume that more cases will be added to the variant in the future. This mean
// that we should handle non-exhaustive SingleVariant the same way we would handle
// a MultiVariant.
match def.def_id().0 {
AdtId::StructId(_) | AdtId::UnionId(_) => false,
AdtId::EnumId(did) => {
let has_foreign_non_exhaustive = || {
AttrFlags::query(self.cx.db, did.into()).contains(AttrFlags::NON_EXHAUSTIVE)
&& did.krate(self.cx.db) != self.cx.krate()
};
did.enum_variants(self.cx.db).variants.len() > 1 || has_foreign_non_exhaustive()
}
}
} else {
false
}
}
}
@@ -11,7 +11,7 @@
InlineAsmKind, LabelId, Literal, Pat, PatId, RecordSpread, Statement, UnaryOp,
},
resolver::ValueNs,
signatures::{FunctionSignature, VariantFields},
signatures::VariantFields,
};
use hir_def::{FunctionId, hir::ClosureKind};
use hir_expand::name::Name;
@@ -24,9 +24,7 @@
use tracing::debug;
use crate::{
Adjust, Adjustment, CallableDefId, DeclContext, DeclOrigin, Rawness,
autoderef::InferenceContextAutoderef,
consteval,
Adjust, Adjustment, CallableDefId, DeclContext, DeclOrigin, Rawness, consteval,
generics::generics,
infer::{
AllowTwoPhase, BreakableKind, coerce::CoerceMany, find_continuable,
@@ -35,8 +33,7 @@
lower::{GenericPredicates, lower_mutability},
method_resolution::{self, CandidateId, MethodCallee, MethodError},
next_solver::{
ClauseKind, ErrorGuaranteed, FnSig, GenericArg, GenericArgs, TraitRef, Ty, TyKind,
TypeError,
ClauseKind, FnSig, GenericArg, GenericArgs, TraitRef, Ty, TyKind, TypeError,
infer::{
BoundRegionConversionTime, InferOk,
traits::{Obligation, ObligationCause},
@@ -44,7 +41,6 @@
obligation_ctxt::ObligationCtxt,
util::clauses_as_obligations,
},
traits::FnTrait,
};
use super::{
@@ -1180,68 +1176,6 @@ fn infer_unop_expr(
}
oprnd_t
}
pub(crate) fn write_fn_trait_method_resolution(
&mut self,
fn_x: FnTrait,
derefed_callee: Ty<'db>,
adjustments: &mut Vec<Adjustment>,
callee_ty: Ty<'db>,
params: &[Ty<'db>],
tgt_expr: ExprId,
) {
match fn_x {
FnTrait::FnOnce | FnTrait::AsyncFnOnce => (),
FnTrait::FnMut | FnTrait::AsyncFnMut => {
if let TyKind::Ref(lt, inner, Mutability::Mut) = derefed_callee.kind() {
if adjustments
.last()
.map(|it| matches!(it.kind, Adjust::Borrow(_)))
.unwrap_or(true)
{
// prefer reborrow to move
adjustments
.push(Adjustment { kind: Adjust::Deref(None), target: inner.store() });
adjustments.push(Adjustment::borrow(
self.interner(),
Mutability::Mut,
inner,
lt,
))
}
} else {
adjustments.push(Adjustment::borrow(
self.interner(),
Mutability::Mut,
derefed_callee,
self.table.next_region_var(),
));
}
}
FnTrait::Fn | FnTrait::AsyncFn => {
if !matches!(derefed_callee.kind(), TyKind::Ref(_, _, Mutability::Not)) {
adjustments.push(Adjustment::borrow(
self.interner(),
Mutability::Not,
derefed_callee,
self.table.next_region_var(),
));
}
}
}
let Some(trait_) = fn_x.get_id(self.lang_items) else {
return;
};
let trait_data = trait_.trait_items(self.db);
if let Some(func) = trait_data.method_by_name(&fn_x.method_name()) {
let subst = GenericArgs::new_from_slice(&[
callee_ty.into(),
Ty::new_tup(self.interner(), params).into(),
]);
self.write_method_resolution(tgt_expr, func, subst);
}
}
fn infer_expr_array(&mut self, array: &Array, expected: &Expectation<'db>) -> Ty<'db> {
let elem_ty = match expected
.to_option(&mut self.table)
@@ -1658,76 +1592,6 @@ fn instantiate_erroneous_method(&mut self, def_id: FunctionId) -> MethodCallee<'
MethodCallee { def_id, args, sig }
}
fn infer_call(
&mut self,
tgt_expr: ExprId,
callee: ExprId,
args: &[ExprId],
expected: &Expectation<'db>,
) -> Ty<'db> {
let callee_ty = self.infer_expr(callee, &Expectation::none(), ExprIsRead::Yes);
let callee_ty = self.table.try_structurally_resolve_type(callee_ty);
let interner = self.interner();
let mut derefs = InferenceContextAutoderef::new_from_inference_context(self, callee_ty);
let (res, derefed_callee) = loop {
let Some((callee_deref_ty, _)) = derefs.next() else {
break (None, callee_ty);
};
if let Some(res) = derefs.ctx().table.callable_sig(callee_deref_ty, args.len()) {
break (Some(res), callee_deref_ty);
}
};
// if the function is unresolved, we use is_varargs=true to
// suppress the arg count diagnostic here
let is_varargs = derefed_callee.callable_sig(interner).is_some_and(|sig| sig.c_variadic())
|| res.is_none();
let (param_tys, ret_ty) = match res {
Some((func, params, ret_ty)) => {
let infer_ok = derefs.adjust_steps_as_infer_ok();
let mut adjustments = self.table.register_infer_ok(infer_ok);
if let Some(fn_x) = func {
self.write_fn_trait_method_resolution(
fn_x,
derefed_callee,
&mut adjustments,
callee_ty,
&params,
tgt_expr,
);
}
if let TyKind::Closure(c, _) = self.table.resolve_completely(callee_ty).kind() {
self.add_current_closure_dependency(c.into());
self.deferred_closures.entry(c.into()).or_default().push((
derefed_callee,
callee_ty,
params.clone(),
tgt_expr,
));
}
self.write_expr_adj(callee, adjustments.into_boxed_slice());
(params, ret_ty)
}
None => {
self.push_diagnostic(InferenceDiagnostic::ExpectedFunction {
call_expr: tgt_expr,
found: callee_ty.store(),
});
(Vec::new(), Ty::new_error(interner, ErrorGuaranteed))
}
};
let indices_to_skip = self.check_legacy_const_generics(derefed_callee, args);
self.check_call(
tgt_expr,
args,
callee_ty,
&param_tys,
ret_ty,
&indices_to_skip,
is_varargs,
expected,
)
}
fn check_call(
&mut self,
tgt_expr: ExprId,
@@ -1749,6 +1613,7 @@ fn check_call(
args,
indices_to_skip,
is_varargs,
TupleArgumentsFlag::DontTupleArguments,
);
ret_ty
}
@@ -1879,13 +1744,22 @@ fn check_method_call(
};
let ret_ty = sig.output();
self.check_call_arguments(tgt_expr, param_tys, ret_ty, expected, args, &[], sig.c_variadic);
self.check_call_arguments(
tgt_expr,
param_tys,
ret_ty,
expected,
args,
&[],
sig.c_variadic,
TupleArgumentsFlag::DontTupleArguments,
);
ret_ty
}
/// Generic function that factors out common logic from function calls,
/// method calls and overloaded operators.
pub(in super::super) fn check_call_arguments(
pub(super) fn check_call_arguments(
&mut self,
call_expr: ExprId,
// Types (as defined in the *signature* of the target function)
@@ -1898,6 +1772,8 @@ pub(in super::super) fn check_call_arguments(
skip_indices: &[u32],
// Whether the function is variadic, for example when imported from C
c_variadic: bool,
// Whether the arguments have been bundled in a tuple (ex: closures)
tuple_arguments: TupleArgumentsFlag,
) {
let formal_input_tys: Vec<_> = formal_input_tys
.iter()
@@ -1949,12 +1825,46 @@ pub(in super::super) fn check_call_arguments(
})
.unwrap_or_default();
// If the arguments should be wrapped in a tuple (ex: closures), unwrap them here
let (formal_input_tys, expected_input_tys) =
if tuple_arguments == TupleArgumentsFlag::TupleArguments {
let tuple_type = self.table.structurally_resolve_type(formal_input_tys[0]);
match tuple_type.kind() {
// We expected a tuple and got a tuple
TyKind::Tuple(arg_types) => {
// Argument length differs
if arg_types.len() != provided_args.len() {
// FIXME: Emit an error.
}
let expected_input_tys = match expected_input_tys {
Some(expected_input_tys) => match expected_input_tys.first() {
Some(ty) => match ty.kind() {
TyKind::Tuple(tys) => Some(tys.iter().collect()),
_ => None,
},
None => None,
},
None => None,
};
(arg_types.iter().collect(), expected_input_tys)
}
_ => {
// Otherwise, there's a mismatch, so clear out what we're expecting, and set
// our input types to err_args so we don't blow up the error messages
// FIXME: Emit an error.
(vec![self.types.types.error; provided_args.len()], None)
}
}
} else {
(formal_input_tys.to_vec(), expected_input_tys)
};
// If there are no external expectations at the call site, just use the types from the function defn
let expected_input_tys = if let Some(expected_input_tys) = &expected_input_tys {
let expected_input_tys = if let Some(expected_input_tys) = expected_input_tys {
assert_eq!(expected_input_tys.len(), formal_input_tys.len());
expected_input_tys
} else {
&formal_input_tys
formal_input_tys.clone()
};
let minimum_input_count = expected_input_tys.len();
@@ -2127,51 +2037,6 @@ fn register_obligations_for_call(&mut self, callable_ty: Ty<'db>) {
}
}
/// Returns the argument indices to skip.
fn check_legacy_const_generics(&mut self, callee: Ty<'db>, args: &[ExprId]) -> Box<[u32]> {
let (func, _subst) = match callee.kind() {
TyKind::FnDef(callable, subst) => {
let func = match callable.0 {
CallableDefId::FunctionId(f) => f,
_ => return Default::default(),
};
(func, subst)
}
_ => return Default::default(),
};
let data = FunctionSignature::of(self.db, func);
let Some(legacy_const_generics_indices) = data.legacy_const_generics_indices(self.db, func)
else {
return Default::default();
};
let mut legacy_const_generics_indices = Box::<[u32]>::from(legacy_const_generics_indices);
// only use legacy const generics if the param count matches with them
if data.params.len() + legacy_const_generics_indices.len() != args.len() {
if args.len() <= data.params.len() {
return Default::default();
} else {
// there are more parameters than there should be without legacy
// const params; use them
legacy_const_generics_indices.sort_unstable();
return legacy_const_generics_indices;
}
}
// check legacy const parameters
for arg_idx in legacy_const_generics_indices.iter().copied() {
if arg_idx >= args.len() as u32 {
continue;
}
let expected = Expectation::none(); // FIXME use actual const ty, when that is lowered correctly
self.infer_expr(args[arg_idx as usize], &expected, ExprIsRead::Yes);
// FIXME: evaluate and unify with the const
}
legacy_const_generics_indices.sort_unstable();
legacy_const_generics_indices
}
pub(super) fn with_breakable_ctx<T>(
&mut self,
kind: BreakableKind,
@@ -2187,3 +2052,28 @@ pub(super) fn with_breakable_ctx<T>(
(if ctx.may_break { ctx.coerce.map(|ctx| ctx.complete(self)) } else { None }, res)
}
}
/// Controls whether the arguments are tupled. This is used for the call
/// operator.
///
/// Tupling means that all call-side arguments are packed into a tuple and
/// passed as a single parameter. For example, if tupling is enabled, this
/// function:
/// ```
/// fn f(x: (isize, isize)) {}
/// ```
/// Can be called as:
/// ```ignore UNSOLVED (can this be done in user code?)
/// # fn f(x: (isize, isize)) {}
/// f(1, 2);
/// ```
/// Instead of:
/// ```
/// # fn f(x: (isize, isize)) {}
/// f((1, 2));
/// ```
#[derive(Copy, Clone, Eq, PartialEq)]
pub(super) enum TupleArgumentsFlag {
DontTupleArguments,
TupleArguments,
}
@@ -38,7 +38,7 @@ fn infer_mut_expr(&mut self, tgt_expr: ExprId, mut mutability: Mutability) {
) {
self.table.register_predicates(infer_ok.obligations);
}
*d = OverloadedDeref(Some(mutability));
*d = OverloadedDeref(mutability);
}
}
Adjust::Borrow(b) => match b {
@@ -3,9 +3,7 @@
use std::fmt;
use base_db::Crate;
use hir_def::{AdtId, ExpressionStoreOwnerId, GenericParamId};
use hir_expand::name::Name;
use intern::sym;
use hir_def::{AdtId, ExpressionStoreOwnerId, GenericParamId, TraitId};
use rustc_hash::FxHashSet;
use rustc_type_ir::{
TyVid, TypeFoldable, TypeVisitableExt, UpcastFrom,
@@ -17,9 +15,9 @@
use crate::{
db::HirDatabase,
next_solver::{
AliasTy, Canonical, ClauseKind, Const, DbInterner, ErrorGuaranteed, GenericArg,
GenericArgs, Goal, ParamEnv, Predicate, PredicateKind, Region, SolverDefId, Term, TraitRef,
Ty, TyKind, TypingMode,
Canonical, ClauseKind, Const, DbInterner, ErrorGuaranteed, GenericArg, GenericArgs, Goal,
ParamEnv, Predicate, PredicateKind, Region, SolverDefId, Term, TraitRef, Ty, TyKind,
TypingMode,
fulfill::{FulfillmentCtxt, NextSolverError},
infer::{
DbInternerInferExt, InferCtxt, InferOk, InferResult,
@@ -31,7 +29,7 @@
obligation_ctxt::ObligationCtxt,
},
traits::{
FnTrait, NextTraitSolveResult, ParamEnvAndCrate, next_trait_solve_canonical_in_ctxt,
NextTraitSolveResult, ParamEnvAndCrate, next_trait_solve_canonical_in_ctxt,
next_trait_solve_in_ctxt,
},
};
@@ -174,6 +172,10 @@ pub(crate) fn type_is_copy_modulo_regions(&self, ty: Ty<'db>) -> bool {
self.infer_ctxt.type_is_copy_modulo_regions(self.param_env, ty)
}
pub(crate) fn type_is_use_cloned_modulo_regions(&self, ty: Ty<'db>) -> bool {
self.infer_ctxt.type_is_use_cloned_modulo_regions(self.param_env, ty)
}
pub(crate) fn type_var_is_sized(&self, self_ty: TyVid) -> bool {
let Some(sized_did) = self.interner().lang_items().Sized else {
return true;
@@ -360,9 +362,6 @@ pub(crate) fn fresh_args_for_item(&self, def: SolverDefId) -> GenericArgs<'db> {
/// in this case.
pub(crate) fn try_structurally_resolve_type(&mut self, ty: Ty<'db>) -> Ty<'db> {
if let TyKind::Alias(..) = ty.kind() {
// We need to use a separate variable here as otherwise the temporary for
// `self.fulfillment_cx.borrow_mut()` is alive in the `Err` branch, resulting
// in a reentrant borrow, causing an ICE.
let result = self
.infer_ctxt
.at(&ObligationCause::misc(), self.param_env)
@@ -445,6 +444,18 @@ fn register_obligation_in_env(&mut self, goal: Goal<'db, Predicate<'db>>) {
}
}
pub(crate) fn register_bound(&mut self, ty: Ty<'db>, def_id: TraitId, cause: ObligationCause) {
if !ty.references_non_lt_error() {
let trait_ref = TraitRef::new(self.interner(), def_id.into(), [ty]);
self.register_predicate(Obligation::new(
self.interner(),
cause,
self.param_env,
trait_ref,
));
}
}
pub(crate) fn register_infer_ok<T>(&mut self, infer_ok: InferOk<'db, T>) -> T {
let InferOk { value, obligations } = infer_ok;
self.register_predicates(obligations);
@@ -489,81 +500,6 @@ pub(crate) fn add_wf_bounds(&mut self, args: GenericArgs<'db>) {
}
}
pub(crate) fn callable_sig(
&mut self,
ty: Ty<'db>,
num_args: usize,
) -> Option<(Option<FnTrait>, Vec<Ty<'db>>, Ty<'db>)> {
match ty.callable_sig(self.interner()) {
Some(sig) => {
let sig = sig.skip_binder();
Some((None, sig.inputs_and_output.inputs().to_vec(), sig.output()))
}
None => {
let (f, args_ty, return_ty) = self.callable_sig_from_fn_trait(ty, num_args)?;
Some((Some(f), args_ty, return_ty))
}
}
}
fn callable_sig_from_fn_trait(
&mut self,
ty: Ty<'db>,
num_args: usize,
) -> Option<(FnTrait, Vec<Ty<'db>>, Ty<'db>)> {
let lang_items = self.interner().lang_items();
for (fn_trait_name, output_assoc_name, subtraits) in [
(FnTrait::FnOnce, sym::Output, &[FnTrait::Fn, FnTrait::FnMut][..]),
(FnTrait::AsyncFnMut, sym::CallRefFuture, &[FnTrait::AsyncFn]),
(FnTrait::AsyncFnOnce, sym::CallOnceFuture, &[]),
] {
let fn_trait = fn_trait_name.get_id(lang_items)?;
let trait_data = fn_trait.trait_items(self.db);
let output_assoc_type =
trait_data.associated_type_by_name(&Name::new_symbol_root(output_assoc_name))?;
let mut arg_tys = Vec::with_capacity(num_args);
let arg_ty = Ty::new_tup_from_iter(
self.interner(),
std::iter::repeat_with(|| {
let ty = self.next_ty_var();
arg_tys.push(ty);
ty
})
.take(num_args),
);
let args = GenericArgs::new_from_slice(&[ty.into(), arg_ty.into()]);
let trait_ref = TraitRef::new_from_args(self.interner(), fn_trait.into(), args);
let proj_args = self.infer_ctxt.fill_rest_fresh_args(output_assoc_type.into(), args);
let projection = Ty::new_alias(
self.interner(),
AliasTy::new_from_args(
self.interner(),
rustc_type_ir::Projection { def_id: output_assoc_type.into() },
proj_args,
),
);
let pred = Predicate::upcast_from(trait_ref, self.interner());
if !self.try_obligation(pred).no_solution() {
self.register_obligation(pred);
let return_ty = self.normalize_alias_ty(projection);
for &fn_x in subtraits {
let fn_x_trait = fn_x.get_id(lang_items)?;
let trait_ref =
TraitRef::new_from_args(self.interner(), fn_x_trait.into(), args);
let pred = Predicate::upcast_from(trait_ref, self.interner());
if !self.try_obligation(pred).no_solution() {
return Some((fn_x, arg_tys, return_ty));
}
}
return Some((fn_trait_name, arg_tys, return_ty));
}
}
None
}
pub(super) fn insert_type_vars<T>(&mut self, ty: T) -> T
where
T: TypeFoldable<DbInterner<'db>>,
@@ -21,7 +21,7 @@
use triomphe::Arc;
use crate::{
InferenceResult, ParamEnvAndCrate,
ParamEnvAndCrate,
consteval::try_const_usize,
db::HirDatabase,
next_solver::{
@@ -331,25 +331,18 @@ pub fn layout_of_ty_query(
ptr.valid_range_mut().start = 1;
Layout::scalar(dl, ptr)
}
TyKind::Closure(id, args) => {
let def = id.0.loc(db);
let infer = InferenceResult::of(db, def.0);
let (captures, _) = infer.closure_info(id.0);
let fields = captures
.iter()
.map(|it| {
let ty = it.ty.get().instantiate(interner, args.as_closure().parent_args());
db.layout_of_ty(ty.store(), trait_env.clone())
})
.collect::<Result<Vec<_>, _>>()?;
let fields = fields.iter().map(|it| &**it).collect::<Vec<_>>();
let fields = fields.iter().collect::<IndexVec<_, _>>();
cx.calc.univariant(&fields, &ReprOptions::default(), StructKind::AlwaysSized)?
TyKind::Closure(_, args) => {
return db.layout_of_ty(args.as_closure().tupled_upvars_ty().store(), trait_env);
}
TyKind::Coroutine(_, args) => {
return db.layout_of_ty(args.as_coroutine().tupled_upvars_ty().store(), trait_env);
}
TyKind::CoroutineClosure(_, args) => {
return db
.layout_of_ty(args.as_coroutine_closure().tupled_upvars_ty().store(), trait_env);
}
TyKind::Coroutine(_, _)
| TyKind::CoroutineWitness(_, _)
| TyKind::CoroutineClosure(_, _) => {
TyKind::CoroutineWitness(_, _) => {
return Err(LayoutError::NotImplemented);
}
@@ -125,6 +125,8 @@ fn capture_specific_fields2() {
#[test]
fn capture_specific_fields() {
size_and_align_expr! {
minicore: fn;
stmts: []
struct X(i64, i32, (u8, i128));
let y: X = X(2, 5, (7, 3));
move |x: i64| {
@@ -132,6 +134,8 @@ fn capture_specific_fields() {
}
}
size_and_align_expr! {
minicore: fn;
stmts: []
struct X(i64, i32, (u8, i128));
let y: X = X(2, 5, (7, 3));
move |x: i64| {
@@ -140,7 +144,7 @@ fn capture_specific_fields() {
}
}
size_and_align_expr! {
minicore: copy;
minicore: fn, copy;
stmts: [
struct X(i64, i32, (u8, i128));
let y: X = X(2, 5, (7, 3));
@@ -151,6 +155,8 @@ fn capture_specific_fields() {
}
}
size_and_align_expr! {
minicore: fn;
stmts: []
struct X(i64, i32, (u8, i128));
let y: X = X(2, 5, (7, 3));
move |x: i64| {
@@ -159,6 +165,8 @@ fn capture_specific_fields() {
}
}
size_and_align_expr! {
minicore: fn;
stmts: []
struct X(i64, i32, (u8, i128));
let y = &&X(2, 5, (7, 3));
move |x: i64| {
@@ -92,10 +92,8 @@
pub use autoderef::autoderef;
pub use infer::{
Adjust, Adjustment, AutoBorrow, BindingMode, InferenceDiagnostic, InferenceResult,
InferenceTyDiagnosticSource, OverloadedDeref, PointerCast,
cast::CastError,
closure::analysis::{CaptureKind, CapturedItem},
could_coerce, could_unify, could_unify_deeply, infer_query_with_inspect,
InferenceTyDiagnosticSource, OverloadedDeref, PointerCast, cast::CastError, could_coerce,
could_unify, could_unify_deeply, infer_query_with_inspect,
};
pub use lower::{
GenericPredicates, ImplTraits, LifetimeElisionKind, TyDefId, TyLoweringContext, ValueTyDefId,
@@ -109,6 +107,16 @@
is_fn_unsafe_to_call, target_feature_is_safe_in_target,
};
pub mod closure_analysis {
pub use crate::infer::{
CaptureInfo, CaptureSourceStack, CapturedPlace, ClosureData, UpvarCapture,
closure::analysis::{
BorrowKind,
expr_use_visitor::{FakeReadCause, Place, PlaceBase, Projection, ProjectionKind},
},
};
}
/// A constant can have reference to other things. Memory map job is holding
/// the necessary bits of memory of the const eval session to keep the constant
/// meaningful.
@@ -197,7 +205,7 @@ pub fn param_idx(db: &dyn HirDatabase, id: TypeOrConstParamId) -> Option<usize>
generics::generics(db, id.parent).type_or_const_param_idx(id)
}
#[derive(Debug, Copy, Clone, Eq)]
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum FnAbi {
Aapcs,
AapcsUnwind,
@@ -239,21 +247,6 @@ pub enum FnAbi {
Unknown,
}
impl PartialEq for FnAbi {
fn eq(&self, _other: &Self) -> bool {
// FIXME: Proper equality breaks `coercion::two_closures_lub` test
true
}
}
impl Hash for FnAbi {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
// Required because of the FIXME above and due to us implementing `Eq`, without this
// we would break the `Hash` + `Eq` contract
core::mem::discriminant(&Self::Unknown).hash(state);
}
}
impl FnAbi {
#[rustfmt::skip]
pub fn from_symbol(s: &Symbol) -> FnAbi {
@@ -219,7 +219,6 @@ pub(crate) fn with_method_resolution<R>(
/// between multiple candidates. We otherwise treat them as ordinary inference
/// variable to avoid rejecting otherwise correct code.
#[derive(Debug)]
#[expect(dead_code)]
pub(super) enum TreatNotYetDefinedOpaques {
AsInfer,
AsRigid,
@@ -6,8 +6,7 @@
use either::Either;
use hir_def::{
DefWithBodyId, FieldId, StaticId, TupleFieldId, UnionId, VariantId,
expr_store::ExpressionStore,
hir::{BindingAnnotation, BindingId, Expr, ExprId, Ordering, PatId},
hir::{BindingId, Expr, ExprId, Ordering, PatId},
};
use la_arena::{Arena, ArenaMap, Idx, RawIdx};
use rustc_ast_ir::Mutability;
@@ -168,7 +167,6 @@ pub fn projected_ty<'db>(
infcx: &InferCtxt<'db>,
env: ParamEnv<'db>,
mut base: Ty<'db>,
closure_field: impl FnOnce(InternedClosureId, GenericArgs<'db>, usize) -> Ty<'db>,
krate: Crate,
) -> Ty<'db> {
let interner = infcx.interner;
@@ -223,7 +221,7 @@ pub fn projected_ty<'db>(
}
},
ProjectionElem::ClosureField(f) => match base.kind() {
TyKind::Closure(id, subst) => closure_field(id.0, subst, *f),
TyKind::Closure(_, args) => args.as_closure().tupled_upvars_ty().tuple_fields()[*f],
_ => {
never!("Only closure has closure field");
Ty::new_error(interner, ErrorGuaranteed)
@@ -711,19 +709,31 @@ pub enum MutBorrowKind {
}
impl BorrowKind {
fn from_hir(m: hir_def::type_ref::Mutability) -> Self {
fn from_hir_mutability(m: hir_def::type_ref::Mutability) -> Self {
match m {
hir_def::type_ref::Mutability::Shared => BorrowKind::Shared,
hir_def::type_ref::Mutability::Mut => BorrowKind::Mut { kind: MutBorrowKind::Default },
}
}
fn from_rustc(m: rustc_ast_ir::Mutability) -> Self {
fn from_rustc_mutability(m: rustc_ast_ir::Mutability) -> Self {
match m {
rustc_ast_ir::Mutability::Not => BorrowKind::Shared,
rustc_ast_ir::Mutability::Mut => BorrowKind::Mut { kind: MutBorrowKind::Default },
}
}
fn from_hir(bk: crate::infer::closure::analysis::BorrowKind) -> Self {
match bk {
crate::closure_analysis::BorrowKind::Immutable => Self::Shared,
crate::closure_analysis::BorrowKind::UniqueImmutable => {
Self::Mut { kind: MutBorrowKind::ClosureCapture }
}
crate::closure_analysis::BorrowKind::Mutable => {
Self::Mut { kind: MutBorrowKind::Default }
}
}
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
@@ -1079,6 +1089,7 @@ pub struct MirBody {
pub start_block: BasicBlockId,
pub owner: DefWithBodyId,
pub binding_locals: ArenaMap<BindingId, LocalId>,
pub upvar_locals: FxHashMap<BindingId, Vec<(LocalId, crate::closure_analysis::Place)>>,
pub param_locals: Vec<LocalId>,
/// This field stores the closures directly owned by this body. It is used
/// in traversing every mir body.
@@ -1190,6 +1201,7 @@ fn shrink_to_fit(&mut self) {
start_block: _,
owner: _,
binding_locals,
upvar_locals,
param_locals,
closures,
projection_store,
@@ -1198,6 +1210,7 @@ fn shrink_to_fit(&mut self) {
basic_blocks.shrink_to_fit();
locals.shrink_to_fit();
binding_locals.shrink_to_fit();
upvar_locals.shrink_to_fit();
param_locals.shrink_to_fit();
closures.shrink_to_fit();
for (_, b) in basic_blocks.iter_mut() {
@@ -1215,20 +1228,6 @@ pub enum MirSpan {
SelfParam,
Unknown,
}
impl MirSpan {
pub fn is_ref_span(&self, store: &ExpressionStore) -> bool {
match *self {
MirSpan::ExprId(expr) => matches!(store[expr], Expr::Ref { .. }),
// FIXME: Figure out if this is correct wrt. match ergonomics.
MirSpan::BindingId(binding) => {
matches!(store[binding].mode, BindingAnnotation::Ref | BindingAnnotation::RefMut)
}
MirSpan::PatId(_) | MirSpan::SelfParam | MirSpan::Unknown => false,
}
}
}
impl_from!(ExprId, PatId for MirSpan);
impl From<&ExprId> for MirSpan {
@@ -8,17 +8,16 @@
use hir_def::{DefWithBodyId, ExpressionStoreOwnerId, HasModule};
use la_arena::ArenaMap;
use rustc_hash::FxHashMap;
use rustc_type_ir::inherent::GenericArgs as _;
use stdx::never;
use triomphe::Arc;
use crate::{
InferenceResult,
db::{HirDatabase, InternedClosure, InternedClosureId},
closure_analysis::ProjectionKind as HirProjectionKind,
db::{HirDatabase, InternedClosureId},
display::DisplayTarget,
mir::OperandKind,
next_solver::{
DbInterner, GenericArgs, ParamEnv, StoredTy, Ty, TypingMode,
DbInterner, ParamEnv, StoredTy, Ty, TypingMode,
infer::{DbInternerInferExt, InferCtxt},
},
};
@@ -68,25 +67,49 @@ pub struct BorrowckResult {
fn all_mir_bodies(
db: &dyn HirDatabase,
def: DefWithBodyId,
mut cb: impl FnMut(Arc<MirBody>),
) -> Result<(), MirLowerError> {
mut cb: impl FnMut(Arc<MirBody>) -> BorrowckResult,
mut merge_from_closures: impl FnMut(&mut BorrowckResult, &BorrowckResult),
) -> Result<Arc<[BorrowckResult]>, MirLowerError> {
fn for_closure(
db: &dyn HirDatabase,
c: InternedClosureId,
cb: &mut impl FnMut(Arc<MirBody>),
results: &mut Vec<BorrowckResult>,
cb: &mut impl FnMut(Arc<MirBody>) -> BorrowckResult,
merge_from_closures: &mut impl FnMut(&mut BorrowckResult, &BorrowckResult),
) -> Result<(), MirLowerError> {
match db.mir_body_for_closure(c) {
Ok(body) => {
cb(body.clone());
body.closures.iter().try_for_each(|&it| for_closure(db, it, cb))
let parent_index = results.len();
results.push(cb(body.clone()));
body.closures
.iter()
.try_for_each(|&it| for_closure(db, it, results, cb, merge_from_closures))?;
merge(results, merge_from_closures, parent_index);
Ok(())
}
Err(e) => Err(e),
}
}
fn merge(
results: &mut [BorrowckResult],
merge: &mut impl FnMut(&mut BorrowckResult, &BorrowckResult),
parent_index: usize,
) {
let (parent_and_before, children) = results.split_at_mut(parent_index + 1);
let parent = &mut parent_and_before[parent_and_before.len() - 1];
children.iter().for_each(|child| merge(parent, child));
}
let mut results = Vec::new();
match db.mir_body(def) {
Ok(body) => {
cb(body.clone());
body.closures.iter().try_for_each(|&it| for_closure(db, it, &mut cb))
results.push(cb(body.clone()));
body.closures.iter().try_for_each(|&it| {
for_closure(db, it, &mut results, &mut cb, &mut merge_from_closures)
})?;
merge(&mut results, &mut merge_from_closures, 0);
Ok(results.into())
}
Err(e) => Err(e),
}
@@ -100,34 +123,50 @@ pub fn borrowck_query(
let module = def.module(db);
let interner = DbInterner::new_with(db, module.krate(db));
let env = db.trait_environment(ExpressionStoreOwnerId::from(def));
let mut res = vec![];
// This calculates opaques defining scope which is a bit costly therefore is put outside `all_mir_bodies()`.
let typing_mode = TypingMode::borrowck(interner, def.into());
all_mir_bodies(db, def, |body| {
// FIXME(next-solver): Opaques.
let infcx = interner.infer_ctxt().build(typing_mode);
res.push(BorrowckResult {
mutability_of_locals: mutability_of_locals(&infcx, env, &body),
moved_out_of_ref: moved_out_of_ref(&infcx, env, &body),
partially_moved: partially_moved(&infcx, env, &body),
borrow_regions: borrow_regions(db, &body),
mir_body: body,
});
})?;
Ok(res.into())
}
fn make_fetch_closure_field<'db>(
db: &'db dyn HirDatabase,
) -> impl FnOnce(InternedClosureId, GenericArgs<'db>, usize) -> Ty<'db> + use<'db> {
|c: InternedClosureId, subst: GenericArgs<'db>, f: usize| {
let InternedClosure(owner, _) = c.loc(db);
let interner = DbInterner::new_no_crate(db);
let infer = InferenceResult::of(db, owner);
let (captures, _) = infer.closure_info(c);
let parent_subst = subst.as_closure().parent_args();
captures.get(f).expect("broken closure field").ty.get().instantiate(interner, parent_subst)
}
let res = all_mir_bodies(
db,
def,
|body| {
// FIXME(next-solver): Opaques.
let infcx = interner.infer_ctxt().build(typing_mode);
BorrowckResult {
mutability_of_locals: mutability_of_locals(&infcx, env, &body),
moved_out_of_ref: moved_out_of_ref(&infcx, env, &body),
partially_moved: partially_moved(&infcx, env, &body),
borrow_regions: borrow_regions(db, &body),
mir_body: body,
}
},
|parent, child| {
for (upvar, child_locals) in &child.mir_body.upvar_locals {
let Some(&parent_local) = parent.mir_body.binding_locals.get(*upvar) else {
continue;
};
for (child_local, capture_place) in child_locals {
if !capture_place
.projections
.iter()
.any(|proj| matches!(proj.kind, HirProjectionKind::Deref))
{
let parent_mol = &mut parent.mutability_of_locals[parent_local];
match (&*parent_mol, &child.mutability_of_locals[*child_local]) {
(MutabilityReason::Mut { .. }, _) => {}
(_, MutabilityReason::Mut { .. }) => {
// FIXME: Fix the child spans.
*parent_mol = MutabilityReason::Mut { spans: Vec::new() }
}
(MutabilityReason::Not, _) => {}
(_, MutabilityReason::Not) => *parent_mol = MutabilityReason::Not,
(MutabilityReason::Unused, MutabilityReason::Unused) => {}
}
}
}
}
},
)?;
Ok(res)
}
fn moved_out_of_ref<'db>(
@@ -145,13 +184,7 @@ fn moved_out_of_ref<'db>(
if *proj == ProjectionElem::Deref && ty.as_reference().is_some() {
is_dereference_of_ref = true;
}
ty = proj.projected_ty(
infcx,
env,
ty,
make_fetch_closure_field(db),
body.owner.module(db).krate(db),
);
ty = proj.projected_ty(infcx, env, ty, body.owner.module(db).krate(db));
}
if is_dereference_of_ref
&& !infcx.type_is_copy_modulo_regions(env, ty)
@@ -242,13 +275,7 @@ fn partially_moved<'db>(
OperandKind::Copy(p) | OperandKind::Move(p) => {
let mut ty: Ty<'db> = body.locals[p.local].ty.as_ref();
for proj in p.projection.lookup(&body.projection_store) {
ty = proj.projected_ty(
infcx,
env,
ty,
make_fetch_closure_field(db),
body.owner.module(db).krate(db),
);
ty = proj.projected_ty(infcx, env, ty, body.owner.module(db).krate(db));
}
if !infcx.type_is_copy_modulo_regions(env, ty) && !ty.references_non_lt_error() {
result.push(PartiallyMoved { span, ty: ty.store(), local: p.local });
@@ -397,13 +424,7 @@ fn place_case<'db>(
}
ProjectionElem::OpaqueCast(_) => (),
}
ty = proj.projected_ty(
infcx,
env,
ty,
make_fetch_closure_field(db),
body.owner.module(db).krate(db),
);
ty = proj.projected_ty(infcx, env, ty, body.owner.module(db).krate(db));
}
if is_part_of { ProjectionCase::DirectPart } else { ProjectionCase::Direct }
}
@@ -40,7 +40,7 @@
use crate::{
CallableDefId, ComplexMemoryMap, InferenceResult, MemoryMap, ParamEnvAndCrate,
consteval::{self, ConstEvalError, try_const_usize},
db::{HirDatabase, InternedClosure, InternedClosureId},
db::{HirDatabase, InternedClosureId},
display::{ClosureStyle, DisplayTarget, HirDisplay},
infer::PointerCast,
layout::{Layout, LayoutError, RustcEnumVariantIdx},
@@ -731,24 +731,7 @@ fn projected_ty(&self, ty: Ty<'db>, proj: PlaceElem) -> Ty<'db> {
return *r;
}
let (ty, proj) = pair;
let r = proj.projected_ty(
&self.infcx,
self.param_env.param_env,
ty,
|c, subst, f| {
let InternedClosure(owner, _) = c.loc(self.db);
let infer = InferenceResult::of(self.db, owner);
let (captures, _) = infer.closure_info(c);
let parent_subst = subst.as_closure().parent_args();
captures
.get(f)
.expect("broken closure field")
.ty
.get()
.instantiate(self.interner(), parent_subst)
},
self.crate_id,
);
let r = proj.projected_ty(&self.infcx, self.param_env.param_env, ty, self.crate_id);
self.projected_ty_cache.borrow_mut().insert((ty, proj), r);
r
}
@@ -6,18 +6,16 @@
use hir_def::{attrs::AttrFlags, signatures::FunctionSignature};
use hir_expand::name::Name;
use intern::sym;
use rustc_type_ir::inherent::{AdtDef, IntoKind, SliceLike, Ty as _};
use rustc_type_ir::inherent::{AdtDef, GenericArgs as _, IntoKind, SliceLike, Ty as _};
use stdx::never;
use crate::{
InferenceResult,
display::DisplayTarget,
drop::{DropGlue, has_drop_glue},
mir::eval::{
Address, AdtId, Arc, Evaluator, FunctionId, GenericArgs, HasModule, HirDisplay,
InternedClosure, Interval, IntervalAndTy, IntervalOrOwned, ItemContainerId, Layout, Locals,
Lookup, MirEvalError, MirSpan, Mutability, Result, Ty, TyKind, from_bytes, not_supported,
pad16,
Address, AdtId, Arc, Evaluator, FunctionId, GenericArgs, HasModule, HirDisplay, Interval,
IntervalAndTy, IntervalOrOwned, ItemContainerId, Layout, Locals, Lookup, MirEvalError,
MirSpan, Mutability, Result, Ty, TyKind, from_bytes, not_supported, pad16,
},
next_solver::Region,
};
@@ -147,19 +145,14 @@ fn exec_clone(
return destination
.write_from_interval(self, Interval { addr, size: destination.size });
}
TyKind::Closure(id, subst) => {
let [arg] = args else {
not_supported!("wrong arg count for clone");
};
let addr = Address::from_bytes(arg.get(self)?)?;
let InternedClosure(owner, _) = id.0.loc(self.db);
let infer = InferenceResult::of(self.db, owner);
let (captures, _) = infer.closure_info(id.0);
let layout = self.layout(self_ty)?;
let db = self.db;
let ty_iter = captures.iter().map(|c| c.ty(db, subst));
self.exec_clone_for_fields(ty_iter, layout, addr, def, locals, destination, span)?;
}
TyKind::Closure(_, closure_args) => self.exec_clone(
def,
args,
closure_args.as_closure().tupled_upvars_ty(),
locals,
destination,
span,
)?,
TyKind::Tuple(subst) => {
let [arg] = args else {
not_supported!("wrong arg count for clone");
@@ -8,8 +8,8 @@
HasModule, ItemContainerId, LocalFieldId, Lookup, TraitId, TupleId,
expr_store::{Body, ExpressionStore, HygieneId, path::Path},
hir::{
ArithOp, Array, BinaryOp, BindingAnnotation, BindingId, ClosureKind, ExprId, LabelId,
Literal, MatchArm, Pat, PatId, RecordFieldPat, RecordLitField, RecordSpread,
ArithOp, Array, BinaryOp, BindingAnnotation, BindingId, ClosureKind, ExprId, ExprOrPatId,
LabelId, Literal, MatchArm, Pat, PatId, RecordFieldPat, RecordLitField, RecordSpread,
generics::GenericParams,
},
item_tree::FieldsShape,
@@ -18,10 +18,11 @@
signatures::{ConstSignature, EnumSignature, FunctionSignature, StaticSignature},
};
use hir_expand::name::Name;
use itertools::{EitherOrBoth, Itertools};
use la_arena::ArenaMap;
use rustc_apfloat::Float;
use rustc_hash::FxHashMap;
use rustc_type_ir::inherent::{Const as _, GenericArgs as _, IntoKind, Ty as _};
use rustc_type_ir::inherent::{AdtDef, Const as _, GenericArgs as _, IntoKind, Ty as _};
use span::{Edition, FileId};
use syntax::TextRange;
use triomphe::Arc;
@@ -33,8 +34,11 @@
display::{DisplayTarget, HirDisplay, hir_display_with_store},
generics::generics,
infer::{
CaptureKind, CapturedItem, TypeMismatch, cast::CastTy,
closure::analysis::HirPlaceProjection,
CaptureSourceStack, CapturedPlace, TypeMismatch, UpvarCapture,
cast::CastTy,
closure::analysis::expr_use_visitor::{
Place as HirPlace, PlaceBase as HirPlaceBase, ProjectionKind as HirProjectionKind,
},
},
inhabitedness::is_ty_uninhabited_from,
layout::LayoutError,
@@ -52,7 +56,6 @@
abi::Safety,
infer::{DbInternerInferExt, InferCtxt},
},
traits::FnTrait,
};
use super::OperandKind;
@@ -304,6 +307,7 @@ fn new(
locals,
start_block,
binding_locals,
upvar_locals: FxHashMap::default(),
param_locals: vec![],
owner,
closures: vec![],
@@ -440,7 +444,7 @@ fn lower_expr_to_place_with_borrow_adjust(
else {
return Ok(None);
};
let bk = BorrowKind::from_rustc(m);
let bk = BorrowKind::from_rustc_mutability(m);
self.push_assignment(current, place, Rvalue::Ref(bk, p), expr_id.into());
Ok(Some(current))
}
@@ -996,7 +1000,7 @@ fn lower_expr_to_place_without_adjust(
let Some((p, current)) = self.lower_expr_as_place(current, *expr, true)? else {
return Ok(None);
};
let bk = BorrowKind::from_hir(*mutability);
let bk = BorrowKind::from_hir_mutability(*mutability);
self.push_assignment(current, place, Rvalue::Ref(bk, p), expr_id.into());
Ok(Some(current))
}
@@ -1251,49 +1255,58 @@ fn lower_expr_to_place_without_adjust(
not_supported!("closure with non closure type");
};
self.result.closures.push(id.0);
let (captures, _) = self.infer.closure_info(id.0);
let mut operands = vec![];
for capture in captures.iter() {
let p = Place {
local: self.binding_local(capture.place.local)?,
projection: self.result.projection_store.intern(
capture
.place
.projections
.clone()
.into_iter()
.map(|it| match it {
HirPlaceProjection::Deref => ProjectionElem::Deref,
HirPlaceProjection::Field(field_id) => {
ProjectionElem::Field(Either::Left(field_id))
}
HirPlaceProjection::TupleField(idx) => {
ProjectionElem::Field(Either::Right(TupleFieldId {
tuple: TupleId(!0), // Dummy as it's unused
index: idx,
}))
}
})
.collect(),
),
let closure_data = &self.infer.closures_data[&id.0.loc(self.db).1];
let span = |sources: &[CaptureSourceStack]| match sources
.first()
.map(|it| it.final_source())
{
Some(ExprOrPatId::ExprId(it)) => it.into(),
Some(ExprOrPatId::PatId(it)) => it.into(),
None => MirSpan::Unknown,
};
let convert_place = |this: &mut Self, place: &HirPlace| {
let (HirPlaceBase::Local(local) | HirPlaceBase::Upvar { var_id: local, .. }) =
place.base
else {
not_supported!("non-local capture");
};
match &capture.kind {
CaptureKind::ByRef(bk) => {
let tmp_ty = capture.ty.get().instantiate_identity();
Ok(Place {
local: this.binding_local(local)?,
projection: this
.result
.projection_store
.intern(convert_closure_capture_projections(self.db, place).collect()),
})
};
for (place, _, sources) in &closure_data.fake_reads {
let p = convert_place(self, place)?;
self.push_fake_read(current, p, span(sources));
}
let captures = closure_data.min_captures.values().flatten();
let mut operands = vec![];
for capture in captures {
let p = convert_place(self, &capture.place)?;
match capture.info.capture_kind {
UpvarCapture::ByRef(bk) => {
let tmp_ty = capture.captured_ty(self.db);
// FIXME: Handle more than one span.
let capture_spans = capture.spans();
let tmp: Place = self.temp(tmp_ty, current, capture_spans[0])?.into();
let capture_span = span(&capture.info.sources);
let tmp: Place = self.temp(tmp_ty, current, capture_span)?.into();
self.push_assignment(
current,
tmp,
Rvalue::Ref(*bk, p),
capture_spans[0],
Rvalue::Ref(BorrowKind::from_hir(bk), p),
capture_span,
);
operands.push(Operand { kind: OperandKind::Move(tmp), span: None });
}
CaptureKind::ByValue => {
UpvarCapture::ByValue => {
operands.push(Operand { kind: OperandKind::Move(p), span: None })
}
UpvarCapture::ByUse => not_supported!("capture by use"),
}
}
self.push_assignment(
@@ -2068,6 +2081,44 @@ fn emit_drop_and_storage_dead_for_scope(
}
}
fn convert_closure_capture_projections(
db: &dyn HirDatabase,
place: &HirPlace,
) -> impl Iterator<Item = PlaceElem> {
place.projections.iter().enumerate().map(|(i, proj)| match proj.kind {
HirProjectionKind::Deref => ProjectionElem::Deref,
HirProjectionKind::Field { field_idx, variant_idx } => {
let ty = place.ty_before_projection(i);
match ty.kind() {
TyKind::Tuple(_) => {
ProjectionElem::Field(Either::Right(TupleFieldId {
tuple: TupleId(!0), // Dummy as it's unused
index: field_idx,
}))
}
TyKind::Adt(adt_def, _) => {
let local_field_id = LocalFieldId::from_raw(RawIdx::from_u32(field_idx));
let field = match adt_def.def_id().0 {
AdtId::StructId(id) => {
FieldId { parent: id.into(), local_id: local_field_id }
}
AdtId::UnionId(id) => {
FieldId { parent: id.into(), local_id: local_field_id }
}
AdtId::EnumId(id) => {
let variant = id.enum_variants(db).variants[variant_idx as usize].0;
FieldId { parent: variant.into(), local_id: local_field_id }
}
};
ProjectionElem::Field(Either::Left(field))
}
_ => panic!("unexpected type"),
}
}
_ => panic!("unexpected projection"),
})
}
fn cast_kind<'db>(
db: &'db dyn HirDatabase,
source_ty: Ty<'db>,
@@ -2105,20 +2156,22 @@ pub fn mir_body_for_closure_query<'db>(
let crate::next_solver::TyKind::Closure(_, substs) = infer.expr_ty(expr).kind() else {
implementation_error!("closure expression is not closure");
};
let (captures, kind) = infer.closure_info(closure);
let kind = substs.as_closure().kind();
let captures = infer.closures_data[&expr].min_captures.values().flatten();
let mut ctx = MirLowerCtx::new(db, body_owner, &body.store, infer);
// 0 is return local
ctx.result.locals.alloc(Local { ty: infer.expr_ty(*root).store() });
let closure_local = ctx.result.locals.alloc(Local {
ty: match kind {
FnTrait::FnOnce | FnTrait::AsyncFnOnce => infer.expr_ty(expr),
FnTrait::FnMut | FnTrait::AsyncFnMut => Ty::new_ref(
rustc_type_ir::ClosureKind::FnOnce => infer.expr_ty(expr),
rustc_type_ir::ClosureKind::FnMut => Ty::new_ref(
ctx.interner(),
Region::error(ctx.interner()),
infer.expr_ty(expr),
Mutability::Mut,
),
FnTrait::Fn | FnTrait::AsyncFn => Ty::new_ref(
rustc_type_ir::ClosureKind::Fn => Ty::new_ref(
ctx.interner(),
Region::error(ctx.interner()),
infer.expr_ty(expr),
@@ -2128,6 +2181,7 @@ pub fn mir_body_for_closure_query<'db>(
.store(),
});
ctx.result.param_locals.push(closure_local);
let sig = ctx.interner().signature_unclosure(substs.as_closure().sig(), Safety::Safe);
let resolver_guard = ctx.resolver.update_to_inner_scope(db, body_owner, expr);
let current = ctx.lower_params_and_bindings(
@@ -2135,60 +2189,101 @@ pub fn mir_body_for_closure_query<'db>(
None,
|_| true,
)?;
// Push local for every upvar in the closure. rustc doesn't do that, but we have to so we have locals
// to associate with upvars for borrowck.
let is_by_ref_closure = match kind {
rustc_type_ir::ClosureKind::Fn | rustc_type_ir::ClosureKind::FnMut => true,
rustc_type_ir::ClosureKind::FnOnce => false,
};
let mut upvar_map: FxHashMap<LocalId, Vec<(&CapturedPlace, LocalId)>> = FxHashMap::default();
for (capture_idx, capture) in captures.enumerate() {
let capture_local = ctx.result.locals.alloc(Local { ty: capture.captured_ty(db).store() });
ctx.push_storage_live_for_local(capture_local, current, MirSpan::Unknown)?;
let mut projections = Vec::with_capacity(usize::from(is_by_ref_closure) + 1);
if is_by_ref_closure {
projections.push(ProjectionElem::Deref);
}
projections.push(ProjectionElem::ClosureField(capture_idx));
let capture_param_place = Place {
local: closure_local,
projection: ctx.result.projection_store.intern(projections.into_boxed_slice()),
};
let capture_local_place = Place {
local: capture_local,
projection: ctx.result.projection_store.intern(Box::new([])),
};
let capture_local_rvalue =
Rvalue::Use(Operand { kind: OperandKind::Move(capture_param_place), span: None });
ctx.push_assignment(current, capture_local_place, capture_local_rvalue, MirSpan::Unknown);
let local = capture.captured_local();
let local = ctx.binding_local(local)?;
upvar_map.entry(local).or_default().push((capture, capture_local));
ctx.result
.upvar_locals
.entry(capture.captured_local())
.or_default()
.push((capture_local, capture.place.clone()));
}
ctx.resolver.reset_to_guard(resolver_guard);
if let Some(current) = ctx.lower_expr_to_place(*root, return_slot().into(), current)? {
let current = ctx.pop_drop_scope_assert_finished(current, root.into())?;
ctx.set_terminator(current, TerminatorKind::Return, (*root).into());
}
let mut upvar_map: FxHashMap<LocalId, Vec<(&CapturedItem, usize)>> = FxHashMap::default();
for (i, capture) in captures.iter().enumerate() {
let local = ctx.binding_local(capture.place.local)?;
upvar_map.entry(local).or_default().push((capture, i));
}
let mut err = None;
let closure_local = ctx.result.locals.iter().nth(1).unwrap().0;
let closure_projection = match kind {
FnTrait::FnOnce | FnTrait::AsyncFnOnce => vec![],
FnTrait::FnMut | FnTrait::Fn | FnTrait::AsyncFnMut | FnTrait::AsyncFn => {
vec![ProjectionElem::Deref]
}
};
ctx.result.walk_places(|p, store| {
if let Some(it) = upvar_map.get(&p.local) {
let r = it.iter().find(|it| {
if p.projection.lookup(store).len() < it.0.place.projections.len() {
return false;
}
for (it, y) in p.projection.lookup(store).iter().zip(it.0.place.projections.iter())
{
match (it, y) {
(ProjectionElem::Deref, HirPlaceProjection::Deref) => (),
(ProjectionElem::Field(Either::Left(it)), HirPlaceProjection::Field(y))
if it == y => {}
(
ProjectionElem::Field(Either::Right(it)),
HirPlaceProjection::TupleField(y),
) if it.index == *y => (),
_ => return false,
ctx.result.walk_places(|mir_place, store| {
let mir_projections = mir_place.projection.lookup(store);
if let Some(hir_places) = upvar_map.get(&mir_place.local) {
let projections = hir_places.iter().find_map(|hir_place| {
let iter = mir_projections
.iter()
.cloned()
.zip_longest(convert_closure_capture_projections(db, &hir_place.0.place))
.enumerate();
for (idx, item) in iter {
match item {
EitherOrBoth::Both(mir, hir) => {
if mir != hir {
// Not this place.
return None;
}
}
EitherOrBoth::Right(_) => {
// FIXME: This can happen in fake reads. I believe this is a bug. So we change the fake read's meaning.
// never!(
// "mir upvar place shorter than hir upvar place; this should not happen, \
// capture analysis should have picked the shorter place"
// );
// return None;
return Some((mir_projections.len(), hir_place));
}
// This place, but truncated.
EitherOrBoth::Left(_) => return Some((idx, hir_place)),
}
}
true
// Exactly this place.
Some((hir_place.0.place.projections.len(), hir_place))
});
match r {
Some(it) => {
p.local = closure_local;
let mut next_projs = closure_projection.clone();
next_projs.push(PlaceElem::ClosureField(it.1));
let prev_projs = p.projection;
if it.0.kind != CaptureKind::ByValue {
next_projs.push(ProjectionElem::Deref);
}
next_projs.extend(
prev_projs.lookup(store).iter().skip(it.0.place.projections.len()).cloned(),
match projections {
Some((skip_projections_up_to, (hir_place, upvar_local))) => {
mir_place.local = *upvar_local;
let mut result_projections = Vec::with_capacity(
usize::from(hir_place.is_by_ref())
+ (mir_projections.len() - skip_projections_up_to),
);
p.projection = store.intern(next_projs.into());
if hir_place.is_by_ref() {
result_projections.push(ProjectionElem::Deref);
}
result_projections
.extend(mir_projections[skip_projections_up_to..].iter().cloned());
mir_place.projection = store.intern(result_projections.into());
}
None => err = Some(*p),
None => err = Some(*mir_place),
}
}
});
@@ -10,12 +10,6 @@
next_solver::Region,
};
macro_rules! not_supported {
($it: expr) => {
return Err(MirLowerError::NotSupported(format!($it)))
};
}
impl<'db> MirLowerCtx<'_, 'db> {
fn lower_expr_to_some_place_without_adjust(
&mut self,
@@ -98,11 +92,8 @@ pub(super) fn lower_expr_as_place_with_adjust(
last.target.as_ref(),
expr_id.into(),
match od.0 {
Some(Mutability::Mut) => true,
Some(Mutability::Not) => false,
None => {
not_supported!("implicit overloaded deref with unknown mutability")
}
Mutability::Mut => true,
Mutability::Not => false,
},
)
}
@@ -561,6 +561,16 @@ pub fn type_is_copy_modulo_regions(&self, param_env: ParamEnv<'db>, ty: Ty<'db>)
traits::type_known_to_meet_bound_modulo_regions(self, param_env, ty, copy_def_id)
}
pub fn type_is_use_cloned_modulo_regions(&self, param_env: ParamEnv<'db>, ty: Ty<'db>) -> bool {
let ty = self.resolve_vars_if_possible(ty);
let Some(use_cloned_def_id) = self.interner.lang_items().UseCloned else {
return false;
};
traits::type_known_to_meet_bound_modulo_regions(self, param_env, ty, use_cloned_def_id)
}
pub fn unresolved_variables(&self) -> Vec<Ty<'db>> {
let mut inner = self.inner.borrow_mut();
let mut vars: Vec<Ty<'db>> = inner
@@ -33,7 +33,7 @@
fast_reject,
inherent::{self, Const as _, GenericsOf, IntoKind, SliceLike as _, Span as _, Ty as _},
lang_items::{SolverAdtLangItem, SolverLangItem, SolverTraitLangItem},
solve::SizedTraitKind,
solve::{AdtDestructorKind, SizedTraitKind},
};
use crate::{
@@ -725,12 +725,8 @@ fn sizedness_constraint(
.transpose()
}
fn destructor(
self,
_interner: DbInterner<'db>,
) -> Option<rustc_type_ir::solve::AdtDestructorKind> {
// FIXME(next-solver)
None
fn destructor(self, interner: DbInterner<'db>) -> Option<AdtDestructorKind> {
crate::drop::destructor(interner.db, self.def_id().0).map(|_| AdtDestructorKind::NotConst)
}
fn is_manually_drop(self) -> bool {
@@ -1492,7 +1488,7 @@ fn has_target_features(self, _def_id: Self::FunctionId) -> bool {
fn require_lang_item(self, lang_item: SolverLangItem) -> Self::DefId {
let lang_items = self.lang_items();
let lang_item = match lang_item {
SolverLangItem::AsyncFnKindUpvars => unimplemented!(),
SolverLangItem::AsyncFnKindUpvars => lang_items.AsyncFnKindUpvars,
SolverLangItem::AsyncFnOnceOutput => lang_items.AsyncFnOnceOutput,
SolverLangItem::CallOnceFuture => lang_items.CallOnceFuture,
SolverLangItem::CallRefFuture => lang_items.CallRefFuture,
@@ -1503,8 +1499,8 @@ fn require_lang_item(self, lang_item: SolverLangItem) -> Self::DefId {
SolverLangItem::DynMetadata => {
return lang_items.DynMetadata.expect("Lang item required but not found.").into();
}
SolverLangItem::FieldBase => unimplemented!(),
SolverLangItem::FieldType => unimplemented!(),
SolverLangItem::FieldBase => lang_items.FieldBase,
SolverLangItem::FieldType => lang_items.FieldType,
};
lang_item.expect("Lang item required but not found.").into()
}
@@ -1513,13 +1509,13 @@ fn require_trait_lang_item(self, lang_item: SolverTraitLangItem) -> TraitIdWrapp
let lang_items = self.lang_items();
let lang_item = match lang_item {
SolverTraitLangItem::AsyncFn => lang_items.AsyncFn,
SolverTraitLangItem::AsyncFnKindHelper => unimplemented!(),
SolverTraitLangItem::AsyncFnKindHelper => lang_items.AsyncFnKindHelper,
SolverTraitLangItem::AsyncFnMut => lang_items.AsyncFnMut,
SolverTraitLangItem::AsyncFnOnce => lang_items.AsyncFnOnce,
SolverTraitLangItem::AsyncFnOnceOutput => unimplemented!(
"This is incorrectly marked as `SolverTraitLangItem`, and is not used by the solver."
),
SolverTraitLangItem::AsyncIterator => unimplemented!(),
SolverTraitLangItem::AsyncIterator => lang_items.AsyncIterator,
SolverTraitLangItem::Clone => lang_items.Clone,
SolverTraitLangItem::Copy => lang_items.Copy,
SolverTraitLangItem::Coroutine => lang_items.Coroutine,
@@ -1530,7 +1526,7 @@ fn require_trait_lang_item(self, lang_item: SolverTraitLangItem) -> TraitIdWrapp
SolverTraitLangItem::FnMut => lang_items.FnMut,
SolverTraitLangItem::FnOnce => lang_items.FnOnce,
SolverTraitLangItem::FnPtrTrait => lang_items.FnPtrTrait,
SolverTraitLangItem::FusedIterator => unimplemented!(),
SolverTraitLangItem::FusedIterator => lang_items.FusedIterator,
SolverTraitLangItem::Future => lang_items.Future,
SolverTraitLangItem::Iterator => lang_items.Iterator,
SolverTraitLangItem::PointeeTrait => lang_items.PointeeTrait,
@@ -1541,11 +1537,9 @@ fn require_trait_lang_item(self, lang_item: SolverTraitLangItem) -> TraitIdWrapp
SolverTraitLangItem::Tuple => lang_items.Tuple,
SolverTraitLangItem::Unpin => lang_items.Unpin,
SolverTraitLangItem::Unsize => lang_items.Unsize,
SolverTraitLangItem::BikeshedGuaranteedNoDrop => {
unimplemented!()
}
SolverTraitLangItem::BikeshedGuaranteedNoDrop => lang_items.BikeshedGuaranteedNoDrop,
SolverTraitLangItem::TrivialClone => lang_items.TrivialClone,
SolverTraitLangItem::Field => unimplemented!(),
SolverTraitLangItem::Field => lang_items.Field,
};
lang_item.expect("Lang item required but not found.").into()
}
@@ -392,6 +392,11 @@ pub fn is_char(self) -> bool {
matches!(self.kind(), TyKind::Char)
}
#[inline]
pub fn is_coroutine_closure(self) -> bool {
matches!(self.kind(), TyKind::CoroutineClosure(..))
}
/// A scalar type is one that denotes an atomic datum, with no sub-components.
/// (A RawPtr is scalar because it represents a non-managed pointer, so its
/// contents are abstract to rustc.)
@@ -441,6 +446,11 @@ pub fn is_raw_ptr(self) -> bool {
matches!(self.kind(), TyKind::RawPtr(..))
}
#[inline]
pub fn is_ref(self) -> bool {
matches!(self.kind(), TyKind::Ref(..))
}
#[inline]
pub fn is_array(self) -> bool {
matches!(self.kind(), TyKind::Array(..))
@@ -507,6 +517,14 @@ pub fn builtin_deref(self, explicit: bool) -> Option<Ty<'db>> {
}
}
/// Returns the type of `ty[i]`.
pub fn builtin_index(self) -> Option<Ty<'db>> {
match self.kind() {
TyKind::Array(ty, _) | TyKind::Slice(ty) => Some(ty),
_ => None,
}
}
/// Whether the type contains some non-lifetime, aka. type or const, error type.
pub fn references_non_lt_error(self) -> bool {
references_non_lt_error(&self)
@@ -1,23 +1,68 @@
use expect_test::{Expect, expect};
use hir_def::{
DefWithBodyId,
AdtId, DefWithBodyId, LocalFieldId, VariantId,
expr_store::{Body, ExpressionStore},
hir::{BindingId, ExprOrPatId},
};
use hir_expand::{HirFileId, files::InFileWrapper};
use itertools::Itertools;
use span::TextRange;
use rustc_type_ir::inherent::{AdtDef as _, IntoKind};
use span::{Edition, TextRange};
use stdx::{format_to, never};
use syntax::{AstNode, AstPtr};
use test_fixture::WithFixture;
use crate::{
InferenceResult,
closure_analysis::Place,
display::{DisplayTarget, HirDisplay},
mir::MirSpan,
next_solver::TyKind,
test_db::TestDB,
};
use super::{setup_tracing, visit_module};
fn display_place(db: &TestDB, store: &ExpressionStore, place: &Place, local: BindingId) -> String {
let mut result = store[local].name.display(db, Edition::LATEST).to_string();
let mut last_was_deref = false;
for (i, proj) in place.projections.iter().enumerate() {
match proj.kind {
hir_ty::closure_analysis::ProjectionKind::Deref => {
result.insert(0, '*');
last_was_deref = true;
}
hir_ty::closure_analysis::ProjectionKind::Field { field_idx, variant_idx } => {
if last_was_deref {
result.insert(0, '(');
result.push(')');
last_was_deref = false;
}
let ty = place.ty_before_projection(i);
match ty.kind() {
TyKind::Tuple(_) => format_to!(result, ".{field_idx}"),
TyKind::Adt(adt_def, _) => {
let variant = match adt_def.def_id().0 {
AdtId::StructId(id) => VariantId::from(id),
AdtId::UnionId(id) => id.into(),
AdtId::EnumId(id) => {
// Can't really do that for an enum, unfortunately, so try to do something alike.
id.enum_variants(db).variants[variant_idx as usize].0.into()
}
};
let field = &variant.fields(db).fields()
[LocalFieldId::from_raw(la_arena::RawIdx::from_u32(field_idx))];
format_to!(result, ".{}", field.name.display(db, Edition::LATEST));
}
_ => never!("mismatching projection type"),
}
}
_ => never!("unexpected projection kind"),
}
}
result
}
fn check_closure_captures(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
let _tracing = setup_tracing();
let (db, file_id) = TestDB::with_single_file(ra_fixture);
@@ -39,84 +84,69 @@ fn check_closure_captures(#[rust_analyzer::rust_fixture] ra_fixture: &str, expec
};
let infer = InferenceResult::of(&db, def);
let db = &db;
captures_info.extend(infer.closure_info.iter().flat_map(
|(closure_id, (captures, _))| {
let closure = closure_id.loc(db);
let body_owner = closure.0;
let source_map = ExpressionStore::with_source_map(db, body_owner).1;
let closure_text_range = source_map
.expr_syntax(closure.1)
.expect("failed to map closure to SyntaxNode")
.value
.text_range();
captures.iter().map(move |capture| {
fn text_range<N: AstNode>(
db: &TestDB,
syntax: InFileWrapper<HirFileId, AstPtr<N>>,
) -> TextRange {
let root = syntax.file_syntax(db);
syntax.value.to_node(&root).syntax().text_range()
captures_info.extend(infer.closures_data.iter().flat_map(|(closure, closure_data)| {
let (body, source_map) = Body::with_source_map(db, def);
let closure_text_range = source_map
.expr_syntax(*closure)
.expect("failed to map closure to SyntaxNode")
.value
.text_range();
closure_data.min_captures.values().flatten().map(move |capture| {
fn text_range<N: AstNode>(
db: &TestDB,
syntax: InFileWrapper<HirFileId, AstPtr<N>>,
) -> TextRange {
let root = syntax.file_syntax(db);
syntax.value.to_node(&root).syntax().text_range()
}
// FIXME: Deduplicate this with hir::Local::sources().
let captured_local = capture.captured_local();
let local_text_range = match body.self_param.zip(source_map.self_param_syntax())
{
Some((param, source)) if param == captured_local => {
format!("{:?}", text_range(db, source))
}
// FIXME: Deduplicate this with hir::Local::sources().
let (body, source_map) =
Body::with_source_map(db, body_owner.as_def_with_body().unwrap());
let local_text_range =
match body.self_param.zip(source_map.self_param_syntax()) {
Some((param, source)) if param == capture.local() => {
format!("{:?}", text_range(db, source))
}
_ => source_map
.patterns_for_binding(capture.local())
.iter()
.map(|&definition| {
text_range(db, source_map.pat_syntax(definition).unwrap())
})
.map(|it| format!("{it:?}"))
.join(", "),
};
let place = capture.display_place(body_owner, db);
let capture_ty = capture
.ty
.get()
.skip_binder()
.display_test(db, DisplayTarget::from_crate(db, module.krate(db)))
.to_string();
let spans = capture
.spans()
_ => source_map
.patterns_for_binding(captured_local)
.iter()
.flat_map(|span| match *span {
MirSpan::ExprId(expr) => {
vec![text_range(db, source_map.expr_syntax(expr).unwrap())]
}
MirSpan::PatId(pat) => {
vec![text_range(db, source_map.pat_syntax(pat).unwrap())]
}
MirSpan::BindingId(binding) => source_map
.patterns_for_binding(binding)
.iter()
.map(|pat| text_range(db, source_map.pat_syntax(*pat).unwrap()))
.collect(),
MirSpan::SelfParam => {
vec![text_range(db, source_map.self_param_syntax().unwrap())]
}
MirSpan::Unknown => Vec::new(),
.map(|&definition| {
text_range(db, source_map.pat_syntax(definition).unwrap())
})
.sorted_by_key(|it| it.start())
.map(|it| format!("{it:?}"))
.join(",");
.join(", "),
};
let place = display_place(db, body, &capture.place, captured_local);
let capture_ty = capture
.captured_ty(db)
.display_test(db, DisplayTarget::from_crate(db, module.krate(db)))
.to_string();
let spans = capture
.info
.sources
.iter()
.flat_map(|span| match span.final_source() {
ExprOrPatId::ExprId(expr) => {
vec![text_range(db, source_map.expr_syntax(expr).unwrap())]
}
ExprOrPatId::PatId(pat) => {
vec![text_range(db, source_map.pat_syntax(pat).unwrap())]
}
})
.sorted_by_key(|it| it.start())
.map(|it| format!("{it:?}"))
.join(",");
(
closure_text_range,
local_text_range,
spans,
place,
capture_ty,
capture.kind(),
)
})
},
));
(
closure_text_range,
local_text_range,
spans,
place,
capture_ty,
capture.info.capture_kind,
)
})
}));
}
captures_info.sort_unstable_by_key(|(closure_text_range, local_text_range, ..)| {
(closure_text_range.start(), local_text_range.clone())
@@ -145,7 +175,7 @@ fn main() {
let closure = || { let b = *a; };
}
"#,
expect!["53..71;20..21;66..68 ByRef(Shared) *a &'? bool"],
expect!["53..71;20..21;66..68 ByRef(Immutable) *a &'<erased> bool"],
);
}
@@ -159,7 +189,7 @@ fn main() {
let closure = || { let &mut ref b = a; };
}
"#,
expect!["53..79;20..21;67..72 ByRef(Shared) *a &'? bool"],
expect!["53..79;20..21;62..72 ByRef(Immutable) *a &'<erased> bool"],
);
check_closure_captures(
r#"
@@ -169,7 +199,7 @@ fn main() {
let closure = || { let &mut ref mut b = a; };
}
"#,
expect!["53..83;20..21;67..76 ByRef(Mut { kind: Default }) *a &'? mut bool"],
expect!["53..83;20..21;62..76 ByRef(Mutable) *a &'<erased> mut bool"],
);
}
@@ -183,7 +213,7 @@ fn main() {
let closure = || { *a = false; };
}
"#,
expect!["53..71;20..21;58..60 ByRef(Mut { kind: Default }) *a &'? mut bool"],
expect!["53..71;20..21;58..60 ByRef(Mutable) *a &'<erased> mut bool"],
);
}
@@ -197,7 +227,7 @@ fn main() {
let closure = || { let ref mut b = *a; };
}
"#,
expect!["53..79;20..21;62..71 ByRef(Mut { kind: Default }) *a &'? mut bool"],
expect!["53..79;20..21;74..76 ByRef(Mutable) *a &'<erased> mut bool"],
);
}
@@ -211,7 +241,7 @@ fn main() {
let closure = || { let _ = *a else { return; }; };
}
"#,
expect!["53..88;20..21;66..68 ByRef(Shared) *a &'? bool"],
expect![""],
);
}
@@ -243,8 +273,8 @@ fn main() {
}
"#,
expect![[r#"
71..89;36..41;84..86 ByRef(Shared) a &'? NonCopy
109..131;36..41;122..128 ByRef(Mut { kind: Default }) a &'? mut NonCopy"#]],
71..89;36..41;85..86 ByRef(Immutable) a &'<erased> NonCopy
109..131;36..41;127..128 ByRef(Mutable) a &'<erased> mut NonCopy"#]],
);
}
@@ -259,7 +289,7 @@ fn main() {
let closure = || { let b = a.a; };
}
"#,
expect!["92..111;50..51;105..108 ByRef(Shared) a.a &'? i32"],
expect!["92..111;50..51;105..108 ByRef(Immutable) a.a &'<erased> i32"],
);
}
@@ -280,8 +310,8 @@ fn main() {
}
"#,
expect![[r#"
133..212;87..92;154..158 ByRef(Shared) a.a &'? i32
133..212;87..92;176..184 ByRef(Mut { kind: Default }) a.b &'? mut i32
133..212;87..92;155..158 ByRef(Immutable) a.a &'<erased> i32
133..212;87..92;181..184 ByRef(Mutable) a.b &'<erased> mut i32
133..212;87..92;202..205 ByValue a.c NonCopy"#]],
);
}
@@ -303,8 +333,8 @@ fn main() {
}
"#,
expect![[r#"
123..133;92..97;126..127 ByRef(Shared) a &'? Foo
153..164;92..97;156..157 ByRef(Mut { kind: Default }) a &'? mut Foo"#]],
123..133;92..97;126..127 ByRef(Immutable) a &'<erased> Foo
153..164;92..97;156..157 ByRef(Mutable) a &'<erased> mut Foo"#]],
);
}
@@ -331,7 +361,7 @@ fn main() {
}
"#,
expect![[r#"
113..167;36..41;127..128,154..160 ByRef(Mut { kind: Default }) a &'? mut &'? mut bool
113..167;36..41;127..128,159..160 ByRef(Mutable) a &'<erased> mut &'? mut bool
231..304;196..201;252..253,276..277,296..297 ByValue a NonCopy"#]],
);
}
@@ -370,8 +400,8 @@ fn main() {
}
"#,
expect![[r#"
125..163;36..41;134..135 ByRef(Shared) a &'? NonCopy
183..225;36..41;192..193 ByRef(Mut { kind: Default }) a &'? mut NonCopy"#]],
125..163;36..41;134..135 ByRef(Immutable) a &'<erased> NonCopy
183..225;36..41;192..193 ByRef(Mutable) a &'<erased> mut NonCopy"#]],
);
}
@@ -385,7 +415,7 @@ fn main() {
let mut closure = || { let (b | b) = a; };
}
"#,
expect!["57..80;20..25;76..77,76..77 ByRef(Shared) a &'? bool"],
expect!["57..80;20..25;76..77 ByRef(Immutable) a &'<erased> bool"],
);
}
@@ -405,7 +435,7 @@ fn main() {
}
"#,
expect![
"57..149;20..25;78..80,98..100,118..124,134..135 ByRef(Mut { kind: Default }) a &'? mut bool"
"57..149;20..25;79..80,99..100,123..124,134..135 ByRef(Mutable) a &'<erased> mut bool"
],
);
}
@@ -420,7 +450,7 @@ fn main() {
let mut closure = || { let b = *&mut a; };
}
"#,
expect!["57..80;20..25;71..77 ByRef(Mut { kind: Default }) a &'? mut bool"],
expect!["57..80;20..25;76..77 ByRef(Mutable) a &'<erased> mut bool"],
);
}
@@ -439,10 +469,10 @@ fn main() {
}
"#,
expect![[r#"
54..72;20..25;67..69 ByRef(Shared) a &'? &'? bool
92..114;20..25;105..111 ByRef(Mut { kind: Default }) a &'? mut &'? bool
158..176;124..125;171..173 ByRef(Shared) a &'? &'? mut bool
196..218;124..125;209..215 ByRef(Mut { kind: Default }) a &'? mut &'? mut bool"#]],
54..72;20..25;68..69 ByRef(Immutable) a &'<erased> &'? bool
92..114;20..25;110..111 ByRef(Mutable) a &'<erased> mut &'? bool
158..176;124..125;172..173 ByRef(Immutable) a &'<erased> &'? mut bool
196..218;124..125;214..215 ByRef(Mutable) a &'<erased> mut &'? mut bool"#]],
);
}
@@ -450,7 +480,7 @@ fn main() {
fn multiple_capture_usages() {
check_closure_captures(
r#"
//- minicore:copy, fn
//- minicore: copy, fn
struct A { a: i32, b: bool }
fn main() {
let mut a = A { a: 123, b: false };
@@ -461,7 +491,7 @@ fn main() {
closure();
}
"#,
expect!["99..165;49..54;120..121,133..134 ByRef(Mut { kind: Default }) a &'? mut A"],
expect!["99..165;49..54;120..121,133..134 ByRef(Mutable) a &'<erased> mut A"],
);
}
@@ -484,8 +514,8 @@ fn main() {
}
"#,
expect![[r#"
129..225;49..54;149..155 ByRef(Shared) s_ref &'? &'? mut S
129..225;93..99;188..198 ByRef(Mut { kind: Default }) s_ref2 &'? mut &'? mut S"#]],
129..225;49..54;158..163 ByRef(Immutable) s_ref &'<erased> &'? mut S
129..225;93..99;201..207 ByRef(Mutable) s_ref2 &'<erased> mut &'? mut S"#]],
);
}
@@ -529,7 +559,7 @@ fn main() {
};
}
"#,
expect!["220..257;174..175;245..250 ByRef(Shared) c.b.x &'? i32"],
expect!["220..257;174..175;245..250 ByRef(Immutable) c.b.x &'<erased> i32"],
);
}
@@ -548,8 +578,8 @@ fn f() {
}
"#,
expect![[r#"
44..113;17..18;92..93 ByRef(Shared) a &'? i32
73..106;17..18;92..93 ByRef(Shared) a &'? i32"#]],
44..113;17..18;92..93 ByRef(Immutable) a &'<erased> i32
73..106;17..18;92..93 ByRef(Immutable) a &'<erased> i32"#]],
);
}
@@ -567,6 +597,6 @@ fn f() {
};
}
"#,
expect!["77..110;46..47;96..97 ByRef(Shared) b &'? i32"],
expect!["77..110;46..47;96..97 ByRef(Immutable) b &'<erased> i32"],
);
}
@@ -309,7 +309,7 @@ fn takes_ref_str(x: &str) {}
fn returns_string() -> String { loop {} }
fn test() {
takes_ref_str(&{ returns_string() });
// ^^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(None), Deref(Some(OverloadedDeref(Some(Not)))), Borrow(Ref(Not))
// ^^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(None), Deref(Some(OverloadedDeref(Not))), Borrow(Ref(Not))
}
"#,
);
@@ -49,6 +49,7 @@ fn foo() -> i32 {
"GenericPredicates::query_with_diagnostics_",
"ImplTraits::return_type_impl_traits_",
"ExprScopes::body_expr_scopes_",
"body_upvars_mentioned",
]
"#]],
);
@@ -137,6 +138,7 @@ fn baz() -> i32 {
"GenericPredicates::query_with_diagnostics_",
"ImplTraits::return_type_impl_traits_",
"ExprScopes::body_expr_scopes_",
"body_upvars_mentioned",
"InferenceResult::for_body_",
"FunctionSignature::of_",
"FunctionSignature::with_source_map_",
@@ -147,6 +149,7 @@ fn baz() -> i32 {
"GenericPredicates::query_with_diagnostics_",
"ImplTraits::return_type_impl_traits_",
"ExprScopes::body_expr_scopes_",
"body_upvars_mentioned",
"InferenceResult::for_body_",
"FunctionSignature::of_",
"FunctionSignature::with_source_map_",
@@ -157,6 +160,7 @@ fn baz() -> i32 {
"GenericPredicates::query_with_diagnostics_",
"ImplTraits::return_type_impl_traits_",
"ExprScopes::body_expr_scopes_",
"body_upvars_mentioned",
]
"#]],
);
@@ -205,6 +209,7 @@ fn baz() -> i32 {
"Body::of_",
"InferenceResult::for_body_",
"ExprScopes::body_expr_scopes_",
"body_upvars_mentioned",
"AttrFlags::query_",
"FunctionSignature::with_source_map_",
"FunctionSignature::of_",
@@ -594,6 +599,7 @@ fn main() {
"GenericPredicates::query_with_diagnostics_",
"GenericPredicates::query_with_diagnostics_",
"ImplTraits::return_type_impl_traits_",
"body_upvars_mentioned",
"InferenceResult::for_body_",
"FunctionSignature::of_",
"FunctionSignature::with_source_map_",
@@ -616,6 +622,7 @@ fn main() {
"impl_self_ty_with_diagnostics_query",
"AttrFlags::query_",
"GenericPredicates::query_with_diagnostics_",
"body_upvars_mentioned",
]
"#]],
);
@@ -686,6 +693,7 @@ fn main() {
"GenericPredicates::query_with_diagnostics_",
"GenericPredicates::query_with_diagnostics_",
"ImplTraits::return_type_impl_traits_",
"body_upvars_mentioned",
"InferenceResult::for_body_",
"FunctionSignature::with_source_map_",
"GenericPredicates::query_with_diagnostics_",
@@ -703,6 +711,7 @@ fn main() {
"impl_self_ty_with_diagnostics_query",
"AttrFlags::query_",
"GenericPredicates::query_with_diagnostics_",
"body_upvars_mentioned",
]
"#]],
);
@@ -1367,7 +1367,7 @@ fn deref(&self) -> &Foo {
mod b {
fn foo() {
let x = super::a::Bar::new().0;
// ^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(Some(OverloadedDeref(Some(Not))))
// ^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(Some(OverloadedDeref(Not)))
// ^^^^^^^^^^^^^^^^^^^^^^ type: char
}
}
@@ -2129,7 +2129,7 @@ fn foo(&self) {}
use core::mem::ManuallyDrop;
fn test() {
ManuallyDrop::new(Foo).foo();
//^^^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(Some(OverloadedDeref(Some(Not)))), Borrow(Ref(Not))
//^^^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(Some(OverloadedDeref(Not))), Borrow(Ref(Not))
}
"#,
);
@@ -294,6 +294,7 @@ fn test() {
fn ref_pat_with_inference_variable() {
check_no_mismatches(
r#"
//- minicore: fn
enum E { A }
fn test() {
let f = |e| match e {
@@ -680,9 +680,9 @@ fn do_something(&self) -> impl Future<Output = usize> {
expect![[r#"
43..47 'self': &'? Self
168..172 'self': &'? F
205..227 '{ ... }': <F as AsyncFnMut<()>>::CallRefFuture<'<erased>>
205..227 '{ ... }': <F as AsyncFnMut<()>>::CallRefFuture<'?>
215..219 'self': &'? F
215..221 'self()': <F as AsyncFnMut<()>>::CallRefFuture<'<erased>>
215..221 'self()': <F as AsyncFnMut<()>>::CallRefFuture<'?>
"#]],
);
}
@@ -87,7 +87,7 @@ async fn test() {
fn infer_async_closure() {
check_types(
r#"
//- minicore: future, option
//- minicore: future, option, async_fn
async fn test() {
let f = async move |x: i32| x + 42;
f;
@@ -3149,6 +3149,7 @@ impl<A: Step> core::iter::Iterator for core::ops::Range<A> {
fn infer_closure_arg() {
check_infer(
r#"
//- minicore: fn
//- /lib.rs
enum Option<T> {
@@ -1,6 +1,8 @@
//! Helper functions for working with def, which don't need to be a separate
//! query, but can't be computed directly from `*Data` (ie, which need a `db`).
use std::iter::Enumerate;
use base_db::target::{self, TargetData};
use hir_def::{
EnumId, EnumVariantId, FunctionId, Lookup, TraitId, attrs::AttrFlags, lang_item::LangItems,
@@ -163,3 +165,54 @@ pub(crate) fn detect_variant_from_bytes<'a>(
};
Some((var_id, var_layout))
}
pub(crate) struct EnumerateAndAdjust<I> {
enumerate: Enumerate<I>,
gap_pos: usize,
gap_len: usize,
}
impl<I> Iterator for EnumerateAndAdjust<I>
where
I: Iterator,
{
type Item = (usize, <I as Iterator>::Item);
fn next(&mut self) -> Option<(usize, <I as Iterator>::Item)> {
self.enumerate
.next()
.map(|(i, elem)| (if i < self.gap_pos { i } else { i + self.gap_len }, elem))
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.enumerate.size_hint()
}
}
pub(crate) trait EnumerateAndAdjustIterator {
fn enumerate_and_adjust(
self,
expected_len: usize,
gap_pos: Option<u32>,
) -> EnumerateAndAdjust<Self>
where
Self: Sized;
}
impl<T: ExactSizeIterator> EnumerateAndAdjustIterator for T {
fn enumerate_and_adjust(
self,
expected_len: usize,
gap_pos: Option<u32>,
) -> EnumerateAndAdjust<Self>
where
Self: Sized,
{
let actual_len = self.len();
EnumerateAndAdjust {
enumerate: self.enumerate(),
gap_pos: gap_pos.map(|it| it as usize).unwrap_or(expected_len),
gap_len: expected_len - actual_len,
}
}
}
@@ -21,6 +21,7 @@ serde_json.workspace = true
smallvec.workspace = true
tracing = { workspace = true, features = ["attributes"] }
triomphe.workspace = true
la-arena.workspace = true
ra-ap-rustc_type_ir.workspace = true
+144 -99
View File
@@ -85,14 +85,14 @@
GenericPredicates, InferenceResult, ParamEnvAndCrate, TyDefId, TyLoweringDiagnostic,
ValueTyDefId, all_super_traits, autoderef, check_orphan_rules,
consteval::try_const_usize,
db::{InternedClosureId, InternedCoroutineClosureId},
db::{InternedClosure, InternedClosureId, InternedCoroutineClosureId},
diagnostics::BodyValidationDiagnostic,
direct_super_traits, known_const_to_ast,
layout::{Layout as TyLayout, RustcEnumVariantIdx, RustcFieldIdx, TagEncoding},
method_resolution::{
self, InherentImpls, MethodResolutionContext, MethodResolutionUnstableFeatures,
},
mir::{MutBorrowKind, interpret_mir},
mir::interpret_mir,
next_solver::{
AliasTy, AnyImplId, ClauseKind, ConstKind, DbInterner, EarlyBinder, EarlyParamRegion,
ErrorGuaranteed, GenericArg, GenericArgs, ParamConst, ParamEnv, PolyFnSig, Region,
@@ -108,9 +108,8 @@
TypeVisitor, fast_reject,
inherent::{AdtDef, GenericArgs as _, IntoKind, SliceLike, Term as _, Ty as _},
};
use smallvec::SmallVec;
use span::{AstIdNode, Edition, FileId};
use stdx::{format_to, impl_from, never, variance::PhantomCovariantLifetime};
use stdx::{format_to, impl_from, never};
use syntax::{
AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, TextRange, ToSmolStr,
ast::{self, HasName as _, HasVisibility as _},
@@ -2305,11 +2304,9 @@ pub fn diagnostics<'db>(
}
}
(mir::MutabilityReason::Not, true) => {
if !infer.mutated_bindings_in_closure.contains(&binding_id) {
let should_ignore = body[binding_id].name.as_str().starts_with('_');
if !should_ignore {
acc.push(UnusedMut { local }.into())
}
let should_ignore = body[binding_id].name.as_str().starts_with('_');
if !should_ignore {
acc.push(UnusedMut { local }.into())
}
}
}
@@ -5158,59 +5155,33 @@ pub fn display_with_impl(&self, db: &dyn HirDatabase, display_target: DisplayTar
}
pub fn captured_items(&self, db: &'db dyn HirDatabase) -> Vec<ClosureCapture<'db>> {
let AnyClosureId::ClosureId(id) = self.id else {
// FIXME: Infer coroutine closures' captures.
return Vec::new();
let closure = match self.id {
AnyClosureId::ClosureId(it) => it.loc(db),
AnyClosureId::CoroutineClosureId(it) => it.loc(db),
};
let owner = id.loc(db).0;
let InternedClosure(owner, closure) = closure;
let infer = InferenceResult::of(db, owner);
let info = infer.closure_info(id);
info.0
.iter()
.cloned()
.map(|capture| ClosureCapture {
owner,
closure: id,
capture,
_marker: PhantomCovariantLifetime::new(),
})
let param_env = body_param_env_from_has_crate(db, owner);
infer.closures_data[&closure]
.min_captures
.values()
.flatten()
.map(|capture| ClosureCapture { owner, closure, capture, param_env })
.collect()
}
pub fn capture_types(&self, db: &'db dyn HirDatabase) -> Vec<Type<'db>> {
let AnyClosureId::ClosureId(id) = self.id else {
// FIXME: Infer coroutine closures' captures.
return Vec::new();
};
let owner = id.loc(db).0;
let Some(body_owner) = owner.as_def_with_body() else {
return Vec::new();
};
let infer = InferenceResult::of(db, body_owner);
let (captures, _) = infer.closure_info(id);
let env = body_param_env_from_has_crate(db, body_owner);
captures.iter().map(|capture| Type { env, ty: capture.ty(db, self.subst) }).collect()
}
pub fn fn_trait(&self, db: &dyn HirDatabase) -> FnTrait {
pub fn fn_trait(&self, _db: &dyn HirDatabase) -> FnTrait {
match self.id {
AnyClosureId::ClosureId(id) => {
let owner = id.loc(db).0;
let Some(body_owner) = owner.as_def_with_body() else {
return FnTrait::FnOnce;
};
let infer = InferenceResult::of(db, body_owner);
let info = infer.closure_info(id);
info.1.into()
}
AnyClosureId::CoroutineClosureId(_id) => {
// FIXME: Infer kind for coroutine closures.
match self.subst.as_coroutine_closure().kind() {
rustc_type_ir::ClosureKind::Fn => FnTrait::AsyncFn,
rustc_type_ir::ClosureKind::FnMut => FnTrait::AsyncFnMut,
rustc_type_ir::ClosureKind::FnOnce => FnTrait::AsyncFnOnce,
}
}
AnyClosureId::ClosureId(_) => match self.subst.as_closure().kind() {
rustc_type_ir::ClosureKind::Fn => FnTrait::Fn,
rustc_type_ir::ClosureKind::FnMut => FnTrait::FnMut,
rustc_type_ir::ClosureKind::FnOnce => FnTrait::FnOnce,
},
AnyClosureId::CoroutineClosureId(_) => match self.subst.as_coroutine_closure().kind() {
rustc_type_ir::ClosureKind::Fn => FnTrait::AsyncFn,
rustc_type_ir::ClosureKind::FnMut => FnTrait::AsyncFnMut,
rustc_type_ir::ClosureKind::FnOnce => FnTrait::AsyncFnOnce,
},
}
}
}
@@ -5283,51 +5254,120 @@ pub fn get_id(self, db: &dyn HirDatabase, krate: Crate) -> Option<Trait> {
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct ClosureCapture<'db> {
owner: ExpressionStoreOwnerId,
closure: InternedClosureId,
capture: hir_ty::CapturedItem,
_marker: PhantomCovariantLifetime<'db>,
closure: ExprId,
capture: &'db hir_ty::closure_analysis::CapturedPlace,
param_env: ParamEnvAndCrate<'db>,
}
impl<'db> ClosureCapture<'db> {
pub fn local(&self) -> Local {
Local { parent: self.owner, binding_id: self.capture.local() }
Local { parent: self.owner, binding_id: self.capture.captured_local() }
}
/// Returns whether this place has any field (aka. non-deref) projections.
pub fn has_field_projections(&self) -> bool {
self.capture.has_field_projections()
self.capture
.place
.projections
.iter()
.any(|proj| matches!(proj.kind, hir_ty::closure_analysis::ProjectionKind::Field { .. }))
}
pub fn usages(&self) -> CaptureUsages {
CaptureUsages { parent: self.owner, spans: self.capture.spans() }
pub fn usages(&self) -> CaptureUsages<'db> {
CaptureUsages { parent: self.owner, sources: &self.capture.info.sources }
}
pub fn kind(&self) -> CaptureKind {
match self.capture.kind() {
hir_ty::CaptureKind::ByRef(
hir_ty::mir::BorrowKind::Shallow | hir_ty::mir::BorrowKind::Shared,
match self.capture.info.capture_kind {
hir_ty::closure_analysis::UpvarCapture::ByValue => CaptureKind::Move,
hir_ty::closure_analysis::UpvarCapture::ByUse => CaptureKind::SharedRef, // Good enough?
hir_ty::closure_analysis::UpvarCapture::ByRef(
hir_ty::closure_analysis::BorrowKind::Immutable,
) => CaptureKind::SharedRef,
hir_ty::CaptureKind::ByRef(hir_ty::mir::BorrowKind::Mut {
kind: MutBorrowKind::ClosureCapture,
}) => CaptureKind::UniqueSharedRef,
hir_ty::CaptureKind::ByRef(hir_ty::mir::BorrowKind::Mut {
kind: MutBorrowKind::Default | MutBorrowKind::TwoPhasedBorrow,
}) => CaptureKind::MutableRef,
hir_ty::CaptureKind::ByValue => CaptureKind::Move,
hir_ty::closure_analysis::UpvarCapture::ByRef(
hir_ty::closure_analysis::BorrowKind::UniqueImmutable,
) => CaptureKind::UniqueSharedRef,
hir_ty::closure_analysis::UpvarCapture::ByRef(
hir_ty::closure_analysis::BorrowKind::Mutable,
) => CaptureKind::MutableRef,
}
}
/// Converts the place to a name that can be inserted into source code.
pub fn place_to_name(&self, db: &dyn HirDatabase) -> String {
self.capture.place_to_name(self.owner, db)
pub fn place_to_name(&self, db: &dyn HirDatabase, edition: Edition) -> String {
let mut result = self.local().name(db).display(db, edition).to_string();
for (i, proj) in self.capture.place.projections.iter().enumerate() {
match proj.kind {
hir_ty::closure_analysis::ProjectionKind::Deref => {}
hir_ty::closure_analysis::ProjectionKind::Field { field_idx, variant_idx } => {
let ty = self.capture.place.ty_before_projection(i);
match ty.kind() {
TyKind::Tuple(_) => format_to!(result, "_{field_idx}"),
TyKind::Adt(adt_def, _) => {
let variant = match adt_def.def_id().0 {
AdtId::StructId(id) => VariantId::from(id),
AdtId::UnionId(id) => id.into(),
AdtId::EnumId(id) => {
id.enum_variants(db).variants[variant_idx as usize].0.into()
}
};
let field = &variant.fields(db).fields()
[LocalFieldId::from_raw(la_arena::RawIdx::from_u32(field_idx))];
format_to!(result, "_{}", field.name.display(db, edition));
}
_ => never!("mismatching projection type"),
}
}
_ => never!("unexpected projection kind"),
}
}
result
}
pub fn display_place_source_code(&self, db: &dyn HirDatabase) -> String {
self.capture.display_place_source_code(self.owner, db)
pub fn display_place_source_code(&self, db: &dyn HirDatabase, edition: Edition) -> String {
let mut result = self.local().name(db).display(db, edition).to_string();
// We only need the derefs that have no field access after them, autoderef will do the rest.
let mut last_derefs = 0;
for (i, proj) in self.capture.place.projections.iter().enumerate() {
match proj.kind {
hir_ty::closure_analysis::ProjectionKind::Deref => last_derefs += 1,
hir_ty::closure_analysis::ProjectionKind::Field { field_idx, variant_idx } => {
last_derefs = 0;
let ty = self.capture.place.ty_before_projection(i);
match ty.kind() {
TyKind::Tuple(_) => format_to!(result, ".{field_idx}"),
TyKind::Adt(adt_def, _) => {
let variant = match adt_def.def_id().0 {
AdtId::StructId(id) => VariantId::from(id),
AdtId::UnionId(id) => id.into(),
AdtId::EnumId(id) => {
// Can't really do that for an enum, unfortunately, so try to do something alike.
id.enum_variants(db).variants[variant_idx as usize].0.into()
}
};
let field = &variant.fields(db).fields()
[LocalFieldId::from_raw(la_arena::RawIdx::from_u32(field_idx))];
format_to!(result, ".{}", field.name.display(db, edition));
}
_ => never!("mismatching projection type"),
}
}
_ => never!("unexpected projection kind"),
}
}
result.insert_str(0, &"*".repeat(last_derefs));
result
}
pub fn display_place(&self, db: &dyn HirDatabase) -> String {
self.capture.display_place(self.owner, db)
pub fn ty(&self, _db: &'db dyn HirDatabase) -> Type<'db> {
Type { env: self.param_env, ty: self.capture.place.ty() }
}
/// The type that is stored in the closure, which is different from [`Self::ty()`], representing
/// the place's type, when the capture is by ref.
pub fn captured_ty(&self, db: &'db dyn HirDatabase) -> Type<'db> {
Type { env: self.param_env, ty: self.capture.captured_ty(db) }
}
}
@@ -5340,38 +5380,43 @@ pub enum CaptureKind {
}
#[derive(Debug, Clone)]
pub struct CaptureUsages {
pub struct CaptureUsages<'db> {
parent: ExpressionStoreOwnerId,
spans: SmallVec<[mir::MirSpan; 3]>,
sources: &'db [hir_ty::closure_analysis::CaptureSourceStack],
}
impl CaptureUsages {
impl CaptureUsages<'_> {
fn is_ref(store: &ExpressionStore, id: ExprOrPatId) -> bool {
match id {
ExprOrPatId::ExprId(expr) => matches!(store[expr], Expr::Ref { .. }),
// FIXME: Figure out if this is correct wrt. match ergonomics.
ExprOrPatId::PatId(pat) => match store[pat] {
Pat::Bind { id: binding, .. } => matches!(
store[binding].mode,
BindingAnnotation::Ref | BindingAnnotation::RefMut
),
_ => false,
},
}
}
pub fn sources(&self, db: &dyn HirDatabase) -> Vec<CaptureUsageSource> {
let (body, source_map) = ExpressionStore::with_source_map(db, self.parent);
let mut result = Vec::with_capacity(self.spans.len());
for &span in self.spans.iter() {
let is_ref = span.is_ref_span(body);
match span {
mir::MirSpan::ExprId(expr) => {
let (store, source_map) = ExpressionStore::with_source_map(db, self.parent);
let mut result = Vec::with_capacity(self.sources.len());
for source in self.sources {
let source = source.final_source();
let is_ref = Self::is_ref(store, source);
match source {
ExprOrPatId::ExprId(expr) => {
if let Ok(expr) = source_map.expr_syntax(expr) {
result.push(CaptureUsageSource { is_ref, source: expr })
}
}
mir::MirSpan::PatId(pat) => {
ExprOrPatId::PatId(pat) => {
if let Ok(pat) = source_map.pat_syntax(pat) {
result.push(CaptureUsageSource { is_ref, source: pat });
}
}
mir::MirSpan::BindingId(binding) => result.extend(
source_map
.patterns_for_binding(binding)
.iter()
.filter_map(|&pat| source_map.pat_syntax(pat).ok())
.map(|pat| CaptureUsageSource { is_ref, source: pat }),
),
mir::MirSpan::SelfParam | mir::MirSpan::Unknown => {
unreachable!("invalid capture usage span")
}
}
}
result
@@ -1693,9 +1693,7 @@ pub fn expr_adjustments(&self, expr: &ast::Expr) -> Option<Vec<Adjustment<'db>>>
hir_ty::Adjust::NeverToAny => Adjust::NeverToAny,
hir_ty::Adjust::Deref(Some(hir_ty::OverloadedDeref(m))) => {
// FIXME: Should we handle unknown mutability better?
Adjust::Deref(Some(OverloadedDeref(
m.map(mutability).unwrap_or(Mutability::Shared),
)))
Adjust::Deref(Some(OverloadedDeref(mutability(m))))
}
hir_ty::Adjust::Deref(None) => Adjust::Deref(None),
hir_ty::Adjust::Borrow(hir_ty::AutoBorrow::RawPtr(m)) => {
@@ -24,7 +24,7 @@
// This converts a closure to a freestanding function, changing all captures to parameters.
//
// ```
// # //- minicore: copy
// # //- minicore: copy, fn
// # struct String;
// # impl String {
// # fn new() -> Self {}
@@ -90,6 +90,7 @@ pub(crate) fn convert_closure_to_fn(acc: &mut Assists, ctx: &AssistContext<'_>)
}
})
.collect::<Option<Vec<_>>>()?;
let capture_params_start = params.len();
let mut body = closure.body()?.clone_for_update();
let mut is_gen = false;
@@ -152,7 +153,8 @@ pub(crate) fn convert_closure_to_fn(acc: &mut Assists, ctx: &AssistContext<'_>)
.map(|(_, _, it)| it.clone())
.unwrap_or_else(|| make::name("fun_name"));
let captures = closure_ty.captured_items(ctx.db());
let capture_tys = closure_ty.capture_types(ctx.db());
let capture_tys =
captures.iter().map(|capture| capture.captured_ty(ctx.db())).collect::<Vec<_>>();
let mut captures_as_args = Vec::with_capacity(captures.len());
@@ -163,22 +165,28 @@ pub(crate) fn convert_closure_to_fn(acc: &mut Assists, ctx: &AssistContext<'_>)
for (capture, capture_ty) in std::iter::zip(&captures, &capture_tys) {
// FIXME: Allow configuring the replacement of `self`.
let capture_name =
if capture.local().is_self(ctx.db()) && !capture.has_field_projections() {
make::name("this")
} else {
make::name(&capture.place_to_name(ctx.db()))
};
let is_self = capture.local().is_self(ctx.db()) && !capture.has_field_projections();
let capture_name = if is_self {
make::name("this")
} else {
make::name(&capture.place_to_name(ctx.db(), ctx.edition()))
};
closure_mentioned_generic_params.extend(capture_ty.generic_params(ctx.db()));
let capture_ty = capture_ty
.display_source_code(ctx.db(), module.into(), true)
.unwrap_or_else(|_| "_".to_owned());
params.push(make::param(
let param = make::param(
ast::Pat::IdentPat(make::ident_pat(false, false, capture_name.clone_subtree())),
make::ty(&capture_ty),
));
);
if is_self {
// Always put `this` first.
params.insert(capture_params_start, param);
} else {
params.push(param);
}
for capture_usage in capture.usages().sources(ctx.db()) {
if capture_usage.file_id() != ctx.file_id() {
@@ -188,24 +196,32 @@ pub(crate) fn convert_closure_to_fn(acc: &mut Assists, ctx: &AssistContext<'_>)
let capture_usage_source = capture_usage.source();
let capture_usage_source = capture_usage_source.to_node(&body_root);
let expr = match capture_usage_source {
let mut expr = match capture_usage_source {
Either::Left(expr) => expr,
Either::Right(pat) => {
let Some(expr) = expr_of_pat(pat) else { continue };
expr
}
};
if !capture_usage.is_ref() {
expr = peel_ref(expr);
}
let replacement = wrap_capture_in_deref_if_needed(
&expr,
&capture_name,
capture.kind(),
capture_usage.is_ref(),
matches!(expr, ast::Expr::RefExpr(_)) || capture_usage.is_ref(),
)
.clone_for_update();
capture_usages_replacement_map.push((expr, replacement));
}
captures_as_args.push(capture_as_arg(ctx, capture));
let capture_as_arg = capture_as_arg(ctx, capture);
if is_self {
captures_as_args.insert(0, capture_as_arg);
} else {
captures_as_args.push(capture_as_arg);
}
}
let (closure_type_params, closure_where_clause) =
@@ -463,24 +479,29 @@ fn compute_closure_type_params(
(Some(make::generic_param_list(include_params)), where_clause)
}
fn peel_parens(mut expr: ast::Expr) -> ast::Expr {
loop {
if ast::ParenExpr::can_cast(expr.syntax().kind()) {
let Some(parent) = expr.syntax().parent().and_then(ast::Expr::cast) else { break };
expr = parent;
} else {
break;
}
}
expr
}
fn peel_ref(mut expr: ast::Expr) -> ast::Expr {
expr = peel_parens(expr);
expr.syntax().parent().and_then(ast::RefExpr::cast).map(Into::into).unwrap_or(expr)
}
fn wrap_capture_in_deref_if_needed(
expr: &ast::Expr,
capture_name: &ast::Name,
capture_kind: CaptureKind,
is_ref: bool,
) -> ast::Expr {
fn peel_parens(mut expr: ast::Expr) -> ast::Expr {
loop {
if ast::ParenExpr::can_cast(expr.syntax().kind()) {
let Some(parent) = expr.syntax().parent().and_then(ast::Expr::cast) else { break };
expr = parent;
} else {
break;
}
}
expr
}
let capture_name = make::expr_path(make::path_from_text(&capture_name.text()));
if capture_kind == CaptureKind::Move || is_ref {
return capture_name;
@@ -507,8 +528,11 @@ fn peel_parens(mut expr: ast::Expr) -> ast::Expr {
}
fn capture_as_arg(ctx: &AssistContext<'_>, capture: &ClosureCapture<'_>) -> ast::Expr {
let place = parse_expr_from_str(&capture.display_place_source_code(ctx.db()), ctx.edition())
.expect("`display_place_source_code()` produced an invalid expr");
let place = parse_expr_from_str(
&capture.display_place_source_code(ctx.db(), ctx.edition()),
ctx.edition(),
)
.expect("`display_place_source_code()` produced an invalid expr");
let needs_mut = match capture.kind() {
CaptureKind::SharedRef => false,
CaptureKind::MutableRef | CaptureKind::UniqueSharedRef => true,
@@ -688,7 +712,7 @@ fn handles_unique_captures() {
check_assist(
convert_closure_to_fn,
r#"
//- minicore:copy
//- minicore: copy, fn
fn main() {
let s = &mut true;
let closure = |$0| { *s = false; };
@@ -710,7 +734,7 @@ fn multiple_capture_usages() {
check_assist(
convert_closure_to_fn,
r#"
//- minicore:copy
//- minicore: copy, fn
struct A { a: i32, b: bool }
fn main() {
let mut a = A { a: 123, b: false };
@@ -740,8 +764,8 @@ fn changes_names_of_place() {
check_assist(
convert_closure_to_fn,
r#"
//- minicore:copy
struct A { b: &'static B, c: i32 }
//- minicore: copy, fn
struct A { b: &'static mut B, c: i32 }
struct B(bool, i32);
struct C;
impl C {
@@ -756,7 +780,7 @@ fn foo(&self) {
}
"#,
r#"
struct A { b: &'static B, c: i32 }
struct A { b: &'static mut B, c: i32 }
struct B(bool, i32);
struct C;
impl C {
@@ -778,7 +802,7 @@ fn self_with_fields_does_not_change_to_this() {
check_assist(
convert_closure_to_fn,
r#"
//- minicore:copy
//- minicore: copy, fn
struct A { b: &'static B, c: i32 }
struct B(bool, i32);
impl A {
@@ -795,10 +819,10 @@ struct A { b: &'static B, c: i32 }
struct B(bool, i32);
impl A {
fn foo(&self) {
fn closure(self_b_1: &i32) {
let b = *self_b_1;
fn closure(self_b: &B) {
let b = self_b.1;
}
closure(&self.b.1);
closure(self.b);
}
}
"#,
@@ -810,7 +834,7 @@ fn replaces_async_closure_with_async_fn() {
check_assist(
convert_closure_to_fn,
r#"
//- minicore: copy, future
//- minicore: copy, future, async_fn
fn foo(&self) {
let closure = async |$0| 1;
closure();
@@ -832,7 +856,7 @@ fn replaces_async_block_with_async_fn() {
check_assist(
convert_closure_to_fn,
r#"
//- minicore: copy, future
//- minicore: copy, future, fn
fn foo() {
let closure = |$0| async { 1 };
closure();
@@ -878,7 +902,7 @@ fn leaves_block_in_place() {
check_assist(
convert_closure_to_fn,
r#"
//- minicore: copy
//- minicore: copy, fn
fn foo() {
let closure = |$0| {};
closure();
@@ -898,7 +922,7 @@ fn wraps_in_block_if_needed() {
check_assist(
convert_closure_to_fn,
r#"
//- minicore: copy
//- minicore: copy, fn
fn foo() {
let a = 1;
let closure = |$0| a;
@@ -918,7 +942,7 @@ fn closure(a: &i32) -> i32 {
check_assist(
convert_closure_to_fn,
r#"
//- minicore: copy
//- minicore: copy, fn
fn foo() {
let closure = |$0| 'label: {};
closure();
@@ -936,7 +960,7 @@ fn closure() {
check_assist(
convert_closure_to_fn,
r#"
//- minicore: copy
//- minicore: copy, fn
fn foo() {
let closure = |$0| {
const { () }
@@ -956,7 +980,7 @@ fn closure() {
check_assist(
convert_closure_to_fn,
r#"
//- minicore: copy
//- minicore: copy, fn
fn foo() {
let closure = |$0| unsafe { };
closure();
@@ -974,7 +998,7 @@ fn closure() {
check_assist(
convert_closure_to_fn,
r#"
//- minicore: copy
//- minicore: copy, fn
fn foo() {
{
let closure = |$0| match () {
@@ -1049,7 +1073,7 @@ fn finds_pat_for_expr() {
check_assist(
convert_closure_to_fn,
r#"
//- minicore: copy
//- minicore: copy, fn
struct A { b: B }
struct B(bool, i32);
fn foo() {
@@ -1066,7 +1090,7 @@ struct A { b: B }
fn foo() {
let mut a = A { b: B(true, 0) };
fn closure(a_b_1: &mut i32) {
let A { b: B(_, ref mut c) } = a_b_1;
let A { b: B(_, ref mut c) } = *a_b_1;
}
closure(&mut a.b.1);
}
@@ -1079,7 +1103,7 @@ fn with_existing_params() {
check_assist(
convert_closure_to_fn,
r#"
//- minicore: copy
//- minicore: copy, fn
fn foo() {
let (mut a, b) = (0.1, "abc");
let closure = |$0p1: i32, p2: &mut bool| {
@@ -1107,7 +1131,7 @@ fn with_existing_params_newlines() {
check_assist(
convert_closure_to_fn,
r#"
//- minicore: copy
//- minicore: copy, fn
fn foo() {
let (mut a, b) = (0.1, "abc");
let closure = |$0p1: i32, p2| {
@@ -1145,7 +1169,7 @@ fn with_existing_params_trailing_comma() {
check_assist(
convert_closure_to_fn,
r#"
//- minicore: copy
//- minicore: copy, fn
fn foo() {
let (mut a, b) = (0.1, "abc");
let closure = |$0p1: i32, p2| {
@@ -1183,7 +1207,7 @@ fn closure_using_generic_params() {
check_assist(
convert_closure_to_fn,
r#"
//- minicore: copy
//- minicore: copy, from
struct Foo<A, B, const C: usize>(A, B);
impl<A, B: From<A>, const C: usize> Foo<A, B, C> {
fn foo<D, E, F, G>(a: A, b: D)
@@ -1244,7 +1268,7 @@ fn unique_and_imm() {
check_assist(
convert_closure_to_fn,
r#"
//- minicore:copy
//- minicore: copy, fn
fn main() {
let a = &mut true;
let closure = |$0| {
@@ -897,6 +897,7 @@ fn in_closure_args() {
check_assist(
assist,
r#"
//- minicore: fn
fn main() {
let f = |$0t| t.0 + t.1;
let v = f((1,2));
@@ -1101,6 +1102,7 @@ fn replace_all() {
check_assist(
assist,
r#"
//- minicore: fn
fn main() {
let $0t = (1,2);
let v = t.1;
@@ -1051,6 +1051,7 @@ fn test_inline_closure() {
check_assist(
inline_local_variable,
r#"
//- minicore: fn
fn main() {
let $0f = || 2;
let _ = f();
@@ -962,7 +962,7 @@ fn wrap_return_type_in_option_simple_with_closure() {
check_assist_by_label(
wrap_return_type,
r#"
//- minicore: option
//- minicore: option, fn
fn foo(the_field: u32) ->$0 u32 {
let true_closure = || { return true; };
if the_field < 5 {
@@ -996,7 +996,7 @@ fn foo(the_field: u32) -> Option<u32> {
check_assist_by_label(
wrap_return_type,
r#"
//- minicore: option
//- minicore: option, fn
fn foo(the_field: u32) -> u32$0 {
let true_closure = || {
return true;
@@ -2033,7 +2033,7 @@ fn wrap_return_type_in_result_simple_with_closure() {
check_assist_by_label(
wrap_return_type,
r#"
//- minicore: result
//- minicore: result, fn
fn foo(the_field: u32) ->$0 u32 {
let true_closure = || { return true; };
if the_field < 5 {
@@ -2067,7 +2067,7 @@ fn foo(the_field: u32) -> Result<u32, ${0:_}> {
check_assist_by_label(
wrap_return_type,
r#"
//- minicore: result
//- minicore: result, fn
fn foo(the_field: u32) -> u32$0 {
let true_closure = || {
return true;
@@ -463,7 +463,7 @@ fn doctest_convert_closure_to_fn() {
check_doc_test(
"convert_closure_to_fn",
r#####"
//- minicore: copy
//- minicore: copy, fn
struct String;
impl String {
fn new() -> Self {}
@@ -357,6 +357,7 @@ fn f() {
fn arg_count_lambda() {
check_diagnostics(
r#"
//- minicore: fn
fn main() {
let f = |()| ();
f();
@@ -845,6 +845,7 @@ fn foo(v: &Union) {
fn union_destructuring() {
check_diagnostics(
r#"
//- minicore: fn
union Union { field: u8 }
fn foo(v @ Union { field: _field }: &Union) {
// ^^^^^^ error: access to union field is unsafe and requires an unsafe function or block
@@ -95,6 +95,7 @@ fn bar() -> u8 {
fn remove_trailing_return_closure() {
check_diagnostics(
r#"
//- minicore: fn
fn foo() -> u8 {
let bar = || return 2;
bar() //^^^^^^^^ 💡 weak: replace return <expr>; with <expr>
@@ -103,6 +104,7 @@ fn foo() -> u8 {
);
check_diagnostics(
r#"
//- minicore: fn
fn foo() -> u8 {
let bar = || {
return 2;
@@ -276,6 +278,7 @@ fn bar() -> u8 {
fn replace_in_closure() {
check_fix(
r#"
//- minicore: fn
fn foo() -> u8 {
let bar = || return$0 2;
bar()
@@ -290,6 +293,7 @@ fn foo() -> u8 {
);
check_fix(
r#"
//- minicore: fn
fn foo() -> u8 {
let bar = || {
return$0 2;
@@ -6,6 +6,7 @@
fn tracing_infinite_repeat() {
check_diagnostics_with_disabled(
r#"
//- minicore: fn
//- /core.rs crate:core
#[rustc_builtin_macro]
#[macro_export]
@@ -2083,6 +2083,7 @@ fn test() {
fn return_in_macros() {
check(
r#"
//- minicore: fn
macro_rules! N {
($i:ident, $x:expr, $blk:expr) => {
for $i in 0..$x {
@@ -1009,8 +1009,9 @@ fn closure_ty(
display_target: DisplayTarget,
) -> Option<HoverResult> {
let c = original.as_closure()?;
let mut captures_rendered = c.captured_items(sema.db)
.into_iter()
let captures = c.captured_items(sema.db);
let mut captures_rendered = captures
.iter()
.map(|it| {
let borrow_kind = match it.kind() {
CaptureKind::SharedRef => "immutable borrow",
@@ -1018,7 +1019,7 @@ fn closure_ty(
CaptureKind::MutableRef => "mutable borrow",
CaptureKind::Move => "move",
};
format!("* `{}` by {}", it.display_place(sema.db), borrow_kind)
format!("* `{}` by {}", it.display_place_source_code(sema.db, display_target.edition), borrow_kind)
})
.join("\n");
if captures_rendered.trim().is_empty() {
@@ -1031,8 +1032,8 @@ fn closure_ty(
}
};
walk_and_push_ty(sema.db, original, &mut push_new_def);
c.capture_types(sema.db).into_iter().for_each(|ty| {
walk_and_push_ty(sema.db, &ty, &mut push_new_def);
captures.iter().for_each(|capture| {
walk_and_push_ty(sema.db, &capture.ty(sema.db), &mut push_new_def);
});
let adjusted = if let Some(adjusted_ty) = adjusted {
@@ -423,7 +423,7 @@ impl FnOnce()
## Captures
* `x.f1` by move
* `(*x.f2.0.0).f` by mutable borrow
* `x.f2.0.0.f` by mutable borrow
"#]],
);
check(
@@ -235,7 +235,7 @@ fn hints(
param_name::hints(hints, famous_defs, config, file_id, ast::Expr::from(it))
}
ast::Expr::ClosureExpr(it) => {
closure_captures::hints(hints, famous_defs, config, it.clone());
closure_captures::hints(hints, famous_defs, config, it.clone(), file_id.edition(sema.db));
closure_ret::hints(hints, famous_defs, config, display_target, it)
},
ast::Expr::RangeExpr(it) => range_exclusive::hints(hints, famous_defs, config, it),
@@ -1085,9 +1085,10 @@ fn foo() {
fn closure_dependency_cycle_no_panic() {
check(
r#"
//- minicore: fn
fn foo() {
let closure;
// ^^^^^^^ impl Fn()
// ^^^^^^^ impl FnOnce()
closure = || {
closure();
};
@@ -1095,9 +1096,9 @@ fn foo() {
fn bar() {
let closure1;
// ^^^^^^^^ impl Fn()
// ^^^^^^^^ impl FnOnce()
let closure2;
// ^^^^^^^^ impl Fn()
// ^^^^^^^^ impl FnOnce()
closure1 = || {
closure2();
};
@@ -906,7 +906,7 @@ fn closure_style() {
check_with_config(
InlayHintsConfig { type_hints: true, ..DISABLED_CONFIG },
r#"
//- minicore: fn
//- minicore: fn, add, builtin_impls
fn main() {
let x = || 2;
//^ impl Fn() -> i32
@@ -928,7 +928,7 @@ fn main() {
..DISABLED_CONFIG
},
r#"
//- minicore: fn
//- minicore: fn, add, builtin_impls
fn main() {
let x = || 2;
//^ || -> i32
@@ -950,7 +950,7 @@ fn main() {
..DISABLED_CONFIG
},
r#"
//- minicore: fn
//- minicore: fn, add, builtin_impls
fn main() {
let x = || 2;
//^ …
@@ -1094,6 +1094,7 @@ fn edit_for_closure_param() {
check_edit(
TEST_CONFIG,
r#"
//- minicore: fn
fn test<T>(t: T) {
let f = |a, b, c| {};
let result = f(42, "", t);
@@ -3,6 +3,7 @@
//! Tests live in [`bind_pat`][super::bind_pat] module.
use ide_db::famous_defs::FamousDefs;
use ide_db::text_edit::{TextRange, TextSize};
use span::Edition;
use stdx::{TupleExt, never};
use syntax::ast::{self, AstNode};
@@ -15,6 +16,7 @@ pub(super) fn hints(
FamousDefs(sema, _): &FamousDefs<'_, '_>,
config: &InlayHintsConfig<'_>,
closure: ast::ClosureExpr,
edition: Edition,
) -> Option<()> {
if !config.closure_capture_hints {
return None;
@@ -60,7 +62,7 @@ pub(super) fn hints(
hir::CaptureKind::MutableRef => "&mut ",
hir::CaptureKind::Move => "",
},
capture.display_place(sema.db)
capture.display_place_source_code(sema.db, edition)
);
if never!(label.is_empty()) {
continue;
@@ -427,6 +427,7 @@ fn main() {
fn param_hints_on_closure() {
check_params(
r#"
//- minicore: fn
fn main() {
let clo = |a: u8, b: u8| a + b;
clo(
@@ -2740,6 +2740,7 @@ fn test() {
fn goto_ref_fn_kw() {
check(
r#"
//- minicore: fn
macro_rules! N {
($i:ident, $x:expr, $blk:expr) => {
for $i in 0..$x {
@@ -1348,7 +1348,7 @@ fn benchmark_syntax_highlighting_parser() {
})
.count()
};
assert_eq!(hash, 1606);
assert_eq!(hash, 1631);
}
#[test]
@@ -177,6 +177,7 @@ pub(super) fn prefill() -> DashMap<Symbol, (), BuildHasherDefault<FxHasher>> {
Continue,
convert,
copy,
use_cloned,
Copy,
core_panic,
core,
@@ -199,7 +200,9 @@ pub(super) fn prefill() -> DashMap<Symbol, (), BuildHasherDefault<FxHasher>> {
derive,
discriminant_kind,
discriminant_type,
dispatch_from_dyn,destruct,
dispatch_from_dyn,
destruct,
bikeshed_guaranteed_no_drop,
div_assign,
div,
doc,
@@ -232,6 +235,8 @@ pub(super) fn prefill() -> DashMap<Symbol, (), BuildHasherDefault<FxHasher>> {
async_fn_once_output,
async_fn_mut,
async_fn,
async_fn_kind_helper,
async_fn_kind_upvars,
call_ref_future,
call_once_future,
fn_ptr_addr,
@@ -297,6 +302,8 @@ pub(super) fn prefill() -> DashMap<Symbol, (), BuildHasherDefault<FxHasher>> {
iter,
Iterator,
iterator,
fused_iterator,
async_iterator,
keyword,
lang,
lang_items,
@@ -568,4 +575,7 @@ pub(super) fn prefill() -> DashMap<Symbol, (), BuildHasherDefault<FxHasher>> {
contracts_internals,
freeze_impls,
unsized_fn_params,
field,
field_base,
field_type,
}
@@ -1577,6 +1577,6 @@ fn test() {
let res: serde_json::Value = serde_json::from_str(res.as_str().unwrap()).unwrap();
let arr = res.as_array().unwrap();
assert_eq!(arr.len(), 2);
assert_eq!(arr.len(), 1);
expect![[r#"{"goal":"Goal { param_env: ParamEnv { clauses: [] }, predicate: Binder { value: TraitPredicate(usize: Trait, polarity:Positive), bound_vars: [] } }","result":"Err(NoSolution)","depth":0,"candidates":[]}"#]].assert_eq(&arr[0].to_string());
}
@@ -936,6 +936,14 @@ extern "rust-call" fn async_call_once(self, args: A) -> Self::CallOnceFuture {
}
}
}
mod internal_implementation_detail {
#[lang = "async_fn_kind_helper"]
trait AsyncFnKindHelper<GoalKind> {
#[lang = "async_fn_kind_upvars"]
type Upvars<'closure_env, Inputs, Upvars, BorrowedUpvarsAsFnPtr>;
}
}
}
pub use self::async_function::{AsyncFn, AsyncFnMut, AsyncFnOnce};
// endregion:async_fn