Auto merge of #144420 - Qelxiros:smart_pointer_try_map, r=joboet

smart pointer (try_)map

Tracking issue: rust-lang/rust#144419
This commit is contained in:
bors
2025-10-31 17:09:47 +00:00
9 changed files with 599 additions and 13 deletions
+80
View File
@@ -192,11 +192,15 @@
use core::future::Future;
use core::hash::{Hash, Hasher};
use core::marker::{Tuple, Unsize};
#[cfg(not(no_global_oom_handling))]
use core::mem::MaybeUninit;
use core::mem::{self, SizedTypeProperties};
use core::ops::{
AsyncFn, AsyncFnMut, AsyncFnOnce, CoerceUnsized, Coroutine, CoroutineState, Deref, DerefMut,
DerefPure, DispatchFromDyn, LegacyReceiver,
};
#[cfg(not(no_global_oom_handling))]
use core::ops::{Residual, Try};
use core::pin::{Pin, PinCoerceUnsized};
use core::ptr::{self, NonNull, Unique};
use core::task::{Context, Poll};
@@ -386,6 +390,82 @@ pub fn try_new_uninit() -> Result<Box<mem::MaybeUninit<T>>, AllocError> {
pub fn try_new_zeroed() -> Result<Box<mem::MaybeUninit<T>>, AllocError> {
Box::try_new_zeroed_in(Global)
}
/// Maps the value in a box, reusing the allocation if possible.
///
/// `f` is called on the value in the box, and the result is returned, also boxed.
///
/// Note: this is an associated function, which means that you have
/// to call it as `Box::map(b, f)` instead of `b.map(f)`. This
/// is so that there is no conflict with a method on the inner type.
///
/// # Examples
///
/// ```
/// #![feature(smart_pointer_try_map)]
///
/// let b = Box::new(7);
/// let new = Box::map(b, |i| i + 7);
/// assert_eq!(*new, 14);
/// ```
#[cfg(not(no_global_oom_handling))]
#[unstable(feature = "smart_pointer_try_map", issue = "144419")]
pub fn map<U>(this: Self, f: impl FnOnce(T) -> U) -> Box<U> {
if size_of::<T>() == size_of::<U>() && align_of::<T>() == align_of::<U>() {
let (value, allocation) = Box::take(this);
Box::write(
unsafe { mem::transmute::<Box<MaybeUninit<T>>, Box<MaybeUninit<U>>>(allocation) },
f(value),
)
} else {
Box::new(f(*this))
}
}
/// Attempts to map the value in a box, reusing the allocation if possible.
///
/// `f` is called on the value in the box, and if the operation succeeds, the result is
/// returned, also boxed.
///
/// Note: this is an associated function, which means that you have
/// to call it as `Box::try_map(b, f)` instead of `b.try_map(f)`. This
/// is so that there is no conflict with a method on the inner type.
///
/// # Examples
///
/// ```
/// #![feature(smart_pointer_try_map)]
///
/// let b = Box::new(7);
/// let new = Box::try_map(b, u32::try_from).unwrap();
/// assert_eq!(*new, 7);
/// ```
#[cfg(not(no_global_oom_handling))]
#[unstable(feature = "smart_pointer_try_map", issue = "144419")]
pub fn try_map<R>(
this: Self,
f: impl FnOnce(T) -> R,
) -> <R::Residual as Residual<Box<R::Output>>>::TryType
where
R: Try,
R::Residual: Residual<Box<R::Output>>,
{
if size_of::<T>() == size_of::<R::Output>() && align_of::<T>() == align_of::<R::Output>() {
let (value, allocation) = Box::take(this);
try {
Box::write(
unsafe {
mem::transmute::<Box<MaybeUninit<T>>, Box<MaybeUninit<R::Output>>>(
allocation,
)
},
f(value)?,
)
}
} else {
try { Box::new(f(*this)?) }
}
}
}
impl<T, A: Allocator> Box<T, A> {
+2
View File
@@ -147,7 +147,9 @@
#![feature(trusted_fused)]
#![feature(trusted_len)]
#![feature(trusted_random_access)]
#![feature(try_blocks)]
#![feature(try_trait_v2)]
#![feature(try_trait_v2_residual)]
#![feature(try_with_capacity)]
#![feature(tuple_trait)]
#![feature(ub_checks)]
+253
View File
@@ -255,6 +255,8 @@
use core::mem::{self, ManuallyDrop, align_of_val_raw};
use core::num::NonZeroUsize;
use core::ops::{CoerceUnsized, Deref, DerefMut, DerefPure, DispatchFromDyn, LegacyReceiver};
#[cfg(not(no_global_oom_handling))]
use core::ops::{Residual, Try};
use core::panic::{RefUnwindSafe, UnwindSafe};
#[cfg(not(no_global_oom_handling))]
use core::pin::Pin;
@@ -639,6 +641,93 @@ pub fn try_new_zeroed() -> Result<Rc<mem::MaybeUninit<T>>, AllocError> {
pub fn pin(value: T) -> Pin<Rc<T>> {
unsafe { Pin::new_unchecked(Rc::new(value)) }
}
/// Maps the value in an `Rc`, reusing the allocation if possible.
///
/// `f` is called on a reference to the value in the `Rc`, and the result is returned, also in
/// an `Rc`.
///
/// Note: this is an associated function, which means that you have
/// to call it as `Rc::map(r, f)` instead of `r.map(f)`. This
/// is so that there is no conflict with a method on the inner type.
///
/// # Examples
///
/// ```
/// #![feature(smart_pointer_try_map)]
///
/// use std::rc::Rc;
///
/// let r = Rc::new(7);
/// let new = Rc::map(r, |i| i + 7);
/// assert_eq!(*new, 14);
/// ```
#[cfg(not(no_global_oom_handling))]
#[unstable(feature = "smart_pointer_try_map", issue = "144419")]
pub fn map<U>(this: Self, f: impl FnOnce(&T) -> U) -> Rc<U> {
if size_of::<T>() == size_of::<U>()
&& align_of::<T>() == align_of::<U>()
&& Rc::is_unique(&this)
{
unsafe {
let ptr = Rc::into_raw(this);
let value = ptr.read();
let mut allocation = Rc::from_raw(ptr.cast::<mem::MaybeUninit<U>>());
Rc::get_mut_unchecked(&mut allocation).write(f(&value));
allocation.assume_init()
}
} else {
Rc::new(f(&*this))
}
}
/// Attempts to map the value in an `Rc`, reusing the allocation if possible.
///
/// `f` is called on a reference to the value in the `Rc`, and if the operation succeeds, the
/// result is returned, also in an `Rc`.
///
/// Note: this is an associated function, which means that you have
/// to call it as `Rc::try_map(r, f)` instead of `r.try_map(f)`. This
/// is so that there is no conflict with a method on the inner type.
///
/// # Examples
///
/// ```
/// #![feature(smart_pointer_try_map)]
///
/// use std::rc::Rc;
///
/// let b = Rc::new(7);
/// let new = Rc::try_map(b, |&i| u32::try_from(i)).unwrap();
/// assert_eq!(*new, 7);
/// ```
#[cfg(not(no_global_oom_handling))]
#[unstable(feature = "smart_pointer_try_map", issue = "144419")]
pub fn try_map<R>(
this: Self,
f: impl FnOnce(&T) -> R,
) -> <R::Residual as Residual<Rc<R::Output>>>::TryType
where
R: Try,
R::Residual: Residual<Rc<R::Output>>,
{
if size_of::<T>() == size_of::<R::Output>()
&& align_of::<T>() == align_of::<R::Output>()
&& Rc::is_unique(&this)
{
unsafe {
let ptr = Rc::into_raw(this);
let value = ptr.read();
let mut allocation = Rc::from_raw(ptr.cast::<mem::MaybeUninit<R::Output>>());
Rc::get_mut_unchecked(&mut allocation).write(f(&value)?);
try { allocation.assume_init() }
}
} else {
try { Rc::new(f(&*this)?) }
}
}
}
impl<T, A: Allocator> Rc<T, A> {
@@ -3991,6 +4080,128 @@ impl<T> UniqueRc<T> {
pub fn new(value: T) -> Self {
Self::new_in(value, Global)
}
/// Maps the value in a `UniqueRc`, reusing the allocation if possible.
///
/// `f` is called on a reference to the value in the `UniqueRc`, and the result is returned,
/// also in a `UniqueRc`.
///
/// Note: this is an associated function, which means that you have
/// to call it as `UniqueRc::map(u, f)` instead of `u.map(f)`. This
/// is so that there is no conflict with a method on the inner type.
///
/// # Examples
///
/// ```
/// #![feature(smart_pointer_try_map)]
/// #![feature(unique_rc_arc)]
///
/// use std::rc::UniqueRc;
///
/// let r = UniqueRc::new(7);
/// let new = UniqueRc::map(r, |i| i + 7);
/// assert_eq!(*new, 14);
/// ```
#[cfg(not(no_global_oom_handling))]
#[unstable(feature = "smart_pointer_try_map", issue = "144419")]
pub fn map<U>(this: Self, f: impl FnOnce(T) -> U) -> UniqueRc<U> {
if size_of::<T>() == size_of::<U>()
&& align_of::<T>() == align_of::<U>()
&& UniqueRc::weak_count(&this) == 0
{
unsafe {
let ptr = UniqueRc::into_raw(this);
let value = ptr.read();
let mut allocation = UniqueRc::from_raw(ptr.cast::<mem::MaybeUninit<U>>());
allocation.write(f(value));
allocation.assume_init()
}
} else {
UniqueRc::new(f(UniqueRc::unwrap(this)))
}
}
/// Attempts to map the value in a `UniqueRc`, reusing the allocation if possible.
///
/// `f` is called on a reference to the value in the `UniqueRc`, and if the operation succeeds,
/// the result is returned, also in a `UniqueRc`.
///
/// Note: this is an associated function, which means that you have
/// to call it as `UniqueRc::try_map(u, f)` instead of `u.try_map(f)`. This
/// is so that there is no conflict with a method on the inner type.
///
/// # Examples
///
/// ```
/// #![feature(smart_pointer_try_map)]
/// #![feature(unique_rc_arc)]
///
/// use std::rc::UniqueRc;
///
/// let b = UniqueRc::new(7);
/// let new = UniqueRc::try_map(b, u32::try_from).unwrap();
/// assert_eq!(*new, 7);
/// ```
#[cfg(not(no_global_oom_handling))]
#[unstable(feature = "smart_pointer_try_map", issue = "144419")]
pub fn try_map<R>(
this: Self,
f: impl FnOnce(T) -> R,
) -> <R::Residual as Residual<UniqueRc<R::Output>>>::TryType
where
R: Try,
R::Residual: Residual<UniqueRc<R::Output>>,
{
if size_of::<T>() == size_of::<R::Output>()
&& align_of::<T>() == align_of::<R::Output>()
&& UniqueRc::weak_count(&this) == 0
{
unsafe {
let ptr = UniqueRc::into_raw(this);
let value = ptr.read();
let mut allocation = UniqueRc::from_raw(ptr.cast::<mem::MaybeUninit<R::Output>>());
allocation.write(f(value)?);
try { allocation.assume_init() }
}
} else {
try { UniqueRc::new(f(UniqueRc::unwrap(this))?) }
}
}
#[cfg(not(no_global_oom_handling))]
fn unwrap(this: Self) -> T {
let this = ManuallyDrop::new(this);
let val: T = unsafe { ptr::read(&**this) };
let _weak = Weak { ptr: this.ptr, alloc: Global };
val
}
}
impl<T: ?Sized> UniqueRc<T> {
#[cfg(not(no_global_oom_handling))]
unsafe fn from_raw(ptr: *const T) -> Self {
let offset = unsafe { data_offset(ptr) };
// Reverse the offset to find the original RcInner.
let rc_ptr = unsafe { ptr.byte_sub(offset) as *mut RcInner<T> };
Self {
ptr: unsafe { NonNull::new_unchecked(rc_ptr) },
_marker: PhantomData,
_marker2: PhantomData,
alloc: Global,
}
}
#[cfg(not(no_global_oom_handling))]
fn into_raw(this: Self) -> *const T {
let this = ManuallyDrop::new(this);
Self::as_ptr(&*this)
}
}
impl<T, A: Allocator> UniqueRc<T, A> {
@@ -4041,6 +4252,40 @@ pub fn into_rc(this: Self) -> Rc<T, A> {
Rc::from_inner_in(this.ptr, alloc)
}
}
#[cfg(not(no_global_oom_handling))]
fn weak_count(this: &Self) -> usize {
this.inner().weak() - 1
}
#[cfg(not(no_global_oom_handling))]
fn inner(&self) -> &RcInner<T> {
// SAFETY: while this UniqueRc is alive we're guaranteed that the inner pointer is valid.
unsafe { self.ptr.as_ref() }
}
#[cfg(not(no_global_oom_handling))]
fn as_ptr(this: &Self) -> *const T {
let ptr: *mut RcInner<T> = NonNull::as_ptr(this.ptr);
// SAFETY: This cannot go through Deref::deref or UniqueRc::inner because
// this is required to retain raw/mut provenance such that e.g. `get_mut` can
// write through the pointer after the Rc is recovered through `from_raw`.
unsafe { &raw mut (*ptr).value }
}
#[inline]
#[cfg(not(no_global_oom_handling))]
fn into_inner_with_allocator(this: Self) -> (NonNull<RcInner<T>>, A) {
let this = mem::ManuallyDrop::new(this);
(this.ptr, unsafe { ptr::read(&this.alloc) })
}
#[inline]
#[cfg(not(no_global_oom_handling))]
unsafe fn from_inner_in(ptr: NonNull<RcInner<T>>, alloc: A) -> Self {
Self { ptr, _marker: PhantomData, _marker2: PhantomData, alloc }
}
}
impl<T: ?Sized, A: Allocator + Clone> UniqueRc<T, A> {
@@ -4059,6 +4304,14 @@ pub fn downgrade(this: &Self) -> Weak<T, A> {
}
}
#[cfg(not(no_global_oom_handling))]
impl<T, A: Allocator> UniqueRc<mem::MaybeUninit<T>, A> {
unsafe fn assume_init(self) -> UniqueRc<T, A> {
let (ptr, alloc) = UniqueRc::into_inner_with_allocator(self);
unsafe { UniqueRc::from_inner_in(ptr.cast(), alloc) }
}
}
#[unstable(feature = "unique_rc_arc", issue = "112566")]
impl<T: ?Sized, A: Allocator> Deref for UniqueRc<T, A> {
type Target = T;
+253
View File
@@ -22,6 +22,8 @@
use core::mem::{self, ManuallyDrop, align_of_val_raw};
use core::num::NonZeroUsize;
use core::ops::{CoerceUnsized, Deref, DerefMut, DerefPure, DispatchFromDyn, LegacyReceiver};
#[cfg(not(no_global_oom_handling))]
use core::ops::{Residual, Try};
use core::panic::{RefUnwindSafe, UnwindSafe};
use core::pin::{Pin, PinCoerceUnsized};
use core::ptr::{self, NonNull};
@@ -650,6 +652,93 @@ pub fn try_new_zeroed() -> Result<Arc<mem::MaybeUninit<T>>, AllocError> {
)?))
}
}
/// Maps the value in an `Arc`, reusing the allocation if possible.
///
/// `f` is called on a reference to the value in the `Arc`, and the result is returned, also in
/// an `Arc`.
///
/// Note: this is an associated function, which means that you have
/// to call it as `Arc::map(a, f)` instead of `r.map(a)`. This
/// is so that there is no conflict with a method on the inner type.
///
/// # Examples
///
/// ```
/// #![feature(smart_pointer_try_map)]
///
/// use std::sync::Arc;
///
/// let r = Arc::new(7);
/// let new = Arc::map(r, |i| i + 7);
/// assert_eq!(*new, 14);
/// ```
#[cfg(not(no_global_oom_handling))]
#[unstable(feature = "smart_pointer_try_map", issue = "144419")]
pub fn map<U>(this: Self, f: impl FnOnce(&T) -> U) -> Arc<U> {
if size_of::<T>() == size_of::<U>()
&& align_of::<T>() == align_of::<U>()
&& Arc::is_unique(&this)
{
unsafe {
let ptr = Arc::into_raw(this);
let value = ptr.read();
let mut allocation = Arc::from_raw(ptr.cast::<mem::MaybeUninit<U>>());
Arc::get_mut_unchecked(&mut allocation).write(f(&value));
allocation.assume_init()
}
} else {
Arc::new(f(&*this))
}
}
/// Attempts to map the value in an `Arc`, reusing the allocation if possible.
///
/// `f` is called on a reference to the value in the `Arc`, and if the operation succeeds, the
/// result is returned, also in an `Arc`.
///
/// Note: this is an associated function, which means that you have
/// to call it as `Arc::try_map(a, f)` instead of `a.try_map(f)`. This
/// is so that there is no conflict with a method on the inner type.
///
/// # Examples
///
/// ```
/// #![feature(smart_pointer_try_map)]
///
/// use std::sync::Arc;
///
/// let b = Arc::new(7);
/// let new = Arc::try_map(b, |&i| u32::try_from(i)).unwrap();
/// assert_eq!(*new, 7);
/// ```
#[cfg(not(no_global_oom_handling))]
#[unstable(feature = "smart_pointer_try_map", issue = "144419")]
pub fn try_map<R>(
this: Self,
f: impl FnOnce(&T) -> R,
) -> <R::Residual as Residual<Arc<R::Output>>>::TryType
where
R: Try,
R::Residual: Residual<Arc<R::Output>>,
{
if size_of::<T>() == size_of::<R::Output>()
&& align_of::<T>() == align_of::<R::Output>()
&& Arc::is_unique(&this)
{
unsafe {
let ptr = Arc::into_raw(this);
let value = ptr.read();
let mut allocation = Arc::from_raw(ptr.cast::<mem::MaybeUninit<R::Output>>());
Arc::get_mut_unchecked(&mut allocation).write(f(&value)?);
try { allocation.assume_init() }
}
} else {
try { Arc::new(f(&*this)?) }
}
}
}
impl<T, A: Allocator> Arc<T, A> {
@@ -4403,6 +4492,128 @@ impl<T> UniqueArc<T, Global> {
pub fn new(value: T) -> Self {
Self::new_in(value, Global)
}
/// Maps the value in a `UniqueArc`, reusing the allocation if possible.
///
/// `f` is called on a reference to the value in the `UniqueArc`, and the result is returned,
/// also in a `UniqueArc`.
///
/// Note: this is an associated function, which means that you have
/// to call it as `UniqueArc::map(u, f)` instead of `u.map(f)`. This
/// is so that there is no conflict with a method on the inner type.
///
/// # Examples
///
/// ```
/// #![feature(smart_pointer_try_map)]
/// #![feature(unique_rc_arc)]
///
/// use std::sync::UniqueArc;
///
/// let r = UniqueArc::new(7);
/// let new = UniqueArc::map(r, |i| i + 7);
/// assert_eq!(*new, 14);
/// ```
#[cfg(not(no_global_oom_handling))]
#[unstable(feature = "smart_pointer_try_map", issue = "144419")]
pub fn map<U>(this: Self, f: impl FnOnce(T) -> U) -> UniqueArc<U> {
if size_of::<T>() == size_of::<U>()
&& align_of::<T>() == align_of::<U>()
&& UniqueArc::weak_count(&this) == 0
{
unsafe {
let ptr = UniqueArc::into_raw(this);
let value = ptr.read();
let mut allocation = UniqueArc::from_raw(ptr.cast::<mem::MaybeUninit<U>>());
allocation.write(f(value));
allocation.assume_init()
}
} else {
UniqueArc::new(f(UniqueArc::unwrap(this)))
}
}
/// Attempts to map the value in a `UniqueArc`, reusing the allocation if possible.
///
/// `f` is called on a reference to the value in the `UniqueArc`, and if the operation succeeds,
/// the result is returned, also in a `UniqueArc`.
///
/// Note: this is an associated function, which means that you have
/// to call it as `UniqueArc::try_map(u, f)` instead of `u.try_map(f)`. This
/// is so that there is no conflict with a method on the inner type.
///
/// # Examples
///
/// ```
/// #![feature(smart_pointer_try_map)]
/// #![feature(unique_rc_arc)]
///
/// use std::sync::UniqueArc;
///
/// let b = UniqueArc::new(7);
/// let new = UniqueArc::try_map(b, u32::try_from).unwrap();
/// assert_eq!(*new, 7);
/// ```
#[cfg(not(no_global_oom_handling))]
#[unstable(feature = "smart_pointer_try_map", issue = "144419")]
pub fn try_map<R>(
this: Self,
f: impl FnOnce(T) -> R,
) -> <R::Residual as Residual<UniqueArc<R::Output>>>::TryType
where
R: Try,
R::Residual: Residual<UniqueArc<R::Output>>,
{
if size_of::<T>() == size_of::<R::Output>()
&& align_of::<T>() == align_of::<R::Output>()
&& UniqueArc::weak_count(&this) == 0
{
unsafe {
let ptr = UniqueArc::into_raw(this);
let value = ptr.read();
let mut allocation = UniqueArc::from_raw(ptr.cast::<mem::MaybeUninit<R::Output>>());
allocation.write(f(value)?);
try { allocation.assume_init() }
}
} else {
try { UniqueArc::new(f(UniqueArc::unwrap(this))?) }
}
}
#[cfg(not(no_global_oom_handling))]
fn unwrap(this: Self) -> T {
let this = ManuallyDrop::new(this);
let val: T = unsafe { ptr::read(&**this) };
let _weak = Weak { ptr: this.ptr, alloc: Global };
val
}
}
impl<T: ?Sized> UniqueArc<T> {
#[cfg(not(no_global_oom_handling))]
unsafe fn from_raw(ptr: *const T) -> Self {
let offset = unsafe { data_offset(ptr) };
// Reverse the offset to find the original ArcInner.
let rc_ptr = unsafe { ptr.byte_sub(offset) as *mut ArcInner<T> };
Self {
ptr: unsafe { NonNull::new_unchecked(rc_ptr) },
_marker: PhantomData,
_marker2: PhantomData,
alloc: Global,
}
}
#[cfg(not(no_global_oom_handling))]
fn into_raw(this: Self) -> *const T {
let this = ManuallyDrop::new(this);
Self::as_ptr(&*this)
}
}
impl<T, A: Allocator> UniqueArc<T, A> {
@@ -4456,6 +4667,40 @@ pub fn into_arc(this: Self) -> Arc<T, A> {
Arc::from_inner_in(this.ptr, alloc)
}
}
#[cfg(not(no_global_oom_handling))]
fn weak_count(this: &Self) -> usize {
this.inner().weak.load(Acquire) - 1
}
#[cfg(not(no_global_oom_handling))]
fn inner(&self) -> &ArcInner<T> {
// SAFETY: while this UniqueArc is alive we're guaranteed that the inner pointer is valid.
unsafe { self.ptr.as_ref() }
}
#[cfg(not(no_global_oom_handling))]
fn as_ptr(this: &Self) -> *const T {
let ptr: *mut ArcInner<T> = NonNull::as_ptr(this.ptr);
// SAFETY: This cannot go through Deref::deref or UniqueArc::inner because
// this is required to retain raw/mut provenance such that e.g. `get_mut` can
// write through the pointer after the Rc is recovered through `from_raw`.
unsafe { &raw mut (*ptr).data }
}
#[inline]
#[cfg(not(no_global_oom_handling))]
fn into_inner_with_allocator(this: Self) -> (NonNull<ArcInner<T>>, A) {
let this = mem::ManuallyDrop::new(this);
(this.ptr, unsafe { ptr::read(&this.alloc) })
}
#[inline]
#[cfg(not(no_global_oom_handling))]
unsafe fn from_inner_in(ptr: NonNull<ArcInner<T>>, alloc: A) -> Self {
Self { ptr, _marker: PhantomData, _marker2: PhantomData, alloc }
}
}
impl<T: ?Sized, A: Allocator + Clone> UniqueArc<T, A> {
@@ -4486,6 +4731,14 @@ pub fn downgrade(this: &Self) -> Weak<T, A> {
}
}
#[cfg(not(no_global_oom_handling))]
impl<T, A: Allocator> UniqueArc<mem::MaybeUninit<T>, A> {
unsafe fn assume_init(self) -> UniqueArc<T, A> {
let (ptr, alloc) = UniqueArc::into_inner_with_allocator(self);
unsafe { UniqueArc::from_inner_in(ptr.cast(), alloc) }
}
}
#[unstable(feature = "unique_rc_arc", issue = "112566")]
impl<T: ?Sized, A: Allocator> Deref for UniqueArc<T, A> {
type Target = T;
@@ -278,10 +278,9 @@ note: if you're trying to build a new `Box<_, _>` consider using one of the foll
Box::<T>::new_uninit
Box::<T>::new_zeroed
Box::<T>::try_new
and 22 others
--> $SRC_DIR/alloc/src/boxed.rs:LL:COL
error: aborting due to 30 previous errors
Some errors have detailed explanations: E0121, E0224, E0261, E0412, E0599.
For more information about an error, try `rustc --explain E0121`.
For more information about an error, try `rustc --explain E0121`.
@@ -104,10 +104,9 @@ note: if you're trying to build a new `Box<_, _>` consider using one of the foll
Box::<T>::new_uninit
Box::<T>::new_zeroed
Box::<T>::try_new
and 22 others
--> $SRC_DIR/alloc/src/boxed.rs:LL:COL
error: aborting due to 11 previous errors
Some errors have detailed explanations: E0121, E0224, E0261, E0599.
For more information about an error, try `rustc --explain E0121`.
For more information about an error, try `rustc --explain E0121`.
+5 -5
View File
@@ -63,7 +63,7 @@ LL - x: (),
LL - })),
LL + wtf: Some(Box::new_in(_, _)),
|
= and 12 other candidates
= and 13 other candidates
help: consider using the `Default` trait
|
LL - wtf: Some(Box(U {
@@ -118,7 +118,7 @@ LL + let _ = Box::new_zeroed();
LL - let _ = Box {};
LL + let _ = Box::new_in(_, _);
|
= and 13 other candidates
= and 14 other candidates
help: consider using the `Default` trait
|
LL - let _ = Box {};
@@ -141,12 +141,12 @@ LL - let _ = Box::<i32> {};
LL + let _ = Box::<i32>::new_in(_, _);
|
LL - let _ = Box::<i32> {};
LL + let _ = Box::<i32>::into_inner(_);
LL + let _ = Box::<i32>::map(_, _);
|
LL - let _ = Box::<i32> {};
LL + let _ = Box::<i32>::write(_, _);
LL + let _ = Box::<i32>::into_inner(_);
|
= and 4 other candidates
= and 5 other candidates
help: consider using the `Default` trait
|
LL - let _ = Box::<i32> {};
@@ -63,7 +63,7 @@ LL - x: (),
LL - })),
LL + wtf: Some(Box::new_in(_, _)),
|
= and 12 other candidates
= and 13 other candidates
help: consider using the `Default` trait
|
LL - wtf: Some(Box(U {
@@ -118,7 +118,7 @@ LL + let _ = Box::new_zeroed();
LL - let _ = Box {};
LL + let _ = Box::new_in(_, _);
|
= and 13 other candidates
= and 14 other candidates
help: consider using the `Default` trait
|
LL - let _ = Box {};
@@ -63,7 +63,7 @@ LL - x: (),
LL - })),
LL + wtf: Some(Box::new_in(_, _)),
╰ and 12 other candidates
╰ and 13 other candidates
help: consider using the `Default` trait
╭╴
LL - wtf: Some(Box(U {
@@ -118,7 +118,7 @@ LL + let _ = Box::new_zeroed();
LL - let _ = Box {};
LL + let _ = Box::new_in(_, _);
╰ and 13 other candidates
╰ and 14 other candidates
help: consider using the `Default` trait
╭╴
LL - let _ = Box {};