Vec::push in consts MVP

This commit is contained in:
Deadbeef
2025-10-19 23:52:19 +00:00
parent fcd630976c
commit 3982d3e706
11 changed files with 361 additions and 87 deletions
+200 -59
View File
@@ -5,8 +5,8 @@
#[stable(feature = "alloc_module", since = "1.28.0")]
#[doc(inline)]
pub use core::alloc::*;
use core::hint;
use core::ptr::{self, NonNull};
use core::{cmp, hint};
unsafe extern "Rust" {
// These are the magic symbols to call the global allocator. rustc generates
@@ -182,7 +182,7 @@ pub unsafe fn alloc_zeroed(layout: Layout) -> *mut u8 {
impl Global {
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
fn alloc_impl(&self, layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
fn alloc_impl_runtime(layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
match layout.size() {
0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)),
// SAFETY: `layout` is non-zero in size,
@@ -194,10 +194,26 @@ fn alloc_impl(&self, layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, Allo
}
}
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
fn deallocate_impl_runtime(ptr: NonNull<u8>, layout: Layout) {
if layout.size() != 0 {
// SAFETY:
// * We have checked that `layout` is non-zero in size.
// * The caller is obligated to provide a layout that "fits", and in this case,
// "fit" always means a layout that is equal to the original, because our
// `allocate()`, `grow()`, and `shrink()` implementations never returns a larger
// allocation than requested.
// * Other conditions must be upheld by the caller, as per `Allocator::deallocate()`'s
// safety documentation.
unsafe { dealloc(ptr.as_ptr(), layout) }
}
}
// SAFETY: Same as `Allocator::grow`
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
unsafe fn grow_impl(
fn grow_impl_runtime(
&self,
ptr: NonNull<u8>,
old_layout: Layout,
@@ -241,69 +257,16 @@ unsafe fn grow_impl(
},
}
}
}
#[unstable(feature = "allocator_api", issue = "32838")]
unsafe impl Allocator for Global {
// SAFETY: Same as `Allocator::grow`
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
self.alloc_impl(layout, false)
}
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
self.alloc_impl(layout, true)
}
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
if layout.size() != 0 {
// SAFETY:
// * We have checked that `layout` is non-zero in size.
// * The caller is obligated to provide a layout that "fits", and in this case,
// "fit" always means a layout that is equal to the original, because our
// `allocate()`, `grow()`, and `shrink()` implementations never returns a larger
// allocation than requested.
// * Other conditions must be upheld by the caller, as per `Allocator::deallocate()`'s
// safety documentation.
unsafe { dealloc(ptr.as_ptr(), layout) }
}
}
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
unsafe fn grow(
&self,
ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout,
) -> Result<NonNull<[u8]>, AllocError> {
// SAFETY: all conditions must be upheld by the caller
unsafe { self.grow_impl(ptr, old_layout, new_layout, false) }
}
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
unsafe fn grow_zeroed(
&self,
ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout,
) -> Result<NonNull<[u8]>, AllocError> {
// SAFETY: all conditions must be upheld by the caller
unsafe { self.grow_impl(ptr, old_layout, new_layout, true) }
}
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
unsafe fn shrink(
fn shrink_impl_runtime(
&self,
ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout,
_zeroed: bool,
) -> Result<NonNull<[u8]>, AllocError> {
debug_assert!(
new_layout.size() <= old_layout.size(),
@@ -340,6 +303,184 @@ unsafe fn shrink(
},
}
}
// SAFETY: Same as `Allocator::allocate`
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
const fn alloc_impl(&self, layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
core::intrinsics::const_eval_select(
(layout, zeroed),
Global::alloc_impl_const,
Global::alloc_impl_runtime,
)
}
// SAFETY: Same as `Allocator::deallocate`
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
const unsafe fn deallocate_impl(&self, ptr: NonNull<u8>, layout: Layout) {
core::intrinsics::const_eval_select(
(ptr, layout),
Global::deallocate_impl_const,
Global::deallocate_impl_runtime,
)
}
// SAFETY: Same as `Allocator::grow`
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
const unsafe fn grow_impl(
&self,
ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout,
zeroed: bool,
) -> Result<NonNull<[u8]>, AllocError> {
core::intrinsics::const_eval_select(
(self, ptr, old_layout, new_layout, zeroed),
Global::grow_shrink_impl_const,
Global::grow_impl_runtime,
)
}
// SAFETY: Same as `Allocator::shrink`
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
const unsafe fn shrink_impl(
&self,
ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout,
) -> Result<NonNull<[u8]>, AllocError> {
core::intrinsics::const_eval_select(
(self, ptr, old_layout, new_layout, false),
Global::grow_shrink_impl_const,
Global::shrink_impl_runtime,
)
}
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
const fn alloc_impl_const(layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
match layout.size() {
0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)),
// SAFETY: `layout` is non-zero in size,
size => unsafe {
let raw_ptr = core::intrinsics::const_allocate(layout.size(), layout.align());
let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
if zeroed {
let mut offset = 0;
while offset < size {
offset += 1;
// SAFETY: the pointer returned by `const_allocate` is valid to write to.
ptr.add(offset).write(0)
}
}
Ok(NonNull::slice_from_raw_parts(ptr, size))
},
}
}
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
const fn deallocate_impl_const(ptr: NonNull<u8>, layout: Layout) {
if layout.size() != 0 {
// SAFETY: We checked for nonzero size; other preconditions must be upheld by caller.
unsafe {
core::intrinsics::const_deallocate(ptr.as_ptr(), layout.size(), layout.align());
}
}
}
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
const fn grow_shrink_impl_const(
&self,
ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout,
zeroed: bool,
) -> Result<NonNull<[u8]>, AllocError> {
let new_ptr = self.alloc_impl(new_layout, zeroed)?;
// SAFETY: both pointers are valid and this operations is in bounds.
unsafe {
ptr::copy_nonoverlapping(
ptr.as_ptr(),
new_ptr.as_mut_ptr(),
cmp::min(old_layout.size(), new_layout.size()),
);
}
unsafe {
self.deallocate_impl(ptr, old_layout);
}
Ok(new_ptr)
}
}
#[unstable(feature = "allocator_api", issue = "32838")]
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
unsafe impl const Allocator for Global {
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
self.alloc_impl(layout, false)
}
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
self.alloc_impl(layout, true)
}
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
// SAFETY: all conditions must be upheld by the caller
unsafe { self.deallocate_impl(ptr, layout) }
}
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
unsafe fn grow(
&self,
ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout,
) -> Result<NonNull<[u8]>, AllocError> {
// SAFETY: all conditions must be upheld by the caller
unsafe { self.grow_impl(ptr, old_layout, new_layout, false) }
}
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
unsafe fn grow_zeroed(
&self,
ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout,
) -> Result<NonNull<[u8]>, AllocError> {
// SAFETY: all conditions must be upheld by the caller
unsafe { self.grow_impl(ptr, old_layout, new_layout, true) }
}
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
unsafe fn shrink(
&self,
ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout,
) -> Result<NonNull<[u8]>, AllocError> {
// SAFETY: all conditions must be upheld by the caller
unsafe { self.shrink_impl(ptr, old_layout, new_layout) }
}
}
/// The allocator for `Box`.
+21 -2
View File
@@ -84,13 +84,14 @@ impl TryReserveError {
reason = "Uncertain how much info should be exposed",
issue = "48043"
)]
pub fn kind(&self) -> TryReserveErrorKind {
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
pub const fn kind(&self) -> TryReserveErrorKind {
self.kind.clone()
}
}
/// Details of the allocation that caused a `TryReserveError`
#[derive(Clone, PartialEq, Eq, Debug)]
#[derive(PartialEq, Eq, Debug)]
#[unstable(
feature = "try_reserve_kind",
reason = "Uncertain how much info should be exposed",
@@ -120,6 +121,24 @@ pub enum TryReserveErrorKind {
},
}
#[unstable(
feature = "try_reserve_kind",
reason = "Uncertain how much info should be exposed",
issue = "48043"
)]
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
#[cfg(not(test))]
impl const Clone for TryReserveErrorKind {
fn clone(&self) -> Self {
match self {
TryReserveErrorKind::CapacityOverflow => TryReserveErrorKind::CapacityOverflow,
TryReserveErrorKind::AllocError { layout, non_exhaustive: () } => {
TryReserveErrorKind::AllocError { layout: *layout, non_exhaustive: () }
}
}
}
}
#[cfg(test)]
pub use realalloc::collections::TryReserveErrorKind;
+6
View File
@@ -101,11 +101,16 @@
#![feature(char_internals)]
#![feature(clone_to_uninit)]
#![feature(coerce_unsized)]
#![feature(const_clone)]
#![feature(const_cmp)]
#![feature(const_convert)]
#![feature(const_default)]
#![feature(const_destruct)]
#![feature(const_eval_select)]
#![feature(const_heap)]
#![feature(copied_into_inner)]
#![feature(const_option_ops)]
#![feature(const_try)]
#![feature(core_intrinsics)]
#![feature(deprecated_suggestion)]
#![feature(deref_pure_trait)]
@@ -171,6 +176,7 @@
#![feature(const_trait_impl)]
#![feature(coroutine_trait)]
#![feature(decl_macro)]
#![feature(derive_const)]
#![feature(dropck_eyepatch)]
#![feature(fundamental)]
#![feature(hashmap_internals)]
+61 -20
View File
@@ -4,7 +4,7 @@
// Note: This module is also included in the alloctests crate using #[path] to
// run the tests. See the comment there for an explanation why this is the case.
use core::marker::PhantomData;
use core::marker::{Destruct, PhantomData};
use core::mem::{ManuallyDrop, MaybeUninit, SizedTypeProperties};
use core::ptr::{self, Alignment, NonNull, Unique};
use core::{cmp, hint};
@@ -24,7 +24,7 @@
// only one location which panics rather than a bunch throughout the module.
#[cfg(not(no_global_oom_handling))]
#[cfg_attr(not(panic = "immediate-abort"), inline(never))]
fn capacity_overflow() -> ! {
const fn capacity_overflow() -> ! {
panic!("capacity overflow");
}
@@ -182,7 +182,11 @@ pub(crate) const fn new_in(alloc: A) -> Self {
/// allocator for the returned `RawVec`.
#[cfg(not(no_global_oom_handling))]
#[inline]
pub(crate) fn with_capacity_in(capacity: usize, alloc: A) -> Self {
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
pub(crate) const fn with_capacity_in(capacity: usize, alloc: A) -> Self
where
A: [const] Allocator + [const] Destruct,
{
Self {
inner: RawVecInner::with_capacity_in(capacity, alloc, T::LAYOUT),
_marker: PhantomData,
@@ -331,7 +335,11 @@ pub(crate) fn reserve(&mut self, len: usize, additional: usize) {
/// caller to ensure `len == self.capacity()`.
#[cfg(not(no_global_oom_handling))]
#[inline(never)]
pub(crate) fn grow_one(&mut self) {
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
pub(crate) const fn grow_one(&mut self)
where
A: [const] Allocator,
{
// SAFETY: All calls on self.inner pass T::LAYOUT as the elem_layout
unsafe { self.inner.grow_one(T::LAYOUT) }
}
@@ -415,7 +423,11 @@ const fn new_in(alloc: A, align: Alignment) -> Self {
#[cfg(not(no_global_oom_handling))]
#[inline]
fn with_capacity_in(capacity: usize, alloc: A, elem_layout: Layout) -> Self {
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
const fn with_capacity_in(capacity: usize, alloc: A, elem_layout: Layout) -> Self
where
A: [const] Allocator + [const] Destruct,
{
match Self::try_allocate_in(capacity, AllocInit::Uninitialized, alloc, elem_layout) {
Ok(this) => {
unsafe {
@@ -446,12 +458,16 @@ fn with_capacity_zeroed_in(capacity: usize, alloc: A, elem_layout: Layout) -> Se
}
}
fn try_allocate_in(
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
const fn try_allocate_in(
capacity: usize,
init: AllocInit,
alloc: A,
elem_layout: Layout,
) -> Result<Self, TryReserveError> {
) -> Result<Self, TryReserveError>
where
A: [const] Allocator + [const] Destruct,
{
// We avoid `unwrap_or_else` here because it bloats the amount of
// LLVM IR generated.
let layout = match layout_array(capacity, elem_layout) {
@@ -519,7 +535,8 @@ fn allocator(&self) -> &A {
/// initially construct `self`
/// - `elem_layout`'s size must be a multiple of its alignment
#[inline]
unsafe fn current_memory(&self, elem_layout: Layout) -> Option<(NonNull<u8>, Layout)> {
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
const unsafe fn current_memory(&self, elem_layout: Layout) -> Option<(NonNull<u8>, Layout)> {
if elem_layout.size() == 0 || self.cap.as_inner() == 0 {
None
} else {
@@ -572,7 +589,11 @@ unsafe fn do_reserve_and_handle<A: Allocator>(
/// - `elem_layout`'s size must be a multiple of its alignment
#[cfg(not(no_global_oom_handling))]
#[inline]
unsafe fn grow_one(&mut self, elem_layout: Layout) {
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
const unsafe fn grow_one(&mut self, elem_layout: Layout)
where
A: [const] Allocator,
{
// SAFETY: Precondition passed to caller
if let Err(err) = unsafe { self.grow_amortized(self.cap.as_inner(), 1, elem_layout) } {
handle_error(err);
@@ -651,12 +672,13 @@ unsafe fn shrink_to_fit(&mut self, cap: usize, elem_layout: Layout) {
}
#[inline]
fn needs_to_grow(&self, len: usize, additional: usize, elem_layout: Layout) -> bool {
const fn needs_to_grow(&self, len: usize, additional: usize, elem_layout: Layout) -> bool {
additional > self.capacity(elem_layout.size()).wrapping_sub(len)
}
#[inline]
unsafe fn set_ptr_and_cap(&mut self, ptr: NonNull<[u8]>, cap: usize) {
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
const unsafe fn set_ptr_and_cap(&mut self, ptr: NonNull<[u8]>, cap: usize) {
// Allocators currently return a `NonNull<[u8]>` whose length matches
// the size requested. If that ever changes, the capacity here should
// change to `ptr.len() / size_of::<T>()`.
@@ -669,12 +691,16 @@ unsafe fn set_ptr_and_cap(&mut self, ptr: NonNull<[u8]>, cap: usize) {
/// initially construct `self`
/// - `elem_layout`'s size must be a multiple of its alignment
/// - The sum of `len` and `additional` must be greater than the current capacity
unsafe fn grow_amortized(
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
const unsafe fn grow_amortized(
&mut self,
len: usize,
additional: usize,
elem_layout: Layout,
) -> Result<(), TryReserveError> {
) -> Result<(), TryReserveError>
where
A: [const] Allocator,
{
// This is ensured by the calling contexts.
debug_assert!(additional > 0);
@@ -737,15 +763,20 @@ unsafe fn grow_exact(
// not marked inline(never) since we want optimizers to be able to observe the specifics of this
// function, see tests/codegen-llvm/vec-reserve-extend.rs.
#[cold]
unsafe fn finish_grow(
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
const unsafe fn finish_grow(
&self,
cap: usize,
elem_layout: Layout,
) -> Result<NonNull<[u8]>, TryReserveError> {
) -> Result<NonNull<[u8]>, TryReserveError>
where
A: [const] Allocator,
{
let new_layout = layout_array(cap, elem_layout)?;
let memory = if let Some((ptr, old_layout)) = unsafe { self.current_memory(elem_layout) } {
debug_assert_eq!(old_layout.align(), new_layout.align());
// FIXME(const-hack): switch to `debug_assert_eq`
debug_assert!(old_layout.align() == new_layout.align());
unsafe {
// The allocator checks for alignment equality
hint::assert_unchecked(old_layout.align() == new_layout.align());
@@ -755,7 +786,11 @@ unsafe fn finish_grow(
self.alloc.allocate(new_layout)
};
memory.map_err(|_| AllocError { layout: new_layout, non_exhaustive: () }.into())
// FIXME(const-hack): switch back to `map_err`
match memory {
Ok(memory) => Ok(memory),
Err(_) => Err(AllocError { layout: new_layout, non_exhaustive: () }.into()),
}
}
/// # Safety
@@ -839,7 +874,8 @@ unsafe fn deallocate(&mut self, elem_layout: Layout) {
#[cfg(not(no_global_oom_handling))]
#[cold]
#[optimize(size)]
fn handle_error(e: TryReserveError) -> ! {
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
const fn handle_error(e: TryReserveError) -> ! {
match e.kind() {
CapacityOverflow => capacity_overflow(),
AllocError { layout, .. } => handle_alloc_error(layout),
@@ -847,6 +883,11 @@ fn handle_error(e: TryReserveError) -> ! {
}
#[inline]
fn layout_array(cap: usize, elem_layout: Layout) -> Result<Layout, TryReserveError> {
elem_layout.repeat(cap).map(|(layout, _pad)| layout).map_err(|_| CapacityOverflow.into())
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
const fn layout_array(cap: usize, elem_layout: Layout) -> Result<Layout, TryReserveError> {
// FIXME(const-hack) return to using `map` and `map_err` once `const_closures` is implemented
match elem_layout.repeat(cap) {
Ok((layout, _pad)) => Ok(layout),
Err(_) => Err(CapacityOverflow.into()),
}
}
+29 -4
View File
@@ -81,6 +81,8 @@
use core::hash::{Hash, Hasher};
#[cfg(not(no_global_oom_handling))]
use core::iter;
#[cfg(not(no_global_oom_handling))]
use core::marker::Destruct;
use core::marker::PhantomData;
use core::mem::{self, Assume, ManuallyDrop, MaybeUninit, SizedTypeProperties, TransmuteFrom};
use core::ops::{self, Index, IndexMut, Range, RangeBounds};
@@ -519,7 +521,8 @@ pub const fn new() -> Self {
#[stable(feature = "rust1", since = "1.0.0")]
#[must_use]
#[rustc_diagnostic_item = "vec_with_capacity"]
pub fn with_capacity(capacity: usize) -> Self {
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
pub const fn with_capacity(capacity: usize) -> Self {
Self::with_capacity_in(capacity, Global)
}
@@ -881,6 +884,16 @@ pub fn into_parts(self) -> (NonNull<T>, usize, usize) {
// SAFETY: A `Vec` always has a non-null pointer.
(unsafe { NonNull::new_unchecked(ptr) }, len, capacity)
}
/// Leaks the `Vec<T>` to be interned statically. This mut be done for all
/// `Vec<T>` created during compile time.
#[unstable(feature = "const_heap", issue = "79597")]
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
pub const fn const_leak(mut self) -> &'static [T] {
unsafe { core::intrinsics::const_make_global(self.as_mut_ptr().cast()) };
let me = ManuallyDrop::new(self);
unsafe { slice::from_raw_parts(me.as_ptr(), me.len) }
}
}
impl<T, A: Allocator> Vec<T, A> {
@@ -962,7 +975,11 @@ pub const fn new_in(alloc: A) -> Self {
#[cfg(not(no_global_oom_handling))]
#[inline]
#[unstable(feature = "allocator_api", issue = "32838")]
pub fn with_capacity_in(capacity: usize, alloc: A) -> Self {
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
pub const fn with_capacity_in(capacity: usize, alloc: A) -> Self
where
A: [const] Allocator + [const] Destruct,
{
Vec { buf: RawVec::with_capacity_in(capacity, alloc), len: 0 }
}
@@ -2575,7 +2592,11 @@ fn drop(&mut self) {
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_confusables("push_back", "put", "append")]
pub fn push(&mut self, value: T) {
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
pub const fn push(&mut self, value: T)
where
A: [const] Allocator,
{
let _ = self.push_mut(value);
}
@@ -2664,7 +2685,11 @@ pub fn push_within_capacity(&mut self, value: T) -> Result<&mut T, T> {
#[inline]
#[unstable(feature = "push_mut", issue = "135974")]
#[must_use = "if you don't need a reference to the value, use `Vec::push` instead"]
pub fn push_mut(&mut self, value: T) -> &mut T {
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
pub const fn push_mut(&mut self, value: T) -> &mut T
where
A: [const] Allocator,
{
// Inform codegen that the length does not change across grow_one().
let len = self.len;
// This will panic or abort if we would allocate > isize::MAX bytes
+7
View File
@@ -20,6 +20,13 @@
#![feature(assert_matches)]
#![feature(box_vec_non_null)]
#![feature(char_internals)]
#![feature(const_alloc_error)]
#![feature(const_cmp)]
#![feature(const_convert)]
#![feature(const_destruct)]
#![feature(const_heap)]
#![feature(const_option_ops)]
#![feature(const_try)]
#![feature(copied_into_inner)]
#![feature(core_intrinsics)]
#![feature(exact_size_is_empty)]
+1
View File
@@ -1,5 +1,6 @@
#![feature(allocator_api)]
#![feature(alloc_layout_extra)]
#![feature(const_heap)]
#![feature(deque_extend_front)]
#![feature(iter_array_chunks)]
#![feature(assert_matches)]
+16
View File
@@ -2749,3 +2749,19 @@ fn zst_collections_iter_nth_back_regression() {
list.push_back(Thing);
let _ = list.into_iter().nth_back(1);
}
#[test]
fn const_heap() {
const X: &'static [u32] = {
let mut v = Vec::with_capacity(6);
let mut x = 1;
while x < 42 {
v.push(x);
x *= 2;
}
assert!(v.len() == 6);
v.const_leak()
};
assert_eq!([1, 2, 4, 8, 16, 32], X);
}
+5 -2
View File
@@ -102,6 +102,8 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
///
/// [*currently allocated*]: #currently-allocated-memory
#[unstable(feature = "allocator_api", issue = "32838")]
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
#[const_trait]
pub unsafe trait Allocator {
/// Attempts to allocate a block of memory.
///
@@ -368,9 +370,10 @@ fn by_ref(&self) -> &Self
}
#[unstable(feature = "allocator_api", issue = "32838")]
unsafe impl<A> Allocator for &A
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
unsafe impl<A> const Allocator for &A
where
A: Allocator + ?Sized,
A: [const] Allocator + ?Sized,
{
#[inline]
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
@@ -0,0 +1,5 @@
#![feature(const_heap)]
const V: Vec<i32> = Vec::with_capacity(1);
//~^ ERROR: encountered `const_allocate` pointer in final value that was not made global
fn main() {}
@@ -0,0 +1,10 @@
error: encountered `const_allocate` pointer in final value that was not made global
--> $DIR/vec-not-made-global.rs:2:1
|
LL | const V: Vec<i32> = Vec::with_capacity(1);
| ^^^^^^^^^^^^^^^^^
|
= note: use `const_make_global` to turn allocated pointers into immutable globals before returning
error: aborting due to 1 previous error