mirror of
https://github.com/rust-lang/rust.git
synced 2026-04-26 13:01:27 +03:00
Disallow ZST allocations with TypedArena.
`DroplessArena::alloc` already disallows ZST allocation. `TypedArena::alloc` allows it but: - (a) it's never used, and - (b) writing to `NonNull::dangling()` seems dubious, even if the write is zero-sized. This commit just changes it to panic on a ZST. This eliminates an untested code path, and we shouldn't be allocating ZSTs anyway. It also eliminates an unused ZST code path in `clear_last_chunk`. Tests are also updated accordingly.
This commit is contained in:
@@ -140,25 +140,19 @@ impl<T> TypedArena<T> {
|
|||||||
/// Allocates an object in the `TypedArena`, returning a reference to it.
|
/// Allocates an object in the `TypedArena`, returning a reference to it.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn alloc(&self, object: T) -> &mut T {
|
pub fn alloc(&self, object: T) -> &mut T {
|
||||||
|
assert!(size_of::<T>() != 0);
|
||||||
|
|
||||||
if self.ptr == self.end {
|
if self.ptr == self.end {
|
||||||
self.grow(1)
|
self.grow(1)
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe {
|
unsafe {
|
||||||
if size_of::<T>() == 0 {
|
let ptr = self.ptr.get();
|
||||||
self.ptr.set(self.ptr.get().wrapping_byte_add(1));
|
// Advance the pointer.
|
||||||
let ptr = ptr::NonNull::<T>::dangling().as_ptr();
|
self.ptr.set(self.ptr.get().add(1));
|
||||||
// Don't drop the object. This `write` is equivalent to `forget`.
|
// Write into uninitialized memory.
|
||||||
ptr::write(ptr, object);
|
ptr::write(ptr, object);
|
||||||
&mut *ptr
|
&mut *ptr
|
||||||
} else {
|
|
||||||
let ptr = self.ptr.get();
|
|
||||||
// Advance the pointer.
|
|
||||||
self.ptr.set(self.ptr.get().add(1));
|
|
||||||
// Write into uninitialized memory.
|
|
||||||
ptr::write(ptr, object);
|
|
||||||
&mut *ptr
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -302,16 +296,10 @@ fn clear_last_chunk(&self, last_chunk: &mut ArenaChunk<T>) {
|
|||||||
let end = self.ptr.get().addr();
|
let end = self.ptr.get().addr();
|
||||||
// We then calculate the number of elements to be dropped in the last chunk,
|
// We then calculate the number of elements to be dropped in the last chunk,
|
||||||
// which is the filled area's length.
|
// which is the filled area's length.
|
||||||
let diff = if size_of::<T>() == 0 {
|
assert_ne!(size_of::<T>(), 0);
|
||||||
// `T` is ZST. It can't have a drop flag, so the value here doesn't matter. We get
|
// FIXME: this should *likely* use `offset_from`, but more
|
||||||
// the number of zero-sized values in the last and only chunk, just out of caution.
|
// investigation is needed (including running tests in miri).
|
||||||
// Recall that `end` was incremented for each allocated value.
|
let diff = (end - start) / size_of::<T>();
|
||||||
end - start
|
|
||||||
} else {
|
|
||||||
// FIXME: this should *likely* use `offset_from`, but more
|
|
||||||
// investigation is needed (including running tests in miri).
|
|
||||||
(end - start) / size_of::<T>()
|
|
||||||
};
|
|
||||||
// Pass that to the `destroy` method.
|
// Pass that to the `destroy` method.
|
||||||
unsafe {
|
unsafe {
|
||||||
last_chunk.destroy(diff);
|
last_chunk.destroy(diff);
|
||||||
|
|||||||
@@ -22,7 +22,6 @@ fn clear(&mut self) {
|
|||||||
if let Some(last_chunk) = chunks_borrow.last_mut() {
|
if let Some(last_chunk) = chunks_borrow.last_mut() {
|
||||||
self.clear_last_chunk(last_chunk);
|
self.clear_last_chunk(last_chunk);
|
||||||
let len = chunks_borrow.len();
|
let len = chunks_borrow.len();
|
||||||
// If `T` is ZST, code below has no effect.
|
|
||||||
for mut chunk in chunks_borrow.drain(..len - 1) {
|
for mut chunk in chunks_borrow.drain(..len - 1) {
|
||||||
chunk.destroy(chunk.entries);
|
chunk.destroy(chunk.entries);
|
||||||
}
|
}
|
||||||
@@ -117,18 +116,6 @@ fn test_noncopy() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_typed_arena_zero_sized() {
|
|
||||||
let arena = TypedArena::default();
|
|
||||||
#[cfg(not(miri))]
|
|
||||||
const N: usize = 100000;
|
|
||||||
#[cfg(miri)]
|
|
||||||
const N: usize = 1000;
|
|
||||||
for _ in 0..N {
|
|
||||||
arena.alloc(());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_typed_arena_clear() {
|
fn test_typed_arena_clear() {
|
||||||
let mut arena = TypedArena::default();
|
let mut arena = TypedArena::default();
|
||||||
@@ -207,7 +194,8 @@ fn test_typed_arena_drop_on_clear() {
|
|||||||
static DROP_COUNTER: Cell<u32> = Cell::new(0)
|
static DROP_COUNTER: Cell<u32> = Cell::new(0)
|
||||||
}
|
}
|
||||||
|
|
||||||
struct SmallDroppable;
|
#[allow(unused)]
|
||||||
|
struct SmallDroppable(u8);
|
||||||
|
|
||||||
impl Drop for SmallDroppable {
|
impl Drop for SmallDroppable {
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
@@ -222,7 +210,7 @@ fn test_typed_arena_drop_small_count() {
|
|||||||
let arena: TypedArena<SmallDroppable> = TypedArena::default();
|
let arena: TypedArena<SmallDroppable> = TypedArena::default();
|
||||||
for _ in 0..100 {
|
for _ in 0..100 {
|
||||||
// Allocate something with drop glue to make sure it doesn't leak.
|
// Allocate something with drop glue to make sure it doesn't leak.
|
||||||
arena.alloc(SmallDroppable);
|
arena.alloc(SmallDroppable(0));
|
||||||
}
|
}
|
||||||
// dropping
|
// dropping
|
||||||
};
|
};
|
||||||
|
|||||||
Reference in New Issue
Block a user