Eliminate ZST allocations in Box
and Vec
This commit is contained in:
parent
27e10c5292
commit
d24be14276
4 changed files with 115 additions and 26 deletions
|
@ -157,12 +157,12 @@ use core::hash::{Hash, Hasher};
|
|||
use core::iter::FusedIterator;
|
||||
use core::marker::Tuple;
|
||||
use core::marker::Unsize;
|
||||
use core::mem;
|
||||
use core::mem::{self, SizedTypeProperties};
|
||||
use core::ops::{
|
||||
CoerceUnsized, Deref, DerefMut, DispatchFromDyn, Generator, GeneratorState, Receiver,
|
||||
};
|
||||
use core::pin::Pin;
|
||||
use core::ptr::{self, Unique};
|
||||
use core::ptr::{self, NonNull, Unique};
|
||||
use core::task::{Context, Poll};
|
||||
|
||||
#[cfg(not(no_global_oom_handling))]
|
||||
|
@ -479,8 +479,12 @@ impl<T, A: Allocator> Box<T, A> {
|
|||
where
|
||||
A: Allocator,
|
||||
{
|
||||
let layout = Layout::new::<mem::MaybeUninit<T>>();
|
||||
let ptr = alloc.allocate(layout)?.cast();
|
||||
let ptr = if T::IS_ZST {
|
||||
NonNull::dangling()
|
||||
} else {
|
||||
let layout = Layout::new::<mem::MaybeUninit<T>>();
|
||||
alloc.allocate(layout)?.cast()
|
||||
};
|
||||
unsafe { Ok(Box::from_raw_in(ptr.as_ptr(), alloc)) }
|
||||
}
|
||||
|
||||
|
@ -549,8 +553,12 @@ impl<T, A: Allocator> Box<T, A> {
|
|||
where
|
||||
A: Allocator,
|
||||
{
|
||||
let layout = Layout::new::<mem::MaybeUninit<T>>();
|
||||
let ptr = alloc.allocate_zeroed(layout)?.cast();
|
||||
let ptr = if T::IS_ZST {
|
||||
NonNull::dangling()
|
||||
} else {
|
||||
let layout = Layout::new::<mem::MaybeUninit<T>>();
|
||||
alloc.allocate_zeroed(layout)?.cast()
|
||||
};
|
||||
unsafe { Ok(Box::from_raw_in(ptr.as_ptr(), alloc)) }
|
||||
}
|
||||
|
||||
|
@ -675,14 +683,16 @@ impl<T> Box<[T]> {
|
|||
#[unstable(feature = "allocator_api", issue = "32838")]
|
||||
#[inline]
|
||||
pub fn try_new_uninit_slice(len: usize) -> Result<Box<[mem::MaybeUninit<T>]>, AllocError> {
|
||||
unsafe {
|
||||
let ptr = if T::IS_ZST || len == 0 {
|
||||
NonNull::dangling()
|
||||
} else {
|
||||
let layout = match Layout::array::<mem::MaybeUninit<T>>(len) {
|
||||
Ok(l) => l,
|
||||
Err(_) => return Err(AllocError),
|
||||
};
|
||||
let ptr = Global.allocate(layout)?;
|
||||
Ok(RawVec::from_raw_parts_in(ptr.as_mut_ptr() as *mut _, len, Global).into_box(len))
|
||||
}
|
||||
Global.allocate(layout)?.cast()
|
||||
};
|
||||
unsafe { Ok(RawVec::from_raw_parts_in(ptr.as_ptr(), len, Global).into_box(len)) }
|
||||
}
|
||||
|
||||
/// Constructs a new boxed slice with uninitialized contents, with the memory
|
||||
|
@ -707,14 +717,16 @@ impl<T> Box<[T]> {
|
|||
#[unstable(feature = "allocator_api", issue = "32838")]
|
||||
#[inline]
|
||||
pub fn try_new_zeroed_slice(len: usize) -> Result<Box<[mem::MaybeUninit<T>]>, AllocError> {
|
||||
unsafe {
|
||||
let ptr = if T::IS_ZST || len == 0 {
|
||||
NonNull::dangling()
|
||||
} else {
|
||||
let layout = match Layout::array::<mem::MaybeUninit<T>>(len) {
|
||||
Ok(l) => l,
|
||||
Err(_) => return Err(AllocError),
|
||||
};
|
||||
let ptr = Global.allocate_zeroed(layout)?;
|
||||
Ok(RawVec::from_raw_parts_in(ptr.as_mut_ptr() as *mut _, len, Global).into_box(len))
|
||||
}
|
||||
Global.allocate_zeroed(layout)?.cast()
|
||||
};
|
||||
unsafe { Ok(RawVec::from_raw_parts_in(ptr.as_ptr(), len, Global).into_box(len)) }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1219,7 +1231,9 @@ unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Box<T, A> {
|
|||
|
||||
unsafe {
|
||||
let layout = Layout::for_value_raw(ptr.as_ptr());
|
||||
self.1.deallocate(From::from(ptr.cast()), layout)
|
||||
if layout.size() != 0 {
|
||||
self.1.deallocate(From::from(ptr.cast()), layout);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -432,16 +432,26 @@ impl<T, A: Allocator> RawVec<T, A> {
|
|||
let (ptr, layout) = if let Some(mem) = self.current_memory() { mem } else { return Ok(()) };
|
||||
// See current_memory() why this assert is here
|
||||
let _: () = const { assert!(mem::size_of::<T>() % mem::align_of::<T>() == 0) };
|
||||
let ptr = unsafe {
|
||||
// `Layout::array` cannot overflow here because it would have
|
||||
// overflowed earlier when capacity was larger.
|
||||
let new_size = mem::size_of::<T>().unchecked_mul(cap);
|
||||
let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
|
||||
self.alloc
|
||||
.shrink(ptr, layout, new_layout)
|
||||
.map_err(|_| AllocError { layout: new_layout, non_exhaustive: () })?
|
||||
};
|
||||
self.set_ptr_and_cap(ptr, cap);
|
||||
|
||||
// If shrinking to 0, deallocate the buffer. We don't reach this point
|
||||
// for the T::IS_ZST case since current_memory() will have returned
|
||||
// None.
|
||||
if cap == 0 {
|
||||
unsafe { self.alloc.deallocate(ptr, layout) };
|
||||
self.ptr = Unique::dangling();
|
||||
self.cap = 0;
|
||||
} else {
|
||||
let ptr = unsafe {
|
||||
// `Layout::array` cannot overflow here because it would have
|
||||
// overflowed earlier when capacity was larger.
|
||||
let new_size = mem::size_of::<T>().unchecked_mul(cap);
|
||||
let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
|
||||
self.alloc
|
||||
.shrink(ptr, layout, new_layout)
|
||||
.map_err(|_| AllocError { layout: new_layout, non_exhaustive: () })?
|
||||
};
|
||||
self.set_ptr_and_cap(ptr, cap);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2497,3 +2497,68 @@ fn test_into_flattened_size_overflow() {
|
|||
let v = vec![[(); usize::MAX]; 2];
|
||||
let _ = v.into_flattened();
|
||||
}
|
||||
|
||||
#[cfg(not(bootstrap))]
|
||||
#[test]
|
||||
fn test_box_zero_allocator() {
|
||||
use core::{alloc::AllocError, cell::RefCell};
|
||||
use std::collections::HashSet;
|
||||
|
||||
// Track ZST allocations and ensure that they all have a matching free.
|
||||
struct ZstTracker {
|
||||
state: RefCell<(HashSet<usize>, usize)>,
|
||||
}
|
||||
unsafe impl Allocator for ZstTracker {
|
||||
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
|
||||
let ptr = if layout.size() == 0 {
|
||||
let mut state = self.state.borrow_mut();
|
||||
let addr = state.1;
|
||||
assert!(state.0.insert(addr));
|
||||
state.1 += 1;
|
||||
std::println!("allocating {addr}");
|
||||
std::ptr::invalid_mut(addr)
|
||||
} else {
|
||||
unsafe { std::alloc::alloc(layout) }
|
||||
};
|
||||
Ok(NonNull::slice_from_raw_parts(NonNull::new(ptr).ok_or(AllocError)?, layout.size()))
|
||||
}
|
||||
|
||||
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
|
||||
if layout.size() == 0 {
|
||||
let addr = ptr.as_ptr() as usize;
|
||||
let mut state = self.state.borrow_mut();
|
||||
std::println!("freeing {addr}");
|
||||
assert!(state.0.remove(&addr), "ZST free that wasn't allocated");
|
||||
} else {
|
||||
unsafe { std::alloc::dealloc(ptr.as_ptr(), layout) }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Start the state at 100 to avoid returning null pointers.
|
||||
let alloc = ZstTracker { state: RefCell::new((HashSet::new(), 100)) };
|
||||
|
||||
// Ensure that unsizing retains the same behavior.
|
||||
{
|
||||
let b1: Box<[u8; 0], &ZstTracker> = Box::new_in([], &alloc);
|
||||
let b2: Box<[u8], &ZstTracker> = b1.clone();
|
||||
let _b3: Box<[u8], &ZstTracker> = b2.clone();
|
||||
}
|
||||
|
||||
// Ensure that shrinking doesn't leak a ZST allocation.
|
||||
{
|
||||
let mut v1: Vec<u8, &ZstTracker> = Vec::with_capacity_in(100, &alloc);
|
||||
v1.shrink_to_fit();
|
||||
}
|
||||
|
||||
// Ensure that conversion to/from vec works.
|
||||
{
|
||||
let v1: Vec<(), &ZstTracker> = Vec::with_capacity_in(100, &alloc);
|
||||
let _b1: Box<[()], &ZstTracker> = v1.into_boxed_slice();
|
||||
let b2: Box<[()], &ZstTracker> = Box::new_in([(), (), ()], &alloc);
|
||||
let _v2: Vec<(), &ZstTracker> = b2.into();
|
||||
}
|
||||
|
||||
// Ensure all ZSTs have been freed.
|
||||
assert!(alloc.state.borrow().0.is_empty());
|
||||
}
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
thread 'main' panicked at 'capacity overflow', library/alloc/src/raw_vec.rs:524:5
|
||||
thread 'main' panicked at 'capacity overflow', library/alloc/src/raw_vec.rs:534:5
|
||||
note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace
|
||||
|
|
Loading…
Add table
Reference in a new issue