Pass a Layout
to oom
As discussed in https://github.com/rust-lang/rust/issues/49668#issuecomment-384893456 and subsequent, there are use-cases where the OOM handler needs to know the size of the allocation that failed. The alignment might also be a cause for allocation failure, so providing it as well can be useful.
This commit is contained in:
parent
5015fa346c
commit
0f4ef003ac
10 changed files with 161 additions and 112 deletions
|
@ -115,7 +115,7 @@ unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
|
|||
if !ptr.is_null() {
|
||||
ptr as *mut u8
|
||||
} else {
|
||||
oom()
|
||||
oom(layout)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -134,12 +134,13 @@ pub(crate) unsafe fn box_free<T: ?Sized>(ptr: Unique<T>) {
|
|||
}
|
||||
|
||||
#[rustc_allocator_nounwind]
|
||||
pub fn oom() -> ! {
|
||||
extern {
|
||||
pub fn oom(layout: Layout) -> ! {
|
||||
#[allow(improper_ctypes)]
|
||||
extern "Rust" {
|
||||
#[lang = "oom"]
|
||||
fn oom_impl() -> !;
|
||||
fn oom_impl(layout: Layout) -> !;
|
||||
}
|
||||
unsafe { oom_impl() }
|
||||
unsafe { oom_impl(layout) }
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -154,7 +155,7 @@ mod tests {
|
|||
unsafe {
|
||||
let layout = Layout::from_size_align(1024, 1).unwrap();
|
||||
let ptr = Global.alloc_zeroed(layout.clone())
|
||||
.unwrap_or_else(|_| oom());
|
||||
.unwrap_or_else(|_| oom(layout));
|
||||
|
||||
let mut i = ptr.cast::<u8>().as_ptr();
|
||||
let end = i.offset(layout.size() as isize);
|
||||
|
|
|
@ -553,7 +553,7 @@ impl<T: ?Sized> Arc<T> {
|
|||
let layout = Layout::for_value(&*fake_ptr);
|
||||
|
||||
let mem = Global.alloc(layout)
|
||||
.unwrap_or_else(|_| oom());
|
||||
.unwrap_or_else(|_| oom(layout));
|
||||
|
||||
// Initialize the real ArcInner
|
||||
let inner = set_data_ptr(ptr as *mut T, mem.as_ptr() as *mut u8) as *mut ArcInner<T>;
|
||||
|
|
|
@ -96,14 +96,15 @@ impl<T, A: Alloc> RawVec<T, A> {
|
|||
NonNull::<T>::dangling().as_opaque()
|
||||
} else {
|
||||
let align = mem::align_of::<T>();
|
||||
let layout = Layout::from_size_align(alloc_size, align).unwrap();
|
||||
let result = if zeroed {
|
||||
a.alloc_zeroed(Layout::from_size_align(alloc_size, align).unwrap())
|
||||
a.alloc_zeroed(layout)
|
||||
} else {
|
||||
a.alloc(Layout::from_size_align(alloc_size, align).unwrap())
|
||||
a.alloc(layout)
|
||||
};
|
||||
match result {
|
||||
Ok(ptr) => ptr,
|
||||
Err(_) => oom(),
|
||||
Err(_) => oom(layout),
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -318,7 +319,7 @@ impl<T, A: Alloc> RawVec<T, A> {
|
|||
new_size);
|
||||
match ptr_res {
|
||||
Ok(ptr) => (new_cap, ptr.cast().into()),
|
||||
Err(_) => oom(),
|
||||
Err(_) => oom(Layout::from_size_align_unchecked(new_size, cur.align())),
|
||||
}
|
||||
}
|
||||
None => {
|
||||
|
@ -327,7 +328,7 @@ impl<T, A: Alloc> RawVec<T, A> {
|
|||
let new_cap = if elem_size > (!0) / 8 { 1 } else { 4 };
|
||||
match self.a.alloc_array::<T>(new_cap) {
|
||||
Ok(ptr) => (new_cap, ptr.into()),
|
||||
Err(_) => oom(),
|
||||
Err(_) => oom(Layout::array::<T>(new_cap).unwrap()),
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -389,37 +390,7 @@ impl<T, A: Alloc> RawVec<T, A> {
|
|||
pub fn try_reserve_exact(&mut self, used_cap: usize, needed_extra_cap: usize)
|
||||
-> Result<(), CollectionAllocErr> {
|
||||
|
||||
unsafe {
|
||||
// NOTE: we don't early branch on ZSTs here because we want this
|
||||
// to actually catch "asking for more than usize::MAX" in that case.
|
||||
// If we make it past the first branch then we are guaranteed to
|
||||
// panic.
|
||||
|
||||
// Don't actually need any more capacity.
|
||||
// Wrapping in case they gave a bad `used_cap`.
|
||||
if self.cap().wrapping_sub(used_cap) >= needed_extra_cap {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Nothing we can really do about these checks :(
|
||||
let new_cap = used_cap.checked_add(needed_extra_cap).ok_or(CapacityOverflow)?;
|
||||
let new_layout = Layout::array::<T>(new_cap).map_err(|_| CapacityOverflow)?;
|
||||
|
||||
alloc_guard(new_layout.size())?;
|
||||
|
||||
let res = match self.current_layout() {
|
||||
Some(layout) => {
|
||||
debug_assert!(new_layout.align() == layout.align());
|
||||
self.a.realloc(NonNull::from(self.ptr).as_opaque(), layout, new_layout.size())
|
||||
}
|
||||
None => self.a.alloc(new_layout),
|
||||
};
|
||||
|
||||
self.ptr = res?.cast().into();
|
||||
self.cap = new_cap;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
self.reserve_internal(used_cap, needed_extra_cap, Fallible, Exact)
|
||||
}
|
||||
|
||||
/// Ensures that the buffer contains at least enough space to hold
|
||||
|
@ -443,9 +414,9 @@ impl<T, A: Alloc> RawVec<T, A> {
|
|||
///
|
||||
/// Aborts on OOM
|
||||
pub fn reserve_exact(&mut self, used_cap: usize, needed_extra_cap: usize) {
|
||||
match self.try_reserve_exact(used_cap, needed_extra_cap) {
|
||||
match self.reserve_internal(used_cap, needed_extra_cap, Infallible, Exact) {
|
||||
Err(CapacityOverflow) => capacity_overflow(),
|
||||
Err(AllocErr) => oom(),
|
||||
Err(AllocErr) => unreachable!(),
|
||||
Ok(()) => { /* yay */ }
|
||||
}
|
||||
}
|
||||
|
@ -467,37 +438,7 @@ impl<T, A: Alloc> RawVec<T, A> {
|
|||
/// The same as `reserve`, but returns on errors instead of panicking or aborting.
|
||||
pub fn try_reserve(&mut self, used_cap: usize, needed_extra_cap: usize)
|
||||
-> Result<(), CollectionAllocErr> {
|
||||
unsafe {
|
||||
// NOTE: we don't early branch on ZSTs here because we want this
|
||||
// to actually catch "asking for more than usize::MAX" in that case.
|
||||
// If we make it past the first branch then we are guaranteed to
|
||||
// panic.
|
||||
|
||||
// Don't actually need any more capacity.
|
||||
// Wrapping in case they give a bad `used_cap`
|
||||
if self.cap().wrapping_sub(used_cap) >= needed_extra_cap {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let new_cap = self.amortized_new_size(used_cap, needed_extra_cap)?;
|
||||
let new_layout = Layout::array::<T>(new_cap).map_err(|_| CapacityOverflow)?;
|
||||
|
||||
// FIXME: may crash and burn on over-reserve
|
||||
alloc_guard(new_layout.size())?;
|
||||
|
||||
let res = match self.current_layout() {
|
||||
Some(layout) => {
|
||||
debug_assert!(new_layout.align() == layout.align());
|
||||
self.a.realloc(NonNull::from(self.ptr).as_opaque(), layout, new_layout.size())
|
||||
}
|
||||
None => self.a.alloc(new_layout),
|
||||
};
|
||||
|
||||
self.ptr = res?.cast().into();
|
||||
self.cap = new_cap;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
self.reserve_internal(used_cap, needed_extra_cap, Fallible, Amortized)
|
||||
}
|
||||
|
||||
/// Ensures that the buffer contains at least enough space to hold
|
||||
|
@ -553,12 +494,12 @@ impl<T, A: Alloc> RawVec<T, A> {
|
|||
/// # }
|
||||
/// ```
|
||||
pub fn reserve(&mut self, used_cap: usize, needed_extra_cap: usize) {
|
||||
match self.try_reserve(used_cap, needed_extra_cap) {
|
||||
match self.reserve_internal(used_cap, needed_extra_cap, Infallible, Amortized) {
|
||||
Err(CapacityOverflow) => capacity_overflow(),
|
||||
Err(AllocErr) => oom(),
|
||||
Err(AllocErr) => unreachable!(),
|
||||
Ok(()) => { /* yay */ }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
/// Attempts to ensure that the buffer contains at least enough space to hold
|
||||
/// `used_cap + needed_extra_cap` elements. If it doesn't already have
|
||||
/// enough capacity, will reallocate in place enough space plus comfortable slack
|
||||
|
@ -670,7 +611,7 @@ impl<T, A: Alloc> RawVec<T, A> {
|
|||
old_layout,
|
||||
new_size) {
|
||||
Ok(p) => self.ptr = p.cast().into(),
|
||||
Err(_) => oom(),
|
||||
Err(_) => oom(Layout::from_size_align_unchecked(new_size, align)),
|
||||
}
|
||||
}
|
||||
self.cap = amount;
|
||||
|
@ -678,6 +619,73 @@ impl<T, A: Alloc> RawVec<T, A> {
|
|||
}
|
||||
}
|
||||
|
||||
enum Fallibility {
|
||||
Fallible,
|
||||
Infallible,
|
||||
}
|
||||
|
||||
use self::Fallibility::*;
|
||||
|
||||
enum ReserveStrategy {
|
||||
Exact,
|
||||
Amortized,
|
||||
}
|
||||
|
||||
use self::ReserveStrategy::*;
|
||||
|
||||
impl<T, A: Alloc> RawVec<T, A> {
|
||||
fn reserve_internal(
|
||||
&mut self,
|
||||
used_cap: usize,
|
||||
needed_extra_cap: usize,
|
||||
fallibility: Fallibility,
|
||||
strategy: ReserveStrategy,
|
||||
) -> Result<(), CollectionAllocErr> {
|
||||
unsafe {
|
||||
use alloc::AllocErr;
|
||||
|
||||
// NOTE: we don't early branch on ZSTs here because we want this
|
||||
// to actually catch "asking for more than usize::MAX" in that case.
|
||||
// If we make it past the first branch then we are guaranteed to
|
||||
// panic.
|
||||
|
||||
// Don't actually need any more capacity.
|
||||
// Wrapping in case they gave a bad `used_cap`.
|
||||
if self.cap().wrapping_sub(used_cap) >= needed_extra_cap {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Nothing we can really do about these checks :(
|
||||
let new_cap = match strategy {
|
||||
Exact => used_cap.checked_add(needed_extra_cap).ok_or(CapacityOverflow)?,
|
||||
Amortized => self.amortized_new_size(used_cap, needed_extra_cap)?,
|
||||
};
|
||||
let new_layout = Layout::array::<T>(new_cap).map_err(|_| CapacityOverflow)?;
|
||||
|
||||
alloc_guard(new_layout.size())?;
|
||||
|
||||
let res = match self.current_layout() {
|
||||
Some(layout) => {
|
||||
debug_assert!(new_layout.align() == layout.align());
|
||||
self.a.realloc(NonNull::from(self.ptr).as_opaque(), layout, new_layout.size())
|
||||
}
|
||||
None => self.a.alloc(new_layout),
|
||||
};
|
||||
|
||||
match (&res, fallibility) {
|
||||
(Err(AllocErr), Infallible) => oom(new_layout),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
self.ptr = res?.cast().into();
|
||||
self.cap = new_cap;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
impl<T> RawVec<T, Global> {
|
||||
/// Converts the entire buffer into `Box<[T]>`.
|
||||
///
|
||||
|
|
|
@ -668,7 +668,7 @@ impl<T: ?Sized> Rc<T> {
|
|||
let layout = Layout::for_value(&*fake_ptr);
|
||||
|
||||
let mem = Global.alloc(layout)
|
||||
.unwrap_or_else(|_| oom());
|
||||
.unwrap_or_else(|_| oom(layout));
|
||||
|
||||
// Initialize the real RcBox
|
||||
let inner = set_data_ptr(ptr as *mut T, mem.as_ptr() as *mut u8) as *mut RcBox<T>;
|
||||
|
|
|
@ -13,14 +13,14 @@
|
|||
#![unstable(issue = "32838", feature = "allocator_api")]
|
||||
|
||||
#[doc(inline)] #[allow(deprecated)] pub use alloc_crate::alloc::Heap;
|
||||
#[doc(inline)] pub use alloc_crate::alloc::{Global, oom};
|
||||
#[doc(inline)] pub use alloc_crate::alloc::{Global, Layout, oom};
|
||||
#[doc(inline)] pub use alloc_system::System;
|
||||
#[doc(inline)] pub use core::alloc::*;
|
||||
|
||||
#[cfg(not(test))]
|
||||
#[doc(hidden)]
|
||||
#[lang = "oom"]
|
||||
pub extern fn rust_oom() -> ! {
|
||||
pub extern fn rust_oom(_: Layout) -> ! {
|
||||
rtabort!("memory allocation failed");
|
||||
}
|
||||
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
use self::Entry::*;
|
||||
use self::VacantEntryState::*;
|
||||
|
||||
use alloc::{CollectionAllocErr, oom};
|
||||
use alloc::CollectionAllocErr;
|
||||
use cell::Cell;
|
||||
use borrow::Borrow;
|
||||
use cmp::max;
|
||||
|
@ -23,8 +23,10 @@ use mem::{self, replace};
|
|||
use ops::{Deref, Index};
|
||||
use sys;
|
||||
|
||||
use super::table::{self, Bucket, EmptyBucket, FullBucket, FullBucketMut, RawTable, SafeHash};
|
||||
use super::table::{self, Bucket, EmptyBucket, Fallibility, FullBucket, FullBucketMut, RawTable,
|
||||
SafeHash};
|
||||
use super::table::BucketState::{Empty, Full};
|
||||
use super::table::Fallibility::{Fallible, Infallible};
|
||||
|
||||
const MIN_NONZERO_RAW_CAPACITY: usize = 32; // must be a power of two
|
||||
|
||||
|
@ -783,11 +785,11 @@ impl<K, V, S> HashMap<K, V, S>
|
|||
/// ```
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
pub fn reserve(&mut self, additional: usize) {
|
||||
match self.try_reserve(additional) {
|
||||
match self.reserve_internal(additional, Infallible) {
|
||||
Err(CollectionAllocErr::CapacityOverflow) => panic!("capacity overflow"),
|
||||
Err(CollectionAllocErr::AllocErr) => oom(),
|
||||
Err(CollectionAllocErr::AllocErr) => unreachable!(),
|
||||
Ok(()) => { /* yay */ }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Tries to reserve capacity for at least `additional` more elements to be inserted
|
||||
|
@ -809,17 +811,24 @@ impl<K, V, S> HashMap<K, V, S>
|
|||
/// ```
|
||||
#[unstable(feature = "try_reserve", reason = "new API", issue="48043")]
|
||||
pub fn try_reserve(&mut self, additional: usize) -> Result<(), CollectionAllocErr> {
|
||||
self.reserve_internal(additional, Fallible)
|
||||
}
|
||||
|
||||
fn reserve_internal(&mut self, additional: usize, fallibility: Fallibility)
|
||||
-> Result<(), CollectionAllocErr> {
|
||||
|
||||
let remaining = self.capacity() - self.len(); // this can't overflow
|
||||
if remaining < additional {
|
||||
let min_cap = self.len().checked_add(additional)
|
||||
let min_cap = self.len()
|
||||
.checked_add(additional)
|
||||
.ok_or(CollectionAllocErr::CapacityOverflow)?;
|
||||
let raw_cap = self.resize_policy.try_raw_capacity(min_cap)?;
|
||||
self.try_resize(raw_cap)?;
|
||||
self.try_resize(raw_cap, fallibility)?;
|
||||
} else if self.table.tag() && remaining <= self.len() {
|
||||
// Probe sequence is too long and table is half full,
|
||||
// resize early to reduce probing length.
|
||||
let new_capacity = self.table.capacity() * 2;
|
||||
self.try_resize(new_capacity)?;
|
||||
self.try_resize(new_capacity, fallibility)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -831,11 +840,21 @@ impl<K, V, S> HashMap<K, V, S>
|
|||
/// 2) Ensure `new_raw_cap` is a power of two or zero.
|
||||
#[inline(never)]
|
||||
#[cold]
|
||||
fn try_resize(&mut self, new_raw_cap: usize) -> Result<(), CollectionAllocErr> {
|
||||
fn try_resize(
|
||||
&mut self,
|
||||
new_raw_cap: usize,
|
||||
fallibility: Fallibility,
|
||||
) -> Result<(), CollectionAllocErr> {
|
||||
assert!(self.table.size() <= new_raw_cap);
|
||||
assert!(new_raw_cap.is_power_of_two() || new_raw_cap == 0);
|
||||
|
||||
let mut old_table = replace(&mut self.table, RawTable::try_new(new_raw_cap)?);
|
||||
let mut old_table = replace(
|
||||
&mut self.table,
|
||||
match fallibility {
|
||||
Infallible => RawTable::new(new_raw_cap),
|
||||
Fallible => RawTable::try_new(new_raw_cap)?,
|
||||
}
|
||||
);
|
||||
let old_size = old_table.size();
|
||||
|
||||
if old_table.size() == 0 {
|
||||
|
|
|
@ -711,11 +711,21 @@ fn test_offset_calculation() {
|
|||
assert_eq!(calculate_offsets(6, 12, 4), (8, 20, false));
|
||||
}
|
||||
|
||||
pub(crate) enum Fallibility {
|
||||
Fallible,
|
||||
Infallible,
|
||||
}
|
||||
|
||||
use self::Fallibility::*;
|
||||
|
||||
impl<K, V> RawTable<K, V> {
|
||||
/// Does not initialize the buckets. The caller should ensure they,
|
||||
/// at the very least, set every hash to EMPTY_BUCKET.
|
||||
/// Returns an error if it cannot allocate or capacity overflows.
|
||||
unsafe fn try_new_uninitialized(capacity: usize) -> Result<RawTable<K, V>, CollectionAllocErr> {
|
||||
unsafe fn new_uninitialized_internal(
|
||||
capacity: usize,
|
||||
fallibility: Fallibility,
|
||||
) -> Result<RawTable<K, V>, CollectionAllocErr> {
|
||||
if capacity == 0 {
|
||||
return Ok(RawTable {
|
||||
size: 0,
|
||||
|
@ -754,8 +764,12 @@ impl<K, V> RawTable<K, V> {
|
|||
return Err(CollectionAllocErr::CapacityOverflow);
|
||||
}
|
||||
|
||||
let buffer = Global.alloc(Layout::from_size_align(size, alignment)
|
||||
.map_err(|_| CollectionAllocErr::CapacityOverflow)?)?;
|
||||
let layout = Layout::from_size_align(size, alignment)
|
||||
.map_err(|_| CollectionAllocErr::CapacityOverflow)?;
|
||||
let buffer = Global.alloc(layout).map_err(|e| match fallibility {
|
||||
Infallible => oom(layout),
|
||||
Fallible => e,
|
||||
})?;
|
||||
|
||||
Ok(RawTable {
|
||||
capacity_mask: capacity.wrapping_sub(1),
|
||||
|
@ -768,9 +782,9 @@ impl<K, V> RawTable<K, V> {
|
|||
/// Does not initialize the buckets. The caller should ensure they,
|
||||
/// at the very least, set every hash to EMPTY_BUCKET.
|
||||
unsafe fn new_uninitialized(capacity: usize) -> RawTable<K, V> {
|
||||
match Self::try_new_uninitialized(capacity) {
|
||||
match Self::new_uninitialized_internal(capacity, Infallible) {
|
||||
Err(CollectionAllocErr::CapacityOverflow) => panic!("capacity overflow"),
|
||||
Err(CollectionAllocErr::AllocErr) => oom(),
|
||||
Err(CollectionAllocErr::AllocErr) => unreachable!(),
|
||||
Ok(table) => { table }
|
||||
}
|
||||
}
|
||||
|
@ -794,22 +808,29 @@ impl<K, V> RawTable<K, V> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Tries to create a new raw table from a given capacity. If it cannot allocate,
|
||||
/// it returns with AllocErr.
|
||||
pub fn try_new(capacity: usize) -> Result<RawTable<K, V>, CollectionAllocErr> {
|
||||
fn new_internal(
|
||||
capacity: usize,
|
||||
fallibility: Fallibility,
|
||||
) -> Result<RawTable<K, V>, CollectionAllocErr> {
|
||||
unsafe {
|
||||
let ret = RawTable::try_new_uninitialized(capacity)?;
|
||||
let ret = RawTable::new_uninitialized_internal(capacity, fallibility)?;
|
||||
ptr::write_bytes(ret.hashes.ptr(), 0, capacity);
|
||||
Ok(ret)
|
||||
}
|
||||
}
|
||||
|
||||
/// Tries to create a new raw table from a given capacity. If it cannot allocate,
|
||||
/// it returns with AllocErr.
|
||||
pub fn try_new(capacity: usize) -> Result<RawTable<K, V>, CollectionAllocErr> {
|
||||
Self::new_internal(capacity, Fallible)
|
||||
}
|
||||
|
||||
/// Creates a new raw table from a given capacity. All buckets are
|
||||
/// initially empty.
|
||||
pub fn new(capacity: usize) -> RawTable<K, V> {
|
||||
match Self::try_new(capacity) {
|
||||
match Self::new_internal(capacity, Infallible) {
|
||||
Err(CollectionAllocErr::CapacityOverflow) => panic!("capacity overflow"),
|
||||
Err(CollectionAllocErr::AllocErr) => oom(),
|
||||
Err(CollectionAllocErr::AllocErr) => unreachable!(),
|
||||
Ok(table) => { table }
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,11 +10,11 @@
|
|||
|
||||
#![feature(allocator_api, nonnull)]
|
||||
|
||||
use std::alloc::{Alloc, Global, oom};
|
||||
use std::alloc::{Alloc, Global, Layout, oom};
|
||||
|
||||
fn main() {
|
||||
unsafe {
|
||||
let ptr = Global.alloc_one::<i32>().unwrap_or_else(|_| oom());
|
||||
let ptr = Global.alloc_one::<i32>().unwrap_or_else(|_| oom(Layout::new::<i32>()));
|
||||
*ptr.as_ptr() = 4;
|
||||
assert_eq!(*ptr.as_ptr(), 4);
|
||||
Global.dealloc_one(ptr);
|
||||
|
|
|
@ -50,7 +50,7 @@ unsafe fn test_triangle() -> bool {
|
|||
println!("allocate({:?})", layout);
|
||||
}
|
||||
|
||||
let ret = Global.alloc(layout.clone()).unwrap_or_else(|_| oom());
|
||||
let ret = Global.alloc(layout).unwrap_or_else(|_| oom(layout));
|
||||
|
||||
if PRINT {
|
||||
println!("allocate({:?}) = {:?}", layout, ret);
|
||||
|
@ -72,8 +72,8 @@ unsafe fn test_triangle() -> bool {
|
|||
println!("reallocate({:?}, old={:?}, new={:?})", ptr, old, new);
|
||||
}
|
||||
|
||||
let ret = Global.realloc(NonNull::new_unchecked(ptr).as_opaque(), old.clone(), new.size())
|
||||
.unwrap_or_else(|_| oom());
|
||||
let ret = Global.realloc(NonNull::new_unchecked(ptr).as_opaque(), old, new.size())
|
||||
.unwrap_or_else(|_| oom(Layout::from_size_align_unchecked(new.size(), old.align())));
|
||||
|
||||
if PRINT {
|
||||
println!("reallocate({:?}, old={:?}, new={:?}) = {:?}",
|
||||
|
|
|
@ -32,8 +32,8 @@ struct Ccx {
|
|||
|
||||
fn alloc<'a>(_bcx : &'a arena) -> &'a Bcx<'a> {
|
||||
unsafe {
|
||||
let ptr = Global.alloc(Layout::new::<Bcx>())
|
||||
.unwrap_or_else(|_| oom());
|
||||
let layout = Layout::new::<Bcx>();
|
||||
let ptr = Global.alloc(layout).unwrap_or_else(|_| oom(layout));
|
||||
&*(ptr.as_ptr() as *const _)
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Reference in a new issue