#![allow(unstable_name_collisions)]
#![allow(dead_code)]
use crate::Bump;
use core::cmp;
use core::mem;
use core::ptr::{self, NonNull};
use crate::alloc::{handle_alloc_error, Alloc, Layout, UnstableLayoutMethods};
use crate::collections::CollectionAllocErr;
use crate::collections::CollectionAllocErr::*;
#[allow(missing_debug_implementations)]
pub struct RawVec<'a, T> {
ptr: NonNull<T>,
cap: usize,
a: &'a Bump,
}
impl<'a, T> RawVec<'a, T> {
pub fn new_in(a: &'a Bump) -> Self {
RawVec {
ptr: unsafe { NonNull::new_unchecked(mem::align_of::<T>() as *mut T) },
cap: [0, !0][(mem::size_of::<T>() == 0) as usize],
a,
}
}
#[inline]
pub fn with_capacity_in(cap: usize, a: &'a Bump) -> Self {
RawVec::allocate_in(cap, false, a)
}
#[inline]
pub fn with_capacity_zeroed_in(cap: usize, a: &'a Bump) -> Self {
RawVec::allocate_in(cap, true, a)
}
fn allocate_in(cap: usize, zeroed: bool, mut a: &'a Bump) -> Self {
unsafe {
let elem_size = mem::size_of::<T>();
let alloc_size = cap
.checked_mul(elem_size)
.unwrap_or_else(|| capacity_overflow());
alloc_guard(alloc_size).unwrap_or_else(|_| capacity_overflow());
let ptr = if alloc_size == 0 {
NonNull::<T>::dangling()
} else {
let align = mem::align_of::<T>();
let layout = Layout::from_size_align(alloc_size, align).unwrap();
let result = if zeroed {
a.alloc_zeroed(layout)
} else {
Alloc::alloc(&mut a, layout)
};
match result {
Ok(ptr) => ptr.cast(),
Err(_) => handle_alloc_error(layout),
}
};
RawVec { ptr, cap, a }
}
}
}
impl<'a, T> RawVec<'a, T> {
pub unsafe fn from_raw_parts_in(ptr: *mut T, cap: usize, a: &'a Bump) -> Self {
RawVec {
ptr: NonNull::new_unchecked(ptr),
cap,
a,
}
}
}
impl<'a, T> RawVec<'a, T> {
pub fn ptr(&self) -> *mut T {
self.ptr.as_ptr()
}
#[inline(always)]
pub fn cap(&self) -> usize {
if mem::size_of::<T>() == 0 {
!0
} else {
self.cap
}
}
pub fn bump(&self) -> &'a Bump {
self.a
}
fn current_layout(&self) -> Option<Layout> {
if self.cap == 0 {
None
} else {
unsafe {
let align = mem::align_of::<T>();
let size = mem::size_of::<T>() * self.cap;
Some(Layout::from_size_align_unchecked(size, align))
}
}
}
#[inline(never)]
#[cold]
pub fn double(&mut self) {
unsafe {
let elem_size = mem::size_of::<T>();
assert!(elem_size != 0, "capacity overflow");
let (new_cap, uniq) = match self.current_layout() {
Some(cur) => {
let new_cap = 2 * self.cap;
let new_size = new_cap * elem_size;
alloc_guard(new_size).unwrap_or_else(|_| capacity_overflow());
let ptr_res = self.a.realloc(self.ptr.cast(), cur, new_size);
match ptr_res {
Ok(ptr) => (new_cap, ptr.cast()),
Err(_) => handle_alloc_error(Layout::from_size_align_unchecked(
new_size,
cur.align(),
)),
}
}
None => {
let new_cap = if elem_size > (!0) / 8 { 1 } else { 4 };
match self.a.alloc_array::<T>(new_cap) {
Ok(ptr) => (new_cap, ptr),
Err(_) => handle_alloc_error(Layout::array::<T>(new_cap).unwrap()),
}
}
};
self.ptr = uniq;
self.cap = new_cap;
}
}
#[inline(never)]
#[cold]
pub fn double_in_place(&mut self) -> bool {
unsafe {
let elem_size = mem::size_of::<T>();
let old_layout = match self.current_layout() {
Some(layout) => layout,
None => return false, };
assert!(elem_size != 0, "capacity overflow");
let new_cap = 2 * self.cap;
let new_size = new_cap * elem_size;
alloc_guard(new_size).unwrap_or_else(|_| capacity_overflow());
match self.a.grow_in_place(self.ptr.cast(), old_layout, new_size) {
Ok(_) => {
self.cap = new_cap;
true
}
Err(_) => false,
}
}
}
pub fn try_reserve_exact(
&mut self,
used_cap: usize,
needed_extra_cap: usize,
) -> Result<(), CollectionAllocErr> {
self.reserve_internal(used_cap, needed_extra_cap, Fallible, Exact)
}
pub fn reserve_exact(&mut self, used_cap: usize, needed_extra_cap: usize) {
match self.reserve_internal(used_cap, needed_extra_cap, Infallible, Exact) {
Err(CapacityOverflow) => capacity_overflow(),
Err(AllocErr) => unreachable!(),
Ok(()) => { }
}
}
fn amortized_new_size(
&self,
used_cap: usize,
needed_extra_cap: usize,
) -> Result<usize, CollectionAllocErr> {
let required_cap = used_cap
.checked_add(needed_extra_cap)
.ok_or(CapacityOverflow)?;
let double_cap = self.cap * 2;
Ok(cmp::max(double_cap, required_cap))
}
pub fn try_reserve(
&mut self,
used_cap: usize,
needed_extra_cap: usize,
) -> Result<(), CollectionAllocErr> {
self.reserve_internal(used_cap, needed_extra_cap, Fallible, Amortized)
}
pub fn reserve(&mut self, used_cap: usize, needed_extra_cap: usize) {
match self.reserve_internal(used_cap, needed_extra_cap, Infallible, Amortized) {
Err(CapacityOverflow) => capacity_overflow(),
Err(AllocErr) => unreachable!(),
Ok(()) => { }
}
}
pub fn reserve_in_place(&mut self, used_cap: usize, needed_extra_cap: usize) -> bool {
unsafe {
let old_layout = match self.current_layout() {
Some(layout) => layout,
None => return false,
};
if self.cap().wrapping_sub(used_cap) >= needed_extra_cap {
return false;
}
let new_cap = self
.amortized_new_size(used_cap, needed_extra_cap)
.unwrap_or_else(|_| capacity_overflow());
let new_layout = Layout::new::<T>().repeat(new_cap).unwrap().0;
alloc_guard(new_layout.size()).unwrap_or_else(|_| capacity_overflow());
match self
.a
.grow_in_place(self.ptr.cast(), old_layout, new_layout.size())
{
Ok(_) => {
self.cap = new_cap;
true
}
Err(_) => false,
}
}
}
pub fn shrink_to_fit(&mut self, amount: usize) {
let elem_size = mem::size_of::<T>();
if elem_size == 0 {
self.cap = amount;
return;
}
assert!(self.cap >= amount, "Tried to shrink to a larger capacity");
if amount == 0 {
unsafe {
let a = self.a;
self.dealloc_buffer();
ptr::write(self, RawVec::new_in(a));
}
} else if self.cap != amount {
unsafe {
let old_size = elem_size * self.cap;
let new_size = elem_size * amount;
let align = mem::align_of::<T>();
let old_layout = Layout::from_size_align_unchecked(old_size, align);
match self.a.realloc(self.ptr.cast(), old_layout, new_size) {
Ok(p) => self.ptr = p.cast(),
Err(_) => {
handle_alloc_error(Layout::from_size_align_unchecked(new_size, align))
}
}
}
self.cap = amount;
}
}
}
enum Fallibility {
Fallible,
Infallible,
}
use self::Fallibility::*;
enum ReserveStrategy {
Exact,
Amortized,
}
use self::ReserveStrategy::*;
impl<'a, T> RawVec<'a, T> {
fn reserve_internal(
&mut self,
used_cap: usize,
needed_extra_cap: usize,
fallibility: Fallibility,
strategy: ReserveStrategy,
) -> Result<(), CollectionAllocErr> {
unsafe {
use crate::alloc::AllocErr;
if self.cap().wrapping_sub(used_cap) >= needed_extra_cap {
return Ok(());
}
let new_cap = match strategy {
Exact => used_cap
.checked_add(needed_extra_cap)
.ok_or(CapacityOverflow)?,
Amortized => self.amortized_new_size(used_cap, needed_extra_cap)?,
};
let new_layout = Layout::array::<T>(new_cap).map_err(|_| CapacityOverflow)?;
alloc_guard(new_layout.size())?;
let res = match self.current_layout() {
Some(layout) => {
debug_assert!(new_layout.align() == layout.align());
self.a.realloc(self.ptr.cast(), layout, new_layout.size())
}
None => Alloc::alloc(&mut self.a, new_layout),
};
if let (Err(AllocErr), Infallible) = (&res, fallibility) {
handle_alloc_error(new_layout);
}
self.ptr = res?.cast();
self.cap = new_cap;
Ok(())
}
}
}
impl<'a, T> RawVec<'a, T> {
pub unsafe fn dealloc_buffer(&mut self) {
let elem_size = mem::size_of::<T>();
if elem_size != 0 {
if let Some(layout) = self.current_layout() {
self.a.dealloc(self.ptr.cast(), layout);
}
}
}
}
impl<'a, T> Drop for RawVec<'a, T> {
fn drop(&mut self) {
unsafe {
self.dealloc_buffer();
}
}
}
#[inline]
fn alloc_guard(alloc_size: usize) -> Result<(), CollectionAllocErr> {
if mem::size_of::<usize>() < 8 && alloc_size > ::core::isize::MAX as usize {
Err(CapacityOverflow)
} else {
Ok(())
}
}
fn capacity_overflow() -> ! {
panic!("capacity overflow")
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn reserve_does_not_overallocate() {
let bump = Bump::new();
{
let mut v: RawVec<u32> = RawVec::new_in(&bump);
v.reserve(0, 9);
assert_eq!(9, v.cap());
}
{
let mut v: RawVec<u32> = RawVec::new_in(&bump);
v.reserve(0, 7);
assert_eq!(7, v.cap());
v.reserve(7, 90);
assert_eq!(97, v.cap());
}
{
let mut v: RawVec<u32> = RawVec::new_in(&bump);
v.reserve(0, 12);
assert_eq!(12, v.cap());
v.reserve(12, 3);
assert!(v.cap() >= 12 + 12 / 2);
}
}
}