mod heap;
pub use self::heap::{Heap};
use std::{mem, ptr};
use std::sync::atomic::{AtomicUsize, Ordering};
pub fn heap(len: usize) -> MemRef {
Heap.allocate(len)
}
pub trait Allocator: Sync + Send {
fn allocate(&self, len: usize) -> MemRef;
fn deallocate(&self, mem: *mut Mem);
}
pub struct MemRef {
ptr: *mut u8,
}
impl MemRef {
pub fn new(mem: *mut Mem) -> MemRef {
let ptr = mem as *mut u8;
unsafe {
MemRef {
ptr: ptr.offset(mem::size_of::<Mem>() as isize),
}
}
}
#[inline]
pub fn none() -> MemRef {
MemRef { ptr: ptr::null_mut() }
}
#[inline]
pub fn is_none(&self) -> bool {
self.ptr.is_null()
}
#[inline]
pub fn ptr(&self) -> *mut u8 {
self.ptr
}
pub fn bytes(&self) -> &[u8] {
use std::slice;
unsafe {
slice::from_raw_parts(self.ptr(), self.mem().len)
}
}
#[inline]
pub fn bytes_mut(&mut self) -> &mut [u8] {
use std::slice;
unsafe {
slice::from_raw_parts_mut(self.ptr(), self.mem().len)
}
}
#[inline]
fn mem_ptr(&self) -> *mut Mem {
unsafe {
self.ptr.offset(-(mem::size_of::<Mem>() as isize)) as *mut Mem
}
}
#[inline]
fn mem(&self) -> &Mem {
unsafe {
mem::transmute(self.mem_ptr())
}
}
}
impl Clone for MemRef {
#[inline]
fn clone(&self) -> MemRef {
self.mem().refs.fetch_add(1, Ordering::Relaxed);
MemRef { ptr: self.ptr }
}
}
impl Drop for MemRef {
fn drop(&mut self) {
if self.ptr.is_null() { return; }
if 1 == self.mem().refs.fetch_sub(1, Ordering::Relaxed) {
unsafe {
let alloc: &Allocator = mem::transmute(self.mem().allocator);
alloc.deallocate(self.mem_ptr());
}
}
}
}
unsafe impl Send for MemRef { }
unsafe impl Sync for MemRef { }
pub struct Mem {
allocator: *const Allocator,
refs: AtomicUsize,
len: usize,
}
impl Mem {
pub fn new(len: usize, allocator: *const Allocator) -> Mem {
Mem {
allocator: allocator,
refs: AtomicUsize::new(1),
len: len,
}
}
}