use core::mem;
use core::ptr;
use core::sync::atomic::AtomicUsize;
use core::sync::atomic::Ordering::{Acquire, SeqCst};
use crate::block::{self, Block, Layout, LayoutError};
use crate::alloc::{AllocTag, Hold, HoldError};
use crate::resident::{Resident, ResidentFromValue, ResidentFromClone,
ResidentFromCloneUnchecked, ResidentFromCopy,
ResidentFromCopyUnchecked, ResidentFromEmpty,
ResidentWithCapacity};
use crate::lease::{Lease, Mut, Ref, Hard, Soft};
#[cfg(target_pointer_width = "64")]
pub(crate) const HARD_COUNT_MASK: usize = 0x0000000000FFFFFF;
#[cfg(target_pointer_width = "32")]
pub(crate) const HARD_COUNT_MASK: usize = 0x00000FFF;
#[cfg(target_pointer_width = "64")]
pub const HARD_COUNT_MAX: usize = 0xFFFFFF;
#[cfg(target_pointer_width = "32")]
pub const HARD_COUNT_MAX: usize = 0xFFF;
#[cfg(target_pointer_width = "64")]
pub(crate) const SOFT_COUNT_MASK: usize = 0x0000FFFFFF000000;
#[cfg(target_pointer_width = "32")]
pub(crate) const SOFT_COUNT_MASK: usize = 0x00FFF000;
#[cfg(target_pointer_width = "64")]
pub(crate) const SOFT_COUNT_SHIFT: usize = 24;
#[cfg(target_pointer_width = "32")]
pub(crate) const SOFT_COUNT_SHIFT: usize = 12;
#[cfg(target_pointer_width = "64")]
pub const SOFT_COUNT_MAX: usize = 0xFFFFFF;
#[cfg(target_pointer_width = "32")]
pub const SOFT_COUNT_MAX: usize = 0xFFF;
#[cfg(target_pointer_width = "64")]
pub(crate) const REF_COUNT_MASK: usize = 0x3FFF000000000000;
#[cfg(target_pointer_width = "32")]
pub(crate) const REF_COUNT_MASK: usize = 0x3F000000;
#[cfg(target_pointer_width = "64")]
pub(crate) const REF_COUNT_SHIFT: usize = 48;
#[cfg(target_pointer_width = "32")]
pub(crate) const REF_COUNT_SHIFT: usize = 24;
#[cfg(target_pointer_width = "64")]
pub const REF_COUNT_MAX: usize = 0x3FFF;
#[cfg(target_pointer_width = "32")]
pub const REF_COUNT_MAX: usize = 0x3F;
#[cfg(target_pointer_width = "64")]
pub(crate) const MUT_FLAG: usize = 0x4000000000000000;
#[cfg(target_pointer_width = "32")]
pub(crate) const MUT_FLAG: usize = 0x40000000;
#[cfg(target_pointer_width = "64")]
pub(crate) const RELOCATED_FLAG: usize = 0x8000000000000000;
#[cfg(target_pointer_width = "32")]
pub(crate) const RELOCATED_FLAG: usize = 0x80000000;
#[cfg(target_pointer_width = "64")]
pub(crate) const ALIASED_MASK: usize = 0x7FFF000000000000;
#[cfg(target_pointer_width = "32")]
pub(crate) const ALIASED_MASK: usize = 0x7F000000;
#[cfg(target_pointer_width = "64")]
pub(crate) const REFERENCED_MASK: usize = 0x7FFFFFFFFFFFFFFF;
#[cfg(target_pointer_width = "32")]
pub(crate) const REFERENCED_MASK: usize = 0x7FFFFFFF;
#[cfg(target_pointer_width = "64")]
pub(crate) const READ_LOCKED_MASK: usize = 0xFFFF000000000000;
#[cfg(target_pointer_width = "32")]
pub(crate) const READ_LOCKED_MASK: usize = 0xFF000000;
#[cfg(target_pointer_width = "64")]
pub(crate) const WRITE_LOCKED_MASK: usize = 0xC000000000000000;
#[cfg(target_pointer_width = "32")]
pub(crate) const WRITE_LOCKED_MASK: usize = 0xC0000000;
#[cfg(target_pointer_width = "64")]
pub(crate) const UNIQUE_STATUS: usize = 0x0000000000000001;
#[cfg(target_pointer_width = "32")]
pub(crate) const UNIQUE_STATUS: usize = 0x00000001;
pub(crate) const MUT_STATUS_INIT: usize = 1 | MUT_FLAG;
pub(crate) const REF_STATUS_INIT: usize = 1 | 1 << REF_COUNT_SHIFT;
pub(crate) const HARD_STATUS_INIT: usize = 1;
pub enum Arc<'a, R: Resident> {
Mut(Mut<'a, R>),
Ref(Ref<'a, R>),
Hard(Hard<'a, R>),
Soft(Soft<'a, R>),
}
pub struct ArcHeader<M = ()> {
pub(crate) relocation: AtomicUsize,
pub(crate) status: AtomicUsize,
pub(crate) meta: M,
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum ArcError {
Cleared,
Aliased,
Relocating,
Contended,
HardCountOverflow,
SoftCountOverflow,
RefCountOverflow,
Misaligned,
Oversized,
OutOfMemory,
Unsupported(&'static str),
}
impl From<LayoutError> for ArcError {
fn from(error: LayoutError) -> ArcError {
match error {
LayoutError::Misaligned => ArcError::Misaligned,
LayoutError::Oversized => ArcError::Oversized,
}
}
}
impl From<HoldError> for ArcError {
fn from(error: HoldError) -> ArcError {
match error {
HoldError::Misaligned => ArcError::Misaligned,
HoldError::Oversized => ArcError::Oversized,
HoldError::OutOfMemory => ArcError::OutOfMemory,
HoldError::Unsupported(reason) => ArcError::Unsupported(reason),
}
}
}
impl From<ArcError> for HoldError {
fn from(error: ArcError) -> HoldError {
match error {
ArcError::Cleared => HoldError::Unsupported("cleared"),
ArcError::Aliased => HoldError::Unsupported("aliased"),
ArcError::Relocating => HoldError::Unsupported("relocating"),
ArcError::Contended => HoldError::Unsupported("contended"),
ArcError::HardCountOverflow => HoldError::Unsupported("hard count overflow"),
ArcError::SoftCountOverflow => HoldError::Unsupported("soft count overflow"),
ArcError::RefCountOverflow => HoldError::Unsupported("ref count overflow"),
ArcError::Misaligned => HoldError::Misaligned,
ArcError::Oversized => HoldError::Oversized,
ArcError::OutOfMemory => HoldError::OutOfMemory,
ArcError::Unsupported(reason) => HoldError::Unsupported(reason),
}
}
}
impl<M> ArcHeader<M> {
#[inline]
pub fn hard_count(&self) -> usize {
let status = self.status.load(SeqCst);
status & HARD_COUNT_MASK
}
#[inline]
pub fn soft_count(&self) -> usize {
let status = self.status.load(SeqCst);
(status & SOFT_COUNT_MASK) >> SOFT_COUNT_SHIFT
}
#[inline]
pub fn ref_count(&self) -> usize {
let status = self.status.load(SeqCst);
(status & REF_COUNT_MASK) >> REF_COUNT_SHIFT
}
#[inline]
pub fn is_mut(&self) -> bool {
let status = self.status.load(SeqCst);
status & MUT_FLAG != 0
}
#[inline]
pub fn is_relocated(&self) -> bool {
let status = self.status.load(SeqCst);
status & RELOCATED_FLAG != 0
}
#[inline]
pub fn is_aliased(&self) -> bool {
let status = self.status.load(SeqCst);
status & ALIASED_MASK != 0
}
#[inline]
pub(crate) fn drop<R: Resident>(&mut self, data: *mut R::Data) {
unsafe {
let relocation = block::set_address(data, self.relocation.load(Acquire));
if !relocation.is_null() {
mem::drop(mem::transmute::<*mut R::Data, Hard<R>>(relocation));
} else {
ptr::drop_in_place(&mut self.meta);
}
}
}
}
#[inline]
pub(crate) unsafe fn alloc_new<'a, R, L, T, M>(hold: &Hold<'a>, data: &T, meta: &M, status: usize)
-> Result<*mut R::Data, HoldError>
where R: ResidentFromValue<L, T, M>,
L: Lease,
{
let (layout, offset) = Layout::for_type::<ArcHeader<R::Meta>>()
.extended(R::new_resident_layout(data, meta))?;
let block = hold.alloc(layout)?;
let header = block.as_ptr() as *mut ArcHeader<R::Meta>;
ptr::write(&mut (*header).relocation, AtomicUsize::new(0));
ptr::write(&mut (*header).status, AtomicUsize::new(status));
let resident = (header as *mut u8).wrapping_add(offset);
Ok(R::new_resident_ptr(resident, data, meta))
}
#[inline]
pub(crate) unsafe fn alloc_clone<'a, R, L, T, M>(hold: &Hold<'a>, data: &T, meta: &M, status: usize)
-> Result<*mut R::Data, HoldError>
where R: ResidentFromClone<L, T, M>,
L: Lease,
T: ?Sized,
{
let (layout, offset) = Layout::for_type::<ArcHeader<R::Meta>>()
.extended(R::new_resident_layout(data, meta))?;
let block = hold.alloc(layout)?;
let header = block.as_ptr() as *mut ArcHeader<R::Meta>;
ptr::write(&mut (*header).relocation, AtomicUsize::new(0));
ptr::write(&mut (*header).status, AtomicUsize::new(status));
let resident = (header as *mut u8).wrapping_add(offset);
Ok(R::new_resident_ptr(resident, data, meta))
}
#[inline]
pub(crate) unsafe fn alloc_clone_unchecked<'a, R, L, T, M>(hold: &Hold<'a>, data: &T, meta: &M, status: usize)
-> Result<*mut R::Data, HoldError>
where R: ResidentFromCloneUnchecked<L, T, M>,
L: Lease,
T: ?Sized,
{
let (layout, offset) = Layout::for_type::<ArcHeader<R::Meta>>()
.extended(R::new_resident_layout(data, meta))?;
let block = hold.alloc(layout)?;
let header = block.as_ptr() as *mut ArcHeader<R::Meta>;
ptr::write(&mut (*header).relocation, AtomicUsize::new(0));
ptr::write(&mut (*header).status, AtomicUsize::new(status));
let resident = (header as *mut u8).wrapping_add(offset);
Ok(R::new_resident_ptr(resident, data, meta))
}
#[inline]
pub(crate) unsafe fn alloc_copy<'a, R, L, T, M>(hold: &Hold<'a>, data: &T, meta: &M, status: usize)
-> Result<*mut R::Data, HoldError>
where R: ResidentFromCopy<L, T, M>,
L: Lease,
T: ?Sized,
{
let (layout, offset) = Layout::for_type::<ArcHeader<R::Meta>>()
.extended(R::new_resident_layout(data, meta))?;
let block = hold.alloc(layout)?;
let header = block.as_ptr() as *mut ArcHeader<R::Meta>;
ptr::write(&mut (*header).relocation, AtomicUsize::new(0));
ptr::write(&mut (*header).status, AtomicUsize::new(status));
let resident = (header as *mut u8).wrapping_add(offset);
Ok(R::new_resident_ptr(resident, data, meta))
}
#[inline]
pub(crate) unsafe fn alloc_copy_unchecked<'a, R, L, T, M>(hold: &Hold<'a>, data: &T, meta: &M, status: usize)
-> Result<*mut R::Data, HoldError>
where R: ResidentFromCopyUnchecked<L, T, M>,
L: Lease,
T: ?Sized,
{
let (layout, offset) = Layout::for_type::<ArcHeader<R::Meta>>()
.extended(R::new_resident_layout(data, meta))?;
let block = hold.alloc(layout)?;
let header = block.as_ptr() as *mut ArcHeader<R::Meta>;
ptr::write(&mut (*header).relocation, AtomicUsize::new(0));
ptr::write(&mut (*header).status, AtomicUsize::new(status));
let resident = (header as *mut u8).wrapping_add(offset);
Ok(R::new_resident_ptr(resident, data, meta))
}
#[inline]
pub(crate) unsafe fn alloc_empty<'a, R, L, M>(hold: &Hold<'a>, meta: &M, status: usize)
-> Result<*mut R::Data, HoldError>
where R: ResidentFromEmpty<L, M>,
L: Lease,
{
let (layout, offset) = Layout::for_type::<ArcHeader<R::Meta>>()
.extended(R::new_resident_layout(meta))?;
let block = hold.alloc(layout)?;
let header = block.as_ptr() as *mut ArcHeader<R::Meta>;
ptr::write(&mut (*header).relocation, AtomicUsize::new(0));
ptr::write(&mut (*header).status, AtomicUsize::new(status));
let resident = (header as *mut u8).wrapping_add(offset);
Ok(R::new_resident_ptr(resident, meta))
}
#[inline]
pub(crate) unsafe fn alloc_cap<'a, R, L, M>(hold: &Hold<'a>, cap: usize, meta: &M, status: usize)
-> Result<*mut R::Data, HoldError>
where R: ResidentWithCapacity<L, M>,
L: Lease,
{
let (layout, offset) = Layout::for_type::<ArcHeader<R::Meta>>()
.extended(R::new_resident_layout(cap, meta)?)?;
let block = hold.alloc(layout)?;
let header = block.as_ptr() as *mut ArcHeader<R::Meta>;
ptr::write(&mut (*header).relocation, AtomicUsize::new(0));
ptr::write(&mut (*header).status, AtomicUsize::new(status));
let resident = (header as *mut u8).wrapping_add(offset);
Ok(R::new_resident_ptr(resident, cap, meta))
}
pub(crate) unsafe fn realloc<R>(old_data: *mut R::Data, new_layout: Layout) -> Result<*mut R::Data, HoldError>
where R: Resident
{
let align = mem::align_of_val(&*old_data);
let header_layout = Layout::for_type::<ArcHeader<R::Meta>>();
let offset = header_layout.size().wrapping_add(align).wrapping_sub(1) & !align.wrapping_sub(1);
let old_meta = (old_data as *mut u8).wrapping_sub(offset) as *mut R::Meta;
let size = offset.wrapping_add(R::resident_size(old_data, old_meta));
let new_layout = header_layout.extended(new_layout)?.0;
let old_block = Block::from_raw_parts(old_meta as *mut u8, size);
let hold = AllocTag::from_ptr(old_meta as *mut u8).holder();
match hold.realloc(old_block, new_layout) {
Ok(new_block) => {
let new_meta = new_block.as_ptr() as *mut ArcHeader<R::Meta>;
let new_data = block::set_address(old_data, (new_meta as usize).wrapping_add(offset));
Ok(new_data)
},
Err(error) => Err(error),
}
}
pub(crate) unsafe fn resize<R>(old_data: *mut R::Data, new_layout: Layout) -> Result<*mut R::Data, HoldError>
where R: Resident
{
let align = mem::align_of_val(&*old_data);
let header_layout = Layout::for_type::<ArcHeader<R::Meta>>();
let offset = header_layout.size().wrapping_add(align).wrapping_sub(1) & !align.wrapping_sub(1);
let old_meta = (old_data as *mut u8).wrapping_sub(offset) as *mut R::Meta;
let size = offset.wrapping_add(R::resident_size(old_data, old_meta));
let new_layout = header_layout.extended(new_layout)?.0;
let old_block = Block::from_raw_parts(old_meta as *mut u8, size);
let hold = AllocTag::from_ptr(old_meta as *mut u8).holder();
match hold.resize(old_block, new_layout) {
Ok(new_block) => {
let new_meta = new_block.as_ptr() as *mut ArcHeader<R::Meta>;
let new_data = block::set_address(old_data, (new_meta as usize).wrapping_add(offset));
Ok(new_data)
},
Err(error) => Err(error),
}
}
#[inline]
pub(crate) fn header<R: Resident>(data: *mut R::Data) -> *mut ArcHeader<R::Meta> {
let align = mem::align_of_val(unsafe { &*data });
let offset = mem::size_of::<ArcHeader<R::Meta>>()
.wrapping_add(align).wrapping_sub(1) & !align.wrapping_sub(1);
(data as *mut u8).wrapping_sub(offset) as *mut ArcHeader<R::Meta>
}