use core::cmp::{self, Ordering};
use core::fmt::{self, Debug, Display, Pointer, Formatter};
use core::hash::{Hash, Hasher};
use core::marker::PhantomData;
use core::mem;
use core::ops::{Deref, Index, Add};
use core::ptr::{self, NonNull};
use core::sync::atomic::AtomicUsize;
use core::sync::atomic::Ordering::{Acquire, Relaxed, Release, SeqCst};
use crate::block::{Block, Layout};
use crate::alloc::{AllocTag, Hold, Holder, HoldError, TryClone};
use crate::lease::{arc, ArcHeader, ArcError, Lease, Mut, Hard, Soft};
use crate::resident::{Resident, ResidentFromValue, ResidentFromClone,
ResidentFromCloneUnchecked, ResidentFromCopy,
ResidentFromCopyUnchecked, ResidentFromEmpty,
ResidentWithCapacity, ResidentUnwrap, ResidentDeref,
ResidentAsRef, ResidentIndex, ResidentAdd,
ResidentIntoIterator, ResidentIntoRefIterator,
ResidentPartialEq, ResidentEq, ResidentPartialOrd,
ResidentOrd, ResidentHash, ResidentDisplay, ResidentDebug};
pub struct Ref<'a, R: Resident> {
data: NonNull<R::Data>,
data_lifetime: PhantomData<R::Data>,
meta_lifetime: PhantomData<ArcHeader<R::Meta>>,
hold_lifetime: PhantomData<&'a ()>,
}
unsafe impl<'a, R: Resident> Send for Ref<'a, R> where R::Data: Send, R::Meta: Send {
}
unsafe impl<'a, R: Resident> Sync for Ref<'a, R> where R::Data: Sync, R::Meta: Sync {
}
impl<'a, R: Resident> Ref<'a, R> {
#[inline]
pub fn try_hold_new_meta<T, M>(hold: &dyn Hold<'a>, data: T, meta: M)
-> Result<Ref<'a, R>, HoldError>
where R: ResidentFromValue<Ref<'a, R>, T, M>
{
unsafe {
let resident = arc::alloc_new::<R, Ref<'a, R>, T, M>(hold, &data, &meta, arc::REF_STATUS_INIT)?;
let mut lease = Ref::from_raw(resident);
R::new_resident(&mut lease, data, meta);
Ok(lease)
}
}
#[inline]
pub fn try_hold_clone_meta<T: ?Sized, M>(hold: &dyn Hold<'a>, data: &T, meta: M)
-> Result<Ref<'a, R>, HoldError>
where R: ResidentFromClone<Ref<'a, R>, T, M>
{
unsafe {
let resident = arc::alloc_clone::<R, Ref<'a, R>, T, M>(hold, &data, &meta, arc::REF_STATUS_INIT)?;
let mut lease = Ref::from_raw(resident);
R::new_resident(&mut lease, data, meta);
Ok(lease)
}
}
#[inline]
pub unsafe fn try_hold_clone_unchecked_meta<T: ?Sized, M>(hold: &dyn Hold<'a>, data: &T, meta: M)
-> Result<Ref<'a, R>, HoldError>
where R: ResidentFromCloneUnchecked<Ref<'a, R>, T, M>
{
let resident = arc::alloc_clone_unchecked::<R, Ref<'a, R>, T, M>(hold, &data, &meta, arc::REF_STATUS_INIT)?;
let mut lease = Ref::from_raw(resident);
R::new_resident(&mut lease, data, meta);
Ok(lease)
}
#[inline]
pub fn try_hold_copy_meta<T: ?Sized, M>(hold: &dyn Hold<'a>, data: &T, meta: M)
-> Result<Ref<'a, R>, HoldError>
where R: ResidentFromCopy<Ref<'a, R>, T, M>
{
unsafe {
let resident = arc::alloc_copy::<R, Ref<'a, R>, T, M>(hold, &data, &meta, arc::REF_STATUS_INIT)?;
let mut lease = Ref::from_raw(resident);
R::new_resident(&mut lease, data, meta);
Ok(lease)
}
}
#[inline]
pub unsafe fn try_hold_copy_unchecked_meta<T: ?Sized, M>(hold: &dyn Hold<'a>, data: &T, meta: M)
-> Result<Ref<'a, R>, HoldError>
where R: ResidentFromCopyUnchecked<Ref<'a, R>, T, M>
{
let resident = arc::alloc_copy_unchecked::<R, Ref<'a, R>, T, M>(hold, &data, &meta, arc::REF_STATUS_INIT)?;
let mut lease = Ref::from_raw(resident);
R::new_resident(&mut lease, data, meta);
Ok(lease)
}
#[inline]
pub fn try_hold_empty_meta<M>(hold: &dyn Hold<'a>, meta: M)
-> Result<Ref<'a, R>, HoldError>
where R: ResidentFromEmpty<Ref<'a, R>, M>
{
unsafe {
let resident = arc::alloc_empty::<R, Ref<'a, R>, M>(hold, &meta, arc::REF_STATUS_INIT)?;
let mut lease = Ref::from_raw(resident);
R::new_resident(&mut lease, meta);
Ok(lease)
}
}
#[inline]
pub fn try_hold_cap_meta<M>(hold: &dyn Hold<'a>, cap: usize, meta: M)
-> Result<Ref<'a, R>, HoldError>
where R: ResidentWithCapacity<Ref<'a, R>, M>
{
unsafe {
let resident = arc::alloc_cap::<R, Ref<'a, R>, M>(hold, cap, &meta, arc::REF_STATUS_INIT)?;
let mut lease = Ref::from_raw(resident);
R::new_resident(&mut lease, cap, meta);
Ok(lease)
}
}
#[inline]
pub fn try_hold_new<T>(hold: &dyn Hold<'a>, data: T) -> Result<Ref<'a, R>, HoldError>
where R: ResidentFromValue<Ref<'a, R>, T>
{
Ref::try_hold_new_meta(hold, data, ())
}
#[inline]
pub fn try_hold_clone<T: ?Sized>(hold: &dyn Hold<'a>, data: &T) -> Result<Ref<'a, R>, HoldError>
where R: ResidentFromClone<Ref<'a, R>, T>
{
Ref::try_hold_clone_meta(hold, data, ())
}
#[inline]
pub unsafe fn try_hold_clone_unchecked<T: ?Sized>(hold: &dyn Hold<'a>, data: &T) -> Result<Ref<'a, R>, HoldError>
where R: ResidentFromCloneUnchecked<Ref<'a, R>, T>
{
Ref::try_hold_clone_unchecked_meta(hold, data, ())
}
#[inline]
pub fn try_hold_copy<T: ?Sized>(hold: &dyn Hold<'a>, data: &T) -> Result<Ref<'a, R>, HoldError>
where R: ResidentFromCopy<Ref<'a, R>, T>
{
Ref::try_hold_copy_meta(hold, data, ())
}
#[inline]
pub unsafe fn try_hold_copy_unchecked<T: ?Sized>(hold: &dyn Hold<'a>, data: &T) -> Result<Ref<'a, R>, HoldError>
where R: ResidentFromCopyUnchecked<Ref<'a, R>, T>
{
Ref::try_hold_copy_unchecked_meta(hold, data, ())
}
#[inline]
pub fn try_hold_empty(hold: &dyn Hold<'a>) -> Result<Ref<'a, R>, HoldError>
where R: ResidentFromEmpty<Ref<'a, R>>
{
Ref::try_hold_empty_meta(hold, ())
}
#[inline]
pub fn try_hold_cap(hold: &dyn Hold<'a>, cap: usize) -> Result<Ref<'a, R>, HoldError>
where R: ResidentWithCapacity<Ref<'a, R>>
{
Ref::try_hold_cap_meta(hold, cap, ())
}
#[inline]
pub fn hold_new<T>(hold: &dyn Hold<'a>, data: T) -> Ref<'a, R>
where R: ResidentFromValue<Ref<'a, R>, T>
{
Ref::try_hold_new(hold, data).unwrap()
}
#[inline]
pub fn hold_clone<T: ?Sized>(hold: &dyn Hold<'a>, data: &T) -> Ref<'a, R>
where R: ResidentFromClone<Ref<'a, R>, T>
{
Ref::try_hold_clone(hold, data).unwrap()
}
#[inline]
pub unsafe fn hold_clone_unchecked<T: ?Sized>(hold: &dyn Hold<'a>, data: &T) -> Ref<'a, R>
where R: ResidentFromCloneUnchecked<Ref<'a, R>, T>
{
Ref::try_hold_clone_unchecked(hold, data).unwrap()
}
#[inline]
pub fn hold_copy<T: ?Sized>(hold: &dyn Hold<'a>, data: &T) -> Ref<'a, R>
where R: ResidentFromCopy<Ref<'a, R>, T>
{
Ref::try_hold_copy(hold, data).unwrap()
}
#[inline]
pub unsafe fn hold_copy_unchecked<T: ?Sized>(hold: &dyn Hold<'a>, data: &T) -> Ref<'a, R>
where R: ResidentFromCopyUnchecked<Ref<'a, R>, T>
{
Ref::try_hold_copy_unchecked(hold, data).unwrap()
}
#[inline]
pub fn hold_empty(hold: &dyn Hold<'a>) -> Ref<'a, R>
where R: ResidentFromEmpty<Ref<'a, R>>
{
Ref::try_hold_empty(hold).unwrap()
}
#[inline]
pub fn hold_cap(hold: &dyn Hold<'a>, cap: usize) -> Ref<'a, R>
where R: ResidentWithCapacity<Ref<'a, R>>
{
Ref::try_hold_cap(hold, cap).unwrap()
}
#[inline]
pub fn new<T>(data: T) -> Ref<'a, R>
where R: ResidentFromValue<Ref<'a, R>, T>
{
Ref::hold_new(Hold::global(), data)
}
#[inline]
pub fn from_clone<T: ?Sized>(data: &T) -> Ref<'a, R>
where R: ResidentFromClone<Ref<'a, R>, T>
{
Ref::hold_clone(Hold::global(), data)
}
#[inline]
pub unsafe fn from_clone_unchecked<T: ?Sized>(data: &T) -> Ref<'a, R>
where R: ResidentFromCloneUnchecked<Ref<'a, R>, T>
{
Ref::hold_clone_unchecked(Hold::global(), data)
}
#[inline]
pub fn from_copy<T: ?Sized>(data: &T) -> Ref<'a, R>
where R: ResidentFromCopy<Ref<'a, R>, T>
{
Ref::hold_copy(Hold::global(), data)
}
#[inline]
pub unsafe fn from_copy_unchecked<T: ?Sized>(data: &T) -> Ref<'a, R>
where R: ResidentFromCopyUnchecked<Ref<'a, R>, T>
{
Ref::hold_copy_unchecked(Hold::global(), data)
}
#[inline]
pub fn empty() -> Ref<'a, R>
where R: ResidentFromEmpty<Ref<'a, R>>
{
Ref::hold_empty(Hold::global())
}
#[inline]
pub fn with_cap(cap: usize) -> Ref<'a, R>
where R: ResidentWithCapacity<Ref<'a, R>>
{
Ref::hold_cap(Hold::global(), cap)
}
#[inline]
pub unsafe fn from_raw(data: *mut R::Data) -> Ref<'a, R> {
Ref {
data: NonNull::new_unchecked(data),
data_lifetime: PhantomData,
meta_lifetime: PhantomData,
hold_lifetime: PhantomData,
}
}
#[inline]
fn header(this: &Ref<'a, R>) -> *mut ArcHeader<R::Meta> {
arc::header::<R>(this.data.as_ptr())
}
#[inline]
pub fn hard_count(this: &Ref<'a, R>) -> usize {
unsafe { (*Ref::header(this)).hard_count() }
}
#[inline]
pub fn soft_count(this: &Ref<'a, R>) -> usize {
unsafe { (*Ref::header(this)).soft_count() }
}
#[inline]
pub fn ref_count(this: &Ref<'a, R>) -> usize {
unsafe { (*Ref::header(this)).ref_count() }
}
#[inline]
pub fn metadata<'b>(this: &'b Ref<'a, R>) -> &'b R::Meta {
unsafe { &(*Ref::header(this)).meta }
}
pub fn try_to_unique(this: &Ref<'a, R>) -> Result<Mut<'a, R>, ArcError>
where R::Data: TryClone,
R::Meta: TryClone,
{
unsafe {
let old_data = this.data.as_ptr();
let align = mem::align_of_val(&*old_data);
let offset = mem::size_of::<ArcHeader<R::Meta>>()
.wrapping_add(align).wrapping_sub(1) & !align.wrapping_sub(1);
let old_header = (old_data as *mut u8).wrapping_sub(offset) as *mut ArcHeader<R::Meta>;
let size = offset.wrapping_add(R::resident_size(old_data, &mut (*old_header).meta));
let layout = Layout::from_size_align_unchecked(size, cmp::max(align, mem::align_of::<ArcHeader<R::Meta>>()));
let hold = this.holder();
let new_block = hold.alloc(layout)?;
let new_header = new_block.as_ptr() as *mut ArcHeader<R::Meta>;
ptr::write(&mut (*new_header).relocation, AtomicUsize::new(0));
ptr::write(&mut (*new_header).status, AtomicUsize::new(arc::MUT_STATUS_INIT));
let new_metadata = match (*old_header).meta.try_clone() {
Ok(metadata) => metadata,
Err(error) => {
hold.dealloc(new_block);
return Err(ArcError::from(error));
},
};
ptr::write(&mut (*new_header).meta, new_metadata);
let new_data = (new_header as *mut u8).wrapping_add(offset) as *mut R::Data;
let new_resident = match (*old_data).try_clone() {
Ok(resident) => resident,
Err(error) => {
ptr::drop_in_place(&mut (*new_header).meta);
hold.dealloc(new_block);
return Err(ArcError::from(error));
},
};
ptr::write(new_data, new_resident);
Ok(Mut::from_raw(new_data))
}
}
pub fn to_unique(this: &Ref<'a, R>) -> Mut<'a, R>
where R::Data: TryClone,
R::Meta: TryClone,
{
Ref::try_to_unique(this).unwrap()
}
pub fn try_into_unique(this: Ref<'a, R>) -> Result<Mut<'a, R>, ArcError>
where R::Data: TryClone,
R::Meta: TryClone,
{
unsafe {
let old_data = this.data.as_ptr();
let align = mem::align_of_val(&*old_data);
let offset = mem::size_of::<ArcHeader<R::Meta>>()
.wrapping_add(align).wrapping_sub(1) & !align.wrapping_sub(1);
let old_header = (old_data as *mut u8).wrapping_sub(offset) as *mut ArcHeader<R::Meta>;
let mut old_status = (*old_header).status.load(Relaxed);
loop {
let old_ref_count = (old_status & arc::REF_COUNT_MASK) >> arc::REF_COUNT_SHIFT;
if old_ref_count == 1 {
let new_status = old_status & !arc::REF_COUNT_MASK | arc::MUT_FLAG;
match (*old_header).status.compare_exchange_weak(old_status, new_status, SeqCst, Relaxed) {
Ok(_) => {
mem::forget(this);
return Ok(Mut::from_raw(old_data));
},
Err(status) => old_status = status,
}
} else {
break;
}
}
let size = offset.wrapping_add(R::resident_size(old_data, &mut (*old_header).meta));
let layout = Layout::from_size_align_unchecked(size, cmp::max(align, mem::align_of::<ArcHeader<R::Meta>>()));
let hold = this.holder();
let new_block = hold.alloc(layout)?;
let new_header = new_block.as_ptr() as *mut ArcHeader<R::Meta>;
ptr::write(&mut (*new_header).relocation, AtomicUsize::new(0));
ptr::write(&mut (*new_header).status, AtomicUsize::new(arc::MUT_STATUS_INIT));
let new_metadata = match (*old_header).meta.try_clone() {
Ok(metadata) => metadata,
Err(error) => {
hold.dealloc(new_block);
return Err(ArcError::from(error));
},
};
ptr::write(&mut (*new_header).meta, new_metadata);
let new_data = (new_header as *mut u8).wrapping_add(offset) as *mut R::Data;
let new_resident = match (*old_data).try_clone() {
Ok(resident) => resident,
Err(error) => {
ptr::drop_in_place(&mut (*new_header).meta);
hold.dealloc(new_block);
return Err(ArcError::from(error));
},
};
ptr::write(new_data, new_resident);
Ok(Mut::from_raw(new_data))
}
}
pub fn into_unique(this: Ref<'a, R>) -> Mut<'a, R>
where R::Data: TryClone,
R::Meta: TryClone,
{
Ref::try_into_unique(this).unwrap()
}
pub unsafe fn into_mut(this: Ref<'a, R>) -> Mut<'a, R> {
let data = this.data.as_ptr();
let header = arc::header::<R>(data);
let mut old_status = (*header).status.load(Relaxed);
loop {
let old_ref_count = (old_status & arc::REF_COUNT_MASK) >> arc::REF_COUNT_SHIFT;
if old_ref_count == 1 {
let new_status = old_status & !arc::REF_COUNT_MASK | arc::MUT_FLAG;
match (*header).status.compare_exchange_weak(old_status, new_status, SeqCst, Relaxed) {
Ok(_) => {
mem::forget(this);
return Mut::from_raw(data);
},
Err(status) => old_status = status,
}
} else {
old_status = (*header).status.load(Relaxed);
}
}
}
pub fn try_to_hard(this: &Ref<'a, R>) -> Result<Hard<'a, R>, ArcError> {
unsafe {
let data = this.data.as_ptr();
let header = arc::header::<R>(data);
let mut old_status = (*header).status.load(Relaxed);
loop {
let old_hard_count = old_status & arc::HARD_COUNT_MASK;
let new_hard_count = old_hard_count.wrapping_add(1);
if new_hard_count > arc::HARD_COUNT_MAX {
return Err(ArcError::HardCountOverflow);
}
let new_status = old_status & !arc::HARD_COUNT_MASK;
let new_status = new_status | new_hard_count;
match (*header).status.compare_exchange_weak(old_status, new_status, Acquire, Relaxed) {
Ok(_) => return Ok(Hard::from_raw(data)),
Err(status) => old_status = status,
}
}
}
}
pub fn to_hard(this: &Ref<'a, R>) -> Hard<'a, R> {
Ref::try_to_hard(this).unwrap()
}
pub fn into_hard(this: Ref<'a, R>) -> Hard<'a, R> {
unsafe {
let data = this.data.as_ptr();
let header = arc::header::<R>(data);
let mut old_status = (*header).status.load(Relaxed);
loop {
let old_ref_count = (old_status & arc::REF_COUNT_MASK) >> arc::REF_COUNT_SHIFT;
let new_ref_count = match old_ref_count.checked_sub(1) {
Some(ref_count) => ref_count,
None => panic!("ref count underflow"),
};
let new_status = old_status & !arc::REF_COUNT_MASK;
let new_status = new_status | new_ref_count << arc::REF_COUNT_SHIFT;
match (*header).status.compare_exchange_weak(old_status, new_status, Release, Relaxed) {
Ok(_) => {
mem::forget(this);
return Hard::from_raw(data);
},
Err(status) => old_status = status,
}
}
}
}
pub fn try_to_soft(this: &Ref<'a, R>) -> Result<Soft<'a, R>, ArcError> {
unsafe {
let data = this.data.as_ptr();
let header = arc::header::<R>(data);
let mut old_status = (*header).status.load(Relaxed);
loop {
let old_soft_count = (old_status & arc::SOFT_COUNT_MASK) >> arc::SOFT_COUNT_SHIFT;
let new_soft_count = old_soft_count.wrapping_add(1);
if new_soft_count > arc::SOFT_COUNT_MAX {
return Err(ArcError::SoftCountOverflow);
}
let new_status = old_status & !arc::SOFT_COUNT_MASK;
let new_status = new_status | new_soft_count << arc::SOFT_COUNT_SHIFT;
match (*header).status.compare_exchange_weak(old_status, new_status, Acquire, Relaxed) {
Ok(_) => return Ok(Soft::from_raw(data)),
Err(status) => old_status = status,
}
}
}
}
pub fn to_soft(this: &Ref<'a, R>) -> Soft<'a, R> {
Ref::try_to_soft(this).unwrap()
}
pub fn try_into_soft(this: Ref<'a, R>) -> Result<Soft<'a, R>, ArcError> {
unsafe {
let data = this.data.as_ptr();
let header = arc::header::<R>(data);
let mut old_status = (*header).status.load(Relaxed);
loop {
let old_hard_count = old_status & arc::HARD_COUNT_MASK;
let new_hard_count = match old_hard_count.checked_sub(1) {
Some(hard_count) => hard_count,
None => panic!("hard count underflow"),
};
let old_soft_count = (old_status & arc::SOFT_COUNT_MASK) >> arc::SOFT_COUNT_SHIFT;
let new_soft_count = old_soft_count.wrapping_add(1);
if new_soft_count > arc::SOFT_COUNT_MAX {
return Err(ArcError::SoftCountOverflow);
}
let old_ref_count = (old_status & arc::REF_COUNT_MASK) >> arc::REF_COUNT_SHIFT;
let new_ref_count = match old_ref_count.checked_sub(1) {
Some(ref_count) => ref_count,
None => panic!("ref count underflow"),
};
let new_status = old_status & !(arc::HARD_COUNT_MASK | arc::SOFT_COUNT_MASK | arc::REF_COUNT_MASK);
let new_status = new_status | new_hard_count | new_soft_count << arc::SOFT_COUNT_SHIFT |
new_ref_count << arc::REF_COUNT_SHIFT;
match (*header).status.compare_exchange_weak(old_status, new_status, SeqCst, Relaxed) {
Ok(_) => {
if new_hard_count == 0 {
R::resident_drop(data, &mut (*header).meta);
}
mem::forget(this);
return Ok(Soft::from_raw(data));
},
Err(status) => old_status = status,
}
}
}
}
pub fn into_soft(this: Ref<'a, R>) -> Soft<'a, R> {
Ref::try_into_soft(this).unwrap()
}
#[inline]
pub unsafe fn into_raw(this: Ref<'a, R>) -> *mut R::Data {
let data = this.data.as_ptr();
mem::forget(this);
data
}
#[inline]
pub unsafe fn as_ptr_unchecked(this: &Ref<'a, R>) -> *mut R::Data {
this.data.as_ptr()
}
pub fn try_unwrap(this: Ref<'a, R>) -> Result<R::Target, Ref<'a, R>> where R: ResidentUnwrap<Ref<'a, R>> {
unsafe {
let data = this.data.as_ptr();
let align = mem::align_of_val(&*data);
let offset = mem::size_of::<ArcHeader<R::Meta>>()
.wrapping_add(align).wrapping_sub(1) & !align.wrapping_sub(1);
let header = (data as *mut u8).wrapping_sub(offset) as *mut ArcHeader<R::Meta>;
let size = offset.wrapping_add(R::resident_size(data, &mut (*header).meta));
let mut old_status = (*header).status.load(Relaxed);
loop {
let old_hard_count = old_status & arc::HARD_COUNT_MASK;
if old_hard_count != 1 {
return Err(this);
}
debug_assert_eq!((old_status &arc::REF_COUNT_MASK) >> arc::REF_COUNT_SHIFT, 1);
let new_status = old_status & !(arc::HARD_COUNT_MASK | arc::REF_COUNT_MASK);
let old_soft_count = (old_status & arc::SOFT_COUNT_MASK) >> arc::SOFT_COUNT_SHIFT;
if old_soft_count == 0 {
(*header).status.store(new_status, Relaxed);
let resident = R::resident_unwrap(&this);
(*header).drop::<R>(data);
let block = Block::from_raw_parts(header as *mut u8, size);
AllocTag::from_ptr(header as *mut u8).dealloc(block);
mem::forget(this);
return Ok(resident);
} else {
let new_soft_count = old_soft_count.wrapping_add(1);
if new_soft_count > arc::SOFT_COUNT_MAX {
return Err(this);
}
let new_status = new_status & !arc::SOFT_COUNT_MASK;
let new_status = new_status | new_soft_count << arc::SOFT_COUNT_SHIFT;
match (*header).status.compare_exchange_weak(old_status, new_status, Acquire, Relaxed) {
Ok(_) => {
let resident = R::resident_unwrap(&this);
old_status = new_status;
loop {
let old_soft_count = (old_status & arc::SOFT_COUNT_MASK) >> arc::SOFT_COUNT_SHIFT;
let new_soft_count = match old_soft_count.checked_sub(1) {
Some(soft_count) => soft_count,
None => panic!("soft count underflow"),
};
let new_status = old_status & !arc::SOFT_COUNT_MASK;
let new_status = new_status | new_soft_count << arc::SOFT_COUNT_SHIFT;
match (*header).status.compare_exchange_weak(old_status, new_status, Release, Relaxed) {
Ok(_) => {
if new_soft_count == 0 {
(*header).drop::<R>(data);
let block = Block::from_raw_parts(header as *mut u8, size);
AllocTag::from_ptr(header as *mut u8).dealloc(block);
}
return Ok(resident);
},
Err(status) => old_status = status,
}
}
},
Err(status) => old_status = status,
}
}
}
}
}
pub fn unwrap(this: Ref<'a, R>) -> R::Target where R: ResidentUnwrap<Ref<'a, R>> {
match Ref::try_unwrap(this) {
Ok(resident) => resident,
Err(_) => panic!("aliased resident"),
}
}
}
impl<'a, R: Resident> Holder<'a> for Ref<'a, R> {
#[inline]
fn holder(&self) -> &'a dyn Hold<'a> {
AllocTag::from_ptr(Ref::header(self) as *mut u8).holder()
}
}
impl<'a, R: Resident> Lease for Ref<'a, R> {
type Data = R::Data;
type Meta = R::Meta;
#[inline]
fn data(&self) -> *mut R::Data {
self.data.as_ptr()
}
#[inline]
fn meta(&self) -> *mut R::Meta {
unsafe { &mut (*Ref::header(self)).meta }
}
}
impl<'a, R: ResidentDeref<Ref<'a, R>>> Deref for Ref<'a, R> {
type Target = R::Target;
#[inline]
fn deref(&self) -> &R::Target {
R::resident_deref(self)
}
}
impl<'a, R: ResidentAsRef<Ref<'a, R>, T>, T: ?Sized> AsRef<T> for Ref<'a, R> {
#[inline]
fn as_ref(&self) -> &T {
R::resident_as_ref(self)
}
}
impl<'a, R: ResidentIndex<Ref<'a, R>, Idx>, Idx> Index<Idx> for Ref<'a, R> {
type Output = R::Output;
#[inline]
fn index(&self, index: Idx) -> &R::Output {
R::resident_index(self, index)
}
}
impl<'a, R: ResidentAdd<Ref<'a, R>, Rhs>, Rhs> Add<Rhs> for Ref<'a, R> {
type Output = R::Output;
#[inline]
fn add(self, rhs: Rhs) -> R::Output {
R::resident_add(self, rhs)
}
}
impl<'a, R: ResidentIntoIterator<Ref<'a, R>>> IntoIterator for Ref<'a, R> {
type Item = R::Item;
type IntoIter = R::IntoIter;
#[inline]
fn into_iter(self) -> R::IntoIter {
R::resident_into_iter(self)
}
}
impl<'a, R: ResidentIntoRefIterator<'a, Ref<'a, R>>> IntoIterator for &'a Ref<'a, R> {
type Item = R::Item;
type IntoIter = R::IntoIter;
#[inline]
fn into_iter(self) -> R::IntoIter {
R::resident_into_iter(self)
}
}
impl<'a, R: ResidentPartialEq<Ref<'a, R>, T>, T: ?Sized> PartialEq<T> for Ref<'a, R> {
#[inline]
fn eq(&self, other: &T) -> bool {
R::resident_eq(self, other)
}
#[inline]
fn ne(&self, other: &T) -> bool {
R::resident_ne(self, other)
}
}
impl<'a, R: ResidentEq<Ref<'a, R>>> Eq for Ref<'a, R> {
}
impl<'a, R: ResidentPartialOrd<Ref<'a, R>, T>, T: ?Sized> PartialOrd<T> for Ref<'a, R> {
#[inline]
fn partial_cmp(&self, other: &T) -> Option<Ordering> {
R::resident_partial_cmp(self, other)
}
#[inline]
fn lt(&self, other: &T) -> bool {
R::resident_lt(self, other)
}
#[inline]
fn le(&self, other: &T) -> bool {
R::resident_le(self, other)
}
#[inline]
fn ge(&self, other: &T) -> bool {
R::resident_ge(self, other)
}
#[inline]
fn gt(&self, other: &T) -> bool {
R::resident_gt(self, other)
}
}
impl<'a, R: ResidentOrd<Ref<'a, R>>> Ord for Ref<'a, R> {
#[inline]
fn cmp(&self, other: &Ref<'a, R>) -> Ordering {
R::resident_cmp(self, other)
}
}
impl<'a, R: ResidentHash<Ref<'a, R>>> Hash for Ref<'a, R> {
#[inline]
fn hash<H: Hasher>(&self, state: &mut H) {
R::resident_hash(self, state);
}
}
impl<'a, R: ResidentDisplay<Ref<'a, R>>> Display for Ref<'a, R> {
#[inline]
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
R::resident_fmt(self, f)
}
}
impl<'a, R: ResidentDebug<Ref<'a, R>>> Debug for Ref<'a, R> {
#[inline]
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
R::resident_fmt(self, f)
}
}
impl<'a, R: Resident> Pointer for Ref<'a, R> {
#[inline]
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
Pointer::fmt(&self.data.as_ptr(), f)
}
}
impl<'a, R: Resident> TryClone for Ref<'a, R> {
fn try_clone(&self) -> Result<Ref<'a, R>, HoldError> {
unsafe {
let data = self.data.as_ptr();
let header = arc::header::<R>(data);
let mut old_status = (*header).status.load(Relaxed);
loop {
let old_hard_count = old_status & arc::HARD_COUNT_MASK;
let new_hard_count = old_hard_count.wrapping_add(1);
if new_hard_count > arc::HARD_COUNT_MAX {
return Err(HoldError::Unsupported("hard count overflow"));
}
let old_ref_count = (old_status & arc::REF_COUNT_MASK) >> arc::REF_COUNT_SHIFT;
let new_ref_count = old_ref_count.wrapping_add(1);
if new_ref_count > arc::REF_COUNT_MAX {
return Err(HoldError::Unsupported("ref count overflow"));
}
let new_status = old_status & !(arc::HARD_COUNT_MASK | arc::REF_COUNT_MASK);
let new_status = new_status | new_hard_count | new_ref_count << arc::REF_COUNT_SHIFT;
match (*header).status.compare_exchange_weak(old_status, new_status, Acquire, Relaxed) {
Ok(_) => return Ok(Ref::from_raw(data)),
Err(status) => old_status = status,
}
}
}
}
}
impl<'a, R: Resident> Clone for Ref<'a, R> {
fn clone(&self) -> Ref<'a, R> {
self.try_clone().unwrap()
}
}
unsafe impl<'a, #[may_dangle] R: Resident> Drop for Ref<'a, R> {
fn drop(&mut self) {
unsafe {
let data = self.data.as_ptr();
let align = mem::align_of_val(&*data);
let offset = mem::size_of::<ArcHeader<R::Meta>>()
.wrapping_add(align).wrapping_sub(1) & !align.wrapping_sub(1);
let header = (data as *mut u8).wrapping_sub(offset) as *mut ArcHeader<R::Meta>;
let size = offset.wrapping_add(R::resident_size(data, &mut (*header).meta));
let mut old_status = (*header).status.load(Relaxed);
loop {
let old_hard_count = old_status & arc::HARD_COUNT_MASK;
let new_hard_count = match old_hard_count.checked_sub(1) {
Some(hard_count) => hard_count,
None => panic!("hard count underflow"),
};
let old_ref_count = (old_status & arc::REF_COUNT_MASK) >> arc::REF_COUNT_SHIFT;
let new_ref_count = match old_ref_count.checked_sub(1) {
Some(shared_count) => shared_count,
None => panic!("ref count underflow"),
};
let new_status = old_status & !(arc::HARD_COUNT_MASK | arc::REF_COUNT_MASK);
let new_status = new_status | new_hard_count | new_ref_count << arc::REF_COUNT_SHIFT;
if new_hard_count != 0 {
match (*header).status.compare_exchange_weak(old_status, new_status, Release, Relaxed) {
Ok(_) => return,
Err(status) => old_status = status,
}
} else {
let old_soft_count = (old_status & arc::SOFT_COUNT_MASK) >> arc::SOFT_COUNT_SHIFT;
if old_soft_count == 0 {
(*header).status.store(new_status, Relaxed);
R::resident_drop(data, &mut (*header).meta);
(*header).drop::<R>(data);
let block = Block::from_raw_parts(header as *mut u8, size);
AllocTag::from_ptr(header as *mut u8).dealloc(block);
return;
} else {
let new_soft_count = old_soft_count.wrapping_add(1);
if new_soft_count > arc::SOFT_COUNT_MAX {
panic!("soft count overflow");
}
let new_status = new_status & !arc::SOFT_COUNT_MASK;
let new_status = new_status | new_soft_count << arc::SOFT_COUNT_SHIFT;
match (*header).status.compare_exchange_weak(old_status, new_status, SeqCst, Relaxed) {
Ok(_) => {
R::resident_drop(data, &mut (*header).meta);
old_status = new_status;
loop {
let old_soft_count = (old_status & arc::SOFT_COUNT_MASK) >> arc::SOFT_COUNT_SHIFT;
let new_soft_count = match old_soft_count.checked_sub(1) {
Some(soft_count) => soft_count,
None => panic!("soft count underflow"),
};
let new_status = old_status & !arc::SOFT_COUNT_MASK;
let new_status = new_status | new_soft_count << arc::SOFT_COUNT_SHIFT;
match (*header).status.compare_exchange_weak(old_status, new_status, Release, Relaxed) {
Ok(_) => {
if new_soft_count == 0 {
(*header).drop::<R>(data);
let block = Block::from_raw_parts(header as *mut u8, size);
AllocTag::from_ptr(header as *mut u8).dealloc(block);
}
return;
},
Err(status) => old_status = status,
}
}
},
Err(status) => old_status = status,
}
}
}
}
}
}
}