#![doc(
html_root_url = "https://docs.rs/arc-swap/0.3.11/arc-swap/",
test(attr(deny(warnings)))
)]
#![deny(missing_docs, warnings)]
#![allow(renamed_and_removed_lints)]
mod as_raw;
pub mod cache;
mod compile_fail_tests;
mod debt;
pub mod gen_lock;
mod ref_cnt;
use std::fmt::{Debug, Display, Formatter, Result as FmtResult};
use std::isize;
use std::marker::PhantomData;
use std::mem;
use std::ops::Deref;
use std::process;
use std::ptr;
use std::sync::atomic::{self, AtomicPtr, Ordering};
use std::sync::Arc;
use std::thread;
pub use as_raw::AsRaw;
use debt::Debt;
use gen_lock::{Global, LockStorage, PrivateUnsharded, GEN_CNT};
pub use ref_cnt::{NonNull, RefCnt};
const MAX_GUARDS: usize = (isize::MAX) as usize;
struct GenLock<'a, S: LockStorage + 'a> {
shard: usize,
gen: usize,
lock_storage: &'a S,
}
impl<'a, S: LockStorage> GenLock<'a, S> {
fn new(signal_safe: SignalSafety, lock_storage: &'a S) -> Self {
let shard = match signal_safe {
SignalSafety::Safe => 0,
SignalSafety::Unsafe => lock_storage.choose_shard(),
};
let gen = lock_storage.gen_idx().load(Ordering::Relaxed) % GEN_CNT;
let old = lock_storage.shards().as_ref()[shard].0[gen].fetch_add(1, Ordering::SeqCst);
if old > MAX_GUARDS {
process::abort();
}
GenLock {
shard,
gen,
lock_storage,
}
}
fn unlock(self) {
self.lock_storage.shards().as_ref()[self.shard].0[self.gen].fetch_sub(1, Ordering::AcqRel);
mem::forget(self);
}
}
#[cfg(debug_assertions)] impl<'a, S: LockStorage> Drop for GenLock<'a, S> {
fn drop(&mut self) {
unreachable!("Forgot to unlock generation");
}
}
pub struct Guard<'a, T: RefCnt + 'a, S: LockStorage + 'a = Global>
where
T::Base: 'a,
{
lock: Option<GenLock<'a, S>>,
ptr: *const T::Base,
_arc_swap: PhantomData<&'a T::Base>,
}
impl<'a, T: RefCnt, S: LockStorage> Guard<'a, T, S> {
pub fn upgrade(guard: &Self) -> T {
let res = unsafe { T::from_ptr(guard.ptr) };
T::inc(&res);
res
}
#[deprecated(note = "Use upgrade instead")]
pub fn lease(guard: &Self) -> Lease<T> {
let res = unsafe { T::from_ptr(guard.ptr) };
T::inc(&res);
T::into_ptr(res);
Lease {
ptr: guard.ptr,
debt: None,
_data: PhantomData,
}
}
#[cfg_attr(feature = "cargo-clippy", allow(needless_lifetimes))]
pub fn get_ref<'g>(guard: &'g Self) -> Option<&'g T::Base> {
unsafe { guard.ptr.as_ref() }
}
}
impl<'a, T: NonNull, S: LockStorage> Deref for Guard<'a, T, S> {
type Target = T::Base;
fn deref(&self) -> &T::Base {
unsafe { self.ptr.as_ref().unwrap() }
}
}
impl<'a, T: RefCnt, S: LockStorage> Drop for Guard<'a, T, S> {
fn drop(&mut self) {
self.lock.take().unwrap().unlock();
}
}
unsafe impl<'a, T, S> Send for Guard<'a, T, S>
where
T: RefCnt + Send + Sync,
S: LockStorage,
T::Base: Send + Sync,
{
}
unsafe impl<'a, T, S> Sync for Guard<'a, T, S>
where
T: RefCnt + Send + Sync,
S: LockStorage,
T::Base: Send + Sync,
{
}
pub struct Lease<T: RefCnt> {
ptr: *const T::Base,
debt: Option<&'static Debt>,
_data: PhantomData<T>,
}
impl<T: RefCnt> Lease<T> {
pub fn upgrade(lease: &Self) -> T {
let res = unsafe { T::from_ptr(lease.ptr) };
T::inc(&res);
res
}
#[cfg_attr(feature = "cargo-clippy", allow(wrong_self_convention))]
pub fn into_upgrade(lease: Self) -> T {
let res = unsafe { T::from_ptr(lease.ptr) };
if let Some(debt) = lease.debt {
T::inc(&res);
if !debt.pay::<T>(lease.ptr) {
unsafe { T::dec(lease.ptr) };
}
}
mem::forget(lease);
res
}
pub fn get_ref(lease: &Self) -> Option<&T::Base> {
unsafe { lease.ptr.as_ref() }
}
pub fn is_null(lease: &Self) -> bool {
lease.ptr.is_null()
}
}
impl<T: NonNull> Lease<Option<T>> {
pub fn expect(self, msg: &str) -> Lease<T> {
assert!(
!Self::is_null(&self),
"Expect on NULL arc-swap Lease: {}",
msg
);
let ptr = self.ptr;
let debt = self.debt;
mem::forget(self);
Lease {
ptr,
debt,
_data: PhantomData,
}
}
pub fn unwrap(self) -> Lease<T> {
assert!(!Self::is_null(&self), "Unwrap of NULL arc-swap Lease");
self.expect("")
}
pub fn into_option(self) -> Option<Lease<T>> {
if Self::is_null(&self) {
None
} else {
Some(self.unwrap())
}
}
}
#[cfg_attr(feature = "cargo-clippy", allow(needless_pass_by_value))]
pub fn ptr_eq<Base, A, B>(a: A, b: B) -> bool
where
A: AsRaw<Base>,
B: AsRaw<Base>,
{
let a = a.as_raw();
let b = b.as_raw();
ptr::eq(a, b)
}
impl<T: NonNull> Deref for Lease<T> {
type Target = T::Base;
fn deref(&self) -> &T::Base {
unsafe { self.ptr.as_ref().unwrap() }
}
}
impl<T> Debug for Lease<T>
where
T: RefCnt,
T::Base: Debug,
{
fn fmt(&self, formatter: &mut Formatter) -> FmtResult {
let l = Lease::get_ref(&self);
if T::can_null() {
l.fmt(formatter)
} else {
l.unwrap().fmt(formatter)
}
}
}
impl<T> Display for Lease<T>
where
T: NonNull,
T::Base: Display,
{
fn fmt(&self, formatter: &mut Formatter) -> FmtResult {
self.deref().fmt(formatter)
}
}
impl<T: RefCnt> Drop for Lease<T> {
fn drop(&mut self) {
if let Some(debt) = self.debt {
if debt.pay::<T>(self.ptr) {
return;
}
}
unsafe { T::dec(self.ptr) };
}
}
unsafe impl<T> Send for Lease<T>
where
T: RefCnt + Send + Sync,
T::Base: Send + Sync,
{
}
unsafe impl<T> Sync for Lease<T>
where
T: RefCnt + Send + Sync,
T::Base: Send + Sync,
{
}
#[derive(Copy, Clone)]
enum SignalSafety {
Safe,
Unsafe,
}
const YIELD_EVERY: usize = 16;
pub struct ArcSwapAny<T: RefCnt, S: LockStorage = Global> {
ptr: AtomicPtr<T::Base>,
_phantom_arc: PhantomData<T>,
lock_storage: S,
}
impl<T: RefCnt, S: LockStorage> From<T> for ArcSwapAny<T, S> {
fn from(val: T) -> Self {
let ptr = T::into_ptr(val);
Self {
ptr: AtomicPtr::new(ptr),
_phantom_arc: PhantomData,
lock_storage: S::default(),
}
}
}
impl<T: RefCnt, S: LockStorage> Drop for ArcSwapAny<T, S> {
fn drop(&mut self) {
let ptr = *self.ptr.get_mut();
self.wait_for_readers(ptr);
unsafe { T::dec(ptr) };
}
}
impl<T: RefCnt, S: LockStorage> Clone for ArcSwapAny<T, S> {
fn clone(&self) -> Self {
Self::from(self.load())
}
}
impl<T, S: LockStorage> Debug for ArcSwapAny<T, S>
where
T: RefCnt,
T::Base: Debug,
{
fn fmt(&self, formatter: &mut Formatter) -> FmtResult {
let guard = self.peek();
let r = Guard::get_ref(&guard);
if T::can_null() {
r.fmt(formatter)
} else {
r.unwrap().fmt(formatter)
}
}
}
impl<T, S: LockStorage> Display for ArcSwapAny<T, S>
where
T: NonNull,
T::Base: Display,
{
fn fmt(&self, formatter: &mut Formatter) -> FmtResult {
self.peek().deref().fmt(formatter)
}
}
impl<T: RefCnt, S: LockStorage> ArcSwapAny<T, S> {
pub fn new(val: T) -> Self {
Self::from(val)
}
pub fn into_inner(mut self) -> T {
let ptr = *self.ptr.get_mut();
self.wait_for_readers(ptr);
mem::forget(self);
unsafe { T::from_ptr(ptr) }
}
pub fn load(&self) -> T {
Guard::upgrade(&self.peek())
}
fn peek_inner(&self, signal_safe: SignalSafety) -> Guard<T, S> {
let gen = GenLock::new(signal_safe, &self.lock_storage);
let ptr = self.ptr.load(Ordering::Acquire);
Guard {
lock: Some(gen),
_arc_swap: PhantomData,
ptr,
}
}
pub fn peek(&self) -> Guard<T, S> {
self.peek_inner(SignalSafety::Unsafe)
}
pub fn peek_signal_safe(&self) -> Guard<T, S> {
self.peek_inner(SignalSafety::Safe)
}
#[inline]
fn lease_fallible(&self) -> Option<Lease<T>> {
let ptr = self.ptr.load(Ordering::Relaxed);
let debt = Debt::new(ptr as usize)?;
let confirm = self.ptr.load(Ordering::Acquire);
if ptr == confirm {
Some(Lease {
ptr,
debt: Some(debt),
_data: PhantomData,
})
} else if debt.pay::<T>(ptr) {
None
} else {
Some(Lease {
ptr,
debt: None,
_data: PhantomData,
})
}
}
#[allow(deprecated)] #[inline]
pub fn lease(&self) -> Lease<T> {
self.lease_fallible()
.unwrap_or_else(|| Guard::lease(&self.peek()))
}
pub fn store(&self, val: T) {
drop(self.swap(val));
}
pub fn swap(&self, new: T) -> T {
let new = T::into_ptr(new);
let old = self.ptr.swap(new, Ordering::SeqCst);
self.wait_for_readers(old);
unsafe { T::from_ptr(old) }
}
pub fn compare_and_swap<C: AsRaw<T::Base>>(&self, current: C, new: T) -> Lease<T> {
let cur_ptr = current.as_raw();
let new = T::into_ptr(new);
let gen = GenLock::new(SignalSafety::Unsafe, &self.lock_storage);
let previous_ptr = self.ptr.compare_and_swap(cur_ptr, new, Ordering::SeqCst);
let swapped = ptr::eq(cur_ptr, previous_ptr);
drop(current);
let debt = if swapped {
None
} else {
let debt = Debt::new(previous_ptr as usize);
if debt.is_none() {
let previous = unsafe { T::from_ptr(previous_ptr) };
T::inc(&previous);
T::into_ptr(previous);
}
debt
};
gen.unlock();
if swapped {
self.wait_for_readers(previous_ptr);
} else {
unsafe { T::dec(new) };
}
Lease {
ptr: previous_ptr,
debt,
_data: PhantomData,
}
}
fn wait_for_readers(&self, old: *const T::Base) {
let mut seen_group = [false; GEN_CNT];
let mut iter = 0usize;
while !seen_group.iter().all(|seen| *seen) {
let gen = self.lock_storage.gen_idx().load(Ordering::Relaxed);
let groups = self
.lock_storage
.shards()
.as_ref()
.iter()
.fold([0, 0], |[a1, a2], s| {
let [v1, v2] = s.snapshot();
[a1 + v1, a2 + v2]
});
let next_gen = gen.wrapping_add(1);
if groups[next_gen % GEN_CNT] == 0 {
self.lock_storage
.gen_idx()
.compare_and_swap(gen, next_gen, Ordering::Relaxed);
}
for i in 0..GEN_CNT {
seen_group[i] = seen_group[i] || (groups[i] == 0);
}
iter = iter.wrapping_add(1);
if iter % YIELD_EVERY == 0 {
thread::yield_now();
} else {
atomic::spin_loop_hint();
}
}
Debt::pay_all::<T>(old);
}
pub fn rcu<R, F>(&self, mut f: F) -> T
where
F: FnMut(&Lease<T>) -> R,
R: Into<T>,
{
let mut cur = self.lease();
loop {
let new = f(&cur).into();
let prev = self.compare_and_swap(&cur, new);
let swapped = ptr_eq(&cur, &prev);
if swapped {
return Lease::into_upgrade(prev);
} else {
cur = prev;
}
}
}
}
pub type ArcSwap<T> = ArcSwapAny<Arc<T>>;
impl<T, S: LockStorage> ArcSwapAny<Arc<T>, S> {
pub fn from_pointee(val: T) -> Self {
Self::from(Arc::new(val))
}
pub fn rcu_unwrap<R, F>(&self, mut f: F) -> T
where
F: FnMut(&T) -> R,
R: Into<Arc<T>>,
{
let mut wrapped = self.rcu(|prev| f(&*prev));
loop {
match Arc::try_unwrap(wrapped) {
Ok(val) => return val,
Err(w) => {
wrapped = w;
thread::yield_now();
}
}
}
}
}
impl<T, S: LockStorage> ArcSwapAny<Option<Arc<T>>, S> {
pub fn from_pointee<V: Into<Option<T>>>(val: V) -> Self {
Self::new(val.into().map(Arc::new))
}
pub fn empty() -> Self {
Self::new(None)
}
}
impl<T: RefCnt + Default, S: LockStorage> Default for ArcSwapAny<T, S> {
fn default() -> Self {
Self::new(T::default())
}
}
pub type ArcSwapOption<T> = ArcSwapAny<Option<Arc<T>>>;
pub type IndependentArcSwap<T> = ArcSwapAny<Arc<T>, PrivateUnsharded>;
#[cfg(test)]
mod tests {
extern crate crossbeam_utils;
use std::panic;
use std::sync::atomic::AtomicUsize;
use std::sync::Barrier;
use self::crossbeam_utils::thread;
use super::*;
#[test]
fn publish() {
for _ in 0..100 {
let config = ArcSwap::<String>::default();
let ended = AtomicUsize::new(0);
thread::scope(|scope| {
for _ in 0..20 {
scope.spawn(|_| loop {
let cfg = config.load();
if !cfg.is_empty() {
assert_eq!(*cfg, "New configuration");
ended.fetch_add(1, Ordering::Relaxed);
return;
}
atomic::spin_loop_hint();
});
}
scope.spawn(|_| {
let new_conf = Arc::new("New configuration".to_owned());
config.store(new_conf);
});
})
.unwrap();
assert_eq!(20, ended.load(Ordering::Relaxed));
assert_eq!(2, Arc::strong_count(&config.load()));
assert_eq!(0, Arc::weak_count(&config.load()));
}
}
#[test]
fn swap_load() {
for _ in 0..100 {
let arc = Arc::new(42);
let arc_swap = ArcSwap::from(Arc::clone(&arc));
assert_eq!(42, *arc_swap.load());
assert_eq!(42, *arc_swap.load());
let new_arc = Arc::new(0);
assert_eq!(42, *arc_swap.swap(Arc::clone(&new_arc)));
assert_eq!(0, *arc_swap.load());
assert_eq!(3, Arc::strong_count(&arc_swap.load()));
assert_eq!(0, Arc::weak_count(&arc_swap.load()));
assert_eq!(1, Arc::strong_count(&arc));
assert_eq!(0, Arc::weak_count(&arc));
}
}
#[test]
fn multi_writers() {
let first_value = Arc::new((0, 0));
let shared = ArcSwap::from(Arc::clone(&first_value));
const WRITER_CNT: usize = 2;
const READER_CNT: usize = 3;
const ITERATIONS: usize = 100;
const SEQ: usize = 50;
let barrier = Barrier::new(READER_CNT + WRITER_CNT);
thread::scope(|scope| {
for w in 0..WRITER_CNT {
let barrier = &barrier;
let shared = &shared;
let first_value = &first_value;
scope.spawn(move |_| {
for _ in 0..ITERATIONS {
barrier.wait();
shared.store(Arc::clone(&first_value));
barrier.wait();
for i in 0..SEQ {
shared.store(Arc::new((w, i + 1)));
}
}
});
}
for _ in 0..READER_CNT {
scope.spawn(|_| {
for _ in 0..ITERATIONS {
barrier.wait();
barrier.wait();
let mut previous = [0; 2];
let mut last = Arc::clone(&first_value);
loop {
let cur = shared.load();
if Arc::ptr_eq(&last, &cur) {
atomic::spin_loop_hint();
continue;
}
let (w, s) = *cur;
assert!(previous[w] < s);
previous[w] = s;
last = cur;
if s == SEQ {
break;
}
}
}
});
}
})
.unwrap();
}
#[test]
fn cas_ref_cnt() {
const ITERATIONS: usize = 50;
let shared = ArcSwap::from(Arc::new(0));
for i in 0..ITERATIONS {
let orig = shared.load();
assert_eq!(i, *orig);
if i % 2 == 1 {
assert_eq!(2, Arc::strong_count(&orig));
}
let n1 = Arc::new(i + 1);
let fillup = || {
if i % 2 == 0 {
Some(
(0..50)
.into_iter()
.map(|_| shared.lease())
.collect::<Vec<_>>(),
)
} else {
None
}
};
let leases = fillup();
let prev = shared.compare_and_swap(&orig, Arc::clone(&n1));
assert!(ptr_eq(&orig, &prev));
drop(leases);
assert_eq!(2, Arc::strong_count(&orig));
assert_eq!(2, Arc::strong_count(&n1));
assert_eq!(i + 1, *shared.peek());
let n2 = Arc::new(i);
drop(prev);
let leases = fillup();
let prev = Lease::into_upgrade(shared.compare_and_swap(&orig, Arc::clone(&n2)));
drop(leases);
assert!(ptr_eq(&n1, &prev));
assert_eq!(1, Arc::strong_count(&orig));
assert_eq!(3, Arc::strong_count(&n1));
assert_eq!(1, Arc::strong_count(&n2));
assert_eq!(i + 1, *shared.peek());
}
let a = shared.load();
assert_eq!(2, Arc::strong_count(&a));
drop(shared);
assert_eq!(1, Arc::strong_count(&a));
}
#[test]
fn rcu() {
const ITERATIONS: usize = 50;
const THREADS: usize = 10;
let shared = ArcSwap::from(Arc::new(0));
thread::scope(|scope| {
for _ in 0..THREADS {
scope.spawn(|_| {
for _ in 0..ITERATIONS {
shared.rcu(|old| **old + 1);
}
});
}
})
.unwrap();
assert_eq!(THREADS * ITERATIONS, *shared.load());
}
#[test]
fn rcu_unwrap() {
const ITERATIONS: usize = 50;
const THREADS: usize = 10;
let shared = ArcSwap::from(Arc::new(0));
thread::scope(|scope| {
for _ in 0..THREADS {
scope.spawn(|_| {
for _ in 0..ITERATIONS {
shared.rcu_unwrap(|old| *old + 1);
}
});
}
})
.unwrap();
assert_eq!(THREADS * ITERATIONS, *shared.load());
}
#[test]
fn nulls() {
let shared = ArcSwapOption::from(Some(Arc::new(0)));
let orig = shared.swap(None);
assert_eq!(1, Arc::strong_count(&orig.unwrap()));
let null = shared.load();
assert!(null.is_none());
let a = Arc::new(42);
let orig = shared.compare_and_swap(ptr::null(), Some(Arc::clone(&a)));
assert!(Lease::is_null(&orig));
assert_eq!(2, Arc::strong_count(&a));
let orig = Lease::into_upgrade(shared.compare_and_swap(&None::<Arc<_>>, None));
assert_eq!(3, Arc::strong_count(&a));
assert!(ptr_eq(&a, &orig));
}
#[test]
fn recursive() {
let shared = ArcSwap::from(Arc::new(0));
shared.rcu(|i| {
if **i < 10 {
shared.rcu(|i| **i + 1);
}
**i
});
assert_eq!(10, *shared.peek());
assert_eq!(2, Arc::strong_count(&shared.load()));
}
#[test]
fn rcu_panic() {
let shared = ArcSwap::from(Arc::new(0));
assert!(panic::catch_unwind(|| shared.rcu(|_| -> usize { panic!() })).is_err());
assert_eq!(1, Arc::strong_count(&shared.swap(Arc::new(42))));
}
#[test]
fn lease_cnt() {
let a = Arc::new(0);
let shared = ArcSwap::from(Arc::clone(&a));
assert_eq!(2, Arc::strong_count(&a));
let lease = shared.lease();
assert_eq!(0, *lease);
assert_eq!(2, Arc::strong_count(&a));
let lease_2 = shared.lease();
shared.store(Arc::new(1));
assert_eq!(3, Arc::strong_count(&a));
drop(lease_2);
assert_eq!(2, Arc::strong_count(&a));
let _b = Lease::upgrade(&lease);
assert_eq!(3, Arc::strong_count(&a));
drop(lease);
assert_eq!(2, Arc::strong_count(&a));
let lease = shared.lease();
assert_eq!(1, *lease);
drop(shared);
assert_eq!(1, *lease);
let ptr = Lease::upgrade(&lease);
assert_eq!(2, Arc::strong_count(&ptr));
drop(lease);
assert_eq!(1, Arc::strong_count(&ptr));
}
#[test]
fn lease_overflow() {
let a = Arc::new(0);
let shared = ArcSwap::from(Arc::clone(&a));
assert_eq!(2, Arc::strong_count(&a));
let mut leases = (0..1000)
.into_iter()
.map(|_| shared.lease())
.collect::<Vec<_>>();
let count = Arc::strong_count(&a);
assert!(count > 2);
let lease = shared.lease();
assert_eq!(count + 1, Arc::strong_count(&a));
drop(lease);
assert_eq!(count, Arc::strong_count(&a));
leases.swap_remove(0);
let _lease = shared.lease();
assert_eq!(count, Arc::strong_count(&a));
}
#[test]
fn lease_null() {
let shared = ArcSwapOption::<usize>::default();
let lease = shared.lease();
assert!(Lease::get_ref(&lease).is_none());
shared.store(Some(Arc::new(42)));
assert_eq!(42, *Lease::get_ref(&shared.lease()).unwrap());
}
#[test]
fn from_into() {
let a = Arc::new(42);
let shared = ArcSwap::new(a);
let lease = shared.lease();
let a = shared.into_inner();
assert_eq!(42, *a);
assert_eq!(2, Arc::strong_count(&a));
drop(lease);
assert_eq!(1, Arc::strong_count(&a));
}
#[derive(Default)]
struct ReportDrop(Arc<AtomicUsize>);
impl Drop for ReportDrop {
fn drop(&mut self) {
self.0.fetch_add(1, Ordering::Relaxed);
}
}
const ITERATIONS: usize = 50;
#[test]
fn lease_drop_in_thread() {
for _ in 0..ITERATIONS {
let cnt = Arc::new(AtomicUsize::new(0));
let shared = ArcSwap::from_pointee(ReportDrop(cnt.clone()));
assert_eq!(cnt.load(Ordering::Relaxed), 0, "Dropped prematurely");
let sync = Barrier::new(2);
thread::scope(|scope| {
scope.spawn(|_| {
let lease = shared.lease();
sync.wait();
sync.wait();
drop(lease);
assert_eq!(cnt.load(Ordering::Relaxed), 1, "Value not dropped");
sync.wait();
});
scope.spawn(|_| {
sync.wait();
shared.store(Default::default());
assert_eq!(cnt.load(Ordering::Relaxed), 0, "Dropped while still leased");
sync.wait();
sync.wait();
assert_eq!(cnt.load(Ordering::Relaxed), 1, "Value not dropped");
});
})
.unwrap();
}
}
#[test]
fn lease_drop_in_another_thread() {
for _ in 0..ITERATIONS {
let cnt = Arc::new(AtomicUsize::new(0));
let shared = ArcSwap::from_pointee(ReportDrop(cnt.clone()));
assert_eq!(cnt.load(Ordering::Relaxed), 0, "Dropped prematurely");
let lease = shared.lease();
drop(shared);
assert_eq!(cnt.load(Ordering::Relaxed), 0, "Dropped prematurely");
thread::scope(|scope| {
scope.spawn(|_| {
drop(lease);
});
})
.unwrap();
assert_eq!(cnt.load(Ordering::Relaxed), 1, "Not dropped");
}
}
#[test]
fn guard_drop_in_another_thread() {
for _ in 0..ITERATIONS {
let cnt = Arc::new(AtomicUsize::new(0));
let shared = ArcSwap::from_pointee(ReportDrop(cnt.clone()));
assert_eq!(cnt.load(Ordering::Relaxed), 0, "Dropped prematurely");
let guard = shared.peek();
thread::scope(|scope| {
scope.spawn(|_| {
drop(guard);
});
assert_eq!(cnt.load(Ordering::Relaxed), 0, "Dropped prematurely");
shared.swap(Default::default());
assert_eq!(cnt.load(Ordering::Relaxed), 1, "Not dropped");
})
.unwrap();
}
}
#[test]
fn lease_unwrap() {
let shared = ArcSwapOption::from_pointee(42);
assert_eq!(42, *shared.lease().unwrap());
assert_eq!(42, *shared.lease().expect("Failed"));
}
#[test]
fn lease_unwrap_none() {
let shared: ArcSwapOption<usize> = ArcSwapOption::empty();
panic::catch_unwind(|| shared.lease().unwrap()).unwrap_err();
panic::catch_unwind(|| shared.lease().expect("Failed")).unwrap_err();
}
#[test]
fn lease_option() {
let shared = ArcSwapOption::from_pointee(42);
let opt: Option<_> = shared.lease().into_option();
assert_eq!(42, *opt.unwrap());
shared.store(None);
assert!(shared.lease().into_option().is_none());
}
fn _check_stuff_is_send_sync() {
let shared = ArcSwap::from_pointee(42);
let moved = ArcSwap::from_pointee(42);
let shared_ref = &shared;
let lease = shared.lease();
let lease_ref = &lease;
let lease = shared.lease();
let guard = shared.peek();
let guard_ref = &guard;
let guard = shared.peek();
thread::scope(|s| {
s.spawn(move |_| {
drop(guard);
drop(guard_ref);
drop(lease);
drop(lease_ref);
drop(shared_ref);
drop(moved);
});
})
.unwrap();
}
}