use core::cell::UnsafeCell;
use core::fmt;
use core::marker::PhantomData;
use core::mem;
use core::ops::{Deref, DerefMut};
#[cfg(feature = "owning_ref")]
use owning_ref::StableAddress;
#[cfg(feature = "serde")]
use serde::{Deserialize, Deserializer, Serialize, Serializer};
pub unsafe trait RawRwLock {
#[allow(clippy::declare_interior_mutable_const)]
const INIT: Self;
type GuardMarker;
fn lock_shared(&self);
fn try_lock_shared(&self) -> bool;
fn unlock_shared(&self);
fn lock_exclusive(&self);
fn try_lock_exclusive(&self) -> bool;
fn unlock_exclusive(&self);
}
pub unsafe trait RawRwLockFair: RawRwLock {
fn unlock_shared_fair(&self);
fn unlock_exclusive_fair(&self);
fn bump_shared(&self) {
self.unlock_shared_fair();
self.lock_shared();
}
fn bump_exclusive(&self) {
self.unlock_exclusive_fair();
self.lock_exclusive();
}
}
pub unsafe trait RawRwLockDowngrade: RawRwLock {
fn downgrade(&self);
}
pub unsafe trait RawRwLockTimed: RawRwLock {
type Duration;
type Instant;
fn try_lock_shared_for(&self, timeout: Self::Duration) -> bool;
fn try_lock_shared_until(&self, timeout: Self::Instant) -> bool;
fn try_lock_exclusive_for(&self, timeout: Self::Duration) -> bool;
fn try_lock_exclusive_until(&self, timeout: Self::Instant) -> bool;
}
pub unsafe trait RawRwLockRecursive: RawRwLock {
fn lock_shared_recursive(&self);
fn try_lock_shared_recursive(&self) -> bool;
}
pub unsafe trait RawRwLockRecursiveTimed: RawRwLockRecursive + RawRwLockTimed {
fn try_lock_shared_recursive_for(&self, timeout: Self::Duration) -> bool;
fn try_lock_shared_recursive_until(&self, timeout: Self::Instant) -> bool;
}
pub unsafe trait RawRwLockUpgrade: RawRwLock {
fn lock_upgradable(&self);
fn try_lock_upgradable(&self) -> bool;
fn unlock_upgradable(&self);
fn upgrade(&self);
fn try_upgrade(&self) -> bool;
}
pub unsafe trait RawRwLockUpgradeFair: RawRwLockUpgrade + RawRwLockFair {
fn unlock_upgradable_fair(&self);
fn bump_upgradable(&self) {
self.unlock_upgradable_fair();
self.lock_upgradable();
}
}
pub unsafe trait RawRwLockUpgradeDowngrade: RawRwLockUpgrade + RawRwLockDowngrade {
fn downgrade_upgradable(&self);
fn downgrade_to_upgradable(&self);
}
pub unsafe trait RawRwLockUpgradeTimed: RawRwLockUpgrade + RawRwLockTimed {
fn try_lock_upgradable_for(&self, timeout: Self::Duration) -> bool;
fn try_lock_upgradable_until(&self, timeout: Self::Instant) -> bool;
fn try_upgrade_for(&self, timeout: Self::Duration) -> bool;
fn try_upgrade_until(&self, timeout: Self::Instant) -> bool;
}
pub struct RwLock<R: RawRwLock, T: ?Sized> {
raw: R,
data: UnsafeCell<T>,
}
#[cfg(feature = "serde")]
impl<R, T> Serialize for RwLock<R, T>
where
R: RawRwLock,
T: Serialize + ?Sized,
{
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
self.read().serialize(serializer)
}
}
#[cfg(feature = "serde")]
impl<'de, R, T> Deserialize<'de> for RwLock<R, T>
where
R: RawRwLock,
T: Deserialize<'de> + ?Sized,
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
Deserialize::deserialize(deserializer).map(RwLock::new)
}
}
unsafe impl<R: RawRwLock + Send, T: ?Sized + Send> Send for RwLock<R, T> {}
unsafe impl<R: RawRwLock + Sync, T: ?Sized + Send + Sync> Sync for RwLock<R, T> {}
impl<R: RawRwLock, T> RwLock<R, T> {
#[cfg(feature = "nightly")]
#[inline]
pub const fn new(val: T) -> RwLock<R, T> {
RwLock {
data: UnsafeCell::new(val),
raw: R::INIT,
}
}
#[cfg(not(feature = "nightly"))]
#[inline]
pub fn new(val: T) -> RwLock<R, T> {
RwLock {
data: UnsafeCell::new(val),
raw: R::INIT,
}
}
#[inline]
#[allow(unused_unsafe)]
pub fn into_inner(self) -> T {
unsafe { self.data.into_inner() }
}
}
impl<R: RawRwLock, T: ?Sized> RwLock<R, T> {
#[inline]
unsafe fn read_guard(&self) -> RwLockReadGuard<'_, R, T> {
RwLockReadGuard {
rwlock: self,
marker: PhantomData,
}
}
#[inline]
unsafe fn write_guard(&self) -> RwLockWriteGuard<'_, R, T> {
RwLockWriteGuard {
rwlock: self,
marker: PhantomData,
}
}
#[inline]
pub fn read(&self) -> RwLockReadGuard<'_, R, T> {
self.raw.lock_shared();
unsafe { self.read_guard() }
}
#[inline]
pub fn try_read(&self) -> Option<RwLockReadGuard<'_, R, T>> {
if self.raw.try_lock_shared() {
Some(unsafe { self.read_guard() })
} else {
None
}
}
#[inline]
pub fn write(&self) -> RwLockWriteGuard<'_, R, T> {
self.raw.lock_exclusive();
unsafe { self.write_guard() }
}
#[inline]
pub fn try_write(&self) -> Option<RwLockWriteGuard<'_, R, T>> {
if self.raw.try_lock_exclusive() {
Some(unsafe { self.write_guard() })
} else {
None
}
}
#[inline]
pub fn get_mut(&mut self) -> &mut T {
unsafe { &mut *self.data.get() }
}
#[inline]
pub unsafe fn force_unlock_read(&self) {
self.raw.unlock_shared();
}
#[inline]
pub unsafe fn force_unlock_write(&self) {
self.raw.unlock_exclusive();
}
pub unsafe fn raw(&self) -> &R {
&self.raw
}
}
impl<R: RawRwLockFair, T: ?Sized> RwLock<R, T> {
#[inline]
pub unsafe fn force_unlock_read_fair(&self) {
self.raw.unlock_shared_fair();
}
#[inline]
pub unsafe fn force_unlock_write_fair(&self) {
self.raw.unlock_exclusive_fair();
}
}
impl<R: RawRwLockTimed, T: ?Sized> RwLock<R, T> {
#[inline]
pub fn try_read_for(&self, timeout: R::Duration) -> Option<RwLockReadGuard<'_, R, T>> {
if self.raw.try_lock_shared_for(timeout) {
Some(unsafe { self.read_guard() })
} else {
None
}
}
#[inline]
pub fn try_read_until(&self, timeout: R::Instant) -> Option<RwLockReadGuard<'_, R, T>> {
if self.raw.try_lock_shared_until(timeout) {
Some(unsafe { self.read_guard() })
} else {
None
}
}
#[inline]
pub fn try_write_for(&self, timeout: R::Duration) -> Option<RwLockWriteGuard<'_, R, T>> {
if self.raw.try_lock_exclusive_for(timeout) {
Some(unsafe { self.write_guard() })
} else {
None
}
}
#[inline]
pub fn try_write_until(&self, timeout: R::Instant) -> Option<RwLockWriteGuard<'_, R, T>> {
if self.raw.try_lock_exclusive_until(timeout) {
Some(unsafe { self.write_guard() })
} else {
None
}
}
}
impl<R: RawRwLockRecursive, T: ?Sized> RwLock<R, T> {
#[inline]
pub fn read_recursive(&self) -> RwLockReadGuard<'_, R, T> {
self.raw.lock_shared_recursive();
unsafe { self.read_guard() }
}
#[inline]
pub fn try_read_recursive(&self) -> Option<RwLockReadGuard<'_, R, T>> {
if self.raw.try_lock_shared_recursive() {
Some(unsafe { self.read_guard() })
} else {
None
}
}
}
impl<R: RawRwLockRecursiveTimed, T: ?Sized> RwLock<R, T> {
#[inline]
pub fn try_read_recursive_for(
&self,
timeout: R::Duration,
) -> Option<RwLockReadGuard<'_, R, T>> {
if self.raw.try_lock_shared_recursive_for(timeout) {
Some(unsafe { self.read_guard() })
} else {
None
}
}
#[inline]
pub fn try_read_recursive_until(
&self,
timeout: R::Instant,
) -> Option<RwLockReadGuard<'_, R, T>> {
if self.raw.try_lock_shared_recursive_until(timeout) {
Some(unsafe { self.read_guard() })
} else {
None
}
}
}
impl<R: RawRwLockUpgrade, T: ?Sized> RwLock<R, T> {
#[inline]
unsafe fn upgradable_guard(&self) -> RwLockUpgradableReadGuard<'_, R, T> {
RwLockUpgradableReadGuard {
rwlock: self,
marker: PhantomData,
}
}
#[inline]
pub fn upgradable_read(&self) -> RwLockUpgradableReadGuard<'_, R, T> {
self.raw.lock_upgradable();
unsafe { self.upgradable_guard() }
}
#[inline]
pub fn try_upgradable_read(&self) -> Option<RwLockUpgradableReadGuard<'_, R, T>> {
if self.raw.try_lock_upgradable() {
Some(unsafe { self.upgradable_guard() })
} else {
None
}
}
}
impl<R: RawRwLockUpgradeTimed, T: ?Sized> RwLock<R, T> {
#[inline]
pub fn try_upgradable_read_for(
&self,
timeout: R::Duration,
) -> Option<RwLockUpgradableReadGuard<'_, R, T>> {
if self.raw.try_lock_upgradable_for(timeout) {
Some(unsafe { self.upgradable_guard() })
} else {
None
}
}
#[inline]
pub fn try_upgradable_read_until(
&self,
timeout: R::Instant,
) -> Option<RwLockUpgradableReadGuard<'_, R, T>> {
if self.raw.try_lock_upgradable_until(timeout) {
Some(unsafe { self.upgradable_guard() })
} else {
None
}
}
}
impl<R: RawRwLock, T: ?Sized + Default> Default for RwLock<R, T> {
#[inline]
fn default() -> RwLock<R, T> {
RwLock::new(Default::default())
}
}
impl<R: RawRwLock, T> From<T> for RwLock<R, T> {
#[inline]
fn from(t: T) -> RwLock<R, T> {
RwLock::new(t)
}
}
impl<R: RawRwLock, T: ?Sized + fmt::Debug> fmt::Debug for RwLock<R, T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.try_read() {
Some(guard) => f.debug_struct("RwLock").field("data", &&*guard).finish(),
None => {
struct LockedPlaceholder;
impl fmt::Debug for LockedPlaceholder {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("<locked>")
}
}
f.debug_struct("RwLock")
.field("data", &LockedPlaceholder)
.finish()
}
}
}
}
#[must_use = "if unused the RwLock will immediately unlock"]
pub struct RwLockReadGuard<'a, R: RawRwLock, T: ?Sized> {
rwlock: &'a RwLock<R, T>,
marker: PhantomData<(&'a T, R::GuardMarker)>,
}
unsafe impl<'a, R: RawRwLock + 'a, T: ?Sized + Sync + 'a> Sync for RwLockReadGuard<'a, R, T> {}
impl<'a, R: RawRwLock + 'a, T: ?Sized + 'a> RwLockReadGuard<'a, R, T> {
pub fn rwlock(s: &Self) -> &'a RwLock<R, T> {
s.rwlock
}
#[inline]
pub fn map<U: ?Sized, F>(s: Self, f: F) -> MappedRwLockReadGuard<'a, R, U>
where
F: FnOnce(&T) -> &U,
{
let raw = &s.rwlock.raw;
let data = f(unsafe { &*s.rwlock.data.get() });
mem::forget(s);
MappedRwLockReadGuard {
raw,
data,
marker: PhantomData,
}
}
#[inline]
pub fn try_map<U: ?Sized, F>(s: Self, f: F) -> Result<MappedRwLockReadGuard<'a, R, U>, Self>
where
F: FnOnce(&T) -> Option<&U>,
{
let raw = &s.rwlock.raw;
let data = match f(unsafe { &*s.rwlock.data.get() }) {
Some(data) => data,
None => return Err(s),
};
mem::forget(s);
Ok(MappedRwLockReadGuard {
raw,
data,
marker: PhantomData,
})
}
#[inline]
pub fn unlocked<F, U>(s: &mut Self, f: F) -> U
where
F: FnOnce() -> U,
{
s.rwlock.raw.unlock_shared();
defer!(s.rwlock.raw.lock_shared());
f()
}
}
impl<'a, R: RawRwLockFair + 'a, T: ?Sized + 'a> RwLockReadGuard<'a, R, T> {
#[inline]
pub fn unlock_fair(s: Self) {
s.rwlock.raw.unlock_shared_fair();
mem::forget(s);
}
#[inline]
pub fn unlocked_fair<F, U>(s: &mut Self, f: F) -> U
where
F: FnOnce() -> U,
{
s.rwlock.raw.unlock_shared_fair();
defer!(s.rwlock.raw.lock_shared());
f()
}
#[inline]
pub fn bump(s: &mut Self) {
s.rwlock.raw.bump_shared();
}
}
impl<'a, R: RawRwLock + 'a, T: ?Sized + 'a> Deref for RwLockReadGuard<'a, R, T> {
type Target = T;
#[inline]
fn deref(&self) -> &T {
unsafe { &*self.rwlock.data.get() }
}
}
impl<'a, R: RawRwLock + 'a, T: ?Sized + 'a> Drop for RwLockReadGuard<'a, R, T> {
#[inline]
fn drop(&mut self) {
self.rwlock.raw.unlock_shared();
}
}
impl<'a, R: RawRwLock + 'a, T: fmt::Debug + ?Sized + 'a> fmt::Debug for RwLockReadGuard<'a, R, T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&**self, f)
}
}
impl<'a, R: RawRwLock + 'a, T: fmt::Display + ?Sized + 'a> fmt::Display
for RwLockReadGuard<'a, R, T>
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
(**self).fmt(f)
}
}
#[cfg(feature = "owning_ref")]
unsafe impl<'a, R: RawRwLock + 'a, T: ?Sized + 'a> StableAddress for RwLockReadGuard<'a, R, T> {}
#[must_use = "if unused the RwLock will immediately unlock"]
pub struct RwLockWriteGuard<'a, R: RawRwLock, T: ?Sized> {
rwlock: &'a RwLock<R, T>,
marker: PhantomData<(&'a mut T, R::GuardMarker)>,
}
unsafe impl<'a, R: RawRwLock + 'a, T: ?Sized + Sync + 'a> Sync for RwLockWriteGuard<'a, R, T> {}
impl<'a, R: RawRwLock + 'a, T: ?Sized + 'a> RwLockWriteGuard<'a, R, T> {
pub fn rwlock(s: &Self) -> &'a RwLock<R, T> {
s.rwlock
}
#[inline]
pub fn map<U: ?Sized, F>(s: Self, f: F) -> MappedRwLockWriteGuard<'a, R, U>
where
F: FnOnce(&mut T) -> &mut U,
{
let raw = &s.rwlock.raw;
let data = f(unsafe { &mut *s.rwlock.data.get() });
mem::forget(s);
MappedRwLockWriteGuard {
raw,
data,
marker: PhantomData,
}
}
#[inline]
pub fn try_map<U: ?Sized, F>(s: Self, f: F) -> Result<MappedRwLockWriteGuard<'a, R, U>, Self>
where
F: FnOnce(&mut T) -> Option<&mut U>,
{
let raw = &s.rwlock.raw;
let data = match f(unsafe { &mut *s.rwlock.data.get() }) {
Some(data) => data,
None => return Err(s),
};
mem::forget(s);
Ok(MappedRwLockWriteGuard {
raw,
data,
marker: PhantomData,
})
}
#[inline]
pub fn unlocked<F, U>(s: &mut Self, f: F) -> U
where
F: FnOnce() -> U,
{
s.rwlock.raw.unlock_exclusive();
defer!(s.rwlock.raw.lock_exclusive());
f()
}
}
impl<'a, R: RawRwLockDowngrade + 'a, T: ?Sized + 'a> RwLockWriteGuard<'a, R, T> {
pub fn downgrade(s: Self) -> RwLockReadGuard<'a, R, T> {
s.rwlock.raw.downgrade();
let rwlock = s.rwlock;
mem::forget(s);
RwLockReadGuard {
rwlock,
marker: PhantomData,
}
}
}
impl<'a, R: RawRwLockUpgradeDowngrade + 'a, T: ?Sized + 'a> RwLockWriteGuard<'a, R, T> {
pub fn downgrade_to_upgradable(s: Self) -> RwLockUpgradableReadGuard<'a, R, T> {
s.rwlock.raw.downgrade_to_upgradable();
let rwlock = s.rwlock;
mem::forget(s);
RwLockUpgradableReadGuard {
rwlock,
marker: PhantomData,
}
}
}
impl<'a, R: RawRwLockFair + 'a, T: ?Sized + 'a> RwLockWriteGuard<'a, R, T> {
#[inline]
pub fn unlock_fair(s: Self) {
s.rwlock.raw.unlock_exclusive_fair();
mem::forget(s);
}
#[inline]
pub fn unlocked_fair<F, U>(s: &mut Self, f: F) -> U
where
F: FnOnce() -> U,
{
s.rwlock.raw.unlock_exclusive_fair();
defer!(s.rwlock.raw.lock_exclusive());
f()
}
#[inline]
pub fn bump(s: &mut Self) {
s.rwlock.raw.bump_exclusive();
}
}
impl<'a, R: RawRwLock + 'a, T: ?Sized + 'a> Deref for RwLockWriteGuard<'a, R, T> {
type Target = T;
#[inline]
fn deref(&self) -> &T {
unsafe { &*self.rwlock.data.get() }
}
}
impl<'a, R: RawRwLock + 'a, T: ?Sized + 'a> DerefMut for RwLockWriteGuard<'a, R, T> {
#[inline]
fn deref_mut(&mut self) -> &mut T {
unsafe { &mut *self.rwlock.data.get() }
}
}
impl<'a, R: RawRwLock + 'a, T: ?Sized + 'a> Drop for RwLockWriteGuard<'a, R, T> {
#[inline]
fn drop(&mut self) {
self.rwlock.raw.unlock_exclusive();
}
}
impl<'a, R: RawRwLock + 'a, T: fmt::Debug + ?Sized + 'a> fmt::Debug for RwLockWriteGuard<'a, R, T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&**self, f)
}
}
impl<'a, R: RawRwLock + 'a, T: fmt::Display + ?Sized + 'a> fmt::Display
for RwLockWriteGuard<'a, R, T>
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
(**self).fmt(f)
}
}
#[cfg(feature = "owning_ref")]
unsafe impl<'a, R: RawRwLock + 'a, T: ?Sized + 'a> StableAddress for RwLockWriteGuard<'a, R, T> {}
#[must_use = "if unused the RwLock will immediately unlock"]
pub struct RwLockUpgradableReadGuard<'a, R: RawRwLockUpgrade, T: ?Sized> {
rwlock: &'a RwLock<R, T>,
marker: PhantomData<(&'a T, R::GuardMarker)>,
}
unsafe impl<'a, R: RawRwLockUpgrade + 'a, T: ?Sized + Sync + 'a> Sync
for RwLockUpgradableReadGuard<'a, R, T>
{
}
impl<'a, R: RawRwLockUpgrade + 'a, T: ?Sized + 'a> RwLockUpgradableReadGuard<'a, R, T> {
pub fn rwlock(s: &Self) -> &'a RwLock<R, T> {
s.rwlock
}
#[inline]
pub fn unlocked<F, U>(s: &mut Self, f: F) -> U
where
F: FnOnce() -> U,
{
s.rwlock.raw.unlock_upgradable();
defer!(s.rwlock.raw.lock_upgradable());
f()
}
pub fn upgrade(s: Self) -> RwLockWriteGuard<'a, R, T> {
s.rwlock.raw.upgrade();
let rwlock = s.rwlock;
mem::forget(s);
RwLockWriteGuard {
rwlock,
marker: PhantomData,
}
}
pub fn try_upgrade(s: Self) -> Result<RwLockWriteGuard<'a, R, T>, Self> {
if s.rwlock.raw.try_upgrade() {
let rwlock = s.rwlock;
mem::forget(s);
Ok(RwLockWriteGuard {
rwlock,
marker: PhantomData,
})
} else {
Err(s)
}
}
}
impl<'a, R: RawRwLockUpgradeFair + 'a, T: ?Sized + 'a> RwLockUpgradableReadGuard<'a, R, T> {
#[inline]
pub fn unlock_fair(s: Self) {
s.rwlock.raw.unlock_upgradable_fair();
mem::forget(s);
}
#[inline]
pub fn unlocked_fair<F, U>(s: &mut Self, f: F) -> U
where
F: FnOnce() -> U,
{
s.rwlock.raw.unlock_upgradable_fair();
defer!(s.rwlock.raw.lock_upgradable());
f()
}
#[inline]
pub fn bump(s: &mut Self) {
s.rwlock.raw.bump_upgradable();
}
}
impl<'a, R: RawRwLockUpgradeDowngrade + 'a, T: ?Sized + 'a> RwLockUpgradableReadGuard<'a, R, T> {
pub fn downgrade(s: Self) -> RwLockReadGuard<'a, R, T> {
s.rwlock.raw.downgrade_upgradable();
let rwlock = s.rwlock;
mem::forget(s);
RwLockReadGuard {
rwlock,
marker: PhantomData,
}
}
}
impl<'a, R: RawRwLockUpgradeTimed + 'a, T: ?Sized + 'a> RwLockUpgradableReadGuard<'a, R, T> {
pub fn try_upgrade_for(
s: Self,
timeout: R::Duration,
) -> Result<RwLockWriteGuard<'a, R, T>, Self> {
if s.rwlock.raw.try_upgrade_for(timeout) {
let rwlock = s.rwlock;
mem::forget(s);
Ok(RwLockWriteGuard {
rwlock,
marker: PhantomData,
})
} else {
Err(s)
}
}
#[inline]
pub fn try_upgrade_until(
s: Self,
timeout: R::Instant,
) -> Result<RwLockWriteGuard<'a, R, T>, Self> {
if s.rwlock.raw.try_upgrade_until(timeout) {
let rwlock = s.rwlock;
mem::forget(s);
Ok(RwLockWriteGuard {
rwlock,
marker: PhantomData,
})
} else {
Err(s)
}
}
}
impl<'a, R: RawRwLockUpgrade + 'a, T: ?Sized + 'a> Deref for RwLockUpgradableReadGuard<'a, R, T> {
type Target = T;
#[inline]
fn deref(&self) -> &T {
unsafe { &*self.rwlock.data.get() }
}
}
impl<'a, R: RawRwLockUpgrade + 'a, T: ?Sized + 'a> Drop for RwLockUpgradableReadGuard<'a, R, T> {
#[inline]
fn drop(&mut self) {
self.rwlock.raw.unlock_upgradable();
}
}
impl<'a, R: RawRwLockUpgrade + 'a, T: fmt::Debug + ?Sized + 'a> fmt::Debug
for RwLockUpgradableReadGuard<'a, R, T>
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&**self, f)
}
}
impl<'a, R: RawRwLockUpgrade + 'a, T: fmt::Display + ?Sized + 'a> fmt::Display
for RwLockUpgradableReadGuard<'a, R, T>
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
(**self).fmt(f)
}
}
#[cfg(feature = "owning_ref")]
unsafe impl<'a, R: RawRwLockUpgrade + 'a, T: ?Sized + 'a> StableAddress
for RwLockUpgradableReadGuard<'a, R, T>
{
}
#[must_use = "if unused the RwLock will immediately unlock"]
pub struct MappedRwLockReadGuard<'a, R: RawRwLock, T: ?Sized> {
raw: &'a R,
data: *const T,
marker: PhantomData<&'a T>,
}
unsafe impl<'a, R: RawRwLock + 'a, T: ?Sized + Sync + 'a> Sync for MappedRwLockReadGuard<'a, R, T> {}
unsafe impl<'a, R: RawRwLock + 'a, T: ?Sized + 'a> Send for MappedRwLockReadGuard<'a, R, T> where
R::GuardMarker: Send
{
}
impl<'a, R: RawRwLock + 'a, T: ?Sized + 'a> MappedRwLockReadGuard<'a, R, T> {
#[inline]
pub fn map<U: ?Sized, F>(s: Self, f: F) -> MappedRwLockReadGuard<'a, R, U>
where
F: FnOnce(&T) -> &U,
{
let raw = s.raw;
let data = f(unsafe { &*s.data });
mem::forget(s);
MappedRwLockReadGuard {
raw,
data,
marker: PhantomData,
}
}
#[inline]
pub fn try_map<U: ?Sized, F>(s: Self, f: F) -> Result<MappedRwLockReadGuard<'a, R, U>, Self>
where
F: FnOnce(&T) -> Option<&U>,
{
let raw = s.raw;
let data = match f(unsafe { &*s.data }) {
Some(data) => data,
None => return Err(s),
};
mem::forget(s);
Ok(MappedRwLockReadGuard {
raw,
data,
marker: PhantomData,
})
}
}
impl<'a, R: RawRwLockFair + 'a, T: ?Sized + 'a> MappedRwLockReadGuard<'a, R, T> {
#[inline]
pub fn unlock_fair(s: Self) {
s.raw.unlock_shared_fair();
mem::forget(s);
}
}
impl<'a, R: RawRwLock + 'a, T: ?Sized + 'a> Deref for MappedRwLockReadGuard<'a, R, T> {
type Target = T;
#[inline]
fn deref(&self) -> &T {
unsafe { &*self.data }
}
}
impl<'a, R: RawRwLock + 'a, T: ?Sized + 'a> Drop for MappedRwLockReadGuard<'a, R, T> {
#[inline]
fn drop(&mut self) {
self.raw.unlock_shared();
}
}
impl<'a, R: RawRwLock + 'a, T: fmt::Debug + ?Sized + 'a> fmt::Debug
for MappedRwLockReadGuard<'a, R, T>
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&**self, f)
}
}
impl<'a, R: RawRwLock + 'a, T: fmt::Display + ?Sized + 'a> fmt::Display
for MappedRwLockReadGuard<'a, R, T>
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
(**self).fmt(f)
}
}
#[cfg(feature = "owning_ref")]
unsafe impl<'a, R: RawRwLock + 'a, T: ?Sized + 'a> StableAddress
for MappedRwLockReadGuard<'a, R, T>
{
}
#[must_use = "if unused the RwLock will immediately unlock"]
pub struct MappedRwLockWriteGuard<'a, R: RawRwLock, T: ?Sized> {
raw: &'a R,
data: *mut T,
marker: PhantomData<&'a mut T>,
}
unsafe impl<'a, R: RawRwLock + 'a, T: ?Sized + Sync + 'a> Sync
for MappedRwLockWriteGuard<'a, R, T>
{
}
unsafe impl<'a, R: RawRwLock + 'a, T: ?Sized + 'a> Send for MappedRwLockWriteGuard<'a, R, T> where
R::GuardMarker: Send
{
}
impl<'a, R: RawRwLock + 'a, T: ?Sized + 'a> MappedRwLockWriteGuard<'a, R, T> {
#[inline]
pub fn map<U: ?Sized, F>(s: Self, f: F) -> MappedRwLockWriteGuard<'a, R, U>
where
F: FnOnce(&mut T) -> &mut U,
{
let raw = s.raw;
let data = f(unsafe { &mut *s.data });
mem::forget(s);
MappedRwLockWriteGuard {
raw,
data,
marker: PhantomData,
}
}
#[inline]
pub fn try_map<U: ?Sized, F>(s: Self, f: F) -> Result<MappedRwLockWriteGuard<'a, R, U>, Self>
where
F: FnOnce(&mut T) -> Option<&mut U>,
{
let raw = s.raw;
let data = match f(unsafe { &mut *s.data }) {
Some(data) => data,
None => return Err(s),
};
mem::forget(s);
Ok(MappedRwLockWriteGuard {
raw,
data,
marker: PhantomData,
})
}
}
impl<'a, R: RawRwLockDowngrade + 'a, T: ?Sized + 'a> MappedRwLockWriteGuard<'a, R, T> {
#[deprecated(
since = "0.3.3",
note = "This function is unsound and will be removed in the future, see issue #198"
)]
pub fn downgrade(s: Self) -> MappedRwLockReadGuard<'a, R, T> {
s.raw.downgrade();
let raw = s.raw;
let data = s.data;
mem::forget(s);
MappedRwLockReadGuard {
raw,
data,
marker: PhantomData,
}
}
}
impl<'a, R: RawRwLockFair + 'a, T: ?Sized + 'a> MappedRwLockWriteGuard<'a, R, T> {
#[inline]
pub fn unlock_fair(s: Self) {
s.raw.unlock_exclusive_fair();
mem::forget(s);
}
}
impl<'a, R: RawRwLock + 'a, T: ?Sized + 'a> Deref for MappedRwLockWriteGuard<'a, R, T> {
type Target = T;
#[inline]
fn deref(&self) -> &T {
unsafe { &*self.data }
}
}
impl<'a, R: RawRwLock + 'a, T: ?Sized + 'a> DerefMut for MappedRwLockWriteGuard<'a, R, T> {
#[inline]
fn deref_mut(&mut self) -> &mut T {
unsafe { &mut *self.data }
}
}
impl<'a, R: RawRwLock + 'a, T: ?Sized + 'a> Drop for MappedRwLockWriteGuard<'a, R, T> {
#[inline]
fn drop(&mut self) {
self.raw.unlock_exclusive();
}
}
impl<'a, R: RawRwLock + 'a, T: fmt::Debug + ?Sized + 'a> fmt::Debug
for MappedRwLockWriteGuard<'a, R, T>
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&**self, f)
}
}
impl<'a, R: RawRwLock + 'a, T: fmt::Display + ?Sized + 'a> fmt::Display
for MappedRwLockWriteGuard<'a, R, T>
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
(**self).fmt(f)
}
}
#[cfg(feature = "owning_ref")]
unsafe impl<'a, R: RawRwLock + 'a, T: ?Sized + 'a> StableAddress
for MappedRwLockWriteGuard<'a, R, T>
{
}