use std::collections::hash_map::RandomState;
use std::fmt::{self, Debug};
use std::hash::{BuildHasher, Hash};
use std::mem::replace;
use std::ops::{Deref, DerefMut, RangeInclusive};
use std::pin::pin;
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering::Relaxed;
use sdd::{AtomicShared, Guard, Shared, Tag};
use super::Equivalent;
use super::hash_table::bucket::{CACHE, DoublyLinkedList, EntryPtr};
use super::hash_table::bucket_array::BucketArray;
use super::hash_table::{HashTable, LockedBucket};
use crate::async_helper::AsyncGuard;
pub struct HashCache<K, V, H = RandomState>
where
H: BuildHasher,
{
bucket_array: AtomicShared<BucketArray<K, V, DoublyLinkedList, CACHE>>,
minimum_capacity: AtomicUsize,
maximum_capacity: usize,
build_hasher: H,
}
pub const DEFAULT_MAXIMUM_CAPACITY: usize = 256;
pub type EvictedEntry<K, V> = Option<(K, V)>;
pub enum Entry<'h, K, V, H = RandomState>
where
H: BuildHasher,
{
Occupied(OccupiedEntry<'h, K, V, H>),
Vacant(VacantEntry<'h, K, V, H>),
}
pub struct OccupiedEntry<'h, K, V, H = RandomState>
where
H: BuildHasher,
{
hashcache: &'h HashCache<K, V, H>,
locked_bucket: LockedBucket<K, V, DoublyLinkedList, CACHE>,
entry_ptr: EntryPtr<'h, K, V, CACHE>,
}
pub struct VacantEntry<'h, K, V, H = RandomState>
where
H: BuildHasher,
{
hashcache: &'h HashCache<K, V, H>,
key: K,
hash: u64,
locked_bucket: LockedBucket<K, V, DoublyLinkedList, CACHE>,
}
pub struct ConsumableEntry<'b, 'g: 'b, K, V> {
locked_bucket: &'b mut LockedBucket<K, V, DoublyLinkedList, CACHE>,
entry_ptr: &'b mut EntryPtr<'g, K, V, CACHE>,
remove_probe: &'b mut bool,
guard: &'g Guard,
}
pub enum ReplaceResult<'h, K, V, H = RandomState>
where
H: BuildHasher,
{
Replaced(OccupiedEntry<'h, K, V, H>, K),
NotReplaced(VacantEntry<'h, K, V, H>),
}
impl<K, V, H> HashCache<K, V, H>
where
H: BuildHasher,
{
#[cfg(not(feature = "loom"))]
#[inline]
pub const fn with_hasher(build_hasher: H) -> Self {
HashCache {
bucket_array: AtomicShared::null(),
minimum_capacity: AtomicUsize::new(0),
maximum_capacity: DEFAULT_MAXIMUM_CAPACITY,
build_hasher,
}
}
#[cfg(feature = "loom")]
#[inline]
pub fn with_hasher(build_hasher: H) -> Self {
Self {
bucket_array: AtomicShared::null(),
minimum_capacity: AtomicUsize::new(0),
maximum_capacity: DEFAULT_MAXIMUM_CAPACITY,
build_hasher,
}
}
#[inline]
pub fn with_capacity_and_hasher(
minimum_capacity: usize,
maximum_capacity: usize,
build_hasher: H,
) -> Self {
let (array, minimum_capacity) = if minimum_capacity == 0 {
(AtomicShared::null(), AtomicUsize::new(0))
} else {
let array = unsafe {
Shared::new_unchecked(BucketArray::<K, V, DoublyLinkedList, CACHE>::new(
minimum_capacity,
AtomicShared::null(),
))
};
let minimum_capacity = array.num_slots();
(
AtomicShared::from(array),
AtomicUsize::new(minimum_capacity),
)
};
let maximum_capacity = maximum_capacity
.max(minimum_capacity.load(Relaxed))
.max(BucketArray::<K, V, DoublyLinkedList, CACHE>::minimum_capacity())
.min(1_usize << (usize::BITS - 1))
.next_power_of_two();
HashCache {
bucket_array: array,
minimum_capacity,
maximum_capacity,
build_hasher,
}
}
}
impl<K, V, H> HashCache<K, V, H>
where
K: Eq + Hash,
H: BuildHasher,
{
#[inline]
pub async fn entry_async(&self, key: K) -> Entry<'_, K, V, H> {
let hash = self.hash(&key);
let async_guard = pin!(AsyncGuard::default());
let locked_bucket = self.writer_async(hash, &async_guard).await;
let prolonged_guard = self.prolonged_guard_ref(async_guard.guard());
let entry_ptr = locked_bucket.search(&key, hash, prolonged_guard);
if entry_ptr.is_valid() {
Entry::Occupied(OccupiedEntry {
hashcache: self,
locked_bucket,
entry_ptr,
})
} else {
let vacant_entry = VacantEntry {
hashcache: self,
key,
hash,
locked_bucket,
};
Entry::Vacant(vacant_entry)
}
}
#[inline]
pub fn entry_sync(&self, key: K) -> Entry<'_, K, V, H> {
let hash = self.hash(&key);
let guard = Guard::new();
let prolonged_guard = self.prolonged_guard_ref(&guard);
let locked_bucket = self.writer_sync(hash, prolonged_guard);
let entry_ptr = locked_bucket.search(&key, hash, prolonged_guard);
if entry_ptr.is_valid() {
Entry::Occupied(OccupiedEntry {
hashcache: self,
locked_bucket,
entry_ptr,
})
} else {
let vacant_entry = VacantEntry {
hashcache: self,
key,
hash,
locked_bucket,
};
Entry::Vacant(vacant_entry)
}
}
#[inline]
pub fn try_entry(&self, key: K) -> Option<Entry<'_, K, V, H>> {
let hash = self.hash(&key);
let guard = Guard::new();
let prolonged_guard = self.prolonged_guard_ref(&guard);
let locked_bucket = self.try_reserve_bucket(hash, prolonged_guard)?;
let entry_ptr = locked_bucket.search(&key, hash, prolonged_guard);
if entry_ptr.is_valid() {
Some(Entry::Occupied(OccupiedEntry {
hashcache: self,
locked_bucket,
entry_ptr,
}))
} else {
Some(Entry::Vacant(VacantEntry {
hashcache: self,
key,
hash,
locked_bucket,
}))
}
}
#[inline]
pub async fn put_async(&self, key: K, val: V) -> Result<EvictedEntry<K, V>, (K, V)> {
let hash = self.hash(&key);
let async_guard = pin!(AsyncGuard::default());
let locked_bucket = self.writer_async(hash, &async_guard).await;
let guard = async_guard.guard();
if locked_bucket.search(&key, hash, guard).is_valid() {
Err((key, val))
} else {
let evicted = locked_bucket.evict_lru_head(locked_bucket.data_block);
let entry_ptr = locked_bucket.insert(hash, (key, val), guard);
locked_bucket.update_lru_tail(&entry_ptr);
Ok(evicted)
}
}
#[inline]
pub fn put_sync(&self, key: K, val: V) -> Result<EvictedEntry<K, V>, (K, V)> {
let hash = self.hash(&key);
let guard = Guard::new();
let locked_bucket = self.writer_sync(hash, &guard);
let entry_ptr = locked_bucket.search(&key, hash, &guard);
if entry_ptr.is_valid() {
Err((key, val))
} else {
let evicted = locked_bucket
.writer
.evict_lru_head(locked_bucket.data_block);
let entry_ptr = locked_bucket.insert(hash, (key, val), &guard);
locked_bucket.writer.update_lru_tail(&entry_ptr);
Ok(evicted)
}
}
#[inline]
pub async fn replace_async(&self, key: K) -> ReplaceResult<'_, K, V, H> {
let hash = self.hash(&key);
let async_guard = pin!(AsyncGuard::default());
let locked_bucket = self.writer_async(hash, &async_guard).await;
let prolonged_guard = self.prolonged_guard_ref(async_guard.guard());
let mut entry_ptr = locked_bucket.search(&key, hash, prolonged_guard);
if entry_ptr.is_valid() {
let prev_key = replace(
&mut entry_ptr
.get_mut(locked_bucket.data_block, &locked_bucket.writer)
.0,
key,
);
ReplaceResult::Replaced(
OccupiedEntry {
hashcache: self,
locked_bucket,
entry_ptr,
},
prev_key,
)
} else {
ReplaceResult::NotReplaced(VacantEntry {
hashcache: self,
key,
hash,
locked_bucket,
})
}
}
#[inline]
pub fn replace_sync(&self, key: K) -> ReplaceResult<'_, K, V, H> {
let hash = self.hash(&key);
let guard = Guard::new();
let locked_bucket = self.writer_sync(hash, &guard);
let prolonged_guard = self.prolonged_guard_ref(&guard);
let mut entry_ptr = locked_bucket.search(&key, hash, prolonged_guard);
if entry_ptr.is_valid() {
let prev_key = replace(
&mut entry_ptr
.get_mut(locked_bucket.data_block, &locked_bucket.writer)
.0,
key,
);
ReplaceResult::Replaced(
OccupiedEntry {
hashcache: self,
locked_bucket,
entry_ptr,
},
prev_key,
)
} else {
ReplaceResult::NotReplaced(VacantEntry {
hashcache: self,
key,
hash,
locked_bucket,
})
}
}
#[inline]
pub async fn remove_async<Q>(&self, key: &Q) -> Option<(K, V)>
where
Q: Equivalent<K> + Hash + ?Sized,
{
self.remove_if_async(key, |_| true).await
}
#[inline]
pub fn remove_sync<Q>(&self, key: &Q) -> Option<(K, V)>
where
Q: Equivalent<K> + Hash + ?Sized,
{
self.remove_if_sync(key, |_| true)
}
#[inline]
pub async fn get_async<Q>(&self, key: &Q) -> Option<OccupiedEntry<'_, K, V, H>>
where
Q: Equivalent<K> + Hash + ?Sized,
{
let hash = self.hash(key);
let async_guard = pin!(AsyncGuard::default());
let locked_bucket = self.optional_writer_async(hash, &async_guard).await?;
let prolonged_guard = self.prolonged_guard_ref(async_guard.guard());
let entry_ptr = locked_bucket.search(key, hash, prolonged_guard);
if entry_ptr.is_valid() {
locked_bucket.writer.update_lru_tail(&entry_ptr);
return Some(OccupiedEntry {
hashcache: self,
locked_bucket,
entry_ptr,
});
}
None
}
#[inline]
pub fn get_sync<Q>(&self, key: &Q) -> Option<OccupiedEntry<'_, K, V, H>>
where
Q: Equivalent<K> + Hash + ?Sized,
{
let hash = self.hash(key);
let guard = Guard::new();
let prolonged_guard = self.prolonged_guard_ref(&guard);
let locked_bucket = self.optional_writer_sync(hash, prolonged_guard)?;
let entry_ptr = locked_bucket.search(key, hash, prolonged_guard);
if entry_ptr.is_valid() {
locked_bucket.writer.update_lru_tail(&entry_ptr);
return Some(OccupiedEntry {
hashcache: self,
locked_bucket,
entry_ptr,
});
}
None
}
#[inline]
pub async fn read_async<Q, R, F: FnOnce(&K, &V) -> R>(&self, key: &Q, reader: F) -> Option<R>
where
Q: Equivalent<K> + Hash + ?Sized,
{
let async_guard = pin!(AsyncGuard::default());
let hash = self.hash(key);
self.reader_async(key, hash, reader, &async_guard).await
}
#[inline]
pub fn read_sync<Q, R, F: FnOnce(&K, &V) -> R>(&self, key: &Q, reader: F) -> Option<R>
where
Q: Equivalent<K> + Hash + ?Sized,
{
let hash = self.hash(key);
let guard = Guard::new();
self.reader_sync(key, hash, reader, &guard)
}
#[inline]
pub async fn contains_async<Q>(&self, key: &Q) -> bool
where
Q: Equivalent<K> + Hash + ?Sized,
{
self.read_async(key, |_, _| ()).await.is_some()
}
#[inline]
pub fn contains_sync<Q>(&self, key: &Q) -> bool
where
Q: Equivalent<K> + Hash + ?Sized,
{
self.read_sync(key, |_, _| ()).is_some()
}
#[inline]
pub async fn remove_if_async<Q, F: FnOnce(&mut V) -> bool>(
&self,
key: &Q,
condition: F,
) -> Option<(K, V)>
where
Q: Equivalent<K> + Hash + ?Sized,
{
let hash = self.hash(key);
let async_guard = pin!(AsyncGuard::default());
let mut locked_bucket = self.optional_writer_async(hash, &async_guard).await?;
let mut entry_ptr = locked_bucket.search(key, hash, async_guard.guard());
if entry_ptr.is_valid() && condition(&mut locked_bucket.entry_mut(&mut entry_ptr).1) {
Some(locked_bucket.remove(self, &mut entry_ptr, async_guard.guard()))
} else {
None
}
}
#[inline]
pub fn remove_if_sync<Q, F: FnOnce(&mut V) -> bool>(
&self,
key: &Q,
condition: F,
) -> Option<(K, V)>
where
Q: Equivalent<K> + Hash + ?Sized,
{
let hash = self.hash(key);
let guard = Guard::default();
let mut locked_bucket = self.optional_writer_sync(hash, &guard)?;
let mut entry_ptr = locked_bucket.search(key, hash, &guard);
if entry_ptr.is_valid() && condition(&mut locked_bucket.entry_mut(&mut entry_ptr).1) {
Some(locked_bucket.remove(self, &mut entry_ptr, &guard))
} else {
None
}
}
#[inline]
pub async fn iter_async<F: FnMut(&K, &V) -> bool>(&self, mut f: F) -> bool {
let async_guard = pin!(AsyncGuard::default());
let mut result = true;
self.for_each_reader_async(&async_guard, |reader, data_block| {
let guard = async_guard.guard();
let mut entry_ptr = EntryPtr::new(guard);
while entry_ptr.move_to_next(&reader, guard) {
let (k, v) = entry_ptr.get(data_block);
if !f(k, v) {
result = false;
return false;
}
}
true
})
.await;
result
}
#[inline]
pub fn iter_sync<F: FnMut(&K, &V) -> bool>(&self, mut f: F) -> bool {
let mut result = true;
let guard = Guard::new();
self.for_each_reader_sync(&guard, |reader, data_block| {
let mut entry_ptr = EntryPtr::new(&guard);
while entry_ptr.move_to_next(&reader, &guard) {
let (k, v) = entry_ptr.get(data_block);
if !f(k, v) {
result = false;
return false;
}
}
true
});
result
}
#[inline]
pub async fn iter_mut_async<F: FnMut(ConsumableEntry<'_, '_, K, V>) -> bool>(
&self,
mut f: F,
) -> bool {
let async_guard = pin!(AsyncGuard::default());
let mut result = true;
self.for_each_writer_async(0, 0, &async_guard, |mut locked_bucket, removed| {
let guard = async_guard.guard();
let mut entry_ptr = EntryPtr::new(guard);
while entry_ptr.move_to_next(&locked_bucket.writer, guard) {
let consumable_entry = ConsumableEntry {
locked_bucket: &mut locked_bucket,
entry_ptr: &mut entry_ptr,
remove_probe: removed,
guard,
};
if !f(consumable_entry) {
result = false;
return true;
}
}
false
})
.await;
result
}
#[inline]
pub fn iter_mut_sync<F: FnMut(ConsumableEntry<'_, '_, K, V>) -> bool>(&self, mut f: F) -> bool {
let mut result = true;
let guard = Guard::new();
self.for_each_writer_sync(0, 0, &guard, |mut locked_bucket, removed| {
let mut entry_ptr = EntryPtr::new(&guard);
while entry_ptr.move_to_next(&locked_bucket.writer, &guard) {
let consumable_entry = ConsumableEntry {
locked_bucket: &mut locked_bucket,
entry_ptr: &mut entry_ptr,
remove_probe: removed,
guard: &guard,
};
if !f(consumable_entry) {
result = false;
return true;
}
}
false
});
result
}
#[inline]
pub async fn retain_async<F: FnMut(&K, &mut V) -> bool>(&self, mut pred: F) {
self.iter_mut_async(|mut e| {
let (k, v) = &mut *e;
if !pred(k, v) {
drop(e.consume());
}
true
})
.await;
}
#[inline]
pub fn retain_sync<F: FnMut(&K, &mut V) -> bool>(&self, mut pred: F) {
self.iter_mut_sync(|mut e| {
let (k, v) = &mut *e;
if !pred(k, v) {
drop(e.consume());
}
true
});
}
#[inline]
pub async fn clear_async(&self) {
self.retain_async(|_, _| false).await;
}
#[inline]
pub fn clear_sync(&self) {
self.retain_sync(|_, _| false);
}
#[inline]
pub fn len(&self) -> usize {
self.num_entries(&Guard::new())
}
#[inline]
pub fn is_empty(&self) -> bool {
!self.has_entry(&Guard::new())
}
#[inline]
pub fn capacity(&self) -> usize {
self.num_slots(&Guard::new())
}
#[inline]
pub fn capacity_range(&self) -> RangeInclusive<usize> {
self.minimum_capacity.load(Relaxed)..=self.maximum_capacity()
}
}
impl<K, V> HashCache<K, V, RandomState> {
#[inline]
#[must_use]
pub fn new() -> Self {
Self::default()
}
#[inline]
#[must_use]
pub fn with_capacity(minimum_capacity: usize, maximum_capacity: usize) -> Self {
Self::with_capacity_and_hasher(minimum_capacity, maximum_capacity, RandomState::new())
}
}
impl<K, V, H> Default for HashCache<K, V, H>
where
H: BuildHasher + Default,
{
#[inline]
fn default() -> Self {
Self::with_hasher(H::default())
}
}
impl<K, V, H> Debug for HashCache<K, V, H>
where
K: Debug + Eq + Hash,
V: Debug,
H: BuildHasher,
{
#[inline]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut d = f.debug_map();
self.iter_sync(|k, v| {
d.entry(k, v);
true
});
d.finish()
}
}
impl<K, V, H> Drop for HashCache<K, V, H>
where
H: BuildHasher,
{
#[inline]
fn drop(&mut self) {
self.bucket_array
.swap((None, Tag::None), Relaxed)
.0
.map(|a| unsafe {
a.drop_in_place()
});
}
}
impl<K, V, H> FromIterator<(K, V)> for HashCache<K, V, H>
where
K: Eq + Hash,
H: BuildHasher + Default,
{
#[inline]
fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> Self {
let into_iter = iter.into_iter();
let size_hint = into_iter.size_hint();
let hashcache = Self::with_capacity_and_hasher(
size_hint.0,
Self::capacity_from_size_hint(size_hint),
H::default(),
);
into_iter.for_each(|e| {
let _result = hashcache.put_sync(e.0, e.1);
});
hashcache
}
}
impl<K, V, H> HashTable<K, V, H, DoublyLinkedList, CACHE> for HashCache<K, V, H>
where
K: Eq + Hash,
H: BuildHasher,
{
#[inline]
fn hasher(&self) -> &H {
&self.build_hasher
}
#[inline]
fn bucket_array(&self) -> &AtomicShared<BucketArray<K, V, DoublyLinkedList, CACHE>> {
&self.bucket_array
}
#[inline]
fn minimum_capacity(&self) -> &AtomicUsize {
&self.minimum_capacity
}
#[inline]
fn maximum_capacity(&self) -> usize {
self.maximum_capacity
}
}
impl<K, V, H> PartialEq for HashCache<K, V, H>
where
K: Eq + Hash,
V: PartialEq,
H: BuildHasher,
{
#[inline]
fn eq(&self, other: &Self) -> bool {
if self.iter_sync(|k, v| other.read_sync(k, |_, ov| v == ov) == Some(true)) {
return other.iter_sync(|k, v| self.read_sync(k, |_, sv| v == sv) == Some(true));
}
false
}
}
impl<'h, K, V, H> Entry<'h, K, V, H>
where
K: Eq + Hash,
H: BuildHasher,
{
#[inline]
pub fn or_put(self, val: V) -> (EvictedEntry<K, V>, OccupiedEntry<'h, K, V, H>) {
self.or_put_with(|| val)
}
#[inline]
pub fn or_put_with<F: FnOnce() -> V>(
self,
constructor: F,
) -> (EvictedEntry<K, V>, OccupiedEntry<'h, K, V, H>) {
self.or_put_with_key(|_| constructor())
}
#[inline]
pub fn or_put_with_key<F: FnOnce(&K) -> V>(
self,
constructor: F,
) -> (EvictedEntry<K, V>, OccupiedEntry<'h, K, V, H>) {
match self {
Self::Occupied(o) => (None, o),
Self::Vacant(v) => {
let val = constructor(v.key());
v.put_entry(val)
}
}
}
#[inline]
pub fn key(&self) -> &K {
match self {
Self::Occupied(o) => o.key(),
Self::Vacant(v) => v.key(),
}
}
#[inline]
#[must_use]
pub fn and_modify<F>(self, f: F) -> Self
where
F: FnOnce(&mut V),
{
match self {
Self::Occupied(mut o) => {
f(o.get_mut());
Self::Occupied(o)
}
Self::Vacant(_) => self,
}
}
#[inline]
pub fn put_entry(self, val: V) -> (EvictedEntry<K, V>, OccupiedEntry<'h, K, V, H>) {
match self {
Self::Occupied(mut o) => {
o.put(val);
(None, o)
}
Self::Vacant(v) => v.put_entry(val),
}
}
}
impl<'h, K, V, H> Entry<'h, K, V, H>
where
K: Eq + Hash,
V: Default,
H: BuildHasher,
{
#[inline]
pub fn or_default(self) -> (EvictedEntry<K, V>, OccupiedEntry<'h, K, V, H>) {
match self {
Self::Occupied(o) => (None, o),
Self::Vacant(v) => v.put_entry(Default::default()),
}
}
}
impl<K, V, H> Debug for Entry<'_, K, V, H>
where
K: Debug + Eq + Hash,
V: Debug,
H: BuildHasher,
{
#[inline]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Vacant(v) => f.debug_tuple("Entry").field(v).finish(),
Self::Occupied(o) => f.debug_tuple("Entry").field(o).finish(),
}
}
}
impl<K, V, H> OccupiedEntry<'_, K, V, H>
where
K: Eq + Hash,
H: BuildHasher,
{
#[inline]
#[must_use]
pub fn key(&self) -> &K {
&self.locked_bucket.entry(&self.entry_ptr).0
}
#[inline]
#[must_use]
pub fn remove_entry(mut self) -> (K, V) {
let guard = Guard::new();
let prolonged_guard = self.hashcache.prolonged_guard_ref(&guard);
self.locked_bucket
.remove(self.hashcache, &mut self.entry_ptr, prolonged_guard)
}
#[inline]
#[must_use]
pub fn get(&self) -> &V {
&self.locked_bucket.entry(&self.entry_ptr).1
}
#[inline]
pub fn get_mut(&mut self) -> &mut V {
&mut self.locked_bucket.entry_mut(&mut self.entry_ptr).1
}
#[inline]
pub fn put(&mut self, val: V) -> V {
replace(self.get_mut(), val)
}
#[inline]
#[must_use]
pub fn remove(self) -> V {
self.remove_entry().1
}
}
impl<K, V, H> Debug for OccupiedEntry<'_, K, V, H>
where
K: Debug + Eq + Hash,
V: Debug,
H: BuildHasher,
{
#[inline]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("OccupiedEntry")
.field("key", self.key())
.field("value", self.get())
.finish_non_exhaustive()
}
}
impl<K, V, H> Deref for OccupiedEntry<'_, K, V, H>
where
K: Eq + Hash,
H: BuildHasher,
{
type Target = V;
#[inline]
fn deref(&self) -> &Self::Target {
self.get()
}
}
impl<K, V, H> DerefMut for OccupiedEntry<'_, K, V, H>
where
K: Eq + Hash,
H: BuildHasher,
{
#[inline]
fn deref_mut(&mut self) -> &mut Self::Target {
self.get_mut()
}
}
impl<'h, K, V, H> VacantEntry<'h, K, V, H>
where
K: Eq + Hash,
H: BuildHasher,
{
#[inline]
pub fn key(&self) -> &K {
&self.key
}
#[inline]
pub fn into_key(self) -> K {
self.key
}
#[inline]
pub fn put_entry(self, val: V) -> (EvictedEntry<K, V>, OccupiedEntry<'h, K, V, H>) {
let guard = Guard::new();
let prolonged_guard = self.hashcache.prolonged_guard_ref(&guard);
let evicted = self
.locked_bucket
.writer
.evict_lru_head(self.locked_bucket.data_block);
let entry_ptr = self
.locked_bucket
.insert(self.hash, (self.key, val), prolonged_guard);
self.locked_bucket.writer.update_lru_tail(&entry_ptr);
let occupied = OccupiedEntry {
hashcache: self.hashcache,
locked_bucket: self.locked_bucket,
entry_ptr,
};
(evicted, occupied)
}
}
impl<K, V, H> Debug for VacantEntry<'_, K, V, H>
where
K: Debug + Eq + Hash,
V: Debug,
H: BuildHasher,
{
#[inline]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("VacantEntry").field(self.key()).finish()
}
}
impl<K, V> ConsumableEntry<'_, '_, K, V> {
#[inline]
#[must_use]
pub fn consume(self) -> (K, V) {
*self.remove_probe |= true;
self.locked_bucket
.writer
.remove(self.locked_bucket.data_block, self.entry_ptr, self.guard)
}
}
impl<K, V> Deref for ConsumableEntry<'_, '_, K, V> {
type Target = (K, V);
#[inline]
fn deref(&self) -> &Self::Target {
self.locked_bucket.entry(self.entry_ptr)
}
}
impl<K, V> DerefMut for ConsumableEntry<'_, '_, K, V> {
#[inline]
fn deref_mut(&mut self) -> &mut Self::Target {
self.locked_bucket.entry_mut(self.entry_ptr)
}
}