#![cfg_attr(
feature = "document-features",
cfg_attr(doc, doc = ::document_features::document_features!())
)]
#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))]
#![deny(missing_docs, rust_2018_idioms, unsafe_code)]
use std::{
io,
marker::PhantomData,
path::{Path, PathBuf},
sync::atomic::AtomicUsize,
};
use once_cell::sync::Lazy;
#[cfg(feature = "hp-hashmap")]
type HashMap<K, V> = dashmap::DashMap<K, V>;
#[cfg(not(feature = "hp-hashmap"))]
mod hashmap {
use std::collections::HashMap;
use parking_lot::Mutex;
pub struct Concurrent<K, V> {
inner: Mutex<HashMap<K, V>>,
}
impl<K, V> Default for Concurrent<K, V>
where
K: Eq + std::hash::Hash,
{
fn default() -> Self {
Concurrent {
inner: Default::default(),
}
}
}
impl<K, V> Concurrent<K, V>
where
K: Eq + std::hash::Hash + Clone,
{
pub fn insert(&self, key: K, value: V) -> Option<V> {
self.inner.lock().insert(key, value)
}
pub fn remove(&self, key: &K) -> Option<(K, V)> {
self.inner.lock().remove(key).map(|v| (key.clone(), v))
}
pub fn for_each<F>(&self, cb: F)
where
Self: Sized,
F: FnMut(&mut V),
{
if let Some(mut guard) = self.inner.try_lock() {
guard.values_mut().for_each(cb);
}
}
}
}
#[cfg(not(feature = "hp-hashmap"))]
type HashMap<K, V> = hashmap::Concurrent<K, V>;
mod fs;
pub use fs::{create_dir, remove_dir};
#[cfg(feature = "signals")]
pub mod signal;
mod forksafe;
use forksafe::ForksafeTempfile;
pub mod handle;
use crate::handle::{Closed, Writable};
pub mod registry;
static NEXT_MAP_INDEX: AtomicUsize = AtomicUsize::new(0);
static REGISTRY: Lazy<HashMap<usize, Option<ForksafeTempfile>>> = Lazy::new(|| {
#[cfg(feature = "signals")]
if signal::handler::MODE.load(std::sync::atomic::Ordering::SeqCst) != signal::handler::Mode::None as usize {
for sig in signal_hook::consts::TERM_SIGNALS {
#[allow(unsafe_code)]
unsafe {
#[cfg(not(windows))]
{
signal_hook_registry::register_sigaction(*sig, signal::handler::cleanup_tempfiles_nix)
}
#[cfg(windows)]
{
signal_hook::low_level::register(*sig, signal::handler::cleanup_tempfiles_windows)
}
}
.expect("signals can always be installed");
}
}
HashMap::default()
});
#[derive(Debug, Clone, Copy, Ord, PartialOrd, Eq, PartialEq)]
pub enum ContainingDirectory {
Exists,
CreateAllRaceProof(create_dir::Retries),
}
#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq)]
pub enum AutoRemove {
Tempfile,
TempfileAndEmptyParentDirectoriesUntil {
boundary_directory: PathBuf,
},
}
impl AutoRemove {
fn execute_best_effort(self, directory_to_potentially_delete: &Path) -> Option<PathBuf> {
match self {
AutoRemove::Tempfile => None,
AutoRemove::TempfileAndEmptyParentDirectoriesUntil { boundary_directory } => {
remove_dir::empty_upward_until_boundary(directory_to_potentially_delete, &boundary_directory).ok();
Some(boundary_directory)
}
}
}
}
#[derive(Debug)]
#[must_use = "A handle that is immediately dropped doesn't lock a resource meaningfully"]
pub struct Handle<Marker: std::fmt::Debug> {
id: usize,
_marker: PhantomData<Marker>,
}
pub fn new(
containing_directory: impl AsRef<Path>,
directory: ContainingDirectory,
cleanup: AutoRemove,
) -> io::Result<Handle<Writable>> {
Handle::<Writable>::new(containing_directory, directory, cleanup)
}
pub fn writable_at(
path: impl AsRef<Path>,
directory: ContainingDirectory,
cleanup: AutoRemove,
) -> io::Result<Handle<Writable>> {
Handle::<Writable>::at(path, directory, cleanup)
}
pub fn mark_at(
path: impl AsRef<Path>,
directory: ContainingDirectory,
cleanup: AutoRemove,
) -> io::Result<Handle<Closed>> {
Handle::<Closed>::at(path, directory, cleanup)
}