1use std::cell::UnsafeCell;
16use std::fmt::{self, Debug, Formatter};
17use std::marker::PhantomData;
18use std::mem;
19use std::ops::Deref;
20use std::ptr;
21use std::sync::atomic::AtomicPtr;
22use std::sync::atomic::Ordering;
23use std::sync::Arc;
24
25pub struct Atom<P>
28where
29 P: IntoRawPtr + FromRawPtr,
30{
31 inner: AtomicPtr<()>,
32 data: PhantomData<UnsafeCell<P>>,
33}
34
35impl<P> Debug for Atom<P>
36where
37 P: IntoRawPtr + FromRawPtr,
38{
39 fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> {
40 write!(f, "atom({:?})", self.inner.load(Ordering::Relaxed))
41 }
42}
43
44impl<P> Atom<P>
45where
46 P: IntoRawPtr + FromRawPtr,
47{
48 pub fn empty() -> Atom<P> {
50 Atom {
51 inner: AtomicPtr::new(ptr::null_mut()),
52 data: PhantomData,
53 }
54 }
55
56 pub fn new(value: P) -> Atom<P> {
58 Atom {
59 inner: AtomicPtr::new(value.into_raw()),
60 data: PhantomData,
61 }
62 }
63
64 pub fn swap(&self, v: P, order: Ordering) -> Option<P> {
67 let new = v.into_raw();
68 let old = self.inner.swap(new, order);
69 unsafe { Self::inner_from_raw(old) }
70 }
71
72 pub fn take(&self, order: Ordering) -> Option<P> {
76 let old = self.inner.swap(ptr::null_mut(), order);
77 unsafe { Self::inner_from_raw(old) }
78 }
79
80 pub fn set_if_none(&self, v: P, order: Ordering) -> Option<P> {
85 let new = v.into_raw();
86 let old = self.inner.compare_and_swap(ptr::null_mut(), new, order);
87 if !old.is_null() {
88 Some(unsafe { FromRawPtr::from_raw(new) })
89 } else {
90 None
91 }
92 }
93
94 pub fn replace_and_set_next(
99 &self,
100 mut value: P,
101 load_order: Ordering,
102 cas_order: Ordering,
103 ) -> bool
104 where
105 P: GetNextMut<NextPtr = Option<P>>,
106 {
107 let next = value.get_next() as *mut Option<P>;
108 let raw = value.into_raw();
109 unsafe { ptr::drop_in_place(next) };
112 loop {
113 let pcurrent = self.inner.load(load_order);
114 let current = unsafe { Self::inner_from_raw(pcurrent) };
115 unsafe { ptr::write(next, current) };
116 let last = self.inner.compare_and_swap(pcurrent, raw, cas_order);
117 if last == pcurrent {
118 return last.is_null();
119 }
120 }
121 }
122
123 pub fn is_none(&self, order: Ordering) -> bool {
127 self.inner.load(order).is_null()
128 }
129
130 #[inline]
131 fn inner_into_raw(val: Option<P>) -> *mut () {
132 match val {
133 Some(val) => val.into_raw(),
134 None => ptr::null_mut(),
135 }
136 }
137
138 #[inline]
139 unsafe fn inner_from_raw(ptr: *mut ()) -> Option<P> {
140 if !ptr.is_null() {
141 Some(FromRawPtr::from_raw(ptr))
142 } else {
143 None
144 }
145 }
146}
147
148impl<P, T> Atom<P>
149where
150 P: IntoRawPtr + FromRawPtr + Deref<Target = T>,
151{
152 pub fn compare_and_swap(
163 &self,
164 current: Option<&P>,
165 new: Option<P>,
166 order: Ordering,
167 ) -> Result<Option<P>, (Option<P>, *mut P)> {
168 let pcurrent = Self::inner_as_ptr(current);
169 let pnew = Self::inner_into_raw(new);
170 let pprev = self.inner.compare_and_swap(pcurrent, pnew, order);
171 if pprev == pcurrent {
172 Ok(unsafe { Self::inner_from_raw(pprev) })
173 } else {
174 Err((unsafe { Self::inner_from_raw(pnew) }, pprev as *mut P))
175 }
176 }
177
178 pub fn compare_exchange(
192 &self,
193 current: Option<&P>,
194 new: Option<P>,
195 success: Ordering,
196 failure: Ordering,
197 ) -> Result<Option<P>, (Option<P>, *mut P)> {
198 let pnew = Self::inner_into_raw(new);
199 self.inner
200 .compare_exchange(Self::inner_as_ptr(current), pnew, success, failure)
201 .map(|pprev| unsafe { Self::inner_from_raw(pprev) })
202 .map_err(|pprev| (unsafe { Self::inner_from_raw(pnew) }, pprev as *mut P))
203 }
204
205 pub fn compare_exchange_weak(
220 &self,
221 current: Option<&P>,
222 new: Option<P>,
223 success: Ordering,
224 failure: Ordering,
225 ) -> Result<Option<P>, (Option<P>, *mut P)> {
226 let pnew = Self::inner_into_raw(new);
227 self.inner
228 .compare_exchange_weak(Self::inner_as_ptr(current), pnew, success, failure)
229 .map(|pprev| unsafe { Self::inner_from_raw(pprev) })
230 .map_err(|pprev| (unsafe { Self::inner_from_raw(pnew) }, pprev as *mut P))
231 }
232
233 #[inline]
234 fn inner_as_ptr(val: Option<&P>) -> *mut () {
235 match val {
236 Some(val) => &**val as *const _ as *mut (),
237 None => ptr::null_mut(),
238 }
239 }
240}
241
242impl<P> Drop for Atom<P>
243where
244 P: IntoRawPtr + FromRawPtr,
245{
246 fn drop(&mut self) {
247 self.take(Ordering::Relaxed);
248 }
249}
250
251unsafe impl<P> Send for Atom<P>
252where
253 P: IntoRawPtr + FromRawPtr + Send,
254{
255}
256unsafe impl<P> Sync for Atom<P>
257where
258 P: IntoRawPtr + FromRawPtr + Send,
259{
260}
261
262pub trait IntoRawPtr {
264 fn into_raw(self) -> *mut ();
265}
266
267pub trait FromRawPtr {
269 unsafe fn from_raw(ptr: *mut ()) -> Self;
270}
271
272impl<T> IntoRawPtr for Box<T> {
273 #[inline]
274 fn into_raw(self) -> *mut () {
275 Box::into_raw(self) as *mut ()
276 }
277}
278
279impl<T> FromRawPtr for Box<T> {
280 #[inline]
281 unsafe fn from_raw(ptr: *mut ()) -> Box<T> {
282 Box::from_raw(ptr as *mut T)
283 }
284}
285
286impl<T> IntoRawPtr for Arc<T> {
287 #[inline]
288 fn into_raw(self) -> *mut () {
289 Arc::into_raw(self) as *mut T as *mut ()
290 }
291}
292
293impl<T> FromRawPtr for Arc<T> {
294 #[inline]
295 unsafe fn from_raw(ptr: *mut ()) -> Arc<T> {
296 Arc::from_raw(ptr as *const () as *const T)
297 }
298}
299
300impl<'a, T> IntoRawPtr for &'a T {
302 #[inline]
303 fn into_raw(self) -> *mut () {
304 self as *const _ as *mut ()
305 }
306}
307
308impl<'a, T> FromRawPtr for &'a T {
309 #[inline]
310 unsafe fn from_raw(ptr: *mut ()) -> &'a T {
311 &*(ptr as *mut T)
312 }
313}
314
315#[inline]
317unsafe fn copy_lifetime<'a, S: ?Sized, T: ?Sized + 'a>(_ptr: &'a S, ptr: &T) -> &'a T {
318 &*(ptr as *const T)
319}
320
321#[inline]
323#[allow(unknown_lints, mut_from_ref)]
324unsafe fn copy_mut_lifetime<'a, S: ?Sized, T: ?Sized + 'a>(_ptr: &'a S, ptr: &mut T) -> &'a mut T {
325 &mut *(ptr as *mut T)
326}
327
328#[derive(Debug)]
334pub struct AtomSetOnce<P>
335where
336 P: IntoRawPtr + FromRawPtr,
337{
338 inner: Atom<P>,
339}
340
341impl<P> AtomSetOnce<P>
342where
343 P: IntoRawPtr + FromRawPtr,
344{
345 pub fn empty() -> AtomSetOnce<P> {
347 AtomSetOnce {
348 inner: Atom::empty(),
349 }
350 }
351
352 pub fn new(value: P) -> AtomSetOnce<P> {
354 AtomSetOnce {
355 inner: Atom::new(value),
356 }
357 }
358
359 pub fn set_if_none(&self, v: P, order: Ordering) -> Option<P> {
364 self.inner.set_if_none(v, order)
365 }
366
367 pub fn into_atom(self) -> Atom<P> {
369 self.inner
370 }
371
372 pub fn atom(&mut self) -> &mut Atom<P> {
374 &mut self.inner
375 }
376
377 pub fn is_none(&self, order: Ordering) -> bool {
381 self.inner.is_none(order)
382 }
383}
384
385impl<T, P> AtomSetOnce<P>
386where
387 P: IntoRawPtr + FromRawPtr + Deref<Target = T>,
388{
389 pub fn get(&self, order: Ordering) -> Option<&T> {
391 let ptr = self.inner.inner.load(order);
392 let val = unsafe { Atom::inner_from_raw(ptr) };
393 val.map(|v: P| {
394 let out = unsafe { copy_lifetime(self, &*v) };
397 mem::forget(v);
398 out
399 })
400 }
401}
402
403impl<T> AtomSetOnce<Box<T>> {
404 pub fn get_mut(&mut self, order: Ordering) -> Option<&mut T> {
406 let ptr = self.inner.inner.load(order);
407 let val = unsafe { Atom::inner_from_raw(ptr) };
408 val.map(move |mut v: Box<T>| {
409 let out = unsafe { copy_mut_lifetime(self, &mut *v) };
412 mem::forget(v);
413 out
414 })
415 }
416}
417
418impl<T> AtomSetOnce<T>
419where
420 T: Clone + IntoRawPtr + FromRawPtr,
421{
422 pub fn dup(&self, order: Ordering) -> Option<T> {
424 let ptr = self.inner.inner.load(order);
425 let val = unsafe { Atom::inner_from_raw(ptr) };
426 val.map(|v: T| {
427 let out = v.clone();
428 mem::forget(v);
429 out
430 })
431 }
432}
433
434pub trait GetNextMut {
437 type NextPtr;
438 fn get_next(&mut self) -> &mut Self::NextPtr;
439}