pub(crate) mod background;
pub(crate) mod registration;
mod sharded_rwlock;
use self::background::Background;
pub use crate::raw::PollEvented;
use self::sharded_rwlock::RwLock;
use std::cell::RefCell;
use std::io;
use std::mem;
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering::{Relaxed, SeqCst};
use std::sync::{Arc, Weak};
use std::time::{Duration, Instant};
use std::{fmt, usize};
use futures::task::{AtomicWaker, Context};
use log::{debug, log_enabled, trace, Level};
use mio::event::Evented;
use slab::Slab;
struct Reactor {
events: mio::Events,
inner: Arc<Inner>,
_wakeup_registration: mio::Registration,
}
#[derive(Clone)]
struct Handle {
inner: Option<HandlePriv>,
}
#[derive(Clone)]
struct HandlePriv {
inner: Weak<Inner>,
}
#[derive(Debug)]
struct Turn {
_priv: (),
}
#[test]
fn test_handle_size() {
use std::mem;
assert_eq!(mem::size_of::<Handle>(), mem::size_of::<HandlePriv>());
}
struct Inner {
io: mio::Poll,
next_aba_guard: AtomicUsize,
io_dispatch: RwLock<Slab<ScheduledIo>>,
wakeup: mio::SetReadiness,
}
struct ScheduledIo {
aba_guard: usize,
readiness: AtomicUsize,
reader: AtomicWaker,
writer: AtomicWaker,
}
#[derive(Debug, Eq, PartialEq, Clone, Copy)]
pub(crate) enum Direction {
Read,
Write,
}
static HANDLE_FALLBACK: AtomicUsize = AtomicUsize::new(0);
thread_local!(static CURRENT_REACTOR: RefCell<Option<HandlePriv>> = RefCell::new(None));
const TOKEN_SHIFT: usize = 22;
const MAX_SOURCES: usize = (1 << TOKEN_SHIFT) - 1;
const TOKEN_WAKEUP: mio::Token = mio::Token(MAX_SOURCES);
fn _assert_kinds() {
fn _assert<T: Send + Sync>() {}
_assert::<Handle>();
}
impl Reactor {
fn new() -> io::Result<Reactor> {
let io = mio::Poll::new()?;
let wakeup_pair = mio::Registration::new2();
io.register(
&wakeup_pair.0,
TOKEN_WAKEUP,
mio::Ready::readable(),
mio::PollOpt::level(),
)?;
Ok(Reactor {
events: mio::Events::with_capacity(1024),
_wakeup_registration: wakeup_pair.0,
inner: Arc::new(Inner {
io: io,
next_aba_guard: AtomicUsize::new(0),
io_dispatch: RwLock::new(Slab::with_capacity(1)),
wakeup: wakeup_pair.1,
}),
})
}
fn handle(&self) -> Handle {
Handle {
inner: Some(HandlePriv {
inner: Arc::downgrade(&self.inner),
}),
}
}
fn turn(&mut self, max_wait: Option<Duration>) -> io::Result<Turn> {
self.poll(max_wait)?;
Ok(Turn { _priv: () })
}
fn is_idle(&self) -> bool {
self.inner.io_dispatch.read().is_empty()
}
fn background(self) -> io::Result<Background> {
Background::new(self)
}
fn poll(&mut self, max_wait: Option<Duration>) -> io::Result<()> {
match self.inner.io.poll(&mut self.events, max_wait) {
Ok(_) => {}
Err(e) => return Err(e),
}
let start = if log_enabled!(Level::Debug) {
Some(Instant::now())
} else {
None
};
let mut events = 0;
for event in self.events.iter() {
events += 1;
let token = event.token();
trace!("event {:?} {:?}", event.readiness(), event.token());
if token == TOKEN_WAKEUP {
self.inner
.wakeup
.set_readiness(mio::Ready::empty())
.unwrap();
} else {
self.dispatch(token, event.readiness());
}
}
if let Some(start) = start {
let dur = start.elapsed();
trace!(
"loop process - {} events, {}.{:03}s",
events,
dur.as_secs(),
dur.subsec_nanos() / 1_000_000
);
}
Ok(())
}
fn dispatch(&self, token: mio::Token, ready: mio::Ready) {
let aba_guard = token.0 & !MAX_SOURCES;
let token = token.0 & MAX_SOURCES;
let mut rd = None;
let mut wr = None;
{
let io_dispatch = self.inner.io_dispatch.read();
let io = match io_dispatch.get(token) {
Some(io) => io,
None => return,
};
if aba_guard != io.aba_guard {
return;
}
io.readiness.fetch_or(ready.as_usize(), Relaxed);
if ready.is_writable() || platform::is_hup(&ready) {
wr = io.writer.take();
}
if !(ready & (!mio::Ready::writable())).is_empty() {
rd = io.reader.take();
}
}
if let Some(task) = rd {
task.wake();
}
if let Some(task) = wr {
task.wake();
}
}
}
impl fmt::Debug for Reactor {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "Reactor")
}
}
impl Handle {
fn as_priv(&self) -> Option<&HandlePriv> {
self.inner.as_ref()
}
fn into_priv(self) -> Option<HandlePriv> {
self.inner
}
fn wakeup(&self) {
if let Some(handle) = self.as_priv() {
handle.wakeup();
}
}
}
impl Default for Handle {
fn default() -> Handle {
Handle { inner: None }
}
}
impl fmt::Debug for Handle {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "Handle")
}
}
fn set_fallback(handle: HandlePriv) -> Result<(), ()> {
unsafe {
let val = handle.into_usize();
match HANDLE_FALLBACK.compare_exchange(0, val, SeqCst, SeqCst) {
Ok(_) => Ok(()),
Err(_) => {
drop(HandlePriv::from_usize(val));
Err(())
}
}
}
}
impl HandlePriv {
pub(crate) fn try_current() -> io::Result<HandlePriv> {
CURRENT_REACTOR.with(|current| match *current.borrow() {
Some(ref handle) => Ok(handle.clone()),
None => HandlePriv::fallback(),
})
}
fn fallback() -> io::Result<HandlePriv> {
let mut fallback = HANDLE_FALLBACK.load(SeqCst);
if fallback == 0 {
let reactor = match Reactor::new() {
Ok(reactor) => reactor,
Err(_) => {
return Err(io::Error::new(
io::ErrorKind::Other,
"failed to create reactor",
))
}
};
if set_fallback(reactor.handle().into_priv().unwrap()).is_ok() {
let ret = reactor.handle().into_priv().unwrap();
match reactor.background() {
Ok(bg) => bg.forget(),
Err(_) => {}
}
return Ok(ret);
}
fallback = HANDLE_FALLBACK.load(SeqCst);
}
assert!(fallback != 0);
let ret = unsafe {
let handle = HandlePriv::from_usize(fallback);
let ret = handle.clone();
drop(handle.into_usize());
ret
};
Ok(ret)
}
fn wakeup(&self) {
if let Some(inner) = self.inner() {
inner.wakeup.set_readiness(mio::Ready::readable()).unwrap();
}
}
fn into_usize(self) -> usize {
unsafe { mem::transmute::<Weak<Inner>, usize>(self.inner) }
}
unsafe fn from_usize(val: usize) -> HandlePriv {
let inner = mem::transmute::<usize, Weak<Inner>>(val);
HandlePriv { inner }
}
fn inner(&self) -> Option<Arc<Inner>> {
self.inner.upgrade()
}
}
impl fmt::Debug for HandlePriv {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "HandlePriv")
}
}
impl Inner {
fn add_source(&self, source: &dyn Evented) -> io::Result<usize> {
let aba_guard = self.next_aba_guard.fetch_add(1 << TOKEN_SHIFT, Relaxed);
let mut io_dispatch = self.io_dispatch.write();
if io_dispatch.len() == MAX_SOURCES {
return Err(io::Error::new(
io::ErrorKind::Other,
"reactor at max \
registered I/O resources",
));
}
let key = io_dispatch.insert(ScheduledIo {
aba_guard,
readiness: AtomicUsize::new(0),
reader: AtomicWaker::new(),
writer: AtomicWaker::new(),
});
self.io.register(
source,
mio::Token(aba_guard | key),
mio::Ready::all(),
mio::PollOpt::edge(),
)?;
Ok(key)
}
fn deregister_source(&self, source: &dyn Evented) -> io::Result<()> {
self.io.deregister(source)
}
fn drop_source(&self, token: usize) {
debug!("dropping I/O source: {}", token);
self.io_dispatch.write().remove(token);
}
fn register(&self, cx: &mut Context<'_>, token: usize, dir: Direction) {
debug!("scheduling direction for: {}", token);
let io_dispatch = self.io_dispatch.read();
let sched = io_dispatch.get(token).unwrap();
let (atomic_waker, ready) = match dir {
Direction::Read => (&sched.reader, !mio::Ready::writable()),
Direction::Write => (&sched.writer, mio::Ready::writable()),
};
atomic_waker.register(&cx.waker());
if sched.readiness.load(SeqCst) & ready.as_usize() != 0 {
atomic_waker.wake();
}
}
}
impl Drop for Inner {
fn drop(&mut self) {
let io = self.io_dispatch.read();
for (_, io) in io.iter() {
io.writer.wake();
io.reader.wake();
}
}
}
impl Direction {
fn mask(&self) -> mio::Ready {
match *self {
Direction::Read => {
mio::Ready::all() - mio::Ready::writable()
}
Direction::Write => mio::Ready::writable() | platform::hup(),
}
}
}
#[cfg(unix)]
pub(crate) mod platform {
use mio::unix::UnixReady;
use mio::Ready;
pub fn hup() -> Ready {
UnixReady::hup().into()
}
pub fn is_hup(ready: &Ready) -> bool {
UnixReady::from(*ready).is_hup()
}
}
#[cfg(windows)]
pub(crate) mod platform {
use mio::Ready;
pub fn hup() -> Ready {
Ready::empty()
}
pub fn is_hup(_: &Ready) -> bool {
false
}
}