use crate::{ffi, internal_tricks::Unsendable, Python};
use parking_lot::{const_mutex, Mutex, Once};
use std::cell::{Cell, RefCell};
use std::{
mem::{self, ManuallyDrop},
ptr::NonNull,
sync::atomic,
};
static START: Once = Once::new();
thread_local! {
pub(crate) static GIL_COUNT: Cell<usize> = Cell::new(0);
static OWNED_OBJECTS: RefCell<Vec<NonNull<ffi::PyObject>>> = RefCell::new(Vec::with_capacity(256));
}
pub(crate) fn gil_is_acquired() -> bool {
GIL_COUNT.try_with(|c| c.get() > 0).unwrap_or(false)
}
#[cfg(not(PyPy))]
#[cfg_attr(docsrs, doc(cfg(not(PyPy))))]
#[allow(clippy::collapsible_if)] pub fn prepare_freethreaded_python() {
START.call_once_force(|_| unsafe {
if ffi::Py_IsInitialized() != 0 {
assert_ne!(ffi::PyEval_ThreadsInitialized(), 0);
} else {
ffi::Py_InitializeEx(0);
if cfg!(not(Py_3_7)) {
if ffi::PyEval_ThreadsInitialized() == 0 {
ffi::PyEval_InitThreads();
}
}
ffi::PyEval_SaveThread();
}
});
}
#[cfg(not(PyPy))]
#[cfg_attr(docsrs, doc(cfg(not(PyPy))))]
#[allow(clippy::collapsible_if)] pub unsafe fn with_embedded_python_interpreter<F, R>(f: F) -> R
where
F: for<'p> FnOnce(Python<'p>) -> R,
{
assert_eq!(
ffi::Py_IsInitialized(),
0,
"called `with_embedded_python_interpreter` but a Python interpreter is already running."
);
ffi::Py_InitializeEx(0);
if cfg!(not(Py_3_7)) {
if ffi::PyEval_ThreadsInitialized() == 0 {
ffi::PyEval_InitThreads();
}
}
let pool = GILPool::new();
pool.python().import("threading").unwrap();
let result = f(pool.python());
drop(pool);
ffi::Py_Finalize();
result
}
#[allow(clippy::upper_case_acronyms)]
#[must_use]
pub struct GILGuard {
gstate: ffi::PyGILState_STATE,
pool: ManuallyDrop<Option<GILPool>>,
}
impl GILGuard {
#[inline]
pub fn python(&self) -> Python {
unsafe { Python::assume_gil_acquired() }
}
pub(crate) fn acquire() -> GILGuard {
cfg_if::cfg_if! {
if #[cfg(all(feature = "auto-initialize", not(PyPy)))] {
prepare_freethreaded_python();
} else {
START.call_once_force(|_| unsafe {
assert_ne!(
ffi::Py_IsInitialized(),
0,
"The Python interpreter is not initalized and the `auto-initialize` \
feature is not enabled.\n\n\
Consider calling `pyo3::prepare_freethreaded_python()` before attempting \
to use Python APIs."
);
assert_ne!(
ffi::PyEval_ThreadsInitialized(),
0,
"Python threading is not initalized and the `auto-initialize` feature is \
not enabled.\n\n\
Consider calling `pyo3::prepare_freethreaded_python()` before attempting \
to use Python APIs."
);
});
}
}
Self::acquire_unchecked()
}
pub(crate) fn acquire_unchecked() -> GILGuard {
let gstate = unsafe { ffi::PyGILState_Ensure() };
let pool = if !gil_is_acquired() {
Some(unsafe { GILPool::new() })
} else {
increment_gil_count();
None
};
GILGuard {
gstate,
pool: ManuallyDrop::new(pool),
}
}
}
impl Drop for GILGuard {
fn drop(&mut self) {
let _ = GIL_COUNT.try_with(|c| {
if self.gstate == ffi::PyGILState_STATE::PyGILState_UNLOCKED && c.get() != 1 {
panic!("The first GILGuard acquired must be the last one dropped.");
}
});
let should_decrement = self.pool.is_none();
unsafe {
ManuallyDrop::drop(&mut self.pool);
}
if should_decrement {
decrement_gil_count();
}
unsafe {
ffi::PyGILState_Release(self.gstate);
}
}
}
type PyObjVec = Vec<NonNull<ffi::PyObject>>;
struct ReferencePool {
dirty: atomic::AtomicBool,
pointer_ops: Mutex<(PyObjVec, PyObjVec)>,
}
impl ReferencePool {
const fn new() -> Self {
Self {
dirty: atomic::AtomicBool::new(false),
pointer_ops: const_mutex((Vec::new(), Vec::new())),
}
}
fn register_incref(&self, obj: NonNull<ffi::PyObject>) {
self.pointer_ops.lock().0.push(obj);
self.dirty.store(true, atomic::Ordering::Release);
}
fn register_decref(&self, obj: NonNull<ffi::PyObject>) {
self.pointer_ops.lock().1.push(obj);
self.dirty.store(true, atomic::Ordering::Release);
}
fn update_counts(&self, _py: Python) {
let prev = self.dirty.swap(false, atomic::Ordering::Acquire);
if !prev {
return;
}
let mut ops = self.pointer_ops.lock();
let (increfs, decrefs) = mem::take(&mut *ops);
drop(ops);
for ptr in increfs {
unsafe { ffi::Py_INCREF(ptr.as_ptr()) };
}
for ptr in decrefs {
unsafe { ffi::Py_DECREF(ptr.as_ptr()) };
}
}
}
unsafe impl Sync for ReferencePool {}
static POOL: ReferencePool = ReferencePool::new();
#[allow(clippy::upper_case_acronyms)]
pub struct GILPool {
start: Option<usize>,
no_send: Unsendable,
}
impl GILPool {
#[inline]
pub unsafe fn new() -> GILPool {
increment_gil_count();
POOL.update_counts(Python::assume_gil_acquired());
GILPool {
start: OWNED_OBJECTS.try_with(|o| o.borrow().len()).ok(),
no_send: Unsendable::default(),
}
}
pub fn python(&self) -> Python {
unsafe { Python::assume_gil_acquired() }
}
}
impl Drop for GILPool {
fn drop(&mut self) {
if let Some(obj_len_start) = self.start {
let dropping_obj = OWNED_OBJECTS.with(|holder| {
let mut holder = holder.borrow_mut();
if obj_len_start < holder.len() {
holder.split_off(obj_len_start)
} else {
Vec::new()
}
});
for obj in dropping_obj {
unsafe {
ffi::Py_DECREF(obj.as_ptr());
}
}
}
decrement_gil_count();
}
}
pub unsafe fn register_incref(obj: NonNull<ffi::PyObject>) {
if gil_is_acquired() {
ffi::Py_INCREF(obj.as_ptr())
} else {
POOL.register_incref(obj);
}
}
pub unsafe fn register_decref(obj: NonNull<ffi::PyObject>) {
if gil_is_acquired() {
ffi::Py_DECREF(obj.as_ptr())
} else {
POOL.register_decref(obj);
}
}
pub unsafe fn register_owned(_py: Python, obj: NonNull<ffi::PyObject>) {
debug_assert!(gil_is_acquired());
let _ = OWNED_OBJECTS.try_with(|holder| holder.borrow_mut().push(obj));
}
#[inline(always)]
fn increment_gil_count() {
let _ = GIL_COUNT.try_with(|c| c.set(c.get() + 1));
}
#[inline(always)]
fn decrement_gil_count() {
let _ = GIL_COUNT.try_with(|c| {
let current = c.get();
debug_assert!(
current > 0,
"Negative GIL count detected. Please report this error to the PyO3 repo as a bug."
);
c.set(current - 1);
});
}
pub(crate) fn ensure_gil() -> EnsureGIL {
if gil_is_acquired() {
EnsureGIL(None)
} else {
EnsureGIL(Some(GILGuard::acquire()))
}
}
pub(crate) fn ensure_gil_unchecked() -> EnsureGIL {
if gil_is_acquired() {
EnsureGIL(None)
} else {
EnsureGIL(Some(GILGuard::acquire_unchecked()))
}
}
#[allow(clippy::upper_case_acronyms)]
pub(crate) struct EnsureGIL(Option<GILGuard>);
impl EnsureGIL {
pub unsafe fn python(&self) -> Python {
match &self.0 {
Some(gil) => gil.python(),
None => Python::assume_gil_acquired(),
}
}
}
#[cfg(test)]
mod tests {
use super::{gil_is_acquired, GILPool, GIL_COUNT, OWNED_OBJECTS, POOL};
use crate::{ffi, gil, AsPyPointer, IntoPyPointer, PyObject, Python, ToPyObject};
use std::ptr::NonNull;
fn get_object(py: Python) -> PyObject {
let pool = unsafe { py.new_pool() };
let py = pool.python();
let obj = py.eval("object()", None, None).unwrap();
obj.to_object(py)
}
fn owned_object_count() -> usize {
OWNED_OBJECTS.with(|holder| holder.borrow().len())
}
#[test]
fn test_owned() {
let gil = Python::acquire_gil();
let py = gil.python();
let obj = get_object(py);
let obj_ptr = obj.as_ptr();
let _ref = obj.clone_ref(py);
unsafe {
{
let pool = py.new_pool();
gil::register_owned(pool.python(), NonNull::new_unchecked(obj.into_ptr()));
assert_eq!(owned_object_count(), 1);
assert_eq!(ffi::Py_REFCNT(obj_ptr), 2);
}
{
let _pool = py.new_pool();
assert_eq!(owned_object_count(), 0);
assert_eq!(ffi::Py_REFCNT(obj_ptr), 1);
}
}
}
#[test]
fn test_owned_nested() {
let gil = Python::acquire_gil();
let py = gil.python();
let obj = get_object(py);
let _ref = obj.clone_ref(py);
let obj_ptr = obj.as_ptr();
unsafe {
{
let _pool = py.new_pool();
assert_eq!(owned_object_count(), 0);
gil::register_owned(py, NonNull::new_unchecked(obj.into_ptr()));
assert_eq!(owned_object_count(), 1);
assert_eq!(ffi::Py_REFCNT(obj_ptr), 2);
{
let _pool = py.new_pool();
let obj = get_object(py);
gil::register_owned(py, NonNull::new_unchecked(obj.into_ptr()));
assert_eq!(owned_object_count(), 2);
}
assert_eq!(owned_object_count(), 1);
}
{
assert_eq!(owned_object_count(), 0);
assert_eq!(ffi::Py_REFCNT(obj_ptr), 1);
}
}
}
#[test]
fn test_pyobject_drop_with_gil_decreases_refcnt() {
let gil = Python::acquire_gil();
let py = gil.python();
let obj = get_object(py);
let _ref = obj.clone_ref(py);
let obj_ptr = obj.as_ptr();
unsafe {
{
assert_eq!(owned_object_count(), 0);
assert_eq!(ffi::Py_REFCNT(obj_ptr), 2);
}
drop(obj);
assert_eq!(ffi::Py_REFCNT(obj_ptr), 1);
}
}
#[test]
fn test_pyobject_drop_without_gil_doesnt_decrease_refcnt() {
let gil = Python::acquire_gil();
let py = gil.python();
let obj = get_object(py);
let _ref = obj.clone_ref(py);
let obj_ptr = obj.as_ptr();
unsafe {
{
assert_eq!(owned_object_count(), 0);
assert_eq!(ffi::Py_REFCNT(obj_ptr), 2);
}
drop(gil);
drop(obj);
assert_eq!(ffi::Py_REFCNT(obj_ptr), 2);
{
let _gil = Python::acquire_gil();
assert_eq!(ffi::Py_REFCNT(obj_ptr), 1);
}
}
}
#[test]
fn test_gil_counts() {
let get_gil_count = || GIL_COUNT.with(|c| c.get());
assert_eq!(get_gil_count(), 0);
let gil = Python::acquire_gil();
assert_eq!(get_gil_count(), 1);
assert_eq!(get_gil_count(), 1);
let pool = unsafe { GILPool::new() };
assert_eq!(get_gil_count(), 2);
let pool2 = unsafe { GILPool::new() };
assert_eq!(get_gil_count(), 3);
drop(pool);
assert_eq!(get_gil_count(), 2);
let gil2 = Python::acquire_gil();
assert_eq!(get_gil_count(), 3);
drop(gil2);
assert_eq!(get_gil_count(), 2);
drop(pool2);
assert_eq!(get_gil_count(), 1);
drop(gil);
assert_eq!(get_gil_count(), 0);
}
#[test]
fn test_allow_threads() {
let gil = Python::acquire_gil();
let py = gil.python();
assert!(gil_is_acquired());
py.allow_threads(move || {
assert!(!gil_is_acquired());
let gil = Python::acquire_gil();
assert!(gil_is_acquired());
drop(gil);
assert!(!gil_is_acquired());
});
assert!(gil_is_acquired());
}
#[test]
fn dropping_gil_does_not_invalidate_references() {
let gil = Python::acquire_gil();
let py = gil.python();
let obj;
let gil2 = Python::acquire_gil();
obj = py.eval("object()", None, None).unwrap();
drop(gil2);
assert_eq!(obj.get_refcnt(), 1);
}
#[test]
fn test_clone_with_gil() {
let gil = Python::acquire_gil();
let py = gil.python();
let obj = get_object(py);
let count = obj.get_refcnt(py);
#[allow(clippy::redundant_clone)]
let c = obj.clone();
assert_eq!(count + 1, c.get_refcnt(py));
}
#[test]
fn test_clone_without_gil() {
let gil = Python::acquire_gil();
let py = gil.python();
let obj = get_object(py);
let count = obj.get_refcnt(py);
drop(gil);
let c = obj.clone();
assert_eq!(
count,
obj.get_refcnt(unsafe { Python::assume_gil_acquired() })
);
let gil = Python::acquire_gil();
let py = gil.python();
assert_eq!(count + 1, obj.get_refcnt(py));
drop(c);
assert_eq!(count, obj.get_refcnt(py));
}
#[test]
fn test_clone_in_other_thread() {
let gil = Python::acquire_gil();
let py = gil.python();
let obj = get_object(py);
let count = obj.get_refcnt(py);
let t = std::thread::spawn(move || {
#[allow(clippy::redundant_clone)]
let _ = obj.clone();
assert_eq!(
count,
obj.get_refcnt(unsafe { Python::assume_gil_acquired() })
);
obj
});
let obj = t.join().unwrap();
let ptr = NonNull::new(obj.as_ptr()).unwrap();
assert_eq!(&POOL.pointer_ops.lock().0, &vec![ptr]);
assert_eq!(&POOL.pointer_ops.lock().1, &vec![ptr]);
drop(gil);
let gil = Python::acquire_gil();
assert!(POOL.pointer_ops.lock().0.is_empty());
assert!(POOL.pointer_ops.lock().1.is_empty());
assert_eq!(count, obj.get_refcnt(gil.python()));
}
#[test]
fn test_update_counts_does_not_deadlock() {
let gil = Python::acquire_gil();
let obj = get_object(gil.python());
unsafe {
unsafe extern "C" fn capsule_drop(capsule: *mut ffi::PyObject) {
let pool = GILPool::new();
PyObject::from_owned_ptr(
pool.python(),
ffi::PyCapsule_GetPointer(capsule, std::ptr::null()) as _,
);
}
let ptr = obj.into_ptr();
let capsule = ffi::PyCapsule_New(ptr as _, std::ptr::null(), Some(capsule_drop));
POOL.register_decref(NonNull::new(capsule).unwrap());
POOL.update_counts(gil.python())
}
}
}