use super::{
PyAtomicRef,
ext::{AsObject, PyRefExact, PyResult},
payload::PyPayload,
};
use crate::object::traverse_object::PyObjVTable;
use crate::{
builtins::{PyDictRef, PyType, PyTypeRef},
common::{
atomic::{Ordering, PyAtomic, Radium},
linked_list::{Link, Pointers},
lock::PyRwLock,
refcount::RefCount,
},
vm::VirtualMachine,
};
use crate::{
class::StaticType,
object::traverse::{MaybeTraverse, Traverse, TraverseFn},
};
use itertools::Itertools;
use alloc::fmt;
use core::{
any::TypeId,
borrow::Borrow,
cell::UnsafeCell,
marker::PhantomData,
mem::ManuallyDrop,
num::NonZeroUsize,
ops::Deref,
ptr::{self, NonNull},
};
#[derive(Debug)]
pub(super) struct Erased;
mod trashcan {
use core::cell::Cell;
const TRASHCAN_LIMIT: usize = 50;
type DeallocFn = unsafe fn(*mut super::PyObject);
type DeallocQueue = Vec<(*mut super::PyObject, DeallocFn)>;
thread_local! {
static DEALLOC_DEPTH: Cell<usize> = const { Cell::new(0) };
static DEALLOC_QUEUE: Cell<DeallocQueue> = const { Cell::new(Vec::new()) };
}
#[inline]
pub(super) unsafe fn begin(
obj: *mut super::PyObject,
dealloc: unsafe fn(*mut super::PyObject),
) -> bool {
DEALLOC_DEPTH.with(|d| {
let depth = d.get();
if depth >= TRASHCAN_LIMIT {
DEALLOC_QUEUE.with(|q| {
let mut queue = q.take();
queue.push((obj, dealloc));
q.set(queue);
});
false
} else {
d.set(depth + 1);
true
}
})
}
#[inline]
pub(super) unsafe fn end() {
let depth = DEALLOC_DEPTH.with(|d| {
let depth = d.get();
debug_assert!(depth > 0, "trashcan::end called without matching begin");
let depth = depth - 1;
d.set(depth);
depth
});
if depth == 0 {
loop {
let next = DEALLOC_QUEUE.with(|q| {
let mut queue = q.take();
let item = queue.pop();
q.set(queue);
item
});
if let Some((obj, dealloc)) = next {
unsafe { dealloc(obj) };
} else {
break;
}
}
}
}
}
pub(super) unsafe fn default_dealloc<T: PyPayload>(obj: *mut PyObject) {
let obj_ref = unsafe { &*(obj as *const PyObject) };
if let Err(()) = obj_ref.drop_slow_inner() {
return; }
if !unsafe { trashcan::begin(obj, default_dealloc::<T>) } {
return; }
let vtable = obj_ref.0.vtable;
if obj_ref.is_gc_tracked() {
let ptr = unsafe { NonNull::new_unchecked(obj) };
unsafe {
crate::gc_state::gc_state().untrack_object(ptr);
}
debug_assert!(
!obj_ref.is_gc_tracked(),
"object still tracked after untrack_object"
);
debug_assert_eq!(
obj_ref.gc_generation(),
crate::object::GC_UNTRACKED,
"gc_generation not reset after untrack_object"
);
}
let typ = obj_ref.class();
let pushed = if T::HAS_FREELIST
&& typ.heaptype_ext.is_none()
&& core::ptr::eq(typ, T::class(crate::vm::Context::genesis()))
{
unsafe { T::freelist_push(obj) }
} else {
false
};
let mut edges = Vec::new();
if let Some(clear_fn) = vtable.clear {
unsafe { clear_fn(obj, &mut edges) };
}
if !pushed {
unsafe { PyInner::dealloc(obj as *mut PyInner<T>) };
}
drop(edges);
unsafe { trashcan::end() };
}
pub(super) unsafe fn debug_obj<T: PyPayload + core::fmt::Debug>(
x: &PyObject,
f: &mut fmt::Formatter<'_>,
) -> fmt::Result {
let x = unsafe { &*(x as *const PyObject as *const PyInner<T>) };
fmt::Debug::fmt(x, f)
}
pub(super) unsafe fn try_traverse_obj<T: PyPayload>(x: &PyObject, tracer_fn: &mut TraverseFn<'_>) {
let x = unsafe { &*(x as *const PyObject as *const PyInner<T>) };
let payload = &x.payload;
payload.try_traverse(tracer_fn)
}
pub(super) unsafe fn try_clear_obj<T: PyPayload>(x: *mut PyObject, out: &mut Vec<PyObjectRef>) {
let x = unsafe { &mut *(x as *mut PyInner<T>) };
x.payload.try_clear(out);
}
bitflags::bitflags! {
#[derive(Copy, Clone, Debug, Default)]
pub(crate) struct GcBits: u8 {
const TRACKED = 1 << 0;
const FINALIZED = 1 << 1;
const UNREACHABLE = 1 << 2;
const FROZEN = 1 << 3;
const SHARED = 1 << 4;
const SHARED_INLINE = 1 << 5;
const DEFERRED = 1 << 6;
}
}
pub(crate) const GC_UNTRACKED: u8 = 0xFF;
pub(crate) const GC_PERMANENT: u8 = 3;
pub(crate) struct GcLink;
unsafe impl Link for GcLink {
type Handle = NonNull<PyObject>;
type Target = PyObject;
fn as_raw(handle: &NonNull<PyObject>) -> NonNull<PyObject> {
*handle
}
unsafe fn from_raw(ptr: NonNull<PyObject>) -> NonNull<PyObject> {
ptr
}
unsafe fn pointers(target: NonNull<PyObject>) -> NonNull<Pointers<PyObject>> {
let inner_ptr = target.as_ptr() as *mut PyInner<Erased>;
unsafe { NonNull::new_unchecked(&raw mut (*inner_ptr).gc_pointers) }
}
}
#[repr(C, align(8))]
pub(super) struct ObjExt {
pub(super) dict: Option<InstanceDict>,
pub(super) slots: Box<[PyRwLock<Option<PyObjectRef>>]>,
}
impl ObjExt {
fn new(dict: Option<PyDictRef>, member_count: usize) -> Self {
Self {
dict: dict.map(InstanceDict::new),
slots: core::iter::repeat_with(|| PyRwLock::new(None))
.take(member_count)
.collect_vec()
.into_boxed_slice(),
}
}
}
impl fmt::Debug for ObjExt {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "[ObjExt]")
}
}
const EXT_OFFSET: usize = core::mem::size_of::<ObjExt>();
const WEAKREF_OFFSET: usize = core::mem::size_of::<WeakRefList>();
const _: () =
assert!(core::mem::size_of::<ObjExt>().is_multiple_of(core::mem::align_of::<ObjExt>()));
const _: () = assert!(core::mem::align_of::<ObjExt>() >= core::mem::align_of::<PyInner<()>>());
const _: () = assert!(
core::mem::size_of::<WeakRefList>().is_multiple_of(core::mem::align_of::<WeakRefList>())
);
const _: () = assert!(core::mem::align_of::<WeakRefList>() >= core::mem::align_of::<PyInner<()>>());
#[repr(C)]
pub(super) struct PyInner<T> {
pub(super) ref_count: RefCount,
pub(super) vtable: &'static PyObjVTable,
pub(super) gc_bits: PyAtomic<u8>,
pub(super) gc_generation: PyAtomic<u8>,
pub(super) gc_pointers: Pointers<PyObject>,
pub(super) typ: PyAtomicRef<PyType>,
pub(super) payload: T,
}
pub(crate) const SIZEOF_PYOBJECT_HEAD: usize = core::mem::size_of::<PyInner<()>>();
impl<T> PyInner<T> {
#[inline(always)]
fn read_type_flags(&self) -> (crate::types::PyTypeFlags, usize) {
let typ_ptr = self.typ.load_raw();
let slots = unsafe { core::ptr::addr_of!((*typ_ptr).0.payload.slots) };
let flags = unsafe { core::ptr::addr_of!((*slots).flags).read() };
let member_count = unsafe { core::ptr::addr_of!((*slots).member_count).read() };
(flags, member_count)
}
#[inline(always)]
pub(super) fn ext_ref(&self) -> Option<&ObjExt> {
let (flags, member_count) = self.read_type_flags();
let has_ext = flags.has_feature(crate::types::PyTypeFlags::HAS_DICT) || member_count > 0;
if !has_ext {
return None;
}
let has_weakref = flags.has_feature(crate::types::PyTypeFlags::HAS_WEAKREF);
let offset = if has_weakref {
WEAKREF_OFFSET + EXT_OFFSET
} else {
EXT_OFFSET
};
let self_addr = (self as *const Self as *const u8).addr();
let ext_ptr = core::ptr::with_exposed_provenance::<ObjExt>(self_addr.wrapping_sub(offset));
Some(unsafe { &*ext_ptr })
}
#[inline(always)]
pub(super) fn weakref_list_ref(&self) -> Option<&WeakRefList> {
let (flags, _) = self.read_type_flags();
if !flags.has_feature(crate::types::PyTypeFlags::HAS_WEAKREF) {
return None;
}
let self_addr = (self as *const Self as *const u8).addr();
let ptr = core::ptr::with_exposed_provenance::<WeakRefList>(
self_addr.wrapping_sub(WEAKREF_OFFSET),
);
Some(unsafe { &*ptr })
}
}
impl<T: fmt::Debug> fmt::Debug for PyInner<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "[PyObject {:?}]", &self.payload)
}
}
unsafe impl<T: MaybeTraverse> Traverse for Py<T> {
fn traverse(&self, tracer_fn: &mut TraverseFn<'_>) {
self.0.traverse(tracer_fn)
}
}
unsafe impl Traverse for PyObject {
fn traverse(&self, tracer_fn: &mut TraverseFn<'_>) {
self.0.traverse(tracer_fn)
}
}
#[cfg(feature = "threading")]
mod weakref_lock {
use core::sync::atomic::{AtomicU8, Ordering};
const NUM_WEAKREF_LOCKS: usize = 64;
static LOCKS: [AtomicU8; NUM_WEAKREF_LOCKS] = [const { AtomicU8::new(0) }; NUM_WEAKREF_LOCKS];
pub(super) struct WeakrefLockGuard {
idx: usize,
}
impl Drop for WeakrefLockGuard {
fn drop(&mut self) {
LOCKS[self.idx].store(0, Ordering::Release);
}
}
pub(super) fn lock(addr: usize) -> WeakrefLockGuard {
let idx = (addr >> 4) % NUM_WEAKREF_LOCKS;
loop {
if LOCKS[idx]
.compare_exchange_weak(0, 1, Ordering::Acquire, Ordering::Relaxed)
.is_ok()
{
return WeakrefLockGuard { idx };
}
core::hint::spin_loop();
}
}
#[cfg(unix)]
pub(crate) fn reset_all_after_fork() {
for lock in &LOCKS {
lock.store(0, Ordering::Release);
}
}
}
#[cfg(not(feature = "threading"))]
mod weakref_lock {
pub(super) struct WeakrefLockGuard;
impl Drop for WeakrefLockGuard {
fn drop(&mut self) {}
}
pub(super) fn lock(_addr: usize) -> WeakrefLockGuard {
WeakrefLockGuard
}
}
#[cfg(all(unix, feature = "threading"))]
pub(crate) fn reset_weakref_locks_after_fork() {
weakref_lock::reset_all_after_fork();
}
#[repr(C)]
pub(super) struct WeakRefList {
head: PyAtomic<*mut Py<PyWeak>>,
generic: PyAtomic<*mut Py<PyWeak>>,
}
impl fmt::Debug for WeakRefList {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("WeakRefList").finish_non_exhaustive()
}
}
unsafe fn unlink_weakref(wrl: &WeakRefList, node: NonNull<Py<PyWeak>>) {
unsafe {
let mut ptrs = WeakLink::pointers(node);
let prev = ptrs.as_ref().get_prev();
let next = ptrs.as_ref().get_next();
if let Some(prev) = prev {
WeakLink::pointers(prev).as_mut().set_next(next);
} else {
wrl.head.store(
next.map_or(ptr::null_mut(), |p| p.as_ptr()),
Ordering::Relaxed,
);
}
if let Some(next) = next {
WeakLink::pointers(next).as_mut().set_prev(prev);
}
ptrs.as_mut().set_prev(None);
ptrs.as_mut().set_next(None);
}
}
impl WeakRefList {
pub fn new() -> Self {
Self {
head: Radium::new(ptr::null_mut()),
generic: Radium::new(ptr::null_mut()),
}
}
fn add(
&self,
obj: &PyObject,
cls: PyTypeRef,
cls_is_weakref: bool,
callback: Option<PyObjectRef>,
dict: Option<PyDictRef>,
) -> PyRef<PyWeak> {
let is_generic = cls_is_weakref && callback.is_none();
{
let _lock = weakref_lock::lock(obj as *const PyObject as usize);
if is_generic {
let generic_ptr = self.generic.load(Ordering::Relaxed);
if !generic_ptr.is_null() {
let generic = unsafe { &*generic_ptr };
if generic.0.ref_count.safe_inc() {
return unsafe { PyRef::from_raw(generic_ptr) };
}
}
}
}
let weak_payload = PyWeak {
pointers: Pointers::new(),
wr_object: Radium::new(obj as *const PyObject as *mut PyObject),
callback: UnsafeCell::new(callback),
hash: Radium::new(crate::common::hash::SENTINEL),
};
let weak = PyRef::new_ref(weak_payload, cls, dict);
let _lock = weakref_lock::lock(obj as *const PyObject as usize);
if is_generic {
let generic_ptr = self.generic.load(Ordering::Relaxed);
if !generic_ptr.is_null() {
let generic = unsafe { &*generic_ptr };
if generic.0.ref_count.safe_inc() {
weak.wr_object.store(ptr::null_mut(), Ordering::Relaxed);
return unsafe { PyRef::from_raw(generic_ptr) };
}
}
}
let node_ptr = NonNull::from(&*weak);
unsafe {
let mut ptrs = WeakLink::pointers(node_ptr);
if is_generic {
let old_head = self.head.load(Ordering::Relaxed);
ptrs.as_mut().set_next(NonNull::new(old_head));
ptrs.as_mut().set_prev(None);
if let Some(old_head) = NonNull::new(old_head) {
WeakLink::pointers(old_head)
.as_mut()
.set_prev(Some(node_ptr));
}
self.head.store(node_ptr.as_ptr(), Ordering::Relaxed);
self.generic.store(node_ptr.as_ptr(), Ordering::Relaxed);
} else {
let generic_ptr = self.generic.load(Ordering::Relaxed);
if let Some(after) = NonNull::new(generic_ptr) {
let after_next = WeakLink::pointers(after).as_ref().get_next();
ptrs.as_mut().set_prev(Some(after));
ptrs.as_mut().set_next(after_next);
WeakLink::pointers(after).as_mut().set_next(Some(node_ptr));
if let Some(next) = after_next {
WeakLink::pointers(next).as_mut().set_prev(Some(node_ptr));
}
} else {
let old_head = self.head.load(Ordering::Relaxed);
ptrs.as_mut().set_next(NonNull::new(old_head));
ptrs.as_mut().set_prev(None);
if let Some(old_head) = NonNull::new(old_head) {
WeakLink::pointers(old_head)
.as_mut()
.set_prev(Some(node_ptr));
}
self.head.store(node_ptr.as_ptr(), Ordering::Relaxed);
}
}
}
weak
}
fn clear(&self, obj: &PyObject) {
let obj_addr = obj as *const PyObject as usize;
let _lock = weakref_lock::lock(obj_addr);
self.generic.store(ptr::null_mut(), Ordering::Relaxed);
let mut callbacks: Vec<(PyRef<PyWeak>, PyObjectRef)> = Vec::new();
let mut current = NonNull::new(self.head.load(Ordering::Relaxed));
while let Some(node) = current {
let next = unsafe { WeakLink::pointers(node).as_ref().get_next() };
let wr = unsafe { node.as_ref() };
wr.0.payload
.wr_object
.store(ptr::null_mut(), Ordering::Relaxed);
unsafe {
let mut ptrs = WeakLink::pointers(node);
ptrs.as_mut().set_prev(None);
ptrs.as_mut().set_next(None);
}
if wr.0.ref_count.safe_inc() {
let wr_ref = unsafe { PyRef::from_raw(wr as *const Py<PyWeak>) };
let cb = unsafe { wr.0.payload.callback.get().replace(None) };
if let Some(cb) = cb {
callbacks.push((wr_ref, cb));
}
}
current = next;
}
self.head.store(ptr::null_mut(), Ordering::Relaxed);
drop(_lock);
for (wr, cb) in callbacks {
crate::vm::thread::with_vm(&cb, |vm| {
let _ = cb.call((wr.clone(),), vm);
});
}
}
fn clear_for_gc_collect_callbacks(&self, obj: &PyObject) -> Vec<(PyRef<PyWeak>, PyObjectRef)> {
let obj_addr = obj as *const PyObject as usize;
let _lock = weakref_lock::lock(obj_addr);
self.generic.store(ptr::null_mut(), Ordering::Relaxed);
let mut callbacks = Vec::new();
let mut current = NonNull::new(self.head.load(Ordering::Relaxed));
while let Some(node) = current {
let next = unsafe { WeakLink::pointers(node).as_ref().get_next() };
let wr = unsafe { node.as_ref() };
wr.0.payload
.wr_object
.store(ptr::null_mut(), Ordering::Relaxed);
unsafe {
let mut ptrs = WeakLink::pointers(node);
ptrs.as_mut().set_prev(None);
ptrs.as_mut().set_next(None);
}
if wr.0.ref_count.safe_inc() {
let wr_ref = unsafe { PyRef::from_raw(wr as *const Py<PyWeak>) };
let cb = unsafe { wr.0.payload.callback.get().replace(None) };
if let Some(cb) = cb {
callbacks.push((wr_ref, cb));
}
}
current = next;
}
self.head.store(ptr::null_mut(), Ordering::Relaxed);
callbacks
}
fn count(&self, obj: &PyObject) -> usize {
let _lock = weakref_lock::lock(obj as *const PyObject as usize);
let mut count = 0usize;
let mut current = NonNull::new(self.head.load(Ordering::Relaxed));
while let Some(node) = current {
if unsafe { node.as_ref() }.0.ref_count.get() > 0 {
count += 1;
}
current = unsafe { WeakLink::pointers(node).as_ref().get_next() };
}
count
}
fn get_weak_references(&self, obj: &PyObject) -> Vec<PyRef<PyWeak>> {
let _lock = weakref_lock::lock(obj as *const PyObject as usize);
let mut v = Vec::new();
let mut current = NonNull::new(self.head.load(Ordering::Relaxed));
while let Some(node) = current {
let wr = unsafe { node.as_ref() };
if wr.0.ref_count.safe_inc() {
v.push(unsafe { PyRef::from_raw(wr as *const Py<PyWeak>) });
}
current = unsafe { WeakLink::pointers(node).as_ref().get_next() };
}
v
}
}
impl Default for WeakRefList {
fn default() -> Self {
Self::new()
}
}
struct WeakLink;
unsafe impl Link for WeakLink {
type Handle = PyRef<PyWeak>;
type Target = Py<PyWeak>;
#[inline(always)]
fn as_raw(handle: &PyRef<PyWeak>) -> NonNull<Self::Target> {
NonNull::from(&**handle)
}
#[inline(always)]
unsafe fn from_raw(ptr: NonNull<Self::Target>) -> Self::Handle {
unsafe { PyRef::from_raw(ptr.as_ptr()) }
}
#[inline(always)]
unsafe fn pointers(target: NonNull<Self::Target>) -> NonNull<Pointers<Self::Target>> {
unsafe { NonNull::new_unchecked(&raw mut (*target.as_ptr()).0.payload.pointers) }
}
}
#[pyclass(name = "weakref", module = false)]
#[derive(Debug)]
pub struct PyWeak {
pointers: Pointers<Py<PyWeak>>,
wr_object: PyAtomic<*mut PyObject>,
callback: UnsafeCell<Option<PyObjectRef>>,
pub(crate) hash: PyAtomic<crate::common::hash::PyHash>,
}
cfg_if::cfg_if! {
if #[cfg(feature = "threading")] {
unsafe impl Send for PyWeak {}
unsafe impl Sync for PyWeak {}
}
}
impl PyWeak {
pub(crate) fn upgrade(&self) -> Option<PyObjectRef> {
let obj_ptr = self.wr_object.load(Ordering::Acquire);
if obj_ptr.is_null() {
return None;
}
let _lock = weakref_lock::lock(obj_ptr as usize);
let obj_ptr = self.wr_object.load(Ordering::Relaxed);
if obj_ptr.is_null() {
return None;
}
unsafe {
if !(*obj_ptr).0.ref_count.safe_inc() {
return None;
}
Some(PyObjectRef::from_raw(NonNull::new_unchecked(obj_ptr)))
}
}
pub(crate) fn is_dead(&self) -> bool {
self.wr_object.load(Ordering::Acquire).is_null()
}
fn drop_inner(&self) {
let obj_ptr = self.wr_object.load(Ordering::Acquire);
if obj_ptr.is_null() {
return; }
let _lock = weakref_lock::lock(obj_ptr as usize);
let obj_ptr = self.wr_object.load(Ordering::Relaxed);
if obj_ptr.is_null() {
return; }
let obj = unsafe { &*obj_ptr };
let wrl = obj.0.weakref_list_ref().unwrap();
let offset = std::mem::offset_of!(PyInner<Self>, payload);
let py_inner = (self as *const Self)
.cast::<u8>()
.wrapping_sub(offset)
.cast::<PyInner<Self>>();
let node_ptr = unsafe { NonNull::new_unchecked(py_inner as *mut Py<Self>) };
unsafe { unlink_weakref(wrl, node_ptr) };
if wrl.generic.load(Ordering::Relaxed) == node_ptr.as_ptr() {
wrl.generic.store(ptr::null_mut(), Ordering::Relaxed);
}
self.wr_object.store(ptr::null_mut(), Ordering::Relaxed);
}
}
impl Drop for PyWeak {
#[inline(always)]
fn drop(&mut self) {
let me: &Self = self;
me.drop_inner();
}
}
impl Py<PyWeak> {
#[inline(always)]
pub fn upgrade(&self) -> Option<PyObjectRef> {
PyWeak::upgrade(self)
}
}
#[derive(Debug)]
pub(super) struct InstanceDict {
pub(super) d: PyRwLock<PyDictRef>,
}
impl From<PyDictRef> for InstanceDict {
#[inline(always)]
fn from(d: PyDictRef) -> Self {
Self::new(d)
}
}
impl InstanceDict {
#[inline]
pub const fn new(d: PyDictRef) -> Self {
Self {
d: PyRwLock::new(d),
}
}
#[inline]
pub fn get(&self) -> PyDictRef {
self.d.read().clone()
}
#[inline]
pub fn set(&self, d: PyDictRef) {
self.replace(d);
}
#[inline]
pub fn replace(&self, d: PyDictRef) -> PyDictRef {
core::mem::replace(&mut self.d.write(), d)
}
#[inline]
pub fn into_inner(self) -> PyDictRef {
self.d.into_inner()
}
}
impl<T: PyPayload> PyInner<T> {
unsafe fn dealloc(ptr: *mut Self) {
unsafe {
let (flags, member_count) = (*ptr).read_type_flags();
let has_ext =
flags.has_feature(crate::types::PyTypeFlags::HAS_DICT) || member_count > 0;
let has_weakref = flags.has_feature(crate::types::PyTypeFlags::HAS_WEAKREF);
if has_ext || has_weakref {
let mut layout = core::alloc::Layout::from_size_align(0, 1).unwrap();
if has_ext {
layout = layout
.extend(core::alloc::Layout::new::<ObjExt>())
.unwrap()
.0;
}
if has_weakref {
layout = layout
.extend(core::alloc::Layout::new::<WeakRefList>())
.unwrap()
.0;
}
let (combined, inner_offset) =
layout.extend(core::alloc::Layout::new::<Self>()).unwrap();
let combined = combined.pad_to_align();
let alloc_ptr = (ptr as *mut u8).sub(inner_offset);
core::ptr::drop_in_place(ptr);
if has_ext {
core::ptr::drop_in_place(alloc_ptr as *mut ObjExt);
}
alloc::alloc::dealloc(alloc_ptr, combined);
} else {
drop(Box::from_raw(ptr));
}
}
}
}
impl<T: PyPayload + core::fmt::Debug> PyInner<T> {
fn new(payload: T, typ: PyTypeRef, dict: Option<PyDictRef>) -> *mut Self {
let member_count = typ.slots.member_count;
let needs_ext = typ
.slots
.flags
.has_feature(crate::types::PyTypeFlags::HAS_DICT)
|| member_count > 0;
let needs_weakref = typ
.slots
.flags
.has_feature(crate::types::PyTypeFlags::HAS_WEAKREF);
debug_assert!(
needs_ext || dict.is_none(),
"dict passed to type '{}' without HAS_DICT flag",
typ.name()
);
if needs_ext || needs_weakref {
let mut layout = core::alloc::Layout::from_size_align(0, 1).unwrap();
let ext_start = if needs_ext {
let (combined, offset) =
layout.extend(core::alloc::Layout::new::<ObjExt>()).unwrap();
layout = combined;
Some(offset)
} else {
None
};
let weakref_start = if needs_weakref {
let (combined, offset) = layout
.extend(core::alloc::Layout::new::<WeakRefList>())
.unwrap();
layout = combined;
Some(offset)
} else {
None
};
let (combined, inner_offset) =
layout.extend(core::alloc::Layout::new::<Self>()).unwrap();
let combined = combined.pad_to_align();
let alloc_ptr = unsafe { alloc::alloc::alloc(combined) };
if alloc_ptr.is_null() {
alloc::alloc::handle_alloc_error(combined);
}
alloc_ptr.expose_provenance();
unsafe {
if let Some(offset) = ext_start {
let ext_ptr = alloc_ptr.add(offset) as *mut ObjExt;
ext_ptr.write(ObjExt::new(dict, member_count));
}
if let Some(offset) = weakref_start {
let weakref_ptr = alloc_ptr.add(offset) as *mut WeakRefList;
weakref_ptr.write(WeakRefList::new());
}
let inner_ptr = alloc_ptr.add(inner_offset) as *mut Self;
inner_ptr.write(Self {
ref_count: RefCount::new(),
vtable: PyObjVTable::of::<T>(),
gc_bits: Radium::new(0),
gc_generation: Radium::new(GC_UNTRACKED),
gc_pointers: Pointers::new(),
typ: PyAtomicRef::from(typ),
payload,
});
inner_ptr
}
} else {
Box::into_raw(Box::new(Self {
ref_count: RefCount::new(),
vtable: PyObjVTable::of::<T>(),
gc_bits: Radium::new(0),
gc_generation: Radium::new(GC_UNTRACKED),
gc_pointers: Pointers::new(),
typ: PyAtomicRef::from(typ),
payload,
}))
}
}
}
pub(crate) const fn pyinner_layout<T: PyPayload>() -> core::alloc::Layout {
core::alloc::Layout::new::<PyInner<T>>()
}
pub(crate) struct FreeList<T: PyPayload> {
items: Vec<*mut PyObject>,
_marker: core::marker::PhantomData<T>,
}
impl<T: PyPayload> FreeList<T> {
pub(crate) const fn new() -> Self {
Self {
items: Vec::new(),
_marker: core::marker::PhantomData,
}
}
}
impl<T: PyPayload> Default for FreeList<T> {
fn default() -> Self {
Self::new()
}
}
impl<T: PyPayload> Drop for FreeList<T> {
fn drop(&mut self) {
for ptr in self.items.drain(..) {
unsafe {
alloc::alloc::dealloc(ptr as *mut u8, core::alloc::Layout::new::<PyInner<T>>());
}
}
}
}
impl<T: PyPayload> core::ops::Deref for FreeList<T> {
type Target = Vec<*mut PyObject>;
fn deref(&self) -> &Self::Target {
&self.items
}
}
impl<T: PyPayload> core::ops::DerefMut for FreeList<T> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.items
}
}
#[repr(transparent)]
pub struct PyObjectRef {
ptr: NonNull<PyObject>,
}
impl Clone for PyObjectRef {
#[inline(always)]
fn clone(&self) -> Self {
(**self).to_owned()
}
}
cfg_if::cfg_if! {
if #[cfg(feature = "threading")] {
unsafe impl Send for PyObjectRef {}
unsafe impl Sync for PyObjectRef {}
}
}
#[repr(transparent)]
pub struct PyObject(PyInner<Erased>);
impl Deref for PyObjectRef {
type Target = PyObject;
#[inline(always)]
fn deref(&self) -> &PyObject {
unsafe { self.ptr.as_ref() }
}
}
impl ToOwned for PyObject {
type Owned = PyObjectRef;
#[inline(always)]
fn to_owned(&self) -> Self::Owned {
self.0.ref_count.inc();
PyObjectRef {
ptr: NonNull::from(self),
}
}
}
impl PyObject {
#[inline]
pub fn try_to_owned(&self) -> Option<PyObjectRef> {
if self.0.ref_count.safe_inc() {
Some(PyObjectRef {
ptr: NonNull::from(self),
})
} else {
None
}
}
#[inline]
pub unsafe fn try_to_owned_from_ptr(ptr: *mut Self) -> Option<PyObjectRef> {
let inner = ptr.cast::<PyInner<Erased>>();
let ref_count = unsafe { &*core::ptr::addr_of!((*inner).ref_count) };
if ref_count.safe_inc() {
Some(PyObjectRef {
ptr: unsafe { NonNull::new_unchecked(ptr) },
})
} else {
None
}
}
}
impl PyObjectRef {
#[inline(always)]
pub const fn into_raw(self) -> NonNull<PyObject> {
let ptr = self.ptr;
core::mem::forget(self);
ptr
}
#[inline(always)]
pub const unsafe fn from_raw(ptr: NonNull<PyObject>) -> Self {
Self { ptr }
}
#[inline(always)]
pub fn downcast<T: PyPayload>(self) -> Result<PyRef<T>, Self> {
if self.downcastable::<T>() {
Ok(unsafe { self.downcast_unchecked() })
} else {
Err(self)
}
}
pub fn try_downcast<T: PyPayload>(self, vm: &VirtualMachine) -> PyResult<PyRef<T>> {
T::try_downcast_from(&self, vm)?;
Ok(unsafe { self.downcast_unchecked() })
}
#[inline(always)]
pub unsafe fn downcast_unchecked<T>(self) -> PyRef<T> {
let obj = ManuallyDrop::new(self);
PyRef {
ptr: obj.ptr.cast(),
}
}
#[inline]
pub fn downcast_exact<T: PyPayload>(self, vm: &VirtualMachine) -> Result<PyRefExact<T>, Self> {
if self.class().is(T::class(&vm.ctx)) {
assert!(
self.downcastable::<T>(),
"obj.__class__ is T::class() but payload is not T"
);
Ok(unsafe { PyRefExact::new_unchecked(PyRef::from_obj_unchecked(self)) })
} else {
Err(self)
}
}
}
impl PyObject {
#[inline(always)]
fn weak_ref_list(&self) -> Option<&WeakRefList> {
self.0.weakref_list_ref()
}
pub(crate) fn get_weakrefs(&self) -> Option<PyObjectRef> {
let wrl = self.weak_ref_list()?;
let _lock = weakref_lock::lock(self as *const PyObject as usize);
let head_ptr = wrl.head.load(Ordering::Relaxed);
if head_ptr.is_null() {
None
} else {
let head = unsafe { &*head_ptr };
if head.0.ref_count.safe_inc() {
Some(unsafe { PyRef::from_raw(head_ptr) }.into())
} else {
None
}
}
}
pub(crate) fn downgrade_with_weakref_typ_opt(
&self,
callback: Option<PyObjectRef>,
typ: PyTypeRef,
) -> Option<PyRef<PyWeak>> {
self.weak_ref_list()
.map(|wrl| wrl.add(self, typ, true, callback, None))
}
pub(crate) fn downgrade_with_typ(
&self,
callback: Option<PyObjectRef>,
typ: PyTypeRef,
vm: &VirtualMachine,
) -> PyResult<PyRef<PyWeak>> {
if !self
.class()
.slots
.flags
.has_feature(crate::types::PyTypeFlags::HAS_WEAKREF)
{
return Err(vm.new_type_error(format!(
"cannot create weak reference to '{}' object",
self.class().name()
)));
}
let dict = if typ
.slots
.flags
.has_feature(crate::types::PyTypeFlags::HAS_DICT)
{
Some(vm.ctx.new_dict())
} else {
None
};
let cls_is_weakref = typ.is(vm.ctx.types.weakref_type);
let wrl = self.weak_ref_list().ok_or_else(|| {
vm.new_type_error(format!(
"cannot create weak reference to '{}' object",
self.class().name()
))
})?;
Ok(wrl.add(self, typ, cls_is_weakref, callback, dict))
}
pub fn downgrade(
&self,
callback: Option<PyObjectRef>,
vm: &VirtualMachine,
) -> PyResult<PyRef<PyWeak>> {
self.downgrade_with_typ(callback, vm.ctx.types.weakref_type.to_owned(), vm)
}
pub fn get_weak_references(&self) -> Option<Vec<PyRef<PyWeak>>> {
self.weak_ref_list()
.map(|wrl| wrl.get_weak_references(self))
}
#[deprecated(note = "use downcastable instead")]
#[inline(always)]
pub fn payload_is<T: PyPayload>(&self) -> bool {
self.0.vtable.typeid == T::PAYLOAD_TYPE_ID
}
#[deprecated(note = "use downcast_unchecked_ref instead")]
#[inline(always)]
pub const unsafe fn payload_unchecked<T: PyPayload>(&self) -> &T {
let inner = unsafe { &*(&self.0 as *const PyInner<Erased> as *const PyInner<T>) };
&inner.payload
}
#[deprecated(note = "use downcast_ref instead")]
#[inline(always)]
pub fn payload<T: PyPayload>(&self) -> Option<&T> {
#[allow(deprecated)]
if self.payload_is::<T>() {
#[allow(deprecated)]
Some(unsafe { self.payload_unchecked() })
} else {
None
}
}
#[inline(always)]
pub fn class(&self) -> &Py<PyType> {
self.0.typ.deref()
}
pub fn set_class(&self, typ: PyTypeRef, vm: &VirtualMachine) {
self.0.typ.swap_to_temporary_refs(typ, vm);
}
#[deprecated(note = "use downcast_ref_if_exact instead")]
#[inline(always)]
pub fn payload_if_exact<T: PyPayload>(&self, vm: &VirtualMachine) -> Option<&T> {
if self.class().is(T::class(&vm.ctx)) {
#[allow(deprecated)]
self.payload()
} else {
None
}
}
#[inline(always)]
fn instance_dict(&self) -> Option<&InstanceDict> {
self.0.ext_ref().and_then(|ext| ext.dict.as_ref())
}
#[inline(always)]
pub fn dict(&self) -> Option<PyDictRef> {
self.instance_dict().map(|d| d.get())
}
pub fn set_dict(&self, dict: PyDictRef) -> Result<(), PyDictRef> {
match self.instance_dict() {
Some(d) => {
d.set(dict);
Ok(())
}
None => Err(dict),
}
}
#[deprecated(note = "use downcast_ref instead")]
#[inline(always)]
pub fn payload_if_subclass<T: crate::PyPayload>(&self, vm: &VirtualMachine) -> Option<&T> {
if self.class().fast_issubclass(T::class(&vm.ctx)) {
#[allow(deprecated)]
self.payload()
} else {
None
}
}
#[inline]
pub(crate) fn typeid(&self) -> TypeId {
self.0.vtable.typeid
}
#[inline(always)]
pub fn downcastable<T: PyPayload>(&self) -> bool {
self.typeid() == T::PAYLOAD_TYPE_ID && unsafe { T::validate_downcastable_from(self) }
}
pub fn try_downcast_ref<'a, T: PyPayload>(
&'a self,
vm: &VirtualMachine,
) -> PyResult<&'a Py<T>> {
T::try_downcast_from(self, vm)?;
Ok(unsafe { self.downcast_unchecked_ref::<T>() })
}
#[inline(always)]
pub fn downcast_ref<T: PyPayload>(&self) -> Option<&Py<T>> {
if self.downcastable::<T>() {
Some(unsafe { self.downcast_unchecked_ref::<T>() })
} else {
None
}
}
#[inline(always)]
pub fn downcast_ref_if_exact<T: PyPayload>(&self, vm: &VirtualMachine) -> Option<&Py<T>> {
self.class()
.is(T::class(&vm.ctx))
.then(|| unsafe { self.downcast_unchecked_ref::<T>() })
}
#[inline(always)]
pub unsafe fn downcast_unchecked_ref<T: PyPayload>(&self) -> &Py<T> {
debug_assert!(self.downcastable::<T>());
unsafe { &*(self as *const Self as *const Py<T>) }
}
#[inline(always)]
pub fn strong_count(&self) -> usize {
self.0.ref_count.get()
}
#[inline]
pub fn weak_count(&self) -> Option<usize> {
self.weak_ref_list().map(|wrl| wrl.count(self))
}
#[inline(always)]
pub const fn as_raw(&self) -> *const Self {
self
}
#[inline]
pub(crate) fn gc_finalized(&self) -> bool {
GcBits::from_bits_retain(self.0.gc_bits.load(Ordering::Relaxed)).contains(GcBits::FINALIZED)
}
#[inline]
pub(crate) fn set_gc_finalized(&self) {
self.set_gc_bit(GcBits::FINALIZED);
}
#[inline]
pub(crate) fn set_gc_bit(&self, bit: GcBits) {
self.0.gc_bits.fetch_or(bit.bits(), Ordering::Relaxed);
}
#[inline]
pub(crate) fn gc_generation(&self) -> u8 {
self.0.gc_generation.load(Ordering::Relaxed)
}
#[inline]
pub(crate) fn set_gc_generation(&self, generation: u8) {
self.0.gc_generation.store(generation, Ordering::Relaxed);
}
#[inline]
pub(crate) fn set_gc_tracked(&self) {
self.set_gc_bit(GcBits::TRACKED);
}
#[inline]
pub(crate) fn clear_gc_tracked(&self) {
self.0
.gc_bits
.fetch_and(!GcBits::TRACKED.bits(), Ordering::Relaxed);
}
#[inline(always)] fn drop_slow_inner(&self) -> Result<(), ()> {
#[inline(never)]
#[cold]
fn call_slot_del(
zelf: &PyObject,
slot_del: fn(&PyObject, &VirtualMachine) -> PyResult<()>,
) -> Result<(), ()> {
let ret = crate::vm::thread::with_vm(zelf, |vm| {
zelf.0.ref_count.inc_by(2);
if let Err(e) = slot_del(zelf, vm) {
let del_method = zelf.get_class_attr(identifier!(vm, __del__)).unwrap();
vm.run_unraisable(e, None, del_method);
}
let _ = zelf.0.ref_count.dec();
zelf.0.ref_count.dec()
});
match ret {
Some(true) => Ok(()),
Some(false) => Err(()),
None => Ok(()),
}
}
let del = self.class().slots.del.load();
if let Some(slot_del) = del
&& !self.gc_finalized()
{
self.set_gc_finalized();
call_slot_del(self, slot_del)?;
}
if let Some(wrl) = self.weak_ref_list() {
wrl.clear(self);
}
Ok(())
}
#[inline(never)]
unsafe fn drop_slow(ptr: NonNull<Self>) {
let dealloc = unsafe { ptr.as_ref().0.vtable.dealloc };
unsafe { dealloc(ptr.as_ptr()) }
}
pub(crate) unsafe fn mark_intern(&self) {
self.0.ref_count.leak();
}
pub(crate) fn is_interned(&self) -> bool {
self.0.ref_count.is_leaked()
}
pub(crate) fn get_slot(&self, offset: usize) -> Option<PyObjectRef> {
self.0.ext_ref().unwrap().slots[offset].read().clone()
}
pub(crate) fn set_slot(&self, offset: usize, value: Option<PyObjectRef>) {
*self.0.ext_ref().unwrap().slots[offset].write() = value;
}
pub fn is_gc_tracked(&self) -> bool {
GcBits::from_bits_retain(self.0.gc_bits.load(Ordering::Relaxed)).contains(GcBits::TRACKED)
}
pub fn gc_get_referents(&self) -> Vec<PyObjectRef> {
let mut result = Vec::new();
self.0.traverse(&mut |child: &PyObject| {
result.push(child.to_owned());
});
result
}
pub fn try_call_finalizer(&self) {
let del = self.class().slots.del.load();
if let Some(slot_del) = del
&& !self.gc_finalized()
{
self.set_gc_finalized();
let result = crate::vm::thread::with_vm(self, |vm| {
if let Err(e) = slot_del(self, vm)
&& let Some(del_method) = self.get_class_attr(identifier!(vm, __del__))
{
vm.run_unraisable(e, None, del_method);
}
});
let _ = result;
}
}
pub fn gc_clear_weakrefs_collect_callbacks(&self) -> Vec<(PyRef<PyWeak>, PyObjectRef)> {
if let Some(wrl) = self.weak_ref_list() {
wrl.clear_for_gc_collect_callbacks(self)
} else {
vec![]
}
}
pub unsafe fn gc_get_referent_ptrs(&self) -> Vec<NonNull<PyObject>> {
let mut result = Vec::new();
self.0.traverse(&mut |child: &PyObject| {
result.push(NonNull::from(child));
});
result
}
pub unsafe fn gc_clear_raw(ptr: *mut PyObject) -> Vec<PyObjectRef> {
let mut result = Vec::new();
let obj = unsafe { &*ptr };
if let Some(clear_fn) = obj.0.vtable.clear {
unsafe { clear_fn(ptr, &mut result) };
}
let (flags, member_count) = obj.0.read_type_flags();
let has_ext = flags.has_feature(crate::types::PyTypeFlags::HAS_DICT) || member_count > 0;
if has_ext {
let has_weakref = flags.has_feature(crate::types::PyTypeFlags::HAS_WEAKREF);
let offset = if has_weakref {
WEAKREF_OFFSET + EXT_OFFSET
} else {
EXT_OFFSET
};
let self_addr = (ptr as *const u8).addr();
let ext_ptr =
core::ptr::with_exposed_provenance_mut::<ObjExt>(self_addr.wrapping_sub(offset));
let ext = unsafe { &mut *ext_ptr };
if let Some(old_dict) = ext.dict.take() {
let dict_ref = old_dict.into_inner();
result.push(dict_ref.into());
}
for slot in ext.slots.iter() {
if let Some(val) = slot.write().take() {
result.push(val);
}
}
}
result
}
pub unsafe fn gc_clear(&self) -> Vec<PyObjectRef> {
unsafe { Self::gc_clear_raw(self as *const _ as *mut PyObject) }
}
pub fn gc_has_clear(&self) -> bool {
self.0.vtable.clear.is_some()
|| self
.0
.ext_ref()
.is_some_and(|ext| ext.dict.is_some() || !ext.slots.is_empty())
}
}
impl Borrow<PyObject> for PyObjectRef {
#[inline(always)]
fn borrow(&self) -> &PyObject {
self
}
}
impl AsRef<PyObject> for PyObjectRef {
#[inline(always)]
fn as_ref(&self) -> &PyObject {
self
}
}
impl<'a, T: PyPayload> From<&'a Py<T>> for &'a PyObject {
#[inline(always)]
fn from(py_ref: &'a Py<T>) -> Self {
py_ref.as_object()
}
}
impl Drop for PyObjectRef {
#[inline]
fn drop(&mut self) {
if self.0.ref_count.dec() {
unsafe { PyObject::drop_slow(self.ptr) }
}
}
}
impl fmt::Debug for PyObject {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
unsafe { (self.0.vtable.debug)(self, f) }
}
}
impl fmt::Debug for PyObjectRef {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.as_object().fmt(f)
}
}
const STACKREF_BORROW_TAG: usize = 1;
#[repr(transparent)]
pub struct PyStackRef {
bits: NonZeroUsize,
}
impl PyStackRef {
#[inline(always)]
pub fn new_owned(obj: PyObjectRef) -> Self {
let ptr = obj.into_raw();
let bits = ptr.as_ptr() as usize;
debug_assert!(
bits & STACKREF_BORROW_TAG == 0,
"PyObject pointer must be aligned"
);
Self {
bits: unsafe { NonZeroUsize::new_unchecked(bits) },
}
}
#[inline(always)]
pub unsafe fn new_borrowed(obj: &PyObject) -> Self {
let bits = (obj as *const PyObject as usize) | STACKREF_BORROW_TAG;
Self {
bits: unsafe { NonZeroUsize::new_unchecked(bits) },
}
}
#[inline(always)]
pub fn is_borrowed(&self) -> bool {
self.bits.get() & STACKREF_BORROW_TAG != 0
}
#[inline(always)]
pub fn as_object(&self) -> &PyObject {
unsafe { &*((self.bits.get() & !STACKREF_BORROW_TAG) as *const PyObject) }
}
#[inline(always)]
pub fn to_pyobj(self) -> PyObjectRef {
let obj = if self.is_borrowed() {
self.as_object().to_owned() } else {
let ptr = unsafe { NonNull::new_unchecked(self.bits.get() as *mut PyObject) };
unsafe { PyObjectRef::from_raw(ptr) }
};
core::mem::forget(self); obj
}
#[inline(always)]
pub fn promote(&mut self) {
if self.is_borrowed() {
self.as_object().0.ref_count.inc();
self.bits =
unsafe { NonZeroUsize::new_unchecked(self.bits.get() & !STACKREF_BORROW_TAG) };
}
}
}
impl Drop for PyStackRef {
#[inline]
fn drop(&mut self) {
if !self.is_borrowed() {
let ptr = unsafe { NonNull::new_unchecked(self.bits.get() as *mut PyObject) };
drop(unsafe { PyObjectRef::from_raw(ptr) });
}
}
}
impl core::ops::Deref for PyStackRef {
type Target = PyObject;
#[inline(always)]
fn deref(&self) -> &PyObject {
self.as_object()
}
}
impl Clone for PyStackRef {
#[inline(always)]
fn clone(&self) -> Self {
Self::new_owned(self.as_object().to_owned())
}
}
impl fmt::Debug for PyStackRef {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if self.is_borrowed() {
write!(f, "PyStackRef(borrowed, ")?;
} else {
write!(f, "PyStackRef(owned, ")?;
}
self.as_object().fmt(f)?;
write!(f, ")")
}
}
cfg_if::cfg_if! {
if #[cfg(feature = "threading")] {
unsafe impl Send for PyStackRef {}
unsafe impl Sync for PyStackRef {}
}
}
const _: () = assert!(
core::mem::size_of::<Option<PyStackRef>>() == core::mem::size_of::<Option<PyObjectRef>>()
);
const _: () =
assert!(core::mem::size_of::<Option<PyStackRef>>() == core::mem::size_of::<PyStackRef>());
#[repr(transparent)]
pub struct Py<T>(PyInner<T>);
impl<T: PyPayload> Py<T> {
pub fn downgrade(
&self,
callback: Option<PyObjectRef>,
vm: &VirtualMachine,
) -> PyResult<PyWeakRef<T>> {
Ok(PyWeakRef {
weak: self.as_object().downgrade(callback, vm)?,
_marker: PhantomData,
})
}
#[inline]
pub fn payload(&self) -> &T {
&self.0.payload
}
}
impl<T> ToOwned for Py<T> {
type Owned = PyRef<T>;
#[inline(always)]
fn to_owned(&self) -> Self::Owned {
self.0.ref_count.inc();
PyRef {
ptr: NonNull::from(self),
}
}
}
impl<T> Deref for Py<T> {
type Target = T;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0.payload
}
}
impl<T: PyPayload> Borrow<PyObject> for Py<T> {
#[inline(always)]
fn borrow(&self) -> &PyObject {
unsafe { &*(&self.0 as *const PyInner<T> as *const PyObject) }
}
}
impl<T> core::hash::Hash for Py<T>
where
T: core::hash::Hash + PyPayload,
{
#[inline]
fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
self.deref().hash(state)
}
}
impl<T> PartialEq for Py<T>
where
T: PartialEq + PyPayload,
{
#[inline]
fn eq(&self, other: &Self) -> bool {
self.deref().eq(other.deref())
}
}
impl<T> Eq for Py<T> where T: Eq + PyPayload {}
impl<T> AsRef<PyObject> for Py<T>
where
T: PyPayload,
{
#[inline(always)]
fn as_ref(&self) -> &PyObject {
self.borrow()
}
}
impl<T: PyPayload + core::fmt::Debug> fmt::Debug for Py<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
(**self).fmt(f)
}
}
#[repr(transparent)]
pub struct PyRef<T> {
ptr: NonNull<Py<T>>,
}
cfg_if::cfg_if! {
if #[cfg(feature = "threading")] {
unsafe impl<T> Send for PyRef<T> {}
unsafe impl<T> Sync for PyRef<T> {}
}
}
impl<T: fmt::Debug> fmt::Debug for PyRef<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
(**self).fmt(f)
}
}
impl<T> Drop for PyRef<T> {
#[inline]
fn drop(&mut self) {
if self.0.ref_count.dec() {
unsafe { PyObject::drop_slow(self.ptr.cast::<PyObject>()) }
}
}
}
impl<T> Clone for PyRef<T> {
#[inline(always)]
fn clone(&self) -> Self {
(**self).to_owned()
}
}
impl<T: PyPayload> PyRef<T> {
#[inline(always)]
pub(crate) const unsafe fn from_non_null(ptr: NonNull<Py<T>>) -> Self {
Self { ptr }
}
#[inline(always)]
pub(crate) const unsafe fn from_raw(raw: *const Py<T>) -> Self {
unsafe { Self::from_non_null(NonNull::new_unchecked(raw as *mut _)) }
}
#[inline(always)]
unsafe fn from_obj_unchecked(obj: PyObjectRef) -> Self {
debug_assert!(obj.downcast_ref::<T>().is_some());
let obj = ManuallyDrop::new(obj);
Self {
ptr: obj.ptr.cast(),
}
}
pub const fn leak(pyref: Self) -> &'static Py<T> {
let ptr = pyref.ptr;
core::mem::forget(pyref);
unsafe { ptr.as_ref() }
}
}
impl<T: PyPayload + crate::object::MaybeTraverse + core::fmt::Debug> PyRef<T> {
#[inline(always)]
pub fn new_ref(payload: T, typ: crate::builtins::PyTypeRef, dict: Option<PyDictRef>) -> Self {
let has_dict = dict.is_some();
let is_heaptype = typ.heaptype_ext.is_some();
let cached = if !has_dict && !is_heaptype {
unsafe { T::freelist_pop(&payload) }
} else {
None
};
let ptr = if let Some(cached) = cached {
let inner = cached.as_ptr() as *mut PyInner<T>;
unsafe {
core::ptr::write(&mut (*inner).ref_count, RefCount::new());
(*inner).gc_bits.store(0, Ordering::Relaxed);
core::ptr::drop_in_place(&mut (*inner).payload);
core::ptr::write(&mut (*inner).payload, payload);
let cached_typ: *const Py<PyType> = &*(*inner).typ;
if core::ptr::eq(cached_typ, &*typ) {
drop(typ);
} else {
let _old = (*inner).typ.swap(typ);
}
}
unsafe { NonNull::new_unchecked(inner.cast::<Py<T>>()) }
} else {
let inner = PyInner::new(payload, typ, dict);
unsafe { NonNull::new_unchecked(inner.cast::<Py<T>>()) }
};
if <T as crate::object::MaybeTraverse>::HAS_TRAVERSE || has_dict || is_heaptype {
let gc = crate::gc_state::gc_state();
unsafe {
gc.track_object(ptr.cast());
}
gc.maybe_collect();
}
Self { ptr }
}
}
impl<T: crate::class::PySubclass + core::fmt::Debug> PyRef<T>
where
T::Base: core::fmt::Debug,
{
#[inline]
pub fn into_base(self) -> PyRef<T::Base> {
let obj: PyObjectRef = self.into();
match obj.downcast() {
Ok(base_ref) => base_ref,
Err(_) => unsafe { core::hint::unreachable_unchecked() },
}
}
#[inline]
pub fn upcast<U: PyPayload + StaticType>(self) -> PyRef<U>
where
T: StaticType,
{
debug_assert!(T::static_type().is_subtype(U::static_type()));
let obj: PyObjectRef = self.into();
match obj.downcast::<U>() {
Ok(upcast_ref) => upcast_ref,
Err(_) => unsafe { core::hint::unreachable_unchecked() },
}
}
}
impl<T: crate::class::PySubclass> Py<T> {
#[inline]
pub fn to_base(&self) -> &Py<T::Base> {
debug_assert!(self.as_object().downcast_ref::<T::Base>().is_some());
unsafe { &*(self as *const Py<T> as *const Py<T::Base>) }
}
#[inline]
pub fn upcast_ref<U: PyPayload + StaticType>(&self) -> &Py<U>
where
T: StaticType,
{
debug_assert!(T::static_type().is_subtype(U::static_type()));
unsafe { &*(self as *const Py<T> as *const Py<U>) }
}
}
impl<T> Borrow<PyObject> for PyRef<T>
where
T: PyPayload,
{
#[inline(always)]
fn borrow(&self) -> &PyObject {
(**self).as_object()
}
}
impl<T> AsRef<PyObject> for PyRef<T>
where
T: PyPayload,
{
#[inline(always)]
fn as_ref(&self) -> &PyObject {
self.borrow()
}
}
impl<T> From<PyRef<T>> for PyObjectRef {
#[inline]
fn from(value: PyRef<T>) -> Self {
let me = ManuallyDrop::new(value);
Self { ptr: me.ptr.cast() }
}
}
impl<T> Borrow<Py<T>> for PyRef<T> {
#[inline(always)]
fn borrow(&self) -> &Py<T> {
self
}
}
impl<T> AsRef<Py<T>> for PyRef<T> {
#[inline(always)]
fn as_ref(&self) -> &Py<T> {
self
}
}
impl<T> Deref for PyRef<T> {
type Target = Py<T>;
#[inline(always)]
fn deref(&self) -> &Py<T> {
unsafe { self.ptr.as_ref() }
}
}
impl<T> core::hash::Hash for PyRef<T>
where
T: core::hash::Hash + PyPayload,
{
#[inline]
fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
self.deref().hash(state)
}
}
impl<T> PartialEq for PyRef<T>
where
T: PartialEq + PyPayload,
{
#[inline]
fn eq(&self, other: &Self) -> bool {
self.deref().eq(other.deref())
}
}
impl<T> Eq for PyRef<T> where T: Eq + PyPayload {}
#[repr(transparent)]
pub struct PyWeakRef<T: PyPayload> {
weak: PyRef<PyWeak>,
_marker: PhantomData<T>,
}
impl<T: PyPayload> PyWeakRef<T> {
pub fn upgrade(&self) -> Option<PyRef<T>> {
self.weak
.upgrade()
.map(|obj| unsafe { PyRef::from_obj_unchecked(obj) })
}
}
macro_rules! partially_init {
(
$ty:path {$($init_field:ident: $init_value:expr),*$(,)?},
Uninit { $($uninit_field:ident),*$(,)? }$(,)?
) => {{
#[allow(clippy::diverging_sub_expression, reason = "intentional compile-time field check in an unreachable branch")]
if false {
#[allow(invalid_value, dead_code, unreachable_code)]
let _ = {$ty {
$($init_field: $init_value,)*
$($uninit_field: unreachable!(),)*
}};
}
let mut m = ::core::mem::MaybeUninit::<$ty>::uninit();
#[allow(unused_unsafe)]
unsafe {
$(::core::ptr::write(&mut (*m.as_mut_ptr()).$init_field, $init_value);)*
}
m
}};
}
pub(crate) fn init_type_hierarchy() -> (PyTypeRef, PyTypeRef, PyTypeRef) {
use crate::{builtins::object, class::PyClassImpl};
use core::mem::MaybeUninit;
let (type_type, object_type) = {
static_assertions::assert_eq_size!(MaybeUninit<PyInner<PyType>>, PyInner<PyType>);
static_assertions::assert_eq_align!(MaybeUninit<PyInner<PyType>>, PyInner<PyType>);
let type_payload = PyType {
base: None,
bases: PyRwLock::default(),
mro: PyRwLock::default(),
subclasses: PyRwLock::default(),
attributes: PyRwLock::new(Default::default()),
slots: PyType::make_slots(),
heaptype_ext: None,
tp_version_tag: core::sync::atomic::AtomicU32::new(0),
};
let object_payload = PyType {
base: None,
bases: PyRwLock::default(),
mro: PyRwLock::default(),
subclasses: PyRwLock::default(),
attributes: PyRwLock::new(Default::default()),
slots: object::PyBaseObject::make_slots(),
heaptype_ext: None,
tp_version_tag: core::sync::atomic::AtomicU32::new(0),
};
let alloc_type_with_prefixes = || -> *mut MaybeUninit<PyInner<PyType>> {
let inner_layout = core::alloc::Layout::new::<MaybeUninit<PyInner<PyType>>>();
let ext_layout = core::alloc::Layout::new::<ObjExt>();
let weakref_layout = core::alloc::Layout::new::<WeakRefList>();
let (layout, weakref_offset) = ext_layout.extend(weakref_layout).unwrap();
let (combined, inner_offset) = layout.extend(inner_layout).unwrap();
let combined = combined.pad_to_align();
let alloc_ptr = unsafe { alloc::alloc::alloc(combined) };
if alloc_ptr.is_null() {
alloc::alloc::handle_alloc_error(combined);
}
alloc_ptr.expose_provenance();
unsafe {
let ext_ptr = alloc_ptr as *mut ObjExt;
ext_ptr.write(ObjExt::new(None, 0));
let weakref_ptr = alloc_ptr.add(weakref_offset) as *mut WeakRefList;
weakref_ptr.write(WeakRefList::new());
alloc_ptr.add(inner_offset) as *mut MaybeUninit<PyInner<PyType>>
}
};
let type_type_ptr = alloc_type_with_prefixes();
unsafe {
type_type_ptr.write(partially_init!(
PyInner::<PyType> {
ref_count: RefCount::new(),
vtable: PyObjVTable::of::<PyType>(),
gc_bits: Radium::new(0),
gc_generation: Radium::new(GC_UNTRACKED),
gc_pointers: Pointers::new(),
payload: type_payload,
},
Uninit { typ }
));
}
let object_type_ptr = alloc_type_with_prefixes();
unsafe {
object_type_ptr.write(partially_init!(
PyInner::<PyType> {
ref_count: RefCount::new(),
vtable: PyObjVTable::of::<PyType>(),
gc_bits: Radium::new(0),
gc_generation: Radium::new(GC_UNTRACKED),
gc_pointers: Pointers::new(),
payload: object_payload,
},
Uninit { typ },
));
}
let object_type_ptr = object_type_ptr as *mut PyInner<PyType>;
let type_type_ptr = type_type_ptr as *mut PyInner<PyType>;
unsafe {
(*type_type_ptr).ref_count.inc();
let type_type = PyTypeRef::from_raw(type_type_ptr.cast());
ptr::write(&mut (*object_type_ptr).typ, PyAtomicRef::from(type_type));
(*type_type_ptr).ref_count.inc();
let type_type = PyTypeRef::from_raw(type_type_ptr.cast());
ptr::write(&mut (*type_type_ptr).typ, PyAtomicRef::from(type_type));
let object_type = PyTypeRef::from_raw(object_type_ptr.cast());
(*object_type_ptr).payload.mro = PyRwLock::new(vec![object_type.clone()]);
(*type_type_ptr).payload.bases = PyRwLock::new(vec![object_type.clone()]);
(*type_type_ptr).payload.base = Some(object_type.clone());
let type_type = PyTypeRef::from_raw(type_type_ptr.cast());
(*type_type_ptr).payload.mro =
PyRwLock::new(vec![type_type.clone(), object_type.clone()]);
(type_type, object_type)
}
};
let weakref_type = PyType {
base: Some(object_type.clone()),
bases: PyRwLock::new(vec![object_type.clone()]),
mro: PyRwLock::new(vec![object_type.clone()]),
subclasses: PyRwLock::default(),
attributes: PyRwLock::default(),
slots: PyWeak::make_slots(),
heaptype_ext: None,
tp_version_tag: core::sync::atomic::AtomicU32::new(0),
};
let weakref_type = PyRef::new_ref(weakref_type, type_type.clone(), None);
unsafe {
crate::gc_state::gc_state()
.untrack_object(core::ptr::NonNull::from(weakref_type.as_object()));
}
weakref_type.as_object().clear_gc_tracked();
weakref_type.mro.write().insert(0, weakref_type.clone());
object_type.subclasses.write().push(
type_type
.as_object()
.downgrade_with_weakref_typ_opt(None, weakref_type.clone())
.unwrap(),
);
object_type.subclasses.write().push(
weakref_type
.as_object()
.downgrade_with_weakref_typ_opt(None, weakref_type.clone())
.unwrap(),
);
(type_type, object_type, weakref_type)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn miri_test_type_initialization() {
let _ = init_type_hierarchy();
}
#[test]
fn miri_test_drop() {
let ctx = crate::Context::genesis();
let obj = ctx.new_bytes(b"dfghjkl".to_vec());
drop(obj);
}
}