Skip to main content

rustpython_vm/object/
core.rs

1//! Essential types for object models
2//!
3//! +-------------------------+--------------+-----------------------+
4//! |       Management        |       Typed      |      Untyped      |
5//! +-------------------------+------------------+-------------------+
6//! | Interpreter-independent | [`Py<T>`]        | [`PyObject`]      |
7//! | Reference-counted       | [`PyRef<T>`]     | [`PyObjectRef`]   |
8//! | Weak                    | [`PyWeakRef<T>`] | [`PyRef<PyWeak>`] |
9//! +-------------------------+--------------+-----------------------+
10//!
11//! [`PyRef<PyWeak>`] may looking like to be called as PyObjectWeak by the rule,
12//! but not to do to remember it is a PyRef object.
13use super::{
14    PyAtomicRef,
15    ext::{AsObject, PyRefExact, PyResult},
16    payload::PyPayload,
17};
18use crate::object::traverse_object::PyObjVTable;
19use crate::{
20    builtins::{PyDictRef, PyType, PyTypeRef},
21    common::{
22        atomic::{Ordering, PyAtomic, Radium},
23        linked_list::{Link, Pointers},
24        lock::PyRwLock,
25        refcount::RefCount,
26    },
27    vm::VirtualMachine,
28};
29use crate::{
30    class::StaticType,
31    object::traverse::{MaybeTraverse, Traverse, TraverseFn},
32};
33use itertools::Itertools;
34
35use alloc::fmt;
36
37use core::{
38    any::TypeId,
39    borrow::Borrow,
40    cell::UnsafeCell,
41    marker::PhantomData,
42    mem::ManuallyDrop,
43    num::NonZeroUsize,
44    ops::Deref,
45    ptr::{self, NonNull},
46};
47
48// so, PyObjectRef is basically equivalent to `PyRc<PyInner<dyn PyObjectPayload>>`, except it's
49// only one pointer in width rather than 2. We do that by manually creating a vtable, and putting
50// a &'static reference to it inside the `PyRc` rather than adjacent to it, like trait objects do.
51// This can lead to faster code since there's just less data to pass around, as well as because of
52// some weird stuff with trait objects, alignment, and padding.
53//
54// So, every type has an alignment, which means that if you create a value of it it's location in
55// memory has to be a multiple of it's alignment. e.g., a type with alignment 4 (like i32) could be
56// at 0xb7befbc0, 0xb7befbc4, or 0xb7befbc8, but not 0xb7befbc2. If you have a struct and there are
57// 2 fields whose sizes/alignments don't perfectly fit in with each other, e.g.:
58// +-------------+-------------+---------------------------+
59// |     u16     |      ?      |            i32            |
60// | 0x00 | 0x01 | 0x02 | 0x03 | 0x04 | 0x05 | 0x06 | 0x07 |
61// +-------------+-------------+---------------------------+
62// There has to be padding in the space between the 2 fields. But, if that field is a trait object
63// (like `dyn PyObjectPayload`) we don't *know* how much padding there is between the `payload`
64// field and the previous field. So, Rust has to consult the vtable to know the exact offset of
65// `payload` in `PyInner<dyn PyObjectPayload>`, which has a huge performance impact when *every
66// single payload access* requires a vtable lookup. Thankfully, we're able to avoid that because of
67// the way we use PyObjectRef, in that whenever we want to access the payload we (almost) always
68// access it from a generic function. So, rather than doing
69//
70// - check vtable for payload offset
71// - get offset in PyInner struct
72// - call as_any() method of PyObjectPayload
73// - call downcast_ref() method of Any
74// we can just do
75// - check vtable that typeid matches
76// - pointer cast directly to *const PyInner<T>
77//
78// and at that point the compiler can know the offset of `payload` for us because **we've given it a
79// concrete type to work with before we ever access the `payload` field**
80
81/// A type to just represent "we've erased the type of this object, cast it before you use it"
82#[derive(Debug)]
83pub(super) struct Erased;
84
85/// Trashcan mechanism to limit recursive deallocation depth (Py_TRASHCAN).
86/// Without this, deeply nested structures (e.g. 200k-deep list) cause stack overflow
87/// during deallocation because each level adds a stack frame.
88mod trashcan {
89    use core::cell::Cell;
90
91    /// Maximum nesting depth for deallocation before deferring.
92    /// CPython uses UNWIND_NO_NESTING = 50.
93    const TRASHCAN_LIMIT: usize = 50;
94
95    type DeallocFn = unsafe fn(*mut super::PyObject);
96    type DeallocQueue = Vec<(*mut super::PyObject, DeallocFn)>;
97
98    thread_local! {
99        static DEALLOC_DEPTH: Cell<usize> = const { Cell::new(0) };
100        static DEALLOC_QUEUE: Cell<DeallocQueue> = const { Cell::new(Vec::new()) };
101    }
102
103    /// Try to begin deallocation. Returns true if we should proceed,
104    /// false if the object was deferred (depth exceeded).
105    #[inline]
106    pub(super) unsafe fn begin(
107        obj: *mut super::PyObject,
108        dealloc: unsafe fn(*mut super::PyObject),
109    ) -> bool {
110        DEALLOC_DEPTH.with(|d| {
111            let depth = d.get();
112            if depth >= TRASHCAN_LIMIT {
113                // Depth exceeded: defer this deallocation
114                DEALLOC_QUEUE.with(|q| {
115                    let mut queue = q.take();
116                    queue.push((obj, dealloc));
117                    q.set(queue);
118                });
119                false
120            } else {
121                d.set(depth + 1);
122                true
123            }
124        })
125    }
126
127    /// End deallocation and process any deferred objects if at outermost level.
128    #[inline]
129    pub(super) unsafe fn end() {
130        let depth = DEALLOC_DEPTH.with(|d| {
131            let depth = d.get();
132            debug_assert!(depth > 0, "trashcan::end called without matching begin");
133            let depth = depth - 1;
134            d.set(depth);
135            depth
136        });
137        if depth == 0 {
138            // Process deferred deallocations iteratively
139            loop {
140                let next = DEALLOC_QUEUE.with(|q| {
141                    let mut queue = q.take();
142                    let item = queue.pop();
143                    q.set(queue);
144                    item
145                });
146                if let Some((obj, dealloc)) = next {
147                    unsafe { dealloc(obj) };
148                } else {
149                    break;
150                }
151            }
152        }
153    }
154}
155
156/// Default dealloc: handles __del__, weakref clearing, tp_clear, and memory free.
157/// Equivalent to subtype_dealloc.
158pub(super) unsafe fn default_dealloc<T: PyPayload>(obj: *mut PyObject) {
159    let obj_ref = unsafe { &*(obj as *const PyObject) };
160    if let Err(()) = obj_ref.drop_slow_inner() {
161        return; // resurrected by __del__
162    }
163
164    // Trashcan: limit recursive deallocation depth to prevent stack overflow
165    if !unsafe { trashcan::begin(obj, default_dealloc::<T>) } {
166        return; // deferred to queue
167    }
168
169    let vtable = obj_ref.0.vtable;
170
171    // Untrack from GC BEFORE deallocation.
172    // Must happen before memory is freed because intrusive list removal
173    // reads the object's gc_pointers (prev/next).
174    if obj_ref.is_gc_tracked() {
175        let ptr = unsafe { NonNull::new_unchecked(obj) };
176        unsafe {
177            crate::gc_state::gc_state().untrack_object(ptr);
178        }
179        // Verify untrack cleared the tracked flag and generation
180        debug_assert!(
181            !obj_ref.is_gc_tracked(),
182            "object still tracked after untrack_object"
183        );
184        debug_assert_eq!(
185            obj_ref.gc_generation(),
186            crate::object::GC_UNTRACKED,
187            "gc_generation not reset after untrack_object"
188        );
189    }
190
191    // Try to store in freelist for reuse BEFORE tp_clear, so that
192    // size-based freelists (e.g. PyTuple) can read the payload directly.
193    // Only exact base types (not heaptype or structseq subtypes) go into the freelist.
194    let typ = obj_ref.class();
195    let pushed = if T::HAS_FREELIST
196        && typ.heaptype_ext.is_none()
197        && core::ptr::eq(typ, T::class(crate::vm::Context::genesis()))
198    {
199        unsafe { T::freelist_push(obj) }
200    } else {
201        false
202    };
203
204    // Extract child references to break circular refs (tp_clear).
205    // This runs regardless of freelist push — the object's children must be released.
206    let mut edges = Vec::new();
207    if let Some(clear_fn) = vtable.clear {
208        unsafe { clear_fn(obj, &mut edges) };
209    }
210
211    if !pushed {
212        // Deallocate the object memory (handles ObjExt prefix if present)
213        unsafe { PyInner::dealloc(obj as *mut PyInner<T>) };
214    }
215
216    // Drop child references - may trigger recursive destruction.
217    drop(edges);
218
219    // Trashcan: decrement depth and process deferred objects at outermost level
220    unsafe { trashcan::end() };
221}
222pub(super) unsafe fn debug_obj<T: PyPayload + core::fmt::Debug>(
223    x: &PyObject,
224    f: &mut fmt::Formatter<'_>,
225) -> fmt::Result {
226    let x = unsafe { &*(x as *const PyObject as *const PyInner<T>) };
227    fmt::Debug::fmt(x, f)
228}
229
230/// Call `try_trace` on payload
231pub(super) unsafe fn try_traverse_obj<T: PyPayload>(x: &PyObject, tracer_fn: &mut TraverseFn<'_>) {
232    let x = unsafe { &*(x as *const PyObject as *const PyInner<T>) };
233    let payload = &x.payload;
234    payload.try_traverse(tracer_fn)
235}
236
237/// Call `try_clear` on payload to extract child references (tp_clear)
238pub(super) unsafe fn try_clear_obj<T: PyPayload>(x: *mut PyObject, out: &mut Vec<PyObjectRef>) {
239    let x = unsafe { &mut *(x as *mut PyInner<T>) };
240    x.payload.try_clear(out);
241}
242
243bitflags::bitflags! {
244    /// GC bits for free-threading support (like ob_gc_bits in Py_GIL_DISABLED)
245    /// These bits are stored in a separate atomic field for lock-free access.
246    /// See Include/internal/pycore_gc.h
247    #[derive(Copy, Clone, Debug, Default)]
248    pub(crate) struct GcBits: u8 {
249        /// Tracked by the GC
250        const TRACKED = 1 << 0;
251        /// tp_finalize was called (prevents __del__ from being called twice)
252        const FINALIZED = 1 << 1;
253        /// Object is unreachable (during GC collection)
254        const UNREACHABLE = 1 << 2;
255        /// Object is frozen (immutable)
256        const FROZEN = 1 << 3;
257        /// Memory the object references is shared between multiple threads
258        /// and needs special handling when freeing due to possible in-flight lock-free reads
259        const SHARED = 1 << 4;
260        /// Memory of the object itself is shared between multiple threads
261        /// Objects with this bit that are GC objects will automatically be delay-freed
262        const SHARED_INLINE = 1 << 5;
263        /// Use deferred reference counting
264        const DEFERRED = 1 << 6;
265    }
266}
267
268/// GC generation constants
269pub(crate) const GC_UNTRACKED: u8 = 0xFF;
270pub(crate) const GC_PERMANENT: u8 = 3;
271
272/// Link implementation for GC intrusive linked list tracking
273pub(crate) struct GcLink;
274
275// SAFETY: PyObject (PyInner<Erased>) is heap-allocated and pinned in memory
276// once created. gc_pointers is at a fixed offset in PyInner.
277unsafe impl Link for GcLink {
278    type Handle = NonNull<PyObject>;
279    type Target = PyObject;
280
281    fn as_raw(handle: &NonNull<PyObject>) -> NonNull<PyObject> {
282        *handle
283    }
284
285    unsafe fn from_raw(ptr: NonNull<PyObject>) -> NonNull<PyObject> {
286        ptr
287    }
288
289    unsafe fn pointers(target: NonNull<PyObject>) -> NonNull<Pointers<PyObject>> {
290        let inner_ptr = target.as_ptr() as *mut PyInner<Erased>;
291        unsafe { NonNull::new_unchecked(&raw mut (*inner_ptr).gc_pointers) }
292    }
293}
294
295/// Extension fields for objects that need dict or member slots.
296/// Allocated as a prefix before PyInner when needed (prefix allocation pattern).
297/// Access via `PyInner::ext_ref()` using negative offset from the object pointer.
298///
299/// align(8) ensures size_of::<ObjExt>() is always a multiple of 8,
300/// so the offset from Layout::extend equals size_of::<ObjExt>() for any
301/// PyInner<T> alignment (important on wasm32 where pointers are 4 bytes
302/// but some payloads like PyWeak have align 8 due to i64 fields).
303#[repr(C, align(8))]
304pub(super) struct ObjExt {
305    pub(super) dict: Option<InstanceDict>,
306    pub(super) slots: Box<[PyRwLock<Option<PyObjectRef>>]>,
307}
308
309impl ObjExt {
310    fn new(dict: Option<PyDictRef>, member_count: usize) -> Self {
311        Self {
312            dict: dict.map(InstanceDict::new),
313            slots: core::iter::repeat_with(|| PyRwLock::new(None))
314                .take(member_count)
315                .collect_vec()
316                .into_boxed_slice(),
317        }
318    }
319}
320
321impl fmt::Debug for ObjExt {
322    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
323        write!(f, "[ObjExt]")
324    }
325}
326
327/// Precomputed offset constants for prefix allocation.
328/// All prefix components are align(8) and their sizes are multiples of 8,
329/// so Layout::extend adds no inter-padding.
330const EXT_OFFSET: usize = core::mem::size_of::<ObjExt>();
331const WEAKREF_OFFSET: usize = core::mem::size_of::<WeakRefList>();
332
333const _: () =
334    assert!(core::mem::size_of::<ObjExt>().is_multiple_of(core::mem::align_of::<ObjExt>()));
335const _: () = assert!(core::mem::align_of::<ObjExt>() >= core::mem::align_of::<PyInner<()>>());
336const _: () = assert!(
337    core::mem::size_of::<WeakRefList>().is_multiple_of(core::mem::align_of::<WeakRefList>())
338);
339const _: () = assert!(core::mem::align_of::<WeakRefList>() >= core::mem::align_of::<PyInner<()>>());
340
341/// This is an actual python object. It consists of a `typ` which is the
342/// python class, and carries some rust payload optionally. This rust
343/// payload can be a rust float or rust int in case of float and int objects.
344#[repr(C)]
345pub(super) struct PyInner<T> {
346    pub(super) ref_count: RefCount,
347    pub(super) vtable: &'static PyObjVTable,
348    /// GC bits for free-threading (like ob_gc_bits)
349    pub(super) gc_bits: PyAtomic<u8>,
350    /// GC generation index (0-2=gen, GC_PERMANENT=permanent, GC_UNTRACKED=not tracked).
351    /// Uses PyAtomic for interior mutability (writes happen through &self under list locks).
352    pub(super) gc_generation: PyAtomic<u8>,
353    /// Intrusive linked list pointers for GC generational tracking
354    pub(super) gc_pointers: Pointers<PyObject>,
355
356    pub(super) typ: PyAtomicRef<PyType>, // __class__ member
357
358    pub(super) payload: T,
359}
360pub(crate) const SIZEOF_PYOBJECT_HEAD: usize = core::mem::size_of::<PyInner<()>>();
361
362impl<T> PyInner<T> {
363    /// Read type flags and member_count via raw pointers to avoid Stacked Borrows
364    /// violations during bootstrap, where type objects have self-referential typ pointers.
365    #[inline(always)]
366    fn read_type_flags(&self) -> (crate::types::PyTypeFlags, usize) {
367        let typ_ptr = self.typ.load_raw();
368        let slots = unsafe { core::ptr::addr_of!((*typ_ptr).0.payload.slots) };
369        let flags = unsafe { core::ptr::addr_of!((*slots).flags).read() };
370        let member_count = unsafe { core::ptr::addr_of!((*slots).member_count).read() };
371        (flags, member_count)
372    }
373
374    /// Access the ObjExt prefix at a negative offset from this PyInner.
375    /// Returns None if this object was allocated without dict/slots.
376    ///
377    /// Layout: [ObjExt?][WeakRefList?][PyInner]
378    /// ObjExt offset depends on whether WeakRefList is also present.
379    #[inline(always)]
380    pub(super) fn ext_ref(&self) -> Option<&ObjExt> {
381        let (flags, member_count) = self.read_type_flags();
382        let has_ext = flags.has_feature(crate::types::PyTypeFlags::HAS_DICT) || member_count > 0;
383        if !has_ext {
384            return None;
385        }
386        let has_weakref = flags.has_feature(crate::types::PyTypeFlags::HAS_WEAKREF);
387        let offset = if has_weakref {
388            WEAKREF_OFFSET + EXT_OFFSET
389        } else {
390            EXT_OFFSET
391        };
392        let self_addr = (self as *const Self as *const u8).addr();
393        let ext_ptr = core::ptr::with_exposed_provenance::<ObjExt>(self_addr.wrapping_sub(offset));
394        Some(unsafe { &*ext_ptr })
395    }
396
397    /// Access the WeakRefList prefix at a fixed negative offset from this PyInner.
398    /// Returns None if the type does not support weakrefs.
399    ///
400    /// Layout: [ObjExt?][WeakRefList?][PyInner]
401    /// WeakRefList is always immediately before PyInner (fixed WEAKREF_OFFSET).
402    #[inline(always)]
403    pub(super) fn weakref_list_ref(&self) -> Option<&WeakRefList> {
404        let (flags, _) = self.read_type_flags();
405        if !flags.has_feature(crate::types::PyTypeFlags::HAS_WEAKREF) {
406            return None;
407        }
408        let self_addr = (self as *const Self as *const u8).addr();
409        let ptr = core::ptr::with_exposed_provenance::<WeakRefList>(
410            self_addr.wrapping_sub(WEAKREF_OFFSET),
411        );
412        Some(unsafe { &*ptr })
413    }
414}
415
416impl<T: fmt::Debug> fmt::Debug for PyInner<T> {
417    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
418        write!(f, "[PyObject {:?}]", &self.payload)
419    }
420}
421
422unsafe impl<T: MaybeTraverse> Traverse for Py<T> {
423    /// DO notice that call `trace` on `Py<T>` means apply `tracer_fn` on `Py<T>`'s children,
424    /// not like call `trace` on `PyRef<T>` which apply `tracer_fn` on `PyRef<T>` itself
425    fn traverse(&self, tracer_fn: &mut TraverseFn<'_>) {
426        self.0.traverse(tracer_fn)
427    }
428}
429
430unsafe impl Traverse for PyObject {
431    /// DO notice that call `trace` on `PyObject` means apply `tracer_fn` on `PyObject`'s children,
432    /// not like call `trace` on `PyObjectRef` which apply `tracer_fn` on `PyObjectRef` itself
433    fn traverse(&self, tracer_fn: &mut TraverseFn<'_>) {
434        self.0.traverse(tracer_fn)
435    }
436}
437
438// === Stripe lock for weakref list protection (WEAKREF_LIST_LOCK) ===
439
440#[cfg(feature = "threading")]
441mod weakref_lock {
442    use core::sync::atomic::{AtomicU8, Ordering};
443
444    const NUM_WEAKREF_LOCKS: usize = 64;
445
446    static LOCKS: [AtomicU8; NUM_WEAKREF_LOCKS] = [const { AtomicU8::new(0) }; NUM_WEAKREF_LOCKS];
447
448    pub(super) struct WeakrefLockGuard {
449        idx: usize,
450    }
451
452    impl Drop for WeakrefLockGuard {
453        fn drop(&mut self) {
454            LOCKS[self.idx].store(0, Ordering::Release);
455        }
456    }
457
458    pub(super) fn lock(addr: usize) -> WeakrefLockGuard {
459        let idx = (addr >> 4) % NUM_WEAKREF_LOCKS;
460        loop {
461            if LOCKS[idx]
462                .compare_exchange_weak(0, 1, Ordering::Acquire, Ordering::Relaxed)
463                .is_ok()
464            {
465                return WeakrefLockGuard { idx };
466            }
467            core::hint::spin_loop();
468        }
469    }
470
471    /// Reset all weakref stripe locks after fork in child process.
472    /// Locks held by parent threads would cause infinite spin in the child.
473    #[cfg(unix)]
474    pub(crate) fn reset_all_after_fork() {
475        for lock in &LOCKS {
476            lock.store(0, Ordering::Release);
477        }
478    }
479}
480
481#[cfg(not(feature = "threading"))]
482mod weakref_lock {
483    pub(super) struct WeakrefLockGuard;
484
485    impl Drop for WeakrefLockGuard {
486        fn drop(&mut self) {}
487    }
488
489    pub(super) fn lock(_addr: usize) -> WeakrefLockGuard {
490        WeakrefLockGuard
491    }
492}
493
494/// Reset weakref stripe locks after fork. Must be called before any
495/// Python code runs in the child process.
496#[cfg(all(unix, feature = "threading"))]
497pub(crate) fn reset_weakref_locks_after_fork() {
498    weakref_lock::reset_all_after_fork();
499}
500
501// === WeakRefList: inline on every object (tp_weaklist) ===
502
503#[repr(C)]
504pub(super) struct WeakRefList {
505    /// Head of the intrusive doubly-linked list of weakrefs.
506    head: PyAtomic<*mut Py<PyWeak>>,
507    /// Cached generic weakref (no callback, exact weakref type).
508    /// Matches try_reuse_basic_ref in weakrefobject.c.
509    generic: PyAtomic<*mut Py<PyWeak>>,
510}
511
512impl fmt::Debug for WeakRefList {
513    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
514        f.debug_struct("WeakRefList").finish_non_exhaustive()
515    }
516}
517
518/// Unlink a node from the weakref list. Must be called under stripe lock.
519///
520/// # Safety
521/// `node` must be a valid pointer to a node currently in the list owned by `wrl`.
522unsafe fn unlink_weakref(wrl: &WeakRefList, node: NonNull<Py<PyWeak>>) {
523    unsafe {
524        let mut ptrs = WeakLink::pointers(node);
525        let prev = ptrs.as_ref().get_prev();
526        let next = ptrs.as_ref().get_next();
527
528        if let Some(prev) = prev {
529            WeakLink::pointers(prev).as_mut().set_next(next);
530        } else {
531            // node is the head
532            wrl.head.store(
533                next.map_or(ptr::null_mut(), |p| p.as_ptr()),
534                Ordering::Relaxed,
535            );
536        }
537        if let Some(next) = next {
538            WeakLink::pointers(next).as_mut().set_prev(prev);
539        }
540
541        ptrs.as_mut().set_prev(None);
542        ptrs.as_mut().set_next(None);
543    }
544}
545
546impl WeakRefList {
547    pub fn new() -> Self {
548        Self {
549            head: Radium::new(ptr::null_mut()),
550            generic: Radium::new(ptr::null_mut()),
551        }
552    }
553
554    /// get_or_create_weakref
555    fn add(
556        &self,
557        obj: &PyObject,
558        cls: PyTypeRef,
559        cls_is_weakref: bool,
560        callback: Option<PyObjectRef>,
561        dict: Option<PyDictRef>,
562    ) -> PyRef<PyWeak> {
563        let is_generic = cls_is_weakref && callback.is_none();
564
565        // Try reuse under lock first (fast path, no allocation)
566        {
567            let _lock = weakref_lock::lock(obj as *const PyObject as usize);
568            if is_generic {
569                let generic_ptr = self.generic.load(Ordering::Relaxed);
570                if !generic_ptr.is_null() {
571                    let generic = unsafe { &*generic_ptr };
572                    if generic.0.ref_count.safe_inc() {
573                        return unsafe { PyRef::from_raw(generic_ptr) };
574                    }
575                }
576            }
577        }
578
579        // Allocate OUTSIDE the stripe lock. PyRef::new_ref may trigger
580        // maybe_collect → GC → WeakRefList::clear on another object that
581        // hashes to the same stripe, which would deadlock on the spinlock.
582        let weak_payload = PyWeak {
583            pointers: Pointers::new(),
584            wr_object: Radium::new(obj as *const PyObject as *mut PyObject),
585            callback: UnsafeCell::new(callback),
586            hash: Radium::new(crate::common::hash::SENTINEL),
587        };
588        let weak = PyRef::new_ref(weak_payload, cls, dict);
589
590        // Re-acquire lock for linked list insertion
591        let _lock = weakref_lock::lock(obj as *const PyObject as usize);
592
593        // Re-check: another thread may have inserted a generic ref while we
594        // were allocating outside the lock. If so, reuse it and drop ours.
595        if is_generic {
596            let generic_ptr = self.generic.load(Ordering::Relaxed);
597            if !generic_ptr.is_null() {
598                let generic = unsafe { &*generic_ptr };
599                if generic.0.ref_count.safe_inc() {
600                    // Nullify wr_object so drop_inner won't unlink an
601                    // un-inserted node (which would corrupt the list head).
602                    weak.wr_object.store(ptr::null_mut(), Ordering::Relaxed);
603                    return unsafe { PyRef::from_raw(generic_ptr) };
604                }
605            }
606        }
607
608        // Insert into linked list under stripe lock
609        let node_ptr = NonNull::from(&*weak);
610        unsafe {
611            let mut ptrs = WeakLink::pointers(node_ptr);
612            if is_generic {
613                // Generic ref goes to head (insert_head for basic ref)
614                let old_head = self.head.load(Ordering::Relaxed);
615                ptrs.as_mut().set_next(NonNull::new(old_head));
616                ptrs.as_mut().set_prev(None);
617                if let Some(old_head) = NonNull::new(old_head) {
618                    WeakLink::pointers(old_head)
619                        .as_mut()
620                        .set_prev(Some(node_ptr));
621                }
622                self.head.store(node_ptr.as_ptr(), Ordering::Relaxed);
623                self.generic.store(node_ptr.as_ptr(), Ordering::Relaxed);
624            } else {
625                // Non-generic refs go after generic ref (insert_after)
626                let generic_ptr = self.generic.load(Ordering::Relaxed);
627                if let Some(after) = NonNull::new(generic_ptr) {
628                    let after_next = WeakLink::pointers(after).as_ref().get_next();
629                    ptrs.as_mut().set_prev(Some(after));
630                    ptrs.as_mut().set_next(after_next);
631                    WeakLink::pointers(after).as_mut().set_next(Some(node_ptr));
632                    if let Some(next) = after_next {
633                        WeakLink::pointers(next).as_mut().set_prev(Some(node_ptr));
634                    }
635                } else {
636                    // No generic ref; insert at head
637                    let old_head = self.head.load(Ordering::Relaxed);
638                    ptrs.as_mut().set_next(NonNull::new(old_head));
639                    ptrs.as_mut().set_prev(None);
640                    if let Some(old_head) = NonNull::new(old_head) {
641                        WeakLink::pointers(old_head)
642                            .as_mut()
643                            .set_prev(Some(node_ptr));
644                    }
645                    self.head.store(node_ptr.as_ptr(), Ordering::Relaxed);
646                }
647            }
648        }
649
650        weak
651    }
652
653    /// Clear all weakrefs and call their callbacks.
654    /// Called when the owner object is being dropped.
655    // PyObject_ClearWeakRefs
656    fn clear(&self, obj: &PyObject) {
657        let obj_addr = obj as *const PyObject as usize;
658        let _lock = weakref_lock::lock(obj_addr);
659
660        // Clear generic cache
661        self.generic.store(ptr::null_mut(), Ordering::Relaxed);
662
663        // Walk the list, collecting weakrefs with callbacks
664        let mut callbacks: Vec<(PyRef<PyWeak>, PyObjectRef)> = Vec::new();
665        let mut current = NonNull::new(self.head.load(Ordering::Relaxed));
666        while let Some(node) = current {
667            let next = unsafe { WeakLink::pointers(node).as_ref().get_next() };
668
669            let wr = unsafe { node.as_ref() };
670
671            // Mark weakref as dead
672            wr.0.payload
673                .wr_object
674                .store(ptr::null_mut(), Ordering::Relaxed);
675
676            // Unlink from list
677            unsafe {
678                let mut ptrs = WeakLink::pointers(node);
679                ptrs.as_mut().set_prev(None);
680                ptrs.as_mut().set_next(None);
681            }
682
683            // Collect callback only if we can still acquire a strong ref.
684            if wr.0.ref_count.safe_inc() {
685                let wr_ref = unsafe { PyRef::from_raw(wr as *const Py<PyWeak>) };
686                let cb = unsafe { wr.0.payload.callback.get().replace(None) };
687                if let Some(cb) = cb {
688                    callbacks.push((wr_ref, cb));
689                }
690            }
691
692            current = next;
693        }
694        self.head.store(ptr::null_mut(), Ordering::Relaxed);
695
696        // Invoke callbacks outside the lock
697        drop(_lock);
698        for (wr, cb) in callbacks {
699            crate::vm::thread::with_vm(&cb, |vm| {
700                let _ = cb.call((wr.clone(),), vm);
701            });
702        }
703    }
704
705    /// Clear all weakrefs but DON'T call callbacks. Instead, return them for later invocation.
706    /// Used by GC to ensure ALL weakrefs are cleared BEFORE any callbacks are invoked.
707    /// handle_weakrefs() clears all weakrefs first, then invokes callbacks.
708    fn clear_for_gc_collect_callbacks(&self, obj: &PyObject) -> Vec<(PyRef<PyWeak>, PyObjectRef)> {
709        let obj_addr = obj as *const PyObject as usize;
710        let _lock = weakref_lock::lock(obj_addr);
711
712        // Clear generic cache
713        self.generic.store(ptr::null_mut(), Ordering::Relaxed);
714
715        let mut callbacks = Vec::new();
716        let mut current = NonNull::new(self.head.load(Ordering::Relaxed));
717        while let Some(node) = current {
718            let next = unsafe { WeakLink::pointers(node).as_ref().get_next() };
719
720            let wr = unsafe { node.as_ref() };
721
722            // Mark weakref as dead
723            wr.0.payload
724                .wr_object
725                .store(ptr::null_mut(), Ordering::Relaxed);
726
727            // Unlink from list
728            unsafe {
729                let mut ptrs = WeakLink::pointers(node);
730                ptrs.as_mut().set_prev(None);
731                ptrs.as_mut().set_next(None);
732            }
733
734            // Collect callback without invoking only if we can keep weakref alive.
735            if wr.0.ref_count.safe_inc() {
736                let wr_ref = unsafe { PyRef::from_raw(wr as *const Py<PyWeak>) };
737                let cb = unsafe { wr.0.payload.callback.get().replace(None) };
738                if let Some(cb) = cb {
739                    callbacks.push((wr_ref, cb));
740                }
741            }
742
743            current = next;
744        }
745        self.head.store(ptr::null_mut(), Ordering::Relaxed);
746
747        callbacks
748    }
749
750    fn count(&self, obj: &PyObject) -> usize {
751        let _lock = weakref_lock::lock(obj as *const PyObject as usize);
752        let mut count = 0usize;
753        let mut current = NonNull::new(self.head.load(Ordering::Relaxed));
754        while let Some(node) = current {
755            if unsafe { node.as_ref() }.0.ref_count.get() > 0 {
756                count += 1;
757            }
758            current = unsafe { WeakLink::pointers(node).as_ref().get_next() };
759        }
760        count
761    }
762
763    fn get_weak_references(&self, obj: &PyObject) -> Vec<PyRef<PyWeak>> {
764        let _lock = weakref_lock::lock(obj as *const PyObject as usize);
765        let mut v = Vec::new();
766        let mut current = NonNull::new(self.head.load(Ordering::Relaxed));
767        while let Some(node) = current {
768            let wr = unsafe { node.as_ref() };
769            if wr.0.ref_count.safe_inc() {
770                v.push(unsafe { PyRef::from_raw(wr as *const Py<PyWeak>) });
771            }
772            current = unsafe { WeakLink::pointers(node).as_ref().get_next() };
773        }
774        v
775    }
776}
777
778impl Default for WeakRefList {
779    fn default() -> Self {
780        Self::new()
781    }
782}
783
784struct WeakLink;
785unsafe impl Link for WeakLink {
786    type Handle = PyRef<PyWeak>;
787
788    type Target = Py<PyWeak>;
789
790    #[inline(always)]
791    fn as_raw(handle: &PyRef<PyWeak>) -> NonNull<Self::Target> {
792        NonNull::from(&**handle)
793    }
794
795    #[inline(always)]
796    unsafe fn from_raw(ptr: NonNull<Self::Target>) -> Self::Handle {
797        unsafe { PyRef::from_raw(ptr.as_ptr()) }
798    }
799
800    #[inline(always)]
801    unsafe fn pointers(target: NonNull<Self::Target>) -> NonNull<Pointers<Self::Target>> {
802        // SAFETY: requirements forwarded from caller
803        unsafe { NonNull::new_unchecked(&raw mut (*target.as_ptr()).0.payload.pointers) }
804    }
805}
806
807/// PyWeakReference: each weakref holds a direct pointer to its referent.
808#[pyclass(name = "weakref", module = false)]
809#[derive(Debug)]
810pub struct PyWeak {
811    pointers: Pointers<Py<PyWeak>>,
812    /// Direct pointer to the referent object, null when dead.
813    /// Equivalent to wr_object in PyWeakReference.
814    wr_object: PyAtomic<*mut PyObject>,
815    /// Protected by stripe lock (keyed on wr_object address).
816    callback: UnsafeCell<Option<PyObjectRef>>,
817    pub(crate) hash: PyAtomic<crate::common::hash::PyHash>,
818}
819
820cfg_if::cfg_if! {
821    if #[cfg(feature = "threading")] {
822        unsafe impl Send for PyWeak {}
823        unsafe impl Sync for PyWeak {}
824    }
825}
826
827impl PyWeak {
828    /// _PyWeakref_GET_REF: attempt to upgrade the weakref to a strong reference.
829    pub(crate) fn upgrade(&self) -> Option<PyObjectRef> {
830        let obj_ptr = self.wr_object.load(Ordering::Acquire);
831        if obj_ptr.is_null() {
832            return None;
833        }
834
835        let _lock = weakref_lock::lock(obj_ptr as usize);
836
837        // Double-check under lock (clear may have run between our check and lock)
838        let obj_ptr = self.wr_object.load(Ordering::Relaxed);
839        if obj_ptr.is_null() {
840            return None;
841        }
842
843        unsafe {
844            if !(*obj_ptr).0.ref_count.safe_inc() {
845                return None;
846            }
847            Some(PyObjectRef::from_raw(NonNull::new_unchecked(obj_ptr)))
848        }
849    }
850
851    pub(crate) fn is_dead(&self) -> bool {
852        self.wr_object.load(Ordering::Acquire).is_null()
853    }
854
855    /// weakref_dealloc: remove from list if still linked.
856    fn drop_inner(&self) {
857        let obj_ptr = self.wr_object.load(Ordering::Acquire);
858        if obj_ptr.is_null() {
859            return; // Already cleared by WeakRefList::clear()
860        }
861
862        let _lock = weakref_lock::lock(obj_ptr as usize);
863
864        // Double-check under lock
865        let obj_ptr = self.wr_object.load(Ordering::Relaxed);
866        if obj_ptr.is_null() {
867            return; // Cleared between our check and lock acquisition
868        }
869
870        let obj = unsafe { &*obj_ptr };
871        // Safety: if a weakref exists pointing to this object, weakref prefix must be present
872        let wrl = obj.0.weakref_list_ref().unwrap();
873
874        // Compute our Py<PyWeak> node pointer from payload address
875        let offset = std::mem::offset_of!(PyInner<Self>, payload);
876        let py_inner = (self as *const Self)
877            .cast::<u8>()
878            .wrapping_sub(offset)
879            .cast::<PyInner<Self>>();
880        let node_ptr = unsafe { NonNull::new_unchecked(py_inner as *mut Py<Self>) };
881
882        // Unlink from list
883        unsafe { unlink_weakref(wrl, node_ptr) };
884
885        // Update generic cache if this was it
886        if wrl.generic.load(Ordering::Relaxed) == node_ptr.as_ptr() {
887            wrl.generic.store(ptr::null_mut(), Ordering::Relaxed);
888        }
889
890        // Mark as dead
891        self.wr_object.store(ptr::null_mut(), Ordering::Relaxed);
892    }
893}
894
895impl Drop for PyWeak {
896    #[inline(always)]
897    fn drop(&mut self) {
898        // we do NOT have actual exclusive access!
899        let me: &Self = self;
900        me.drop_inner();
901    }
902}
903
904impl Py<PyWeak> {
905    #[inline(always)]
906    pub fn upgrade(&self) -> Option<PyObjectRef> {
907        PyWeak::upgrade(self)
908    }
909}
910
911#[derive(Debug)]
912pub(super) struct InstanceDict {
913    pub(super) d: PyRwLock<PyDictRef>,
914}
915
916impl From<PyDictRef> for InstanceDict {
917    #[inline(always)]
918    fn from(d: PyDictRef) -> Self {
919        Self::new(d)
920    }
921}
922
923impl InstanceDict {
924    #[inline]
925    pub const fn new(d: PyDictRef) -> Self {
926        Self {
927            d: PyRwLock::new(d),
928        }
929    }
930
931    #[inline]
932    pub fn get(&self) -> PyDictRef {
933        self.d.read().clone()
934    }
935
936    #[inline]
937    pub fn set(&self, d: PyDictRef) {
938        self.replace(d);
939    }
940
941    #[inline]
942    pub fn replace(&self, d: PyDictRef) -> PyDictRef {
943        core::mem::replace(&mut self.d.write(), d)
944    }
945
946    /// Consume the InstanceDict and return the inner PyDictRef.
947    #[inline]
948    pub fn into_inner(self) -> PyDictRef {
949        self.d.into_inner()
950    }
951}
952
953impl<T: PyPayload> PyInner<T> {
954    /// Deallocate a PyInner, handling optional prefix(es).
955    /// Layout: [ObjExt?][WeakRefList?][PyInner<T>]
956    ///
957    /// # Safety
958    /// `ptr` must be a valid pointer from `PyInner::new` and must not be used after this call.
959    unsafe fn dealloc(ptr: *mut Self) {
960        unsafe {
961            let (flags, member_count) = (*ptr).read_type_flags();
962            let has_ext =
963                flags.has_feature(crate::types::PyTypeFlags::HAS_DICT) || member_count > 0;
964            let has_weakref = flags.has_feature(crate::types::PyTypeFlags::HAS_WEAKREF);
965
966            if has_ext || has_weakref {
967                // Reconstruct the same layout used in new()
968                let mut layout = core::alloc::Layout::from_size_align(0, 1).unwrap();
969
970                if has_ext {
971                    layout = layout
972                        .extend(core::alloc::Layout::new::<ObjExt>())
973                        .unwrap()
974                        .0;
975                }
976                if has_weakref {
977                    layout = layout
978                        .extend(core::alloc::Layout::new::<WeakRefList>())
979                        .unwrap()
980                        .0;
981                }
982                let (combined, inner_offset) =
983                    layout.extend(core::alloc::Layout::new::<Self>()).unwrap();
984                let combined = combined.pad_to_align();
985
986                let alloc_ptr = (ptr as *mut u8).sub(inner_offset);
987
988                // Drop PyInner (payload, typ, etc.)
989                core::ptr::drop_in_place(ptr);
990
991                // Drop ObjExt if present (dict, slots)
992                if has_ext {
993                    core::ptr::drop_in_place(alloc_ptr as *mut ObjExt);
994                }
995                // WeakRefList has no Drop (just raw pointers), no drop_in_place needed
996
997                alloc::alloc::dealloc(alloc_ptr, combined);
998            } else {
999                drop(Box::from_raw(ptr));
1000            }
1001        }
1002    }
1003}
1004
1005impl<T: PyPayload + core::fmt::Debug> PyInner<T> {
1006    /// Allocate a new PyInner, optionally with prefix(es).
1007    /// Returns a raw pointer to the PyInner (NOT the allocation start).
1008    /// Layout: [ObjExt?][WeakRefList?][PyInner<T>]
1009    fn new(payload: T, typ: PyTypeRef, dict: Option<PyDictRef>) -> *mut Self {
1010        let member_count = typ.slots.member_count;
1011        let needs_ext = typ
1012            .slots
1013            .flags
1014            .has_feature(crate::types::PyTypeFlags::HAS_DICT)
1015            || member_count > 0;
1016        let needs_weakref = typ
1017            .slots
1018            .flags
1019            .has_feature(crate::types::PyTypeFlags::HAS_WEAKREF);
1020        debug_assert!(
1021            needs_ext || dict.is_none(),
1022            "dict passed to type '{}' without HAS_DICT flag",
1023            typ.name()
1024        );
1025
1026        if needs_ext || needs_weakref {
1027            // Build layout left-to-right: [ObjExt?][WeakRefList?][PyInner]
1028            let mut layout = core::alloc::Layout::from_size_align(0, 1).unwrap();
1029
1030            let ext_start = if needs_ext {
1031                let (combined, offset) =
1032                    layout.extend(core::alloc::Layout::new::<ObjExt>()).unwrap();
1033                layout = combined;
1034                Some(offset)
1035            } else {
1036                None
1037            };
1038
1039            let weakref_start = if needs_weakref {
1040                let (combined, offset) = layout
1041                    .extend(core::alloc::Layout::new::<WeakRefList>())
1042                    .unwrap();
1043                layout = combined;
1044                Some(offset)
1045            } else {
1046                None
1047            };
1048
1049            let (combined, inner_offset) =
1050                layout.extend(core::alloc::Layout::new::<Self>()).unwrap();
1051            let combined = combined.pad_to_align();
1052
1053            let alloc_ptr = unsafe { alloc::alloc::alloc(combined) };
1054            if alloc_ptr.is_null() {
1055                alloc::alloc::handle_alloc_error(combined);
1056            }
1057            // Expose provenance so ext_ref()/weakref_list_ref() can reconstruct
1058            alloc_ptr.expose_provenance();
1059
1060            unsafe {
1061                if let Some(offset) = ext_start {
1062                    let ext_ptr = alloc_ptr.add(offset) as *mut ObjExt;
1063                    ext_ptr.write(ObjExt::new(dict, member_count));
1064                }
1065
1066                if let Some(offset) = weakref_start {
1067                    let weakref_ptr = alloc_ptr.add(offset) as *mut WeakRefList;
1068                    weakref_ptr.write(WeakRefList::new());
1069                }
1070
1071                let inner_ptr = alloc_ptr.add(inner_offset) as *mut Self;
1072                inner_ptr.write(Self {
1073                    ref_count: RefCount::new(),
1074                    vtable: PyObjVTable::of::<T>(),
1075                    gc_bits: Radium::new(0),
1076                    gc_generation: Radium::new(GC_UNTRACKED),
1077                    gc_pointers: Pointers::new(),
1078                    typ: PyAtomicRef::from(typ),
1079                    payload,
1080                });
1081                inner_ptr
1082            }
1083        } else {
1084            Box::into_raw(Box::new(Self {
1085                ref_count: RefCount::new(),
1086                vtable: PyObjVTable::of::<T>(),
1087                gc_bits: Radium::new(0),
1088                gc_generation: Radium::new(GC_UNTRACKED),
1089                gc_pointers: Pointers::new(),
1090                typ: PyAtomicRef::from(typ),
1091                payload,
1092            }))
1093        }
1094    }
1095}
1096
1097/// Returns the allocation layout for `PyInner<T>`, for use in freelist Drop impls.
1098pub(crate) const fn pyinner_layout<T: PyPayload>() -> core::alloc::Layout {
1099    core::alloc::Layout::new::<PyInner<T>>()
1100}
1101
1102/// Thread-local freelist storage for reusing object allocations.
1103///
1104/// Wraps a `Vec<*mut PyObject>`. On thread teardown, `Drop` frees raw
1105/// `PyInner<T>` allocations without running payload destructors to avoid
1106/// accessing already-destroyed thread-local storage (GC state, other freelists).
1107pub(crate) struct FreeList<T: PyPayload> {
1108    items: Vec<*mut PyObject>,
1109    _marker: core::marker::PhantomData<T>,
1110}
1111
1112impl<T: PyPayload> FreeList<T> {
1113    pub(crate) const fn new() -> Self {
1114        Self {
1115            items: Vec::new(),
1116            _marker: core::marker::PhantomData,
1117        }
1118    }
1119}
1120
1121impl<T: PyPayload> Default for FreeList<T> {
1122    fn default() -> Self {
1123        Self::new()
1124    }
1125}
1126
1127impl<T: PyPayload> Drop for FreeList<T> {
1128    fn drop(&mut self) {
1129        // During thread teardown, we cannot safely run destructors on cached
1130        // objects because their Drop impls may access thread-local storage
1131        // (GC state, other freelists) that is already destroyed.
1132        // Instead, free just the raw allocation. The payload's heap fields
1133        // (BigInt, PyObjectRef, etc.) are leaked, but this is bounded by
1134        // MAX_FREELIST per type per thread.
1135        for ptr in self.items.drain(..) {
1136            unsafe {
1137                alloc::alloc::dealloc(ptr as *mut u8, core::alloc::Layout::new::<PyInner<T>>());
1138            }
1139        }
1140    }
1141}
1142
1143impl<T: PyPayload> core::ops::Deref for FreeList<T> {
1144    type Target = Vec<*mut PyObject>;
1145    fn deref(&self) -> &Self::Target {
1146        &self.items
1147    }
1148}
1149
1150impl<T: PyPayload> core::ops::DerefMut for FreeList<T> {
1151    fn deref_mut(&mut self) -> &mut Self::Target {
1152        &mut self.items
1153    }
1154}
1155
1156/// The `PyObjectRef` is one of the most used types. It is a reference to a
1157/// python object. A single python object can have multiple references, and
1158/// this reference counting is accounted for by this type. Use the `.clone()`
1159/// method to create a new reference and increment the amount of references
1160/// to the python object by 1.
1161#[repr(transparent)]
1162pub struct PyObjectRef {
1163    ptr: NonNull<PyObject>,
1164}
1165
1166impl Clone for PyObjectRef {
1167    #[inline(always)]
1168    fn clone(&self) -> Self {
1169        (**self).to_owned()
1170    }
1171}
1172
1173cfg_if::cfg_if! {
1174    if #[cfg(feature = "threading")] {
1175        unsafe impl Send for PyObjectRef {}
1176        unsafe impl Sync for PyObjectRef {}
1177    }
1178}
1179
1180#[repr(transparent)]
1181pub struct PyObject(PyInner<Erased>);
1182
1183impl Deref for PyObjectRef {
1184    type Target = PyObject;
1185
1186    #[inline(always)]
1187    fn deref(&self) -> &PyObject {
1188        unsafe { self.ptr.as_ref() }
1189    }
1190}
1191
1192impl ToOwned for PyObject {
1193    type Owned = PyObjectRef;
1194
1195    #[inline(always)]
1196    fn to_owned(&self) -> Self::Owned {
1197        self.0.ref_count.inc();
1198        PyObjectRef {
1199            ptr: NonNull::from(self),
1200        }
1201    }
1202}
1203
1204impl PyObject {
1205    /// Atomically try to create a strong reference.
1206    /// Returns `None` if the strong count is already 0 (object being destroyed).
1207    /// Uses CAS to prevent the TOCTOU race between checking strong_count and
1208    /// incrementing it.
1209    #[inline]
1210    pub fn try_to_owned(&self) -> Option<PyObjectRef> {
1211        if self.0.ref_count.safe_inc() {
1212            Some(PyObjectRef {
1213                ptr: NonNull::from(self),
1214            })
1215        } else {
1216            None
1217        }
1218    }
1219
1220    /// Like [`try_to_owned`](Self::try_to_owned), but from a raw pointer.
1221    ///
1222    /// Uses `addr_of!` to access `ref_count` without forming `&PyObject`,
1223    /// minimizing the borrow scope when the pointer may be stale
1224    /// (e.g. cache-hit paths protected by version guards).
1225    ///
1226    /// # Safety
1227    /// `ptr` must point to a live (not yet deallocated) `PyObject`, or to
1228    /// memory whose `ref_count` field is still atomically readable
1229    /// (same guarantee as `_Py_TryIncRefShared`).
1230    #[inline]
1231    pub unsafe fn try_to_owned_from_ptr(ptr: *mut Self) -> Option<PyObjectRef> {
1232        let inner = ptr.cast::<PyInner<Erased>>();
1233        let ref_count = unsafe { &*core::ptr::addr_of!((*inner).ref_count) };
1234        if ref_count.safe_inc() {
1235            Some(PyObjectRef {
1236                ptr: unsafe { NonNull::new_unchecked(ptr) },
1237            })
1238        } else {
1239            None
1240        }
1241    }
1242}
1243
1244impl PyObjectRef {
1245    #[inline(always)]
1246    pub const fn into_raw(self) -> NonNull<PyObject> {
1247        let ptr = self.ptr;
1248        core::mem::forget(self);
1249        ptr
1250    }
1251
1252    /// # Safety
1253    /// The raw pointer must have been previously returned from a call to
1254    /// [`PyObjectRef::into_raw`]. The user is responsible for ensuring that the inner data is not
1255    /// dropped more than once due to mishandling the reference count by calling this function
1256    /// too many times.
1257    #[inline(always)]
1258    pub const unsafe fn from_raw(ptr: NonNull<PyObject>) -> Self {
1259        Self { ptr }
1260    }
1261
1262    /// Attempt to downcast this reference to a subclass.
1263    ///
1264    /// If the downcast fails, the original ref is returned in as `Err` so
1265    /// another downcast can be attempted without unnecessary cloning.
1266    #[inline(always)]
1267    pub fn downcast<T: PyPayload>(self) -> Result<PyRef<T>, Self> {
1268        if self.downcastable::<T>() {
1269            Ok(unsafe { self.downcast_unchecked() })
1270        } else {
1271            Err(self)
1272        }
1273    }
1274
1275    pub fn try_downcast<T: PyPayload>(self, vm: &VirtualMachine) -> PyResult<PyRef<T>> {
1276        T::try_downcast_from(&self, vm)?;
1277        Ok(unsafe { self.downcast_unchecked() })
1278    }
1279
1280    /// Force to downcast this reference to a subclass.
1281    ///
1282    /// # Safety
1283    /// T must be the exact payload type
1284    #[inline(always)]
1285    pub unsafe fn downcast_unchecked<T>(self) -> PyRef<T> {
1286        // PyRef::from_obj_unchecked(self)
1287        // manual impl to avoid assertion
1288        let obj = ManuallyDrop::new(self);
1289        PyRef {
1290            ptr: obj.ptr.cast(),
1291        }
1292    }
1293
1294    // ideally we'd be able to define these in pyobject.rs, but method visibility rules are weird
1295
1296    /// Attempt to downcast this reference to the specific class that is associated `T`.
1297    ///
1298    /// If the downcast fails, the original ref is returned in as `Err` so
1299    /// another downcast can be attempted without unnecessary cloning.
1300    #[inline]
1301    pub fn downcast_exact<T: PyPayload>(self, vm: &VirtualMachine) -> Result<PyRefExact<T>, Self> {
1302        if self.class().is(T::class(&vm.ctx)) {
1303            // TODO: is this always true?
1304            assert!(
1305                self.downcastable::<T>(),
1306                "obj.__class__ is T::class() but payload is not T"
1307            );
1308            // SAFETY: just asserted that downcastable::<T>()
1309            Ok(unsafe { PyRefExact::new_unchecked(PyRef::from_obj_unchecked(self)) })
1310        } else {
1311            Err(self)
1312        }
1313    }
1314}
1315
1316impl PyObject {
1317    /// Returns the WeakRefList if the type supports weakrefs (HAS_WEAKREF).
1318    /// The WeakRefList is stored as a separate prefix before PyInner,
1319    /// independent from ObjExt (dict/slots).
1320    #[inline(always)]
1321    fn weak_ref_list(&self) -> Option<&WeakRefList> {
1322        self.0.weakref_list_ref()
1323    }
1324
1325    /// Returns the first weakref in the weakref list, if any.
1326    pub(crate) fn get_weakrefs(&self) -> Option<PyObjectRef> {
1327        let wrl = self.weak_ref_list()?;
1328        let _lock = weakref_lock::lock(self as *const PyObject as usize);
1329        let head_ptr = wrl.head.load(Ordering::Relaxed);
1330        if head_ptr.is_null() {
1331            None
1332        } else {
1333            let head = unsafe { &*head_ptr };
1334            if head.0.ref_count.safe_inc() {
1335                Some(unsafe { PyRef::from_raw(head_ptr) }.into())
1336            } else {
1337                None
1338            }
1339        }
1340    }
1341
1342    pub(crate) fn downgrade_with_weakref_typ_opt(
1343        &self,
1344        callback: Option<PyObjectRef>,
1345        // a reference to weakref_type **specifically**
1346        typ: PyTypeRef,
1347    ) -> Option<PyRef<PyWeak>> {
1348        self.weak_ref_list()
1349            .map(|wrl| wrl.add(self, typ, true, callback, None))
1350    }
1351
1352    pub(crate) fn downgrade_with_typ(
1353        &self,
1354        callback: Option<PyObjectRef>,
1355        typ: PyTypeRef,
1356        vm: &VirtualMachine,
1357    ) -> PyResult<PyRef<PyWeak>> {
1358        // Check HAS_WEAKREF flag first
1359        if !self
1360            .class()
1361            .slots
1362            .flags
1363            .has_feature(crate::types::PyTypeFlags::HAS_WEAKREF)
1364        {
1365            return Err(vm.new_type_error(format!(
1366                "cannot create weak reference to '{}' object",
1367                self.class().name()
1368            )));
1369        }
1370        let dict = if typ
1371            .slots
1372            .flags
1373            .has_feature(crate::types::PyTypeFlags::HAS_DICT)
1374        {
1375            Some(vm.ctx.new_dict())
1376        } else {
1377            None
1378        };
1379        let cls_is_weakref = typ.is(vm.ctx.types.weakref_type);
1380        let wrl = self.weak_ref_list().ok_or_else(|| {
1381            vm.new_type_error(format!(
1382                "cannot create weak reference to '{}' object",
1383                self.class().name()
1384            ))
1385        })?;
1386        Ok(wrl.add(self, typ, cls_is_weakref, callback, dict))
1387    }
1388
1389    pub fn downgrade(
1390        &self,
1391        callback: Option<PyObjectRef>,
1392        vm: &VirtualMachine,
1393    ) -> PyResult<PyRef<PyWeak>> {
1394        self.downgrade_with_typ(callback, vm.ctx.types.weakref_type.to_owned(), vm)
1395    }
1396
1397    pub fn get_weak_references(&self) -> Option<Vec<PyRef<PyWeak>>> {
1398        self.weak_ref_list()
1399            .map(|wrl| wrl.get_weak_references(self))
1400    }
1401
1402    #[deprecated(note = "use downcastable instead")]
1403    #[inline(always)]
1404    pub fn payload_is<T: PyPayload>(&self) -> bool {
1405        self.0.vtable.typeid == T::PAYLOAD_TYPE_ID
1406    }
1407
1408    /// Force to return payload as T.
1409    ///
1410    /// # Safety
1411    /// The actual payload type must be T.
1412    #[deprecated(note = "use downcast_unchecked_ref instead")]
1413    #[inline(always)]
1414    pub const unsafe fn payload_unchecked<T: PyPayload>(&self) -> &T {
1415        // we cast to a PyInner<T> first because we don't know T's exact offset because of
1416        // varying alignment, but once we get a PyInner<T> the compiler can get it for us
1417        let inner = unsafe { &*(&self.0 as *const PyInner<Erased> as *const PyInner<T>) };
1418        &inner.payload
1419    }
1420
1421    #[deprecated(note = "use downcast_ref instead")]
1422    #[inline(always)]
1423    pub fn payload<T: PyPayload>(&self) -> Option<&T> {
1424        #[allow(deprecated)]
1425        if self.payload_is::<T>() {
1426            #[allow(deprecated)]
1427            Some(unsafe { self.payload_unchecked() })
1428        } else {
1429            None
1430        }
1431    }
1432
1433    #[inline(always)]
1434    pub fn class(&self) -> &Py<PyType> {
1435        self.0.typ.deref()
1436    }
1437
1438    pub fn set_class(&self, typ: PyTypeRef, vm: &VirtualMachine) {
1439        self.0.typ.swap_to_temporary_refs(typ, vm);
1440    }
1441
1442    #[deprecated(note = "use downcast_ref_if_exact instead")]
1443    #[inline(always)]
1444    pub fn payload_if_exact<T: PyPayload>(&self, vm: &VirtualMachine) -> Option<&T> {
1445        if self.class().is(T::class(&vm.ctx)) {
1446            #[allow(deprecated)]
1447            self.payload()
1448        } else {
1449            None
1450        }
1451    }
1452
1453    #[inline(always)]
1454    fn instance_dict(&self) -> Option<&InstanceDict> {
1455        self.0.ext_ref().and_then(|ext| ext.dict.as_ref())
1456    }
1457
1458    #[inline(always)]
1459    pub fn dict(&self) -> Option<PyDictRef> {
1460        self.instance_dict().map(|d| d.get())
1461    }
1462
1463    /// Set the dict field. Returns `Err(dict)` if this object does not have a dict field
1464    /// in the first place.
1465    pub fn set_dict(&self, dict: PyDictRef) -> Result<(), PyDictRef> {
1466        match self.instance_dict() {
1467            Some(d) => {
1468                d.set(dict);
1469                Ok(())
1470            }
1471            None => Err(dict),
1472        }
1473    }
1474
1475    #[deprecated(note = "use downcast_ref instead")]
1476    #[inline(always)]
1477    pub fn payload_if_subclass<T: crate::PyPayload>(&self, vm: &VirtualMachine) -> Option<&T> {
1478        if self.class().fast_issubclass(T::class(&vm.ctx)) {
1479            #[allow(deprecated)]
1480            self.payload()
1481        } else {
1482            None
1483        }
1484    }
1485
1486    #[inline]
1487    pub(crate) fn typeid(&self) -> TypeId {
1488        self.0.vtable.typeid
1489    }
1490
1491    /// Check if this object can be downcast to T.
1492    #[inline(always)]
1493    pub fn downcastable<T: PyPayload>(&self) -> bool {
1494        self.typeid() == T::PAYLOAD_TYPE_ID && unsafe { T::validate_downcastable_from(self) }
1495    }
1496
1497    /// Attempt to downcast this reference to a subclass.
1498    pub fn try_downcast_ref<'a, T: PyPayload>(
1499        &'a self,
1500        vm: &VirtualMachine,
1501    ) -> PyResult<&'a Py<T>> {
1502        T::try_downcast_from(self, vm)?;
1503        Ok(unsafe { self.downcast_unchecked_ref::<T>() })
1504    }
1505
1506    /// Attempt to downcast this reference to a subclass.
1507    #[inline(always)]
1508    pub fn downcast_ref<T: PyPayload>(&self) -> Option<&Py<T>> {
1509        if self.downcastable::<T>() {
1510            // SAFETY: just checked that the payload is T, and PyRef is repr(transparent) over
1511            // PyObjectRef
1512            Some(unsafe { self.downcast_unchecked_ref::<T>() })
1513        } else {
1514            None
1515        }
1516    }
1517
1518    #[inline(always)]
1519    pub fn downcast_ref_if_exact<T: PyPayload>(&self, vm: &VirtualMachine) -> Option<&Py<T>> {
1520        self.class()
1521            .is(T::class(&vm.ctx))
1522            .then(|| unsafe { self.downcast_unchecked_ref::<T>() })
1523    }
1524
1525    /// # Safety
1526    /// T must be the exact payload type
1527    #[inline(always)]
1528    pub unsafe fn downcast_unchecked_ref<T: PyPayload>(&self) -> &Py<T> {
1529        debug_assert!(self.downcastable::<T>());
1530        // SAFETY: requirements forwarded from caller
1531        unsafe { &*(self as *const Self as *const Py<T>) }
1532    }
1533
1534    #[inline(always)]
1535    pub fn strong_count(&self) -> usize {
1536        self.0.ref_count.get()
1537    }
1538
1539    #[inline]
1540    pub fn weak_count(&self) -> Option<usize> {
1541        self.weak_ref_list().map(|wrl| wrl.count(self))
1542    }
1543
1544    #[inline(always)]
1545    pub const fn as_raw(&self) -> *const Self {
1546        self
1547    }
1548
1549    /// Check if the object has been finalized (__del__ already called).
1550    /// _PyGC_FINALIZED in Py_GIL_DISABLED mode.
1551    #[inline]
1552    pub(crate) fn gc_finalized(&self) -> bool {
1553        GcBits::from_bits_retain(self.0.gc_bits.load(Ordering::Relaxed)).contains(GcBits::FINALIZED)
1554    }
1555
1556    /// Mark the object as finalized. Should be called before __del__.
1557    /// _PyGC_SET_FINALIZED in Py_GIL_DISABLED mode.
1558    #[inline]
1559    pub(crate) fn set_gc_finalized(&self) {
1560        self.set_gc_bit(GcBits::FINALIZED);
1561    }
1562
1563    /// Set a GC bit atomically.
1564    #[inline]
1565    pub(crate) fn set_gc_bit(&self, bit: GcBits) {
1566        self.0.gc_bits.fetch_or(bit.bits(), Ordering::Relaxed);
1567    }
1568
1569    /// Get the GC generation index for this object.
1570    #[inline]
1571    pub(crate) fn gc_generation(&self) -> u8 {
1572        self.0.gc_generation.load(Ordering::Relaxed)
1573    }
1574
1575    /// Set the GC generation index for this object.
1576    /// Must only be called while holding the generation list's write lock.
1577    #[inline]
1578    pub(crate) fn set_gc_generation(&self, generation: u8) {
1579        self.0.gc_generation.store(generation, Ordering::Relaxed);
1580    }
1581
1582    /// _PyObject_GC_TRACK
1583    #[inline]
1584    pub(crate) fn set_gc_tracked(&self) {
1585        self.set_gc_bit(GcBits::TRACKED);
1586    }
1587
1588    /// _PyObject_GC_UNTRACK
1589    #[inline]
1590    pub(crate) fn clear_gc_tracked(&self) {
1591        self.0
1592            .gc_bits
1593            .fetch_and(!GcBits::TRACKED.bits(), Ordering::Relaxed);
1594    }
1595
1596    #[inline(always)] // the outer function is never inlined
1597    fn drop_slow_inner(&self) -> Result<(), ()> {
1598        // __del__ is mostly not implemented
1599        #[inline(never)]
1600        #[cold]
1601        fn call_slot_del(
1602            zelf: &PyObject,
1603            slot_del: fn(&PyObject, &VirtualMachine) -> PyResult<()>,
1604        ) -> Result<(), ()> {
1605            let ret = crate::vm::thread::with_vm(zelf, |vm| {
1606                // Temporarily resurrect (0→2) so ref_count stays positive
1607                // during __del__, preventing safe_inc from seeing 0.
1608                zelf.0.ref_count.inc_by(2);
1609
1610                if let Err(e) = slot_del(zelf, vm) {
1611                    let del_method = zelf.get_class_attr(identifier!(vm, __del__)).unwrap();
1612                    vm.run_unraisable(e, None, del_method);
1613                }
1614
1615                // Undo the temporary resurrection. Always remove both
1616                // temporary refs; the second dec returns true only when
1617                // ref_count drops to 0 (no resurrection).
1618                let _ = zelf.0.ref_count.dec();
1619                zelf.0.ref_count.dec()
1620            });
1621            match ret {
1622                // the decref set ref_count back to 0
1623                Some(true) => Ok(()),
1624                // we've been resurrected by __del__
1625                Some(false) => Err(()),
1626                None => Ok(()),
1627            }
1628        }
1629
1630        // __del__ should only be called once (like _PyGC_FINALIZED check in GIL_DISABLED)
1631        // We call __del__ BEFORE clearing weakrefs to allow the finalizer to access
1632        // the object's weak references if needed.
1633        let del = self.class().slots.del.load();
1634        if let Some(slot_del) = del
1635            && !self.gc_finalized()
1636        {
1637            self.set_gc_finalized();
1638            call_slot_del(self, slot_del)?;
1639        }
1640
1641        // Clear weak refs AFTER __del__.
1642        // Note: This differs from GC behavior which clears weakrefs before finalizers,
1643        // but for direct deallocation (drop_slow_inner), we need to allow the finalizer
1644        // to run without triggering use-after-free from WeakRefList operations.
1645        if let Some(wrl) = self.weak_ref_list() {
1646            wrl.clear(self);
1647        }
1648
1649        Ok(())
1650    }
1651
1652    /// _Py_Dealloc: dispatch to type's dealloc
1653    #[inline(never)]
1654    unsafe fn drop_slow(ptr: NonNull<Self>) {
1655        let dealloc = unsafe { ptr.as_ref().0.vtable.dealloc };
1656        unsafe { dealloc(ptr.as_ptr()) }
1657    }
1658
1659    /// # Safety
1660    /// This call will make the object live forever.
1661    pub(crate) unsafe fn mark_intern(&self) {
1662        self.0.ref_count.leak();
1663    }
1664
1665    pub(crate) fn is_interned(&self) -> bool {
1666        self.0.ref_count.is_leaked()
1667    }
1668
1669    pub(crate) fn get_slot(&self, offset: usize) -> Option<PyObjectRef> {
1670        self.0.ext_ref().unwrap().slots[offset].read().clone()
1671    }
1672
1673    pub(crate) fn set_slot(&self, offset: usize, value: Option<PyObjectRef>) {
1674        *self.0.ext_ref().unwrap().slots[offset].write() = value;
1675    }
1676
1677    /// _PyObject_GC_IS_TRACKED
1678    pub fn is_gc_tracked(&self) -> bool {
1679        GcBits::from_bits_retain(self.0.gc_bits.load(Ordering::Relaxed)).contains(GcBits::TRACKED)
1680    }
1681
1682    /// Get the referents (objects directly referenced) of this object.
1683    /// Uses the full traverse including dict and slots.
1684    pub fn gc_get_referents(&self) -> Vec<PyObjectRef> {
1685        let mut result = Vec::new();
1686        self.0.traverse(&mut |child: &PyObject| {
1687            result.push(child.to_owned());
1688        });
1689        result
1690    }
1691
1692    /// Call __del__ if present, without triggering object deallocation.
1693    /// Used by GC to call finalizers before breaking cycles.
1694    /// This allows proper resurrection detection.
1695    /// PyObject_CallFinalizerFromDealloc
1696    pub fn try_call_finalizer(&self) {
1697        let del = self.class().slots.del.load();
1698        if let Some(slot_del) = del
1699            && !self.gc_finalized()
1700        {
1701            // Mark as finalized BEFORE calling __del__ to prevent double-call
1702            // This ensures drop_slow_inner() won't call __del__ again
1703            self.set_gc_finalized();
1704            let result = crate::vm::thread::with_vm(self, |vm| {
1705                if let Err(e) = slot_del(self, vm)
1706                    && let Some(del_method) = self.get_class_attr(identifier!(vm, __del__))
1707                {
1708                    vm.run_unraisable(e, None, del_method);
1709                }
1710            });
1711            let _ = result;
1712        }
1713    }
1714
1715    /// Clear weakrefs but collect callbacks instead of calling them.
1716    /// This is used by GC to ensure ALL weakrefs are cleared BEFORE any callbacks run.
1717    /// Returns collected callbacks as (PyRef<PyWeak>, callback) pairs.
1718    // = handle_weakrefs
1719    pub fn gc_clear_weakrefs_collect_callbacks(&self) -> Vec<(PyRef<PyWeak>, PyObjectRef)> {
1720        if let Some(wrl) = self.weak_ref_list() {
1721            wrl.clear_for_gc_collect_callbacks(self)
1722        } else {
1723            vec![]
1724        }
1725    }
1726
1727    /// Get raw pointers to referents without incrementing reference counts.
1728    /// This is used during GC to avoid reference count manipulation.
1729    /// tp_traverse visits objects without incref
1730    ///
1731    /// # Safety
1732    /// The returned pointers are only valid as long as the object is alive
1733    /// and its contents haven't been modified.
1734    pub unsafe fn gc_get_referent_ptrs(&self) -> Vec<NonNull<PyObject>> {
1735        let mut result = Vec::new();
1736        // Traverse the entire object including dict and slots
1737        self.0.traverse(&mut |child: &PyObject| {
1738            result.push(NonNull::from(child));
1739        });
1740        result
1741    }
1742
1743    /// Pop edges from this object for cycle breaking.
1744    /// Returns extracted child references that were removed from this object (tp_clear).
1745    /// This is used during garbage collection to break circular references.
1746    ///
1747    /// # Safety
1748    /// - ptr must be a valid pointer to a PyObject
1749    /// - The caller must have exclusive access (no other references exist)
1750    /// - This is only safe during GC when the object is unreachable
1751    pub unsafe fn gc_clear_raw(ptr: *mut PyObject) -> Vec<PyObjectRef> {
1752        let mut result = Vec::new();
1753        let obj = unsafe { &*ptr };
1754
1755        // 1. Clear payload-specific references (vtable.clear / tp_clear)
1756        if let Some(clear_fn) = obj.0.vtable.clear {
1757            unsafe { clear_fn(ptr, &mut result) };
1758        }
1759
1760        // 2. Clear dict and member slots (subtype_clear)
1761        // Detach the dict via Py_CLEAR(*_PyObject_GetDictPtr(self)) — NULL
1762        // the pointer without clearing dict contents. The dict may still be
1763        // referenced by other live objects (e.g. function.__globals__).
1764        let (flags, member_count) = obj.0.read_type_flags();
1765        let has_ext = flags.has_feature(crate::types::PyTypeFlags::HAS_DICT) || member_count > 0;
1766        if has_ext {
1767            let has_weakref = flags.has_feature(crate::types::PyTypeFlags::HAS_WEAKREF);
1768            let offset = if has_weakref {
1769                WEAKREF_OFFSET + EXT_OFFSET
1770            } else {
1771                EXT_OFFSET
1772            };
1773            let self_addr = (ptr as *const u8).addr();
1774            let ext_ptr =
1775                core::ptr::with_exposed_provenance_mut::<ObjExt>(self_addr.wrapping_sub(offset));
1776            let ext = unsafe { &mut *ext_ptr };
1777            if let Some(old_dict) = ext.dict.take() {
1778                // Get the dict ref before dropping InstanceDict
1779                let dict_ref = old_dict.into_inner();
1780                result.push(dict_ref.into());
1781            }
1782            for slot in ext.slots.iter() {
1783                if let Some(val) = slot.write().take() {
1784                    result.push(val);
1785                }
1786            }
1787        }
1788
1789        result
1790    }
1791
1792    /// Clear this object for cycle breaking (tp_clear).
1793    /// This version takes &self but should only be called during GC
1794    /// when exclusive access is guaranteed.
1795    ///
1796    /// # Safety
1797    /// - The caller must guarantee exclusive access (no other references exist)
1798    /// - This is only safe during GC when the object is unreachable
1799    pub unsafe fn gc_clear(&self) -> Vec<PyObjectRef> {
1800        // SAFETY: During GC collection, this object is unreachable (gc_refs == 0),
1801        // meaning no other code has a reference to it. The only references are
1802        // internal cycle references which we're about to break.
1803        unsafe { Self::gc_clear_raw(self as *const _ as *mut PyObject) }
1804    }
1805
1806    /// Check if this object has clear capability (tp_clear)
1807    // Py_TPFLAGS_HAVE_GC types have tp_clear
1808    pub fn gc_has_clear(&self) -> bool {
1809        self.0.vtable.clear.is_some()
1810            || self
1811                .0
1812                .ext_ref()
1813                .is_some_and(|ext| ext.dict.is_some() || !ext.slots.is_empty())
1814    }
1815}
1816
1817impl Borrow<PyObject> for PyObjectRef {
1818    #[inline(always)]
1819    fn borrow(&self) -> &PyObject {
1820        self
1821    }
1822}
1823
1824impl AsRef<PyObject> for PyObjectRef {
1825    #[inline(always)]
1826    fn as_ref(&self) -> &PyObject {
1827        self
1828    }
1829}
1830
1831impl<'a, T: PyPayload> From<&'a Py<T>> for &'a PyObject {
1832    #[inline(always)]
1833    fn from(py_ref: &'a Py<T>) -> Self {
1834        py_ref.as_object()
1835    }
1836}
1837
1838impl Drop for PyObjectRef {
1839    #[inline]
1840    fn drop(&mut self) {
1841        if self.0.ref_count.dec() {
1842            unsafe { PyObject::drop_slow(self.ptr) }
1843        }
1844    }
1845}
1846
1847impl fmt::Debug for PyObject {
1848    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1849        // SAFETY: the vtable contains functions that accept payload types that always match up
1850        // with the payload of the object
1851        unsafe { (self.0.vtable.debug)(self, f) }
1852    }
1853}
1854
1855impl fmt::Debug for PyObjectRef {
1856    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1857        self.as_object().fmt(f)
1858    }
1859}
1860
1861const STACKREF_BORROW_TAG: usize = 1;
1862
1863/// A tagged stack reference to a Python object.
1864///
1865/// Uses the lowest bit of the pointer to distinguish owned vs borrowed:
1866/// - bit 0 = 0 → **owned**: refcount was incremented; Drop will decrement.
1867/// - bit 0 = 1 → **borrowed**: no refcount change; Drop is a no-op.
1868///
1869/// Same size as `PyObjectRef` (one pointer-width).  `PyObject` is at least
1870/// 8-byte aligned, so the low bit is always available for tagging.
1871///
1872/// Uses `NonZeroUsize` so that `Option<PyStackRef>` has the same size as
1873/// `PyStackRef` via niche optimization (matching `Option<PyObjectRef>`).
1874#[repr(transparent)]
1875pub struct PyStackRef {
1876    bits: NonZeroUsize,
1877}
1878
1879impl PyStackRef {
1880    /// Create an owned stack reference, consuming the `PyObjectRef`.
1881    /// Refcount is NOT incremented — ownership is transferred.
1882    #[inline(always)]
1883    pub fn new_owned(obj: PyObjectRef) -> Self {
1884        let ptr = obj.into_raw();
1885        let bits = ptr.as_ptr() as usize;
1886        debug_assert!(
1887            bits & STACKREF_BORROW_TAG == 0,
1888            "PyObject pointer must be aligned"
1889        );
1890        Self {
1891            // SAFETY: valid PyObject pointers are never null
1892            bits: unsafe { NonZeroUsize::new_unchecked(bits) },
1893        }
1894    }
1895
1896    /// Create a borrowed stack reference from a `&PyObject`.
1897    ///
1898    /// # Safety
1899    /// The caller must guarantee that the pointed-to object lives at least as
1900    /// long as this `PyStackRef`.  In practice the compiler guarantees that
1901    /// borrowed refs are consumed within the same basic block, before any
1902    /// `STORE_FAST`/`DELETE_FAST` could overwrite the source slot.
1903    #[inline(always)]
1904    pub unsafe fn new_borrowed(obj: &PyObject) -> Self {
1905        let bits = (obj as *const PyObject as usize) | STACKREF_BORROW_TAG;
1906        Self {
1907            // SAFETY: valid PyObject pointers are never null, and ORing with 1 keeps it non-zero
1908            bits: unsafe { NonZeroUsize::new_unchecked(bits) },
1909        }
1910    }
1911
1912    /// Whether this is a borrowed (non-owning) reference.
1913    #[inline(always)]
1914    pub fn is_borrowed(&self) -> bool {
1915        self.bits.get() & STACKREF_BORROW_TAG != 0
1916    }
1917
1918    /// Get a `&PyObject` reference.  Works for both owned and borrowed.
1919    #[inline(always)]
1920    pub fn as_object(&self) -> &PyObject {
1921        unsafe { &*((self.bits.get() & !STACKREF_BORROW_TAG) as *const PyObject) }
1922    }
1923
1924    /// Convert to an owned `PyObjectRef`.
1925    ///
1926    /// * If **borrowed** → increments refcount, forgets self.
1927    /// * If **owned** → reconstructs `PyObjectRef` from the raw pointer, forgets self.
1928    #[inline(always)]
1929    pub fn to_pyobj(self) -> PyObjectRef {
1930        let obj = if self.is_borrowed() {
1931            self.as_object().to_owned() // inc refcount
1932        } else {
1933            let ptr = unsafe { NonNull::new_unchecked(self.bits.get() as *mut PyObject) };
1934            unsafe { PyObjectRef::from_raw(ptr) }
1935        };
1936        core::mem::forget(self); // don't run Drop
1937        obj
1938    }
1939
1940    /// Promote a borrowed ref to owned **in place** (increments refcount,
1941    /// clears the borrow tag).  No-op if already owned.
1942    #[inline(always)]
1943    pub fn promote(&mut self) {
1944        if self.is_borrowed() {
1945            self.as_object().0.ref_count.inc();
1946            // SAFETY: clearing the low bit of a non-null pointer keeps it non-zero
1947            self.bits =
1948                unsafe { NonZeroUsize::new_unchecked(self.bits.get() & !STACKREF_BORROW_TAG) };
1949        }
1950    }
1951}
1952
1953impl Drop for PyStackRef {
1954    #[inline]
1955    fn drop(&mut self) {
1956        if !self.is_borrowed() {
1957            // Owned: decrement refcount (potentially deallocate).
1958            let ptr = unsafe { NonNull::new_unchecked(self.bits.get() as *mut PyObject) };
1959            drop(unsafe { PyObjectRef::from_raw(ptr) });
1960        }
1961        // Borrowed: nothing to do.
1962    }
1963}
1964
1965impl core::ops::Deref for PyStackRef {
1966    type Target = PyObject;
1967
1968    #[inline(always)]
1969    fn deref(&self) -> &PyObject {
1970        self.as_object()
1971    }
1972}
1973
1974impl Clone for PyStackRef {
1975    /// Cloning always produces an **owned** reference (increments refcount).
1976    #[inline(always)]
1977    fn clone(&self) -> Self {
1978        Self::new_owned(self.as_object().to_owned())
1979    }
1980}
1981
1982impl fmt::Debug for PyStackRef {
1983    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1984        if self.is_borrowed() {
1985            write!(f, "PyStackRef(borrowed, ")?;
1986        } else {
1987            write!(f, "PyStackRef(owned, ")?;
1988        }
1989        self.as_object().fmt(f)?;
1990        write!(f, ")")
1991    }
1992}
1993
1994cfg_if::cfg_if! {
1995    if #[cfg(feature = "threading")] {
1996        unsafe impl Send for PyStackRef {}
1997        unsafe impl Sync for PyStackRef {}
1998    }
1999}
2000
2001// Ensure Option<PyStackRef> uses niche optimization and matches Option<PyObjectRef> in size
2002const _: () = assert!(
2003    core::mem::size_of::<Option<PyStackRef>>() == core::mem::size_of::<Option<PyObjectRef>>()
2004);
2005const _: () =
2006    assert!(core::mem::size_of::<Option<PyStackRef>>() == core::mem::size_of::<PyStackRef>());
2007
2008#[repr(transparent)]
2009pub struct Py<T>(PyInner<T>);
2010
2011impl<T: PyPayload> Py<T> {
2012    pub fn downgrade(
2013        &self,
2014        callback: Option<PyObjectRef>,
2015        vm: &VirtualMachine,
2016    ) -> PyResult<PyWeakRef<T>> {
2017        Ok(PyWeakRef {
2018            weak: self.as_object().downgrade(callback, vm)?,
2019            _marker: PhantomData,
2020        })
2021    }
2022
2023    #[inline]
2024    pub fn payload(&self) -> &T {
2025        &self.0.payload
2026    }
2027}
2028
2029impl<T> ToOwned for Py<T> {
2030    type Owned = PyRef<T>;
2031
2032    #[inline(always)]
2033    fn to_owned(&self) -> Self::Owned {
2034        self.0.ref_count.inc();
2035        PyRef {
2036            ptr: NonNull::from(self),
2037        }
2038    }
2039}
2040
2041impl<T> Deref for Py<T> {
2042    type Target = T;
2043
2044    #[inline(always)]
2045    fn deref(&self) -> &Self::Target {
2046        &self.0.payload
2047    }
2048}
2049
2050impl<T: PyPayload> Borrow<PyObject> for Py<T> {
2051    #[inline(always)]
2052    fn borrow(&self) -> &PyObject {
2053        unsafe { &*(&self.0 as *const PyInner<T> as *const PyObject) }
2054    }
2055}
2056
2057impl<T> core::hash::Hash for Py<T>
2058where
2059    T: core::hash::Hash + PyPayload,
2060{
2061    #[inline]
2062    fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
2063        self.deref().hash(state)
2064    }
2065}
2066
2067impl<T> PartialEq for Py<T>
2068where
2069    T: PartialEq + PyPayload,
2070{
2071    #[inline]
2072    fn eq(&self, other: &Self) -> bool {
2073        self.deref().eq(other.deref())
2074    }
2075}
2076
2077impl<T> Eq for Py<T> where T: Eq + PyPayload {}
2078
2079impl<T> AsRef<PyObject> for Py<T>
2080where
2081    T: PyPayload,
2082{
2083    #[inline(always)]
2084    fn as_ref(&self) -> &PyObject {
2085        self.borrow()
2086    }
2087}
2088
2089impl<T: PyPayload + core::fmt::Debug> fmt::Debug for Py<T> {
2090    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2091        (**self).fmt(f)
2092    }
2093}
2094
2095/// A reference to a Python object.
2096///
2097/// Note that a `PyRef<T>` can only deref to a shared / immutable reference.
2098/// It is the payload type's responsibility to handle (possibly concurrent)
2099/// mutability with locks or concurrent data structures if required.
2100///
2101/// A `PyRef<T>` can be directly returned from a built-in function to handle
2102/// situations (such as when implementing in-place methods such as `__iadd__`)
2103/// where a reference to the same object must be returned.
2104#[repr(transparent)]
2105pub struct PyRef<T> {
2106    ptr: NonNull<Py<T>>,
2107}
2108
2109cfg_if::cfg_if! {
2110    if #[cfg(feature = "threading")] {
2111        unsafe impl<T> Send for PyRef<T> {}
2112        unsafe impl<T> Sync for PyRef<T> {}
2113    }
2114}
2115
2116impl<T: fmt::Debug> fmt::Debug for PyRef<T> {
2117    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2118        (**self).fmt(f)
2119    }
2120}
2121
2122impl<T> Drop for PyRef<T> {
2123    #[inline]
2124    fn drop(&mut self) {
2125        if self.0.ref_count.dec() {
2126            unsafe { PyObject::drop_slow(self.ptr.cast::<PyObject>()) }
2127        }
2128    }
2129}
2130
2131impl<T> Clone for PyRef<T> {
2132    #[inline(always)]
2133    fn clone(&self) -> Self {
2134        (**self).to_owned()
2135    }
2136}
2137
2138impl<T: PyPayload> PyRef<T> {
2139    // #[inline(always)]
2140    // pub(crate) const fn into_non_null(self) -> NonNull<Py<T>> {
2141    //     let ptr = self.ptr;
2142    //     std::mem::forget(self);
2143    //     ptr
2144    // }
2145
2146    #[inline(always)]
2147    pub(crate) const unsafe fn from_non_null(ptr: NonNull<Py<T>>) -> Self {
2148        Self { ptr }
2149    }
2150
2151    /// # Safety
2152    /// The raw pointer must point to a valid `Py<T>` object
2153    #[inline(always)]
2154    pub(crate) const unsafe fn from_raw(raw: *const Py<T>) -> Self {
2155        unsafe { Self::from_non_null(NonNull::new_unchecked(raw as *mut _)) }
2156    }
2157
2158    /// Safety: payload type of `obj` must be `T`
2159    #[inline(always)]
2160    unsafe fn from_obj_unchecked(obj: PyObjectRef) -> Self {
2161        debug_assert!(obj.downcast_ref::<T>().is_some());
2162        let obj = ManuallyDrop::new(obj);
2163        Self {
2164            ptr: obj.ptr.cast(),
2165        }
2166    }
2167
2168    pub const fn leak(pyref: Self) -> &'static Py<T> {
2169        let ptr = pyref.ptr;
2170        core::mem::forget(pyref);
2171        unsafe { ptr.as_ref() }
2172    }
2173}
2174
2175impl<T: PyPayload + crate::object::MaybeTraverse + core::fmt::Debug> PyRef<T> {
2176    #[inline(always)]
2177    pub fn new_ref(payload: T, typ: crate::builtins::PyTypeRef, dict: Option<PyDictRef>) -> Self {
2178        let has_dict = dict.is_some();
2179        let is_heaptype = typ.heaptype_ext.is_some();
2180
2181        // Try to reuse from freelist (no dict, no heaptype)
2182        let cached = if !has_dict && !is_heaptype {
2183            unsafe { T::freelist_pop(&payload) }
2184        } else {
2185            None
2186        };
2187
2188        let ptr = if let Some(cached) = cached {
2189            let inner = cached.as_ptr() as *mut PyInner<T>;
2190            unsafe {
2191                core::ptr::write(&mut (*inner).ref_count, RefCount::new());
2192                (*inner).gc_bits.store(0, Ordering::Relaxed);
2193                core::ptr::drop_in_place(&mut (*inner).payload);
2194                core::ptr::write(&mut (*inner).payload, payload);
2195                // Freelist only stores exact base types (push-side filter),
2196                // but subtypes sharing the same Rust payload (e.g. structseq)
2197                // may pop entries. Update typ if it differs.
2198                let cached_typ: *const Py<PyType> = &*(*inner).typ;
2199                if core::ptr::eq(cached_typ, &*typ) {
2200                    drop(typ);
2201                } else {
2202                    let _old = (*inner).typ.swap(typ);
2203                }
2204            }
2205            unsafe { NonNull::new_unchecked(inner.cast::<Py<T>>()) }
2206        } else {
2207            let inner = PyInner::new(payload, typ, dict);
2208            unsafe { NonNull::new_unchecked(inner.cast::<Py<T>>()) }
2209        };
2210
2211        // Track object if:
2212        // - HAS_TRAVERSE is true (Rust payload implements Traverse), OR
2213        // - has instance dict (user-defined class instances), OR
2214        // - heap type (all heap type instances are GC-tracked, like Py_TPFLAGS_HAVE_GC)
2215        if <T as crate::object::MaybeTraverse>::HAS_TRAVERSE || has_dict || is_heaptype {
2216            let gc = crate::gc_state::gc_state();
2217            unsafe {
2218                gc.track_object(ptr.cast());
2219            }
2220            // Check if automatic GC should run
2221            gc.maybe_collect();
2222        }
2223
2224        Self { ptr }
2225    }
2226}
2227
2228impl<T: crate::class::PySubclass + core::fmt::Debug> PyRef<T>
2229where
2230    T::Base: core::fmt::Debug,
2231{
2232    /// Converts this reference to the base type (ownership transfer).
2233    /// # Safety
2234    /// T and T::Base must have compatible layouts in size_of::<T::Base>() bytes.
2235    #[inline]
2236    pub fn into_base(self) -> PyRef<T::Base> {
2237        let obj: PyObjectRef = self.into();
2238        match obj.downcast() {
2239            Ok(base_ref) => base_ref,
2240            Err(_) => unsafe { core::hint::unreachable_unchecked() },
2241        }
2242    }
2243    #[inline]
2244    pub fn upcast<U: PyPayload + StaticType>(self) -> PyRef<U>
2245    where
2246        T: StaticType,
2247    {
2248        debug_assert!(T::static_type().is_subtype(U::static_type()));
2249        let obj: PyObjectRef = self.into();
2250        match obj.downcast::<U>() {
2251            Ok(upcast_ref) => upcast_ref,
2252            Err(_) => unsafe { core::hint::unreachable_unchecked() },
2253        }
2254    }
2255}
2256
2257impl<T: crate::class::PySubclass> Py<T> {
2258    /// Converts `&Py<T>` to `&Py<T::Base>`.
2259    #[inline]
2260    pub fn to_base(&self) -> &Py<T::Base> {
2261        debug_assert!(self.as_object().downcast_ref::<T::Base>().is_some());
2262        // SAFETY: T is #[repr(transparent)] over T::Base,
2263        // so Py<T> and Py<T::Base> have the same layout.
2264        unsafe { &*(self as *const Py<T> as *const Py<T::Base>) }
2265    }
2266
2267    /// Converts `&Py<T>` to `&Py<U>` where U is an ancestor type.
2268    #[inline]
2269    pub fn upcast_ref<U: PyPayload + StaticType>(&self) -> &Py<U>
2270    where
2271        T: StaticType,
2272    {
2273        debug_assert!(T::static_type().is_subtype(U::static_type()));
2274        // SAFETY: T is a subtype of U, so Py<T> can be viewed as Py<U>.
2275        unsafe { &*(self as *const Py<T> as *const Py<U>) }
2276    }
2277}
2278
2279impl<T> Borrow<PyObject> for PyRef<T>
2280where
2281    T: PyPayload,
2282{
2283    #[inline(always)]
2284    fn borrow(&self) -> &PyObject {
2285        (**self).as_object()
2286    }
2287}
2288
2289impl<T> AsRef<PyObject> for PyRef<T>
2290where
2291    T: PyPayload,
2292{
2293    #[inline(always)]
2294    fn as_ref(&self) -> &PyObject {
2295        self.borrow()
2296    }
2297}
2298
2299impl<T> From<PyRef<T>> for PyObjectRef {
2300    #[inline]
2301    fn from(value: PyRef<T>) -> Self {
2302        let me = ManuallyDrop::new(value);
2303        Self { ptr: me.ptr.cast() }
2304    }
2305}
2306
2307impl<T> Borrow<Py<T>> for PyRef<T> {
2308    #[inline(always)]
2309    fn borrow(&self) -> &Py<T> {
2310        self
2311    }
2312}
2313
2314impl<T> AsRef<Py<T>> for PyRef<T> {
2315    #[inline(always)]
2316    fn as_ref(&self) -> &Py<T> {
2317        self
2318    }
2319}
2320
2321impl<T> Deref for PyRef<T> {
2322    type Target = Py<T>;
2323
2324    #[inline(always)]
2325    fn deref(&self) -> &Py<T> {
2326        unsafe { self.ptr.as_ref() }
2327    }
2328}
2329
2330impl<T> core::hash::Hash for PyRef<T>
2331where
2332    T: core::hash::Hash + PyPayload,
2333{
2334    #[inline]
2335    fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
2336        self.deref().hash(state)
2337    }
2338}
2339
2340impl<T> PartialEq for PyRef<T>
2341where
2342    T: PartialEq + PyPayload,
2343{
2344    #[inline]
2345    fn eq(&self, other: &Self) -> bool {
2346        self.deref().eq(other.deref())
2347    }
2348}
2349
2350impl<T> Eq for PyRef<T> where T: Eq + PyPayload {}
2351
2352#[repr(transparent)]
2353pub struct PyWeakRef<T: PyPayload> {
2354    weak: PyRef<PyWeak>,
2355    _marker: PhantomData<T>,
2356}
2357
2358impl<T: PyPayload> PyWeakRef<T> {
2359    pub fn upgrade(&self) -> Option<PyRef<T>> {
2360        self.weak
2361            .upgrade()
2362            // SAFETY: PyWeakRef<T> was always created from a PyRef<T>, so the object is T
2363            .map(|obj| unsafe { PyRef::from_obj_unchecked(obj) })
2364    }
2365}
2366
2367/// Partially initialize a struct, ensuring that all fields are
2368/// either given values or explicitly left uninitialized
2369macro_rules! partially_init {
2370    (
2371        $ty:path {$($init_field:ident: $init_value:expr),*$(,)?},
2372        Uninit { $($uninit_field:ident),*$(,)? }$(,)?
2373    ) => {{
2374        // check all the fields are there but *don't* actually run it
2375
2376        #[allow(clippy::diverging_sub_expression, reason = "intentional compile-time field check in an unreachable branch")]
2377        if false {
2378            #[allow(invalid_value, dead_code, unreachable_code)]
2379            let _ = {$ty {
2380                $($init_field: $init_value,)*
2381                $($uninit_field: unreachable!(),)*
2382            }};
2383        }
2384        let mut m = ::core::mem::MaybeUninit::<$ty>::uninit();
2385        #[allow(unused_unsafe)]
2386        unsafe {
2387            $(::core::ptr::write(&mut (*m.as_mut_ptr()).$init_field, $init_value);)*
2388        }
2389        m
2390    }};
2391}
2392
2393pub(crate) fn init_type_hierarchy() -> (PyTypeRef, PyTypeRef, PyTypeRef) {
2394    use crate::{builtins::object, class::PyClassImpl};
2395    use core::mem::MaybeUninit;
2396
2397    // `type` inherits from `object`
2398    // and both `type` and `object are instances of `type`.
2399    // to produce this circular dependency, we need an unsafe block.
2400    // (and yes, this will never get dropped. TODO?)
2401    let (type_type, object_type) = {
2402        // We cast between these 2 types, so make sure (at compile time) that there's no change in
2403        // layout when we wrap PyInner<PyTypeObj> in MaybeUninit<>
2404        static_assertions::assert_eq_size!(MaybeUninit<PyInner<PyType>>, PyInner<PyType>);
2405        static_assertions::assert_eq_align!(MaybeUninit<PyInner<PyType>>, PyInner<PyType>);
2406
2407        let type_payload = PyType {
2408            base: None,
2409            bases: PyRwLock::default(),
2410            mro: PyRwLock::default(),
2411            subclasses: PyRwLock::default(),
2412            attributes: PyRwLock::new(Default::default()),
2413            slots: PyType::make_slots(),
2414            heaptype_ext: None,
2415            tp_version_tag: core::sync::atomic::AtomicU32::new(0),
2416        };
2417        let object_payload = PyType {
2418            base: None,
2419            bases: PyRwLock::default(),
2420            mro: PyRwLock::default(),
2421            subclasses: PyRwLock::default(),
2422            attributes: PyRwLock::new(Default::default()),
2423            slots: object::PyBaseObject::make_slots(),
2424            heaptype_ext: None,
2425            tp_version_tag: core::sync::atomic::AtomicU32::new(0),
2426        };
2427        // Both type_type and object_type are instances of `type`, which has
2428        // HAS_DICT and HAS_WEAKREF, so they need both ObjExt and WeakRefList prefixes.
2429        // Layout: [ObjExt][WeakRefList][PyInner<PyType>]
2430        let alloc_type_with_prefixes = || -> *mut MaybeUninit<PyInner<PyType>> {
2431            let inner_layout = core::alloc::Layout::new::<MaybeUninit<PyInner<PyType>>>();
2432            let ext_layout = core::alloc::Layout::new::<ObjExt>();
2433            let weakref_layout = core::alloc::Layout::new::<WeakRefList>();
2434
2435            let (layout, weakref_offset) = ext_layout.extend(weakref_layout).unwrap();
2436            let (combined, inner_offset) = layout.extend(inner_layout).unwrap();
2437            let combined = combined.pad_to_align();
2438
2439            let alloc_ptr = unsafe { alloc::alloc::alloc(combined) };
2440            if alloc_ptr.is_null() {
2441                alloc::alloc::handle_alloc_error(combined);
2442            }
2443            alloc_ptr.expose_provenance();
2444
2445            unsafe {
2446                let ext_ptr = alloc_ptr as *mut ObjExt;
2447                ext_ptr.write(ObjExt::new(None, 0));
2448
2449                let weakref_ptr = alloc_ptr.add(weakref_offset) as *mut WeakRefList;
2450                weakref_ptr.write(WeakRefList::new());
2451
2452                alloc_ptr.add(inner_offset) as *mut MaybeUninit<PyInner<PyType>>
2453            }
2454        };
2455
2456        let type_type_ptr = alloc_type_with_prefixes();
2457        unsafe {
2458            type_type_ptr.write(partially_init!(
2459                PyInner::<PyType> {
2460                    ref_count: RefCount::new(),
2461                    vtable: PyObjVTable::of::<PyType>(),
2462                    gc_bits: Radium::new(0),
2463                    gc_generation: Radium::new(GC_UNTRACKED),
2464                    gc_pointers: Pointers::new(),
2465                    payload: type_payload,
2466                },
2467                Uninit { typ }
2468            ));
2469        }
2470
2471        let object_type_ptr = alloc_type_with_prefixes();
2472        unsafe {
2473            object_type_ptr.write(partially_init!(
2474                PyInner::<PyType> {
2475                    ref_count: RefCount::new(),
2476                    vtable: PyObjVTable::of::<PyType>(),
2477                    gc_bits: Radium::new(0),
2478                    gc_generation: Radium::new(GC_UNTRACKED),
2479                    gc_pointers: Pointers::new(),
2480                    payload: object_payload,
2481                },
2482                Uninit { typ },
2483            ));
2484        }
2485
2486        let object_type_ptr = object_type_ptr as *mut PyInner<PyType>;
2487        let type_type_ptr = type_type_ptr as *mut PyInner<PyType>;
2488
2489        unsafe {
2490            (*type_type_ptr).ref_count.inc();
2491            let type_type = PyTypeRef::from_raw(type_type_ptr.cast());
2492            ptr::write(&mut (*object_type_ptr).typ, PyAtomicRef::from(type_type));
2493            (*type_type_ptr).ref_count.inc();
2494            let type_type = PyTypeRef::from_raw(type_type_ptr.cast());
2495            ptr::write(&mut (*type_type_ptr).typ, PyAtomicRef::from(type_type));
2496
2497            let object_type = PyTypeRef::from_raw(object_type_ptr.cast());
2498            // object's mro is [object]
2499            (*object_type_ptr).payload.mro = PyRwLock::new(vec![object_type.clone()]);
2500
2501            (*type_type_ptr).payload.bases = PyRwLock::new(vec![object_type.clone()]);
2502            (*type_type_ptr).payload.base = Some(object_type.clone());
2503
2504            let type_type = PyTypeRef::from_raw(type_type_ptr.cast());
2505            // type's mro is [type, object]
2506            (*type_type_ptr).payload.mro =
2507                PyRwLock::new(vec![type_type.clone(), object_type.clone()]);
2508
2509            (type_type, object_type)
2510        }
2511    };
2512
2513    let weakref_type = PyType {
2514        base: Some(object_type.clone()),
2515        bases: PyRwLock::new(vec![object_type.clone()]),
2516        mro: PyRwLock::new(vec![object_type.clone()]),
2517        subclasses: PyRwLock::default(),
2518        attributes: PyRwLock::default(),
2519        slots: PyWeak::make_slots(),
2520        heaptype_ext: None,
2521        tp_version_tag: core::sync::atomic::AtomicU32::new(0),
2522    };
2523    let weakref_type = PyRef::new_ref(weakref_type, type_type.clone(), None);
2524    // Static type: untrack from GC (was tracked by new_ref because PyType has HAS_TRAVERSE)
2525    unsafe {
2526        crate::gc_state::gc_state()
2527            .untrack_object(core::ptr::NonNull::from(weakref_type.as_object()));
2528    }
2529    weakref_type.as_object().clear_gc_tracked();
2530    // weakref's mro is [weakref, object]
2531    weakref_type.mro.write().insert(0, weakref_type.clone());
2532
2533    object_type.subclasses.write().push(
2534        type_type
2535            .as_object()
2536            .downgrade_with_weakref_typ_opt(None, weakref_type.clone())
2537            .unwrap(),
2538    );
2539
2540    object_type.subclasses.write().push(
2541        weakref_type
2542            .as_object()
2543            .downgrade_with_weakref_typ_opt(None, weakref_type.clone())
2544            .unwrap(),
2545    );
2546
2547    (type_type, object_type, weakref_type)
2548}
2549
2550#[cfg(test)]
2551mod tests {
2552    use super::*;
2553
2554    #[test]
2555    fn miri_test_type_initialization() {
2556        let _ = init_type_hierarchy();
2557    }
2558
2559    #[test]
2560    fn miri_test_drop() {
2561        //cspell:ignore dfghjkl
2562        let ctx = crate::Context::genesis();
2563        let obj = ctx.new_bytes(b"dfghjkl".to_vec());
2564        drop(obj);
2565    }
2566}