1use super::{
14 PyAtomicRef,
15 ext::{AsObject, PyRefExact, PyResult},
16 payload::PyPayload,
17};
18use crate::object::traverse_object::PyObjVTable;
19use crate::{
20 builtins::{PyDictRef, PyType, PyTypeRef},
21 common::{
22 atomic::{Ordering, PyAtomic, Radium},
23 linked_list::{Link, Pointers},
24 lock::PyRwLock,
25 refcount::RefCount,
26 },
27 vm::VirtualMachine,
28};
29use crate::{
30 class::StaticType,
31 object::traverse::{MaybeTraverse, Traverse, TraverseFn},
32};
33use itertools::Itertools;
34
35use alloc::fmt;
36
37use core::{
38 any::TypeId,
39 borrow::Borrow,
40 cell::UnsafeCell,
41 marker::PhantomData,
42 mem::ManuallyDrop,
43 num::NonZeroUsize,
44 ops::Deref,
45 ptr::{self, NonNull},
46};
47
48#[derive(Debug)]
83pub(super) struct Erased;
84
85mod trashcan {
89 use core::cell::Cell;
90
91 const TRASHCAN_LIMIT: usize = 50;
94
95 type DeallocFn = unsafe fn(*mut super::PyObject);
96 type DeallocQueue = Vec<(*mut super::PyObject, DeallocFn)>;
97
98 thread_local! {
99 static DEALLOC_DEPTH: Cell<usize> = const { Cell::new(0) };
100 static DEALLOC_QUEUE: Cell<DeallocQueue> = const { Cell::new(Vec::new()) };
101 }
102
103 #[inline]
106 pub(super) unsafe fn begin(
107 obj: *mut super::PyObject,
108 dealloc: unsafe fn(*mut super::PyObject),
109 ) -> bool {
110 DEALLOC_DEPTH.with(|d| {
111 let depth = d.get();
112 if depth >= TRASHCAN_LIMIT {
113 DEALLOC_QUEUE.with(|q| {
115 let mut queue = q.take();
116 queue.push((obj, dealloc));
117 q.set(queue);
118 });
119 false
120 } else {
121 d.set(depth + 1);
122 true
123 }
124 })
125 }
126
127 #[inline]
129 pub(super) unsafe fn end() {
130 let depth = DEALLOC_DEPTH.with(|d| {
131 let depth = d.get();
132 debug_assert!(depth > 0, "trashcan::end called without matching begin");
133 let depth = depth - 1;
134 d.set(depth);
135 depth
136 });
137 if depth == 0 {
138 loop {
140 let next = DEALLOC_QUEUE.with(|q| {
141 let mut queue = q.take();
142 let item = queue.pop();
143 q.set(queue);
144 item
145 });
146 if let Some((obj, dealloc)) = next {
147 unsafe { dealloc(obj) };
148 } else {
149 break;
150 }
151 }
152 }
153 }
154}
155
156pub(super) unsafe fn default_dealloc<T: PyPayload>(obj: *mut PyObject) {
159 let obj_ref = unsafe { &*(obj as *const PyObject) };
160 if let Err(()) = obj_ref.drop_slow_inner() {
161 return; }
163
164 if !unsafe { trashcan::begin(obj, default_dealloc::<T>) } {
166 return; }
168
169 let vtable = obj_ref.0.vtable;
170
171 if obj_ref.is_gc_tracked() {
175 let ptr = unsafe { NonNull::new_unchecked(obj) };
176 unsafe {
177 crate::gc_state::gc_state().untrack_object(ptr);
178 }
179 debug_assert!(
181 !obj_ref.is_gc_tracked(),
182 "object still tracked after untrack_object"
183 );
184 debug_assert_eq!(
185 obj_ref.gc_generation(),
186 crate::object::GC_UNTRACKED,
187 "gc_generation not reset after untrack_object"
188 );
189 }
190
191 let typ = obj_ref.class();
195 let pushed = if T::HAS_FREELIST
196 && typ.heaptype_ext.is_none()
197 && core::ptr::eq(typ, T::class(crate::vm::Context::genesis()))
198 {
199 unsafe { T::freelist_push(obj) }
200 } else {
201 false
202 };
203
204 let mut edges = Vec::new();
207 if let Some(clear_fn) = vtable.clear {
208 unsafe { clear_fn(obj, &mut edges) };
209 }
210
211 if !pushed {
212 unsafe { PyInner::dealloc(obj as *mut PyInner<T>) };
214 }
215
216 drop(edges);
218
219 unsafe { trashcan::end() };
221}
222pub(super) unsafe fn debug_obj<T: PyPayload + core::fmt::Debug>(
223 x: &PyObject,
224 f: &mut fmt::Formatter<'_>,
225) -> fmt::Result {
226 let x = unsafe { &*(x as *const PyObject as *const PyInner<T>) };
227 fmt::Debug::fmt(x, f)
228}
229
230pub(super) unsafe fn try_traverse_obj<T: PyPayload>(x: &PyObject, tracer_fn: &mut TraverseFn<'_>) {
232 let x = unsafe { &*(x as *const PyObject as *const PyInner<T>) };
233 let payload = &x.payload;
234 payload.try_traverse(tracer_fn)
235}
236
237pub(super) unsafe fn try_clear_obj<T: PyPayload>(x: *mut PyObject, out: &mut Vec<PyObjectRef>) {
239 let x = unsafe { &mut *(x as *mut PyInner<T>) };
240 x.payload.try_clear(out);
241}
242
243bitflags::bitflags! {
244 #[derive(Copy, Clone, Debug, Default)]
248 pub(crate) struct GcBits: u8 {
249 const TRACKED = 1 << 0;
251 const FINALIZED = 1 << 1;
253 const UNREACHABLE = 1 << 2;
255 const FROZEN = 1 << 3;
257 const SHARED = 1 << 4;
260 const SHARED_INLINE = 1 << 5;
263 const DEFERRED = 1 << 6;
265 }
266}
267
268pub(crate) const GC_UNTRACKED: u8 = 0xFF;
270pub(crate) const GC_PERMANENT: u8 = 3;
271
272pub(crate) struct GcLink;
274
275unsafe impl Link for GcLink {
278 type Handle = NonNull<PyObject>;
279 type Target = PyObject;
280
281 fn as_raw(handle: &NonNull<PyObject>) -> NonNull<PyObject> {
282 *handle
283 }
284
285 unsafe fn from_raw(ptr: NonNull<PyObject>) -> NonNull<PyObject> {
286 ptr
287 }
288
289 unsafe fn pointers(target: NonNull<PyObject>) -> NonNull<Pointers<PyObject>> {
290 let inner_ptr = target.as_ptr() as *mut PyInner<Erased>;
291 unsafe { NonNull::new_unchecked(&raw mut (*inner_ptr).gc_pointers) }
292 }
293}
294
295#[repr(C, align(8))]
304pub(super) struct ObjExt {
305 pub(super) dict: Option<InstanceDict>,
306 pub(super) slots: Box<[PyRwLock<Option<PyObjectRef>>]>,
307}
308
309impl ObjExt {
310 fn new(dict: Option<PyDictRef>, member_count: usize) -> Self {
311 Self {
312 dict: dict.map(InstanceDict::new),
313 slots: core::iter::repeat_with(|| PyRwLock::new(None))
314 .take(member_count)
315 .collect_vec()
316 .into_boxed_slice(),
317 }
318 }
319}
320
321impl fmt::Debug for ObjExt {
322 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
323 write!(f, "[ObjExt]")
324 }
325}
326
327const EXT_OFFSET: usize = core::mem::size_of::<ObjExt>();
331const WEAKREF_OFFSET: usize = core::mem::size_of::<WeakRefList>();
332
333const _: () =
334 assert!(core::mem::size_of::<ObjExt>().is_multiple_of(core::mem::align_of::<ObjExt>()));
335const _: () = assert!(core::mem::align_of::<ObjExt>() >= core::mem::align_of::<PyInner<()>>());
336const _: () = assert!(
337 core::mem::size_of::<WeakRefList>().is_multiple_of(core::mem::align_of::<WeakRefList>())
338);
339const _: () = assert!(core::mem::align_of::<WeakRefList>() >= core::mem::align_of::<PyInner<()>>());
340
341#[repr(C)]
345pub(super) struct PyInner<T> {
346 pub(super) ref_count: RefCount,
347 pub(super) vtable: &'static PyObjVTable,
348 pub(super) gc_bits: PyAtomic<u8>,
350 pub(super) gc_generation: PyAtomic<u8>,
353 pub(super) gc_pointers: Pointers<PyObject>,
355
356 pub(super) typ: PyAtomicRef<PyType>, pub(super) payload: T,
359}
360pub(crate) const SIZEOF_PYOBJECT_HEAD: usize = core::mem::size_of::<PyInner<()>>();
361
362impl<T> PyInner<T> {
363 #[inline(always)]
366 fn read_type_flags(&self) -> (crate::types::PyTypeFlags, usize) {
367 let typ_ptr = self.typ.load_raw();
368 let slots = unsafe { core::ptr::addr_of!((*typ_ptr).0.payload.slots) };
369 let flags = unsafe { core::ptr::addr_of!((*slots).flags).read() };
370 let member_count = unsafe { core::ptr::addr_of!((*slots).member_count).read() };
371 (flags, member_count)
372 }
373
374 #[inline(always)]
380 pub(super) fn ext_ref(&self) -> Option<&ObjExt> {
381 let (flags, member_count) = self.read_type_flags();
382 let has_ext = flags.has_feature(crate::types::PyTypeFlags::HAS_DICT) || member_count > 0;
383 if !has_ext {
384 return None;
385 }
386 let has_weakref = flags.has_feature(crate::types::PyTypeFlags::HAS_WEAKREF);
387 let offset = if has_weakref {
388 WEAKREF_OFFSET + EXT_OFFSET
389 } else {
390 EXT_OFFSET
391 };
392 let self_addr = (self as *const Self as *const u8).addr();
393 let ext_ptr = core::ptr::with_exposed_provenance::<ObjExt>(self_addr.wrapping_sub(offset));
394 Some(unsafe { &*ext_ptr })
395 }
396
397 #[inline(always)]
403 pub(super) fn weakref_list_ref(&self) -> Option<&WeakRefList> {
404 let (flags, _) = self.read_type_flags();
405 if !flags.has_feature(crate::types::PyTypeFlags::HAS_WEAKREF) {
406 return None;
407 }
408 let self_addr = (self as *const Self as *const u8).addr();
409 let ptr = core::ptr::with_exposed_provenance::<WeakRefList>(
410 self_addr.wrapping_sub(WEAKREF_OFFSET),
411 );
412 Some(unsafe { &*ptr })
413 }
414}
415
416impl<T: fmt::Debug> fmt::Debug for PyInner<T> {
417 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
418 write!(f, "[PyObject {:?}]", &self.payload)
419 }
420}
421
422unsafe impl<T: MaybeTraverse> Traverse for Py<T> {
423 fn traverse(&self, tracer_fn: &mut TraverseFn<'_>) {
426 self.0.traverse(tracer_fn)
427 }
428}
429
430unsafe impl Traverse for PyObject {
431 fn traverse(&self, tracer_fn: &mut TraverseFn<'_>) {
434 self.0.traverse(tracer_fn)
435 }
436}
437
438#[cfg(feature = "threading")]
441mod weakref_lock {
442 use core::sync::atomic::{AtomicU8, Ordering};
443
444 const NUM_WEAKREF_LOCKS: usize = 64;
445
446 static LOCKS: [AtomicU8; NUM_WEAKREF_LOCKS] = [const { AtomicU8::new(0) }; NUM_WEAKREF_LOCKS];
447
448 pub(super) struct WeakrefLockGuard {
449 idx: usize,
450 }
451
452 impl Drop for WeakrefLockGuard {
453 fn drop(&mut self) {
454 LOCKS[self.idx].store(0, Ordering::Release);
455 }
456 }
457
458 pub(super) fn lock(addr: usize) -> WeakrefLockGuard {
459 let idx = (addr >> 4) % NUM_WEAKREF_LOCKS;
460 loop {
461 if LOCKS[idx]
462 .compare_exchange_weak(0, 1, Ordering::Acquire, Ordering::Relaxed)
463 .is_ok()
464 {
465 return WeakrefLockGuard { idx };
466 }
467 core::hint::spin_loop();
468 }
469 }
470
471 #[cfg(unix)]
474 pub(crate) fn reset_all_after_fork() {
475 for lock in &LOCKS {
476 lock.store(0, Ordering::Release);
477 }
478 }
479}
480
481#[cfg(not(feature = "threading"))]
482mod weakref_lock {
483 pub(super) struct WeakrefLockGuard;
484
485 impl Drop for WeakrefLockGuard {
486 fn drop(&mut self) {}
487 }
488
489 pub(super) fn lock(_addr: usize) -> WeakrefLockGuard {
490 WeakrefLockGuard
491 }
492}
493
494#[cfg(all(unix, feature = "threading"))]
497pub(crate) fn reset_weakref_locks_after_fork() {
498 weakref_lock::reset_all_after_fork();
499}
500
501#[repr(C)]
504pub(super) struct WeakRefList {
505 head: PyAtomic<*mut Py<PyWeak>>,
507 generic: PyAtomic<*mut Py<PyWeak>>,
510}
511
512impl fmt::Debug for WeakRefList {
513 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
514 f.debug_struct("WeakRefList").finish_non_exhaustive()
515 }
516}
517
518unsafe fn unlink_weakref(wrl: &WeakRefList, node: NonNull<Py<PyWeak>>) {
523 unsafe {
524 let mut ptrs = WeakLink::pointers(node);
525 let prev = ptrs.as_ref().get_prev();
526 let next = ptrs.as_ref().get_next();
527
528 if let Some(prev) = prev {
529 WeakLink::pointers(prev).as_mut().set_next(next);
530 } else {
531 wrl.head.store(
533 next.map_or(ptr::null_mut(), |p| p.as_ptr()),
534 Ordering::Relaxed,
535 );
536 }
537 if let Some(next) = next {
538 WeakLink::pointers(next).as_mut().set_prev(prev);
539 }
540
541 ptrs.as_mut().set_prev(None);
542 ptrs.as_mut().set_next(None);
543 }
544}
545
546impl WeakRefList {
547 pub fn new() -> Self {
548 Self {
549 head: Radium::new(ptr::null_mut()),
550 generic: Radium::new(ptr::null_mut()),
551 }
552 }
553
554 fn add(
556 &self,
557 obj: &PyObject,
558 cls: PyTypeRef,
559 cls_is_weakref: bool,
560 callback: Option<PyObjectRef>,
561 dict: Option<PyDictRef>,
562 ) -> PyRef<PyWeak> {
563 let is_generic = cls_is_weakref && callback.is_none();
564
565 {
567 let _lock = weakref_lock::lock(obj as *const PyObject as usize);
568 if is_generic {
569 let generic_ptr = self.generic.load(Ordering::Relaxed);
570 if !generic_ptr.is_null() {
571 let generic = unsafe { &*generic_ptr };
572 if generic.0.ref_count.safe_inc() {
573 return unsafe { PyRef::from_raw(generic_ptr) };
574 }
575 }
576 }
577 }
578
579 let weak_payload = PyWeak {
583 pointers: Pointers::new(),
584 wr_object: Radium::new(obj as *const PyObject as *mut PyObject),
585 callback: UnsafeCell::new(callback),
586 hash: Radium::new(crate::common::hash::SENTINEL),
587 };
588 let weak = PyRef::new_ref(weak_payload, cls, dict);
589
590 let _lock = weakref_lock::lock(obj as *const PyObject as usize);
592
593 if is_generic {
596 let generic_ptr = self.generic.load(Ordering::Relaxed);
597 if !generic_ptr.is_null() {
598 let generic = unsafe { &*generic_ptr };
599 if generic.0.ref_count.safe_inc() {
600 weak.wr_object.store(ptr::null_mut(), Ordering::Relaxed);
603 return unsafe { PyRef::from_raw(generic_ptr) };
604 }
605 }
606 }
607
608 let node_ptr = NonNull::from(&*weak);
610 unsafe {
611 let mut ptrs = WeakLink::pointers(node_ptr);
612 if is_generic {
613 let old_head = self.head.load(Ordering::Relaxed);
615 ptrs.as_mut().set_next(NonNull::new(old_head));
616 ptrs.as_mut().set_prev(None);
617 if let Some(old_head) = NonNull::new(old_head) {
618 WeakLink::pointers(old_head)
619 .as_mut()
620 .set_prev(Some(node_ptr));
621 }
622 self.head.store(node_ptr.as_ptr(), Ordering::Relaxed);
623 self.generic.store(node_ptr.as_ptr(), Ordering::Relaxed);
624 } else {
625 let generic_ptr = self.generic.load(Ordering::Relaxed);
627 if let Some(after) = NonNull::new(generic_ptr) {
628 let after_next = WeakLink::pointers(after).as_ref().get_next();
629 ptrs.as_mut().set_prev(Some(after));
630 ptrs.as_mut().set_next(after_next);
631 WeakLink::pointers(after).as_mut().set_next(Some(node_ptr));
632 if let Some(next) = after_next {
633 WeakLink::pointers(next).as_mut().set_prev(Some(node_ptr));
634 }
635 } else {
636 let old_head = self.head.load(Ordering::Relaxed);
638 ptrs.as_mut().set_next(NonNull::new(old_head));
639 ptrs.as_mut().set_prev(None);
640 if let Some(old_head) = NonNull::new(old_head) {
641 WeakLink::pointers(old_head)
642 .as_mut()
643 .set_prev(Some(node_ptr));
644 }
645 self.head.store(node_ptr.as_ptr(), Ordering::Relaxed);
646 }
647 }
648 }
649
650 weak
651 }
652
653 fn clear(&self, obj: &PyObject) {
657 let obj_addr = obj as *const PyObject as usize;
658 let _lock = weakref_lock::lock(obj_addr);
659
660 self.generic.store(ptr::null_mut(), Ordering::Relaxed);
662
663 let mut callbacks: Vec<(PyRef<PyWeak>, PyObjectRef)> = Vec::new();
665 let mut current = NonNull::new(self.head.load(Ordering::Relaxed));
666 while let Some(node) = current {
667 let next = unsafe { WeakLink::pointers(node).as_ref().get_next() };
668
669 let wr = unsafe { node.as_ref() };
670
671 wr.0.payload
673 .wr_object
674 .store(ptr::null_mut(), Ordering::Relaxed);
675
676 unsafe {
678 let mut ptrs = WeakLink::pointers(node);
679 ptrs.as_mut().set_prev(None);
680 ptrs.as_mut().set_next(None);
681 }
682
683 if wr.0.ref_count.safe_inc() {
685 let wr_ref = unsafe { PyRef::from_raw(wr as *const Py<PyWeak>) };
686 let cb = unsafe { wr.0.payload.callback.get().replace(None) };
687 if let Some(cb) = cb {
688 callbacks.push((wr_ref, cb));
689 }
690 }
691
692 current = next;
693 }
694 self.head.store(ptr::null_mut(), Ordering::Relaxed);
695
696 drop(_lock);
698 for (wr, cb) in callbacks {
699 crate::vm::thread::with_vm(&cb, |vm| {
700 let _ = cb.call((wr.clone(),), vm);
701 });
702 }
703 }
704
705 fn clear_for_gc_collect_callbacks(&self, obj: &PyObject) -> Vec<(PyRef<PyWeak>, PyObjectRef)> {
709 let obj_addr = obj as *const PyObject as usize;
710 let _lock = weakref_lock::lock(obj_addr);
711
712 self.generic.store(ptr::null_mut(), Ordering::Relaxed);
714
715 let mut callbacks = Vec::new();
716 let mut current = NonNull::new(self.head.load(Ordering::Relaxed));
717 while let Some(node) = current {
718 let next = unsafe { WeakLink::pointers(node).as_ref().get_next() };
719
720 let wr = unsafe { node.as_ref() };
721
722 wr.0.payload
724 .wr_object
725 .store(ptr::null_mut(), Ordering::Relaxed);
726
727 unsafe {
729 let mut ptrs = WeakLink::pointers(node);
730 ptrs.as_mut().set_prev(None);
731 ptrs.as_mut().set_next(None);
732 }
733
734 if wr.0.ref_count.safe_inc() {
736 let wr_ref = unsafe { PyRef::from_raw(wr as *const Py<PyWeak>) };
737 let cb = unsafe { wr.0.payload.callback.get().replace(None) };
738 if let Some(cb) = cb {
739 callbacks.push((wr_ref, cb));
740 }
741 }
742
743 current = next;
744 }
745 self.head.store(ptr::null_mut(), Ordering::Relaxed);
746
747 callbacks
748 }
749
750 fn count(&self, obj: &PyObject) -> usize {
751 let _lock = weakref_lock::lock(obj as *const PyObject as usize);
752 let mut count = 0usize;
753 let mut current = NonNull::new(self.head.load(Ordering::Relaxed));
754 while let Some(node) = current {
755 if unsafe { node.as_ref() }.0.ref_count.get() > 0 {
756 count += 1;
757 }
758 current = unsafe { WeakLink::pointers(node).as_ref().get_next() };
759 }
760 count
761 }
762
763 fn get_weak_references(&self, obj: &PyObject) -> Vec<PyRef<PyWeak>> {
764 let _lock = weakref_lock::lock(obj as *const PyObject as usize);
765 let mut v = Vec::new();
766 let mut current = NonNull::new(self.head.load(Ordering::Relaxed));
767 while let Some(node) = current {
768 let wr = unsafe { node.as_ref() };
769 if wr.0.ref_count.safe_inc() {
770 v.push(unsafe { PyRef::from_raw(wr as *const Py<PyWeak>) });
771 }
772 current = unsafe { WeakLink::pointers(node).as_ref().get_next() };
773 }
774 v
775 }
776}
777
778impl Default for WeakRefList {
779 fn default() -> Self {
780 Self::new()
781 }
782}
783
784struct WeakLink;
785unsafe impl Link for WeakLink {
786 type Handle = PyRef<PyWeak>;
787
788 type Target = Py<PyWeak>;
789
790 #[inline(always)]
791 fn as_raw(handle: &PyRef<PyWeak>) -> NonNull<Self::Target> {
792 NonNull::from(&**handle)
793 }
794
795 #[inline(always)]
796 unsafe fn from_raw(ptr: NonNull<Self::Target>) -> Self::Handle {
797 unsafe { PyRef::from_raw(ptr.as_ptr()) }
798 }
799
800 #[inline(always)]
801 unsafe fn pointers(target: NonNull<Self::Target>) -> NonNull<Pointers<Self::Target>> {
802 unsafe { NonNull::new_unchecked(&raw mut (*target.as_ptr()).0.payload.pointers) }
804 }
805}
806
807#[pyclass(name = "weakref", module = false)]
809#[derive(Debug)]
810pub struct PyWeak {
811 pointers: Pointers<Py<PyWeak>>,
812 wr_object: PyAtomic<*mut PyObject>,
815 callback: UnsafeCell<Option<PyObjectRef>>,
817 pub(crate) hash: PyAtomic<crate::common::hash::PyHash>,
818}
819
820cfg_if::cfg_if! {
821 if #[cfg(feature = "threading")] {
822 unsafe impl Send for PyWeak {}
823 unsafe impl Sync for PyWeak {}
824 }
825}
826
827impl PyWeak {
828 pub(crate) fn upgrade(&self) -> Option<PyObjectRef> {
830 let obj_ptr = self.wr_object.load(Ordering::Acquire);
831 if obj_ptr.is_null() {
832 return None;
833 }
834
835 let _lock = weakref_lock::lock(obj_ptr as usize);
836
837 let obj_ptr = self.wr_object.load(Ordering::Relaxed);
839 if obj_ptr.is_null() {
840 return None;
841 }
842
843 unsafe {
844 if !(*obj_ptr).0.ref_count.safe_inc() {
845 return None;
846 }
847 Some(PyObjectRef::from_raw(NonNull::new_unchecked(obj_ptr)))
848 }
849 }
850
851 pub(crate) fn is_dead(&self) -> bool {
852 self.wr_object.load(Ordering::Acquire).is_null()
853 }
854
855 fn drop_inner(&self) {
857 let obj_ptr = self.wr_object.load(Ordering::Acquire);
858 if obj_ptr.is_null() {
859 return; }
861
862 let _lock = weakref_lock::lock(obj_ptr as usize);
863
864 let obj_ptr = self.wr_object.load(Ordering::Relaxed);
866 if obj_ptr.is_null() {
867 return; }
869
870 let obj = unsafe { &*obj_ptr };
871 let wrl = obj.0.weakref_list_ref().unwrap();
873
874 let offset = std::mem::offset_of!(PyInner<Self>, payload);
876 let py_inner = (self as *const Self)
877 .cast::<u8>()
878 .wrapping_sub(offset)
879 .cast::<PyInner<Self>>();
880 let node_ptr = unsafe { NonNull::new_unchecked(py_inner as *mut Py<Self>) };
881
882 unsafe { unlink_weakref(wrl, node_ptr) };
884
885 if wrl.generic.load(Ordering::Relaxed) == node_ptr.as_ptr() {
887 wrl.generic.store(ptr::null_mut(), Ordering::Relaxed);
888 }
889
890 self.wr_object.store(ptr::null_mut(), Ordering::Relaxed);
892 }
893}
894
895impl Drop for PyWeak {
896 #[inline(always)]
897 fn drop(&mut self) {
898 let me: &Self = self;
900 me.drop_inner();
901 }
902}
903
904impl Py<PyWeak> {
905 #[inline(always)]
906 pub fn upgrade(&self) -> Option<PyObjectRef> {
907 PyWeak::upgrade(self)
908 }
909}
910
911#[derive(Debug)]
912pub(super) struct InstanceDict {
913 pub(super) d: PyRwLock<PyDictRef>,
914}
915
916impl From<PyDictRef> for InstanceDict {
917 #[inline(always)]
918 fn from(d: PyDictRef) -> Self {
919 Self::new(d)
920 }
921}
922
923impl InstanceDict {
924 #[inline]
925 pub const fn new(d: PyDictRef) -> Self {
926 Self {
927 d: PyRwLock::new(d),
928 }
929 }
930
931 #[inline]
932 pub fn get(&self) -> PyDictRef {
933 self.d.read().clone()
934 }
935
936 #[inline]
937 pub fn set(&self, d: PyDictRef) {
938 self.replace(d);
939 }
940
941 #[inline]
942 pub fn replace(&self, d: PyDictRef) -> PyDictRef {
943 core::mem::replace(&mut self.d.write(), d)
944 }
945
946 #[inline]
948 pub fn into_inner(self) -> PyDictRef {
949 self.d.into_inner()
950 }
951}
952
953impl<T: PyPayload> PyInner<T> {
954 unsafe fn dealloc(ptr: *mut Self) {
960 unsafe {
961 let (flags, member_count) = (*ptr).read_type_flags();
962 let has_ext =
963 flags.has_feature(crate::types::PyTypeFlags::HAS_DICT) || member_count > 0;
964 let has_weakref = flags.has_feature(crate::types::PyTypeFlags::HAS_WEAKREF);
965
966 if has_ext || has_weakref {
967 let mut layout = core::alloc::Layout::from_size_align(0, 1).unwrap();
969
970 if has_ext {
971 layout = layout
972 .extend(core::alloc::Layout::new::<ObjExt>())
973 .unwrap()
974 .0;
975 }
976 if has_weakref {
977 layout = layout
978 .extend(core::alloc::Layout::new::<WeakRefList>())
979 .unwrap()
980 .0;
981 }
982 let (combined, inner_offset) =
983 layout.extend(core::alloc::Layout::new::<Self>()).unwrap();
984 let combined = combined.pad_to_align();
985
986 let alloc_ptr = (ptr as *mut u8).sub(inner_offset);
987
988 core::ptr::drop_in_place(ptr);
990
991 if has_ext {
993 core::ptr::drop_in_place(alloc_ptr as *mut ObjExt);
994 }
995 alloc::alloc::dealloc(alloc_ptr, combined);
998 } else {
999 drop(Box::from_raw(ptr));
1000 }
1001 }
1002 }
1003}
1004
1005impl<T: PyPayload + core::fmt::Debug> PyInner<T> {
1006 fn new(payload: T, typ: PyTypeRef, dict: Option<PyDictRef>) -> *mut Self {
1010 let member_count = typ.slots.member_count;
1011 let needs_ext = typ
1012 .slots
1013 .flags
1014 .has_feature(crate::types::PyTypeFlags::HAS_DICT)
1015 || member_count > 0;
1016 let needs_weakref = typ
1017 .slots
1018 .flags
1019 .has_feature(crate::types::PyTypeFlags::HAS_WEAKREF);
1020 debug_assert!(
1021 needs_ext || dict.is_none(),
1022 "dict passed to type '{}' without HAS_DICT flag",
1023 typ.name()
1024 );
1025
1026 if needs_ext || needs_weakref {
1027 let mut layout = core::alloc::Layout::from_size_align(0, 1).unwrap();
1029
1030 let ext_start = if needs_ext {
1031 let (combined, offset) =
1032 layout.extend(core::alloc::Layout::new::<ObjExt>()).unwrap();
1033 layout = combined;
1034 Some(offset)
1035 } else {
1036 None
1037 };
1038
1039 let weakref_start = if needs_weakref {
1040 let (combined, offset) = layout
1041 .extend(core::alloc::Layout::new::<WeakRefList>())
1042 .unwrap();
1043 layout = combined;
1044 Some(offset)
1045 } else {
1046 None
1047 };
1048
1049 let (combined, inner_offset) =
1050 layout.extend(core::alloc::Layout::new::<Self>()).unwrap();
1051 let combined = combined.pad_to_align();
1052
1053 let alloc_ptr = unsafe { alloc::alloc::alloc(combined) };
1054 if alloc_ptr.is_null() {
1055 alloc::alloc::handle_alloc_error(combined);
1056 }
1057 alloc_ptr.expose_provenance();
1059
1060 unsafe {
1061 if let Some(offset) = ext_start {
1062 let ext_ptr = alloc_ptr.add(offset) as *mut ObjExt;
1063 ext_ptr.write(ObjExt::new(dict, member_count));
1064 }
1065
1066 if let Some(offset) = weakref_start {
1067 let weakref_ptr = alloc_ptr.add(offset) as *mut WeakRefList;
1068 weakref_ptr.write(WeakRefList::new());
1069 }
1070
1071 let inner_ptr = alloc_ptr.add(inner_offset) as *mut Self;
1072 inner_ptr.write(Self {
1073 ref_count: RefCount::new(),
1074 vtable: PyObjVTable::of::<T>(),
1075 gc_bits: Radium::new(0),
1076 gc_generation: Radium::new(GC_UNTRACKED),
1077 gc_pointers: Pointers::new(),
1078 typ: PyAtomicRef::from(typ),
1079 payload,
1080 });
1081 inner_ptr
1082 }
1083 } else {
1084 Box::into_raw(Box::new(Self {
1085 ref_count: RefCount::new(),
1086 vtable: PyObjVTable::of::<T>(),
1087 gc_bits: Radium::new(0),
1088 gc_generation: Radium::new(GC_UNTRACKED),
1089 gc_pointers: Pointers::new(),
1090 typ: PyAtomicRef::from(typ),
1091 payload,
1092 }))
1093 }
1094 }
1095}
1096
1097pub(crate) const fn pyinner_layout<T: PyPayload>() -> core::alloc::Layout {
1099 core::alloc::Layout::new::<PyInner<T>>()
1100}
1101
1102pub(crate) struct FreeList<T: PyPayload> {
1108 items: Vec<*mut PyObject>,
1109 _marker: core::marker::PhantomData<T>,
1110}
1111
1112impl<T: PyPayload> FreeList<T> {
1113 pub(crate) const fn new() -> Self {
1114 Self {
1115 items: Vec::new(),
1116 _marker: core::marker::PhantomData,
1117 }
1118 }
1119}
1120
1121impl<T: PyPayload> Default for FreeList<T> {
1122 fn default() -> Self {
1123 Self::new()
1124 }
1125}
1126
1127impl<T: PyPayload> Drop for FreeList<T> {
1128 fn drop(&mut self) {
1129 for ptr in self.items.drain(..) {
1136 unsafe {
1137 alloc::alloc::dealloc(ptr as *mut u8, core::alloc::Layout::new::<PyInner<T>>());
1138 }
1139 }
1140 }
1141}
1142
1143impl<T: PyPayload> core::ops::Deref for FreeList<T> {
1144 type Target = Vec<*mut PyObject>;
1145 fn deref(&self) -> &Self::Target {
1146 &self.items
1147 }
1148}
1149
1150impl<T: PyPayload> core::ops::DerefMut for FreeList<T> {
1151 fn deref_mut(&mut self) -> &mut Self::Target {
1152 &mut self.items
1153 }
1154}
1155
1156#[repr(transparent)]
1162pub struct PyObjectRef {
1163 ptr: NonNull<PyObject>,
1164}
1165
1166impl Clone for PyObjectRef {
1167 #[inline(always)]
1168 fn clone(&self) -> Self {
1169 (**self).to_owned()
1170 }
1171}
1172
1173cfg_if::cfg_if! {
1174 if #[cfg(feature = "threading")] {
1175 unsafe impl Send for PyObjectRef {}
1176 unsafe impl Sync for PyObjectRef {}
1177 }
1178}
1179
1180#[repr(transparent)]
1181pub struct PyObject(PyInner<Erased>);
1182
1183impl Deref for PyObjectRef {
1184 type Target = PyObject;
1185
1186 #[inline(always)]
1187 fn deref(&self) -> &PyObject {
1188 unsafe { self.ptr.as_ref() }
1189 }
1190}
1191
1192impl ToOwned for PyObject {
1193 type Owned = PyObjectRef;
1194
1195 #[inline(always)]
1196 fn to_owned(&self) -> Self::Owned {
1197 self.0.ref_count.inc();
1198 PyObjectRef {
1199 ptr: NonNull::from(self),
1200 }
1201 }
1202}
1203
1204impl PyObject {
1205 #[inline]
1210 pub fn try_to_owned(&self) -> Option<PyObjectRef> {
1211 if self.0.ref_count.safe_inc() {
1212 Some(PyObjectRef {
1213 ptr: NonNull::from(self),
1214 })
1215 } else {
1216 None
1217 }
1218 }
1219
1220 #[inline]
1231 pub unsafe fn try_to_owned_from_ptr(ptr: *mut Self) -> Option<PyObjectRef> {
1232 let inner = ptr.cast::<PyInner<Erased>>();
1233 let ref_count = unsafe { &*core::ptr::addr_of!((*inner).ref_count) };
1234 if ref_count.safe_inc() {
1235 Some(PyObjectRef {
1236 ptr: unsafe { NonNull::new_unchecked(ptr) },
1237 })
1238 } else {
1239 None
1240 }
1241 }
1242}
1243
1244impl PyObjectRef {
1245 #[inline(always)]
1246 pub const fn into_raw(self) -> NonNull<PyObject> {
1247 let ptr = self.ptr;
1248 core::mem::forget(self);
1249 ptr
1250 }
1251
1252 #[inline(always)]
1258 pub const unsafe fn from_raw(ptr: NonNull<PyObject>) -> Self {
1259 Self { ptr }
1260 }
1261
1262 #[inline(always)]
1267 pub fn downcast<T: PyPayload>(self) -> Result<PyRef<T>, Self> {
1268 if self.downcastable::<T>() {
1269 Ok(unsafe { self.downcast_unchecked() })
1270 } else {
1271 Err(self)
1272 }
1273 }
1274
1275 pub fn try_downcast<T: PyPayload>(self, vm: &VirtualMachine) -> PyResult<PyRef<T>> {
1276 T::try_downcast_from(&self, vm)?;
1277 Ok(unsafe { self.downcast_unchecked() })
1278 }
1279
1280 #[inline(always)]
1285 pub unsafe fn downcast_unchecked<T>(self) -> PyRef<T> {
1286 let obj = ManuallyDrop::new(self);
1289 PyRef {
1290 ptr: obj.ptr.cast(),
1291 }
1292 }
1293
1294 #[inline]
1301 pub fn downcast_exact<T: PyPayload>(self, vm: &VirtualMachine) -> Result<PyRefExact<T>, Self> {
1302 if self.class().is(T::class(&vm.ctx)) {
1303 assert!(
1305 self.downcastable::<T>(),
1306 "obj.__class__ is T::class() but payload is not T"
1307 );
1308 Ok(unsafe { PyRefExact::new_unchecked(PyRef::from_obj_unchecked(self)) })
1310 } else {
1311 Err(self)
1312 }
1313 }
1314}
1315
1316impl PyObject {
1317 #[inline(always)]
1321 fn weak_ref_list(&self) -> Option<&WeakRefList> {
1322 self.0.weakref_list_ref()
1323 }
1324
1325 pub(crate) fn get_weakrefs(&self) -> Option<PyObjectRef> {
1327 let wrl = self.weak_ref_list()?;
1328 let _lock = weakref_lock::lock(self as *const PyObject as usize);
1329 let head_ptr = wrl.head.load(Ordering::Relaxed);
1330 if head_ptr.is_null() {
1331 None
1332 } else {
1333 let head = unsafe { &*head_ptr };
1334 if head.0.ref_count.safe_inc() {
1335 Some(unsafe { PyRef::from_raw(head_ptr) }.into())
1336 } else {
1337 None
1338 }
1339 }
1340 }
1341
1342 pub(crate) fn downgrade_with_weakref_typ_opt(
1343 &self,
1344 callback: Option<PyObjectRef>,
1345 typ: PyTypeRef,
1347 ) -> Option<PyRef<PyWeak>> {
1348 self.weak_ref_list()
1349 .map(|wrl| wrl.add(self, typ, true, callback, None))
1350 }
1351
1352 pub(crate) fn downgrade_with_typ(
1353 &self,
1354 callback: Option<PyObjectRef>,
1355 typ: PyTypeRef,
1356 vm: &VirtualMachine,
1357 ) -> PyResult<PyRef<PyWeak>> {
1358 if !self
1360 .class()
1361 .slots
1362 .flags
1363 .has_feature(crate::types::PyTypeFlags::HAS_WEAKREF)
1364 {
1365 return Err(vm.new_type_error(format!(
1366 "cannot create weak reference to '{}' object",
1367 self.class().name()
1368 )));
1369 }
1370 let dict = if typ
1371 .slots
1372 .flags
1373 .has_feature(crate::types::PyTypeFlags::HAS_DICT)
1374 {
1375 Some(vm.ctx.new_dict())
1376 } else {
1377 None
1378 };
1379 let cls_is_weakref = typ.is(vm.ctx.types.weakref_type);
1380 let wrl = self.weak_ref_list().ok_or_else(|| {
1381 vm.new_type_error(format!(
1382 "cannot create weak reference to '{}' object",
1383 self.class().name()
1384 ))
1385 })?;
1386 Ok(wrl.add(self, typ, cls_is_weakref, callback, dict))
1387 }
1388
1389 pub fn downgrade(
1390 &self,
1391 callback: Option<PyObjectRef>,
1392 vm: &VirtualMachine,
1393 ) -> PyResult<PyRef<PyWeak>> {
1394 self.downgrade_with_typ(callback, vm.ctx.types.weakref_type.to_owned(), vm)
1395 }
1396
1397 pub fn get_weak_references(&self) -> Option<Vec<PyRef<PyWeak>>> {
1398 self.weak_ref_list()
1399 .map(|wrl| wrl.get_weak_references(self))
1400 }
1401
1402 #[deprecated(note = "use downcastable instead")]
1403 #[inline(always)]
1404 pub fn payload_is<T: PyPayload>(&self) -> bool {
1405 self.0.vtable.typeid == T::PAYLOAD_TYPE_ID
1406 }
1407
1408 #[deprecated(note = "use downcast_unchecked_ref instead")]
1413 #[inline(always)]
1414 pub const unsafe fn payload_unchecked<T: PyPayload>(&self) -> &T {
1415 let inner = unsafe { &*(&self.0 as *const PyInner<Erased> as *const PyInner<T>) };
1418 &inner.payload
1419 }
1420
1421 #[deprecated(note = "use downcast_ref instead")]
1422 #[inline(always)]
1423 pub fn payload<T: PyPayload>(&self) -> Option<&T> {
1424 #[allow(deprecated)]
1425 if self.payload_is::<T>() {
1426 #[allow(deprecated)]
1427 Some(unsafe { self.payload_unchecked() })
1428 } else {
1429 None
1430 }
1431 }
1432
1433 #[inline(always)]
1434 pub fn class(&self) -> &Py<PyType> {
1435 self.0.typ.deref()
1436 }
1437
1438 pub fn set_class(&self, typ: PyTypeRef, vm: &VirtualMachine) {
1439 self.0.typ.swap_to_temporary_refs(typ, vm);
1440 }
1441
1442 #[deprecated(note = "use downcast_ref_if_exact instead")]
1443 #[inline(always)]
1444 pub fn payload_if_exact<T: PyPayload>(&self, vm: &VirtualMachine) -> Option<&T> {
1445 if self.class().is(T::class(&vm.ctx)) {
1446 #[allow(deprecated)]
1447 self.payload()
1448 } else {
1449 None
1450 }
1451 }
1452
1453 #[inline(always)]
1454 fn instance_dict(&self) -> Option<&InstanceDict> {
1455 self.0.ext_ref().and_then(|ext| ext.dict.as_ref())
1456 }
1457
1458 #[inline(always)]
1459 pub fn dict(&self) -> Option<PyDictRef> {
1460 self.instance_dict().map(|d| d.get())
1461 }
1462
1463 pub fn set_dict(&self, dict: PyDictRef) -> Result<(), PyDictRef> {
1466 match self.instance_dict() {
1467 Some(d) => {
1468 d.set(dict);
1469 Ok(())
1470 }
1471 None => Err(dict),
1472 }
1473 }
1474
1475 #[deprecated(note = "use downcast_ref instead")]
1476 #[inline(always)]
1477 pub fn payload_if_subclass<T: crate::PyPayload>(&self, vm: &VirtualMachine) -> Option<&T> {
1478 if self.class().fast_issubclass(T::class(&vm.ctx)) {
1479 #[allow(deprecated)]
1480 self.payload()
1481 } else {
1482 None
1483 }
1484 }
1485
1486 #[inline]
1487 pub(crate) fn typeid(&self) -> TypeId {
1488 self.0.vtable.typeid
1489 }
1490
1491 #[inline(always)]
1493 pub fn downcastable<T: PyPayload>(&self) -> bool {
1494 self.typeid() == T::PAYLOAD_TYPE_ID && unsafe { T::validate_downcastable_from(self) }
1495 }
1496
1497 pub fn try_downcast_ref<'a, T: PyPayload>(
1499 &'a self,
1500 vm: &VirtualMachine,
1501 ) -> PyResult<&'a Py<T>> {
1502 T::try_downcast_from(self, vm)?;
1503 Ok(unsafe { self.downcast_unchecked_ref::<T>() })
1504 }
1505
1506 #[inline(always)]
1508 pub fn downcast_ref<T: PyPayload>(&self) -> Option<&Py<T>> {
1509 if self.downcastable::<T>() {
1510 Some(unsafe { self.downcast_unchecked_ref::<T>() })
1513 } else {
1514 None
1515 }
1516 }
1517
1518 #[inline(always)]
1519 pub fn downcast_ref_if_exact<T: PyPayload>(&self, vm: &VirtualMachine) -> Option<&Py<T>> {
1520 self.class()
1521 .is(T::class(&vm.ctx))
1522 .then(|| unsafe { self.downcast_unchecked_ref::<T>() })
1523 }
1524
1525 #[inline(always)]
1528 pub unsafe fn downcast_unchecked_ref<T: PyPayload>(&self) -> &Py<T> {
1529 debug_assert!(self.downcastable::<T>());
1530 unsafe { &*(self as *const Self as *const Py<T>) }
1532 }
1533
1534 #[inline(always)]
1535 pub fn strong_count(&self) -> usize {
1536 self.0.ref_count.get()
1537 }
1538
1539 #[inline]
1540 pub fn weak_count(&self) -> Option<usize> {
1541 self.weak_ref_list().map(|wrl| wrl.count(self))
1542 }
1543
1544 #[inline(always)]
1545 pub const fn as_raw(&self) -> *const Self {
1546 self
1547 }
1548
1549 #[inline]
1552 pub(crate) fn gc_finalized(&self) -> bool {
1553 GcBits::from_bits_retain(self.0.gc_bits.load(Ordering::Relaxed)).contains(GcBits::FINALIZED)
1554 }
1555
1556 #[inline]
1559 pub(crate) fn set_gc_finalized(&self) {
1560 self.set_gc_bit(GcBits::FINALIZED);
1561 }
1562
1563 #[inline]
1565 pub(crate) fn set_gc_bit(&self, bit: GcBits) {
1566 self.0.gc_bits.fetch_or(bit.bits(), Ordering::Relaxed);
1567 }
1568
1569 #[inline]
1571 pub(crate) fn gc_generation(&self) -> u8 {
1572 self.0.gc_generation.load(Ordering::Relaxed)
1573 }
1574
1575 #[inline]
1578 pub(crate) fn set_gc_generation(&self, generation: u8) {
1579 self.0.gc_generation.store(generation, Ordering::Relaxed);
1580 }
1581
1582 #[inline]
1584 pub(crate) fn set_gc_tracked(&self) {
1585 self.set_gc_bit(GcBits::TRACKED);
1586 }
1587
1588 #[inline]
1590 pub(crate) fn clear_gc_tracked(&self) {
1591 self.0
1592 .gc_bits
1593 .fetch_and(!GcBits::TRACKED.bits(), Ordering::Relaxed);
1594 }
1595
1596 #[inline(always)] fn drop_slow_inner(&self) -> Result<(), ()> {
1598 #[inline(never)]
1600 #[cold]
1601 fn call_slot_del(
1602 zelf: &PyObject,
1603 slot_del: fn(&PyObject, &VirtualMachine) -> PyResult<()>,
1604 ) -> Result<(), ()> {
1605 let ret = crate::vm::thread::with_vm(zelf, |vm| {
1606 zelf.0.ref_count.inc_by(2);
1609
1610 if let Err(e) = slot_del(zelf, vm) {
1611 let del_method = zelf.get_class_attr(identifier!(vm, __del__)).unwrap();
1612 vm.run_unraisable(e, None, del_method);
1613 }
1614
1615 let _ = zelf.0.ref_count.dec();
1619 zelf.0.ref_count.dec()
1620 });
1621 match ret {
1622 Some(true) => Ok(()),
1624 Some(false) => Err(()),
1626 None => Ok(()),
1627 }
1628 }
1629
1630 let del = self.class().slots.del.load();
1634 if let Some(slot_del) = del
1635 && !self.gc_finalized()
1636 {
1637 self.set_gc_finalized();
1638 call_slot_del(self, slot_del)?;
1639 }
1640
1641 if let Some(wrl) = self.weak_ref_list() {
1646 wrl.clear(self);
1647 }
1648
1649 Ok(())
1650 }
1651
1652 #[inline(never)]
1654 unsafe fn drop_slow(ptr: NonNull<Self>) {
1655 let dealloc = unsafe { ptr.as_ref().0.vtable.dealloc };
1656 unsafe { dealloc(ptr.as_ptr()) }
1657 }
1658
1659 pub(crate) unsafe fn mark_intern(&self) {
1662 self.0.ref_count.leak();
1663 }
1664
1665 pub(crate) fn is_interned(&self) -> bool {
1666 self.0.ref_count.is_leaked()
1667 }
1668
1669 pub(crate) fn get_slot(&self, offset: usize) -> Option<PyObjectRef> {
1670 self.0.ext_ref().unwrap().slots[offset].read().clone()
1671 }
1672
1673 pub(crate) fn set_slot(&self, offset: usize, value: Option<PyObjectRef>) {
1674 *self.0.ext_ref().unwrap().slots[offset].write() = value;
1675 }
1676
1677 pub fn is_gc_tracked(&self) -> bool {
1679 GcBits::from_bits_retain(self.0.gc_bits.load(Ordering::Relaxed)).contains(GcBits::TRACKED)
1680 }
1681
1682 pub fn gc_get_referents(&self) -> Vec<PyObjectRef> {
1685 let mut result = Vec::new();
1686 self.0.traverse(&mut |child: &PyObject| {
1687 result.push(child.to_owned());
1688 });
1689 result
1690 }
1691
1692 pub fn try_call_finalizer(&self) {
1697 let del = self.class().slots.del.load();
1698 if let Some(slot_del) = del
1699 && !self.gc_finalized()
1700 {
1701 self.set_gc_finalized();
1704 let result = crate::vm::thread::with_vm(self, |vm| {
1705 if let Err(e) = slot_del(self, vm)
1706 && let Some(del_method) = self.get_class_attr(identifier!(vm, __del__))
1707 {
1708 vm.run_unraisable(e, None, del_method);
1709 }
1710 });
1711 let _ = result;
1712 }
1713 }
1714
1715 pub fn gc_clear_weakrefs_collect_callbacks(&self) -> Vec<(PyRef<PyWeak>, PyObjectRef)> {
1720 if let Some(wrl) = self.weak_ref_list() {
1721 wrl.clear_for_gc_collect_callbacks(self)
1722 } else {
1723 vec![]
1724 }
1725 }
1726
1727 pub unsafe fn gc_get_referent_ptrs(&self) -> Vec<NonNull<PyObject>> {
1735 let mut result = Vec::new();
1736 self.0.traverse(&mut |child: &PyObject| {
1738 result.push(NonNull::from(child));
1739 });
1740 result
1741 }
1742
1743 pub unsafe fn gc_clear_raw(ptr: *mut PyObject) -> Vec<PyObjectRef> {
1752 let mut result = Vec::new();
1753 let obj = unsafe { &*ptr };
1754
1755 if let Some(clear_fn) = obj.0.vtable.clear {
1757 unsafe { clear_fn(ptr, &mut result) };
1758 }
1759
1760 let (flags, member_count) = obj.0.read_type_flags();
1765 let has_ext = flags.has_feature(crate::types::PyTypeFlags::HAS_DICT) || member_count > 0;
1766 if has_ext {
1767 let has_weakref = flags.has_feature(crate::types::PyTypeFlags::HAS_WEAKREF);
1768 let offset = if has_weakref {
1769 WEAKREF_OFFSET + EXT_OFFSET
1770 } else {
1771 EXT_OFFSET
1772 };
1773 let self_addr = (ptr as *const u8).addr();
1774 let ext_ptr =
1775 core::ptr::with_exposed_provenance_mut::<ObjExt>(self_addr.wrapping_sub(offset));
1776 let ext = unsafe { &mut *ext_ptr };
1777 if let Some(old_dict) = ext.dict.take() {
1778 let dict_ref = old_dict.into_inner();
1780 result.push(dict_ref.into());
1781 }
1782 for slot in ext.slots.iter() {
1783 if let Some(val) = slot.write().take() {
1784 result.push(val);
1785 }
1786 }
1787 }
1788
1789 result
1790 }
1791
1792 pub unsafe fn gc_clear(&self) -> Vec<PyObjectRef> {
1800 unsafe { Self::gc_clear_raw(self as *const _ as *mut PyObject) }
1804 }
1805
1806 pub fn gc_has_clear(&self) -> bool {
1809 self.0.vtable.clear.is_some()
1810 || self
1811 .0
1812 .ext_ref()
1813 .is_some_and(|ext| ext.dict.is_some() || !ext.slots.is_empty())
1814 }
1815}
1816
1817impl Borrow<PyObject> for PyObjectRef {
1818 #[inline(always)]
1819 fn borrow(&self) -> &PyObject {
1820 self
1821 }
1822}
1823
1824impl AsRef<PyObject> for PyObjectRef {
1825 #[inline(always)]
1826 fn as_ref(&self) -> &PyObject {
1827 self
1828 }
1829}
1830
1831impl<'a, T: PyPayload> From<&'a Py<T>> for &'a PyObject {
1832 #[inline(always)]
1833 fn from(py_ref: &'a Py<T>) -> Self {
1834 py_ref.as_object()
1835 }
1836}
1837
1838impl Drop for PyObjectRef {
1839 #[inline]
1840 fn drop(&mut self) {
1841 if self.0.ref_count.dec() {
1842 unsafe { PyObject::drop_slow(self.ptr) }
1843 }
1844 }
1845}
1846
1847impl fmt::Debug for PyObject {
1848 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1849 unsafe { (self.0.vtable.debug)(self, f) }
1852 }
1853}
1854
1855impl fmt::Debug for PyObjectRef {
1856 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1857 self.as_object().fmt(f)
1858 }
1859}
1860
1861const STACKREF_BORROW_TAG: usize = 1;
1862
1863#[repr(transparent)]
1875pub struct PyStackRef {
1876 bits: NonZeroUsize,
1877}
1878
1879impl PyStackRef {
1880 #[inline(always)]
1883 pub fn new_owned(obj: PyObjectRef) -> Self {
1884 let ptr = obj.into_raw();
1885 let bits = ptr.as_ptr() as usize;
1886 debug_assert!(
1887 bits & STACKREF_BORROW_TAG == 0,
1888 "PyObject pointer must be aligned"
1889 );
1890 Self {
1891 bits: unsafe { NonZeroUsize::new_unchecked(bits) },
1893 }
1894 }
1895
1896 #[inline(always)]
1904 pub unsafe fn new_borrowed(obj: &PyObject) -> Self {
1905 let bits = (obj as *const PyObject as usize) | STACKREF_BORROW_TAG;
1906 Self {
1907 bits: unsafe { NonZeroUsize::new_unchecked(bits) },
1909 }
1910 }
1911
1912 #[inline(always)]
1914 pub fn is_borrowed(&self) -> bool {
1915 self.bits.get() & STACKREF_BORROW_TAG != 0
1916 }
1917
1918 #[inline(always)]
1920 pub fn as_object(&self) -> &PyObject {
1921 unsafe { &*((self.bits.get() & !STACKREF_BORROW_TAG) as *const PyObject) }
1922 }
1923
1924 #[inline(always)]
1929 pub fn to_pyobj(self) -> PyObjectRef {
1930 let obj = if self.is_borrowed() {
1931 self.as_object().to_owned() } else {
1933 let ptr = unsafe { NonNull::new_unchecked(self.bits.get() as *mut PyObject) };
1934 unsafe { PyObjectRef::from_raw(ptr) }
1935 };
1936 core::mem::forget(self); obj
1938 }
1939
1940 #[inline(always)]
1943 pub fn promote(&mut self) {
1944 if self.is_borrowed() {
1945 self.as_object().0.ref_count.inc();
1946 self.bits =
1948 unsafe { NonZeroUsize::new_unchecked(self.bits.get() & !STACKREF_BORROW_TAG) };
1949 }
1950 }
1951}
1952
1953impl Drop for PyStackRef {
1954 #[inline]
1955 fn drop(&mut self) {
1956 if !self.is_borrowed() {
1957 let ptr = unsafe { NonNull::new_unchecked(self.bits.get() as *mut PyObject) };
1959 drop(unsafe { PyObjectRef::from_raw(ptr) });
1960 }
1961 }
1963}
1964
1965impl core::ops::Deref for PyStackRef {
1966 type Target = PyObject;
1967
1968 #[inline(always)]
1969 fn deref(&self) -> &PyObject {
1970 self.as_object()
1971 }
1972}
1973
1974impl Clone for PyStackRef {
1975 #[inline(always)]
1977 fn clone(&self) -> Self {
1978 Self::new_owned(self.as_object().to_owned())
1979 }
1980}
1981
1982impl fmt::Debug for PyStackRef {
1983 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1984 if self.is_borrowed() {
1985 write!(f, "PyStackRef(borrowed, ")?;
1986 } else {
1987 write!(f, "PyStackRef(owned, ")?;
1988 }
1989 self.as_object().fmt(f)?;
1990 write!(f, ")")
1991 }
1992}
1993
1994cfg_if::cfg_if! {
1995 if #[cfg(feature = "threading")] {
1996 unsafe impl Send for PyStackRef {}
1997 unsafe impl Sync for PyStackRef {}
1998 }
1999}
2000
2001const _: () = assert!(
2003 core::mem::size_of::<Option<PyStackRef>>() == core::mem::size_of::<Option<PyObjectRef>>()
2004);
2005const _: () =
2006 assert!(core::mem::size_of::<Option<PyStackRef>>() == core::mem::size_of::<PyStackRef>());
2007
2008#[repr(transparent)]
2009pub struct Py<T>(PyInner<T>);
2010
2011impl<T: PyPayload> Py<T> {
2012 pub fn downgrade(
2013 &self,
2014 callback: Option<PyObjectRef>,
2015 vm: &VirtualMachine,
2016 ) -> PyResult<PyWeakRef<T>> {
2017 Ok(PyWeakRef {
2018 weak: self.as_object().downgrade(callback, vm)?,
2019 _marker: PhantomData,
2020 })
2021 }
2022
2023 #[inline]
2024 pub fn payload(&self) -> &T {
2025 &self.0.payload
2026 }
2027}
2028
2029impl<T> ToOwned for Py<T> {
2030 type Owned = PyRef<T>;
2031
2032 #[inline(always)]
2033 fn to_owned(&self) -> Self::Owned {
2034 self.0.ref_count.inc();
2035 PyRef {
2036 ptr: NonNull::from(self),
2037 }
2038 }
2039}
2040
2041impl<T> Deref for Py<T> {
2042 type Target = T;
2043
2044 #[inline(always)]
2045 fn deref(&self) -> &Self::Target {
2046 &self.0.payload
2047 }
2048}
2049
2050impl<T: PyPayload> Borrow<PyObject> for Py<T> {
2051 #[inline(always)]
2052 fn borrow(&self) -> &PyObject {
2053 unsafe { &*(&self.0 as *const PyInner<T> as *const PyObject) }
2054 }
2055}
2056
2057impl<T> core::hash::Hash for Py<T>
2058where
2059 T: core::hash::Hash + PyPayload,
2060{
2061 #[inline]
2062 fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
2063 self.deref().hash(state)
2064 }
2065}
2066
2067impl<T> PartialEq for Py<T>
2068where
2069 T: PartialEq + PyPayload,
2070{
2071 #[inline]
2072 fn eq(&self, other: &Self) -> bool {
2073 self.deref().eq(other.deref())
2074 }
2075}
2076
2077impl<T> Eq for Py<T> where T: Eq + PyPayload {}
2078
2079impl<T> AsRef<PyObject> for Py<T>
2080where
2081 T: PyPayload,
2082{
2083 #[inline(always)]
2084 fn as_ref(&self) -> &PyObject {
2085 self.borrow()
2086 }
2087}
2088
2089impl<T: PyPayload + core::fmt::Debug> fmt::Debug for Py<T> {
2090 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2091 (**self).fmt(f)
2092 }
2093}
2094
2095#[repr(transparent)]
2105pub struct PyRef<T> {
2106 ptr: NonNull<Py<T>>,
2107}
2108
2109cfg_if::cfg_if! {
2110 if #[cfg(feature = "threading")] {
2111 unsafe impl<T> Send for PyRef<T> {}
2112 unsafe impl<T> Sync for PyRef<T> {}
2113 }
2114}
2115
2116impl<T: fmt::Debug> fmt::Debug for PyRef<T> {
2117 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2118 (**self).fmt(f)
2119 }
2120}
2121
2122impl<T> Drop for PyRef<T> {
2123 #[inline]
2124 fn drop(&mut self) {
2125 if self.0.ref_count.dec() {
2126 unsafe { PyObject::drop_slow(self.ptr.cast::<PyObject>()) }
2127 }
2128 }
2129}
2130
2131impl<T> Clone for PyRef<T> {
2132 #[inline(always)]
2133 fn clone(&self) -> Self {
2134 (**self).to_owned()
2135 }
2136}
2137
2138impl<T: PyPayload> PyRef<T> {
2139 #[inline(always)]
2147 pub(crate) const unsafe fn from_non_null(ptr: NonNull<Py<T>>) -> Self {
2148 Self { ptr }
2149 }
2150
2151 #[inline(always)]
2154 pub(crate) const unsafe fn from_raw(raw: *const Py<T>) -> Self {
2155 unsafe { Self::from_non_null(NonNull::new_unchecked(raw as *mut _)) }
2156 }
2157
2158 #[inline(always)]
2160 unsafe fn from_obj_unchecked(obj: PyObjectRef) -> Self {
2161 debug_assert!(obj.downcast_ref::<T>().is_some());
2162 let obj = ManuallyDrop::new(obj);
2163 Self {
2164 ptr: obj.ptr.cast(),
2165 }
2166 }
2167
2168 pub const fn leak(pyref: Self) -> &'static Py<T> {
2169 let ptr = pyref.ptr;
2170 core::mem::forget(pyref);
2171 unsafe { ptr.as_ref() }
2172 }
2173}
2174
2175impl<T: PyPayload + crate::object::MaybeTraverse + core::fmt::Debug> PyRef<T> {
2176 #[inline(always)]
2177 pub fn new_ref(payload: T, typ: crate::builtins::PyTypeRef, dict: Option<PyDictRef>) -> Self {
2178 let has_dict = dict.is_some();
2179 let is_heaptype = typ.heaptype_ext.is_some();
2180
2181 let cached = if !has_dict && !is_heaptype {
2183 unsafe { T::freelist_pop(&payload) }
2184 } else {
2185 None
2186 };
2187
2188 let ptr = if let Some(cached) = cached {
2189 let inner = cached.as_ptr() as *mut PyInner<T>;
2190 unsafe {
2191 core::ptr::write(&mut (*inner).ref_count, RefCount::new());
2192 (*inner).gc_bits.store(0, Ordering::Relaxed);
2193 core::ptr::drop_in_place(&mut (*inner).payload);
2194 core::ptr::write(&mut (*inner).payload, payload);
2195 let cached_typ: *const Py<PyType> = &*(*inner).typ;
2199 if core::ptr::eq(cached_typ, &*typ) {
2200 drop(typ);
2201 } else {
2202 let _old = (*inner).typ.swap(typ);
2203 }
2204 }
2205 unsafe { NonNull::new_unchecked(inner.cast::<Py<T>>()) }
2206 } else {
2207 let inner = PyInner::new(payload, typ, dict);
2208 unsafe { NonNull::new_unchecked(inner.cast::<Py<T>>()) }
2209 };
2210
2211 if <T as crate::object::MaybeTraverse>::HAS_TRAVERSE || has_dict || is_heaptype {
2216 let gc = crate::gc_state::gc_state();
2217 unsafe {
2218 gc.track_object(ptr.cast());
2219 }
2220 gc.maybe_collect();
2222 }
2223
2224 Self { ptr }
2225 }
2226}
2227
2228impl<T: crate::class::PySubclass + core::fmt::Debug> PyRef<T>
2229where
2230 T::Base: core::fmt::Debug,
2231{
2232 #[inline]
2236 pub fn into_base(self) -> PyRef<T::Base> {
2237 let obj: PyObjectRef = self.into();
2238 match obj.downcast() {
2239 Ok(base_ref) => base_ref,
2240 Err(_) => unsafe { core::hint::unreachable_unchecked() },
2241 }
2242 }
2243 #[inline]
2244 pub fn upcast<U: PyPayload + StaticType>(self) -> PyRef<U>
2245 where
2246 T: StaticType,
2247 {
2248 debug_assert!(T::static_type().is_subtype(U::static_type()));
2249 let obj: PyObjectRef = self.into();
2250 match obj.downcast::<U>() {
2251 Ok(upcast_ref) => upcast_ref,
2252 Err(_) => unsafe { core::hint::unreachable_unchecked() },
2253 }
2254 }
2255}
2256
2257impl<T: crate::class::PySubclass> Py<T> {
2258 #[inline]
2260 pub fn to_base(&self) -> &Py<T::Base> {
2261 debug_assert!(self.as_object().downcast_ref::<T::Base>().is_some());
2262 unsafe { &*(self as *const Py<T> as *const Py<T::Base>) }
2265 }
2266
2267 #[inline]
2269 pub fn upcast_ref<U: PyPayload + StaticType>(&self) -> &Py<U>
2270 where
2271 T: StaticType,
2272 {
2273 debug_assert!(T::static_type().is_subtype(U::static_type()));
2274 unsafe { &*(self as *const Py<T> as *const Py<U>) }
2276 }
2277}
2278
2279impl<T> Borrow<PyObject> for PyRef<T>
2280where
2281 T: PyPayload,
2282{
2283 #[inline(always)]
2284 fn borrow(&self) -> &PyObject {
2285 (**self).as_object()
2286 }
2287}
2288
2289impl<T> AsRef<PyObject> for PyRef<T>
2290where
2291 T: PyPayload,
2292{
2293 #[inline(always)]
2294 fn as_ref(&self) -> &PyObject {
2295 self.borrow()
2296 }
2297}
2298
2299impl<T> From<PyRef<T>> for PyObjectRef {
2300 #[inline]
2301 fn from(value: PyRef<T>) -> Self {
2302 let me = ManuallyDrop::new(value);
2303 Self { ptr: me.ptr.cast() }
2304 }
2305}
2306
2307impl<T> Borrow<Py<T>> for PyRef<T> {
2308 #[inline(always)]
2309 fn borrow(&self) -> &Py<T> {
2310 self
2311 }
2312}
2313
2314impl<T> AsRef<Py<T>> for PyRef<T> {
2315 #[inline(always)]
2316 fn as_ref(&self) -> &Py<T> {
2317 self
2318 }
2319}
2320
2321impl<T> Deref for PyRef<T> {
2322 type Target = Py<T>;
2323
2324 #[inline(always)]
2325 fn deref(&self) -> &Py<T> {
2326 unsafe { self.ptr.as_ref() }
2327 }
2328}
2329
2330impl<T> core::hash::Hash for PyRef<T>
2331where
2332 T: core::hash::Hash + PyPayload,
2333{
2334 #[inline]
2335 fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
2336 self.deref().hash(state)
2337 }
2338}
2339
2340impl<T> PartialEq for PyRef<T>
2341where
2342 T: PartialEq + PyPayload,
2343{
2344 #[inline]
2345 fn eq(&self, other: &Self) -> bool {
2346 self.deref().eq(other.deref())
2347 }
2348}
2349
2350impl<T> Eq for PyRef<T> where T: Eq + PyPayload {}
2351
2352#[repr(transparent)]
2353pub struct PyWeakRef<T: PyPayload> {
2354 weak: PyRef<PyWeak>,
2355 _marker: PhantomData<T>,
2356}
2357
2358impl<T: PyPayload> PyWeakRef<T> {
2359 pub fn upgrade(&self) -> Option<PyRef<T>> {
2360 self.weak
2361 .upgrade()
2362 .map(|obj| unsafe { PyRef::from_obj_unchecked(obj) })
2364 }
2365}
2366
2367macro_rules! partially_init {
2370 (
2371 $ty:path {$($init_field:ident: $init_value:expr),*$(,)?},
2372 Uninit { $($uninit_field:ident),*$(,)? }$(,)?
2373 ) => {{
2374 #[allow(clippy::diverging_sub_expression, reason = "intentional compile-time field check in an unreachable branch")]
2377 if false {
2378 #[allow(invalid_value, dead_code, unreachable_code)]
2379 let _ = {$ty {
2380 $($init_field: $init_value,)*
2381 $($uninit_field: unreachable!(),)*
2382 }};
2383 }
2384 let mut m = ::core::mem::MaybeUninit::<$ty>::uninit();
2385 #[allow(unused_unsafe)]
2386 unsafe {
2387 $(::core::ptr::write(&mut (*m.as_mut_ptr()).$init_field, $init_value);)*
2388 }
2389 m
2390 }};
2391}
2392
2393pub(crate) fn init_type_hierarchy() -> (PyTypeRef, PyTypeRef, PyTypeRef) {
2394 use crate::{builtins::object, class::PyClassImpl};
2395 use core::mem::MaybeUninit;
2396
2397 let (type_type, object_type) = {
2402 static_assertions::assert_eq_size!(MaybeUninit<PyInner<PyType>>, PyInner<PyType>);
2405 static_assertions::assert_eq_align!(MaybeUninit<PyInner<PyType>>, PyInner<PyType>);
2406
2407 let type_payload = PyType {
2408 base: None,
2409 bases: PyRwLock::default(),
2410 mro: PyRwLock::default(),
2411 subclasses: PyRwLock::default(),
2412 attributes: PyRwLock::new(Default::default()),
2413 slots: PyType::make_slots(),
2414 heaptype_ext: None,
2415 tp_version_tag: core::sync::atomic::AtomicU32::new(0),
2416 };
2417 let object_payload = PyType {
2418 base: None,
2419 bases: PyRwLock::default(),
2420 mro: PyRwLock::default(),
2421 subclasses: PyRwLock::default(),
2422 attributes: PyRwLock::new(Default::default()),
2423 slots: object::PyBaseObject::make_slots(),
2424 heaptype_ext: None,
2425 tp_version_tag: core::sync::atomic::AtomicU32::new(0),
2426 };
2427 let alloc_type_with_prefixes = || -> *mut MaybeUninit<PyInner<PyType>> {
2431 let inner_layout = core::alloc::Layout::new::<MaybeUninit<PyInner<PyType>>>();
2432 let ext_layout = core::alloc::Layout::new::<ObjExt>();
2433 let weakref_layout = core::alloc::Layout::new::<WeakRefList>();
2434
2435 let (layout, weakref_offset) = ext_layout.extend(weakref_layout).unwrap();
2436 let (combined, inner_offset) = layout.extend(inner_layout).unwrap();
2437 let combined = combined.pad_to_align();
2438
2439 let alloc_ptr = unsafe { alloc::alloc::alloc(combined) };
2440 if alloc_ptr.is_null() {
2441 alloc::alloc::handle_alloc_error(combined);
2442 }
2443 alloc_ptr.expose_provenance();
2444
2445 unsafe {
2446 let ext_ptr = alloc_ptr as *mut ObjExt;
2447 ext_ptr.write(ObjExt::new(None, 0));
2448
2449 let weakref_ptr = alloc_ptr.add(weakref_offset) as *mut WeakRefList;
2450 weakref_ptr.write(WeakRefList::new());
2451
2452 alloc_ptr.add(inner_offset) as *mut MaybeUninit<PyInner<PyType>>
2453 }
2454 };
2455
2456 let type_type_ptr = alloc_type_with_prefixes();
2457 unsafe {
2458 type_type_ptr.write(partially_init!(
2459 PyInner::<PyType> {
2460 ref_count: RefCount::new(),
2461 vtable: PyObjVTable::of::<PyType>(),
2462 gc_bits: Radium::new(0),
2463 gc_generation: Radium::new(GC_UNTRACKED),
2464 gc_pointers: Pointers::new(),
2465 payload: type_payload,
2466 },
2467 Uninit { typ }
2468 ));
2469 }
2470
2471 let object_type_ptr = alloc_type_with_prefixes();
2472 unsafe {
2473 object_type_ptr.write(partially_init!(
2474 PyInner::<PyType> {
2475 ref_count: RefCount::new(),
2476 vtable: PyObjVTable::of::<PyType>(),
2477 gc_bits: Radium::new(0),
2478 gc_generation: Radium::new(GC_UNTRACKED),
2479 gc_pointers: Pointers::new(),
2480 payload: object_payload,
2481 },
2482 Uninit { typ },
2483 ));
2484 }
2485
2486 let object_type_ptr = object_type_ptr as *mut PyInner<PyType>;
2487 let type_type_ptr = type_type_ptr as *mut PyInner<PyType>;
2488
2489 unsafe {
2490 (*type_type_ptr).ref_count.inc();
2491 let type_type = PyTypeRef::from_raw(type_type_ptr.cast());
2492 ptr::write(&mut (*object_type_ptr).typ, PyAtomicRef::from(type_type));
2493 (*type_type_ptr).ref_count.inc();
2494 let type_type = PyTypeRef::from_raw(type_type_ptr.cast());
2495 ptr::write(&mut (*type_type_ptr).typ, PyAtomicRef::from(type_type));
2496
2497 let object_type = PyTypeRef::from_raw(object_type_ptr.cast());
2498 (*object_type_ptr).payload.mro = PyRwLock::new(vec![object_type.clone()]);
2500
2501 (*type_type_ptr).payload.bases = PyRwLock::new(vec![object_type.clone()]);
2502 (*type_type_ptr).payload.base = Some(object_type.clone());
2503
2504 let type_type = PyTypeRef::from_raw(type_type_ptr.cast());
2505 (*type_type_ptr).payload.mro =
2507 PyRwLock::new(vec![type_type.clone(), object_type.clone()]);
2508
2509 (type_type, object_type)
2510 }
2511 };
2512
2513 let weakref_type = PyType {
2514 base: Some(object_type.clone()),
2515 bases: PyRwLock::new(vec![object_type.clone()]),
2516 mro: PyRwLock::new(vec![object_type.clone()]),
2517 subclasses: PyRwLock::default(),
2518 attributes: PyRwLock::default(),
2519 slots: PyWeak::make_slots(),
2520 heaptype_ext: None,
2521 tp_version_tag: core::sync::atomic::AtomicU32::new(0),
2522 };
2523 let weakref_type = PyRef::new_ref(weakref_type, type_type.clone(), None);
2524 unsafe {
2526 crate::gc_state::gc_state()
2527 .untrack_object(core::ptr::NonNull::from(weakref_type.as_object()));
2528 }
2529 weakref_type.as_object().clear_gc_tracked();
2530 weakref_type.mro.write().insert(0, weakref_type.clone());
2532
2533 object_type.subclasses.write().push(
2534 type_type
2535 .as_object()
2536 .downgrade_with_weakref_typ_opt(None, weakref_type.clone())
2537 .unwrap(),
2538 );
2539
2540 object_type.subclasses.write().push(
2541 weakref_type
2542 .as_object()
2543 .downgrade_with_weakref_typ_opt(None, weakref_type.clone())
2544 .unwrap(),
2545 );
2546
2547 (type_type, object_type, weakref_type)
2548}
2549
2550#[cfg(test)]
2551mod tests {
2552 use super::*;
2553
2554 #[test]
2555 fn miri_test_type_initialization() {
2556 let _ = init_type_hierarchy();
2557 }
2558
2559 #[test]
2560 fn miri_test_drop() {
2561 let ctx = crate::Context::genesis();
2563 let obj = ctx.new_bytes(b"dfghjkl".to_vec());
2564 drop(obj);
2565 }
2566}