1use std::any::type_name;
2use std::any::Any;
3use std::borrow::Borrow;
4use std::borrow::BorrowMut;
5use std::convert::identity;
6use std::convert::AsMut;
7use std::convert::AsRef;
8use std::convert::TryFrom;
9use std::fmt::{self, Debug, Formatter};
10use std::marker::PhantomData;
11use std::mem::align_of;
12use std::mem::forget;
13use std::mem::needs_drop;
14use std::mem::size_of;
15use std::mem::take;
16use std::mem::transmute_copy;
17use std::ops::Deref;
18use std::ops::DerefMut;
19use std::ptr::drop_in_place;
20use std::ptr::null_mut;
21use std::ptr::NonNull;
22use std::rc::Rc;
23use std::sync::Arc;
24use std::thread::yield_now;
25use std::time::Duration;
26use std::time::Instant;
27
28#[allow(non_camel_case_types)]
31pub type intptr_t = isize;
32
33#[allow(non_camel_case_types)]
36pub type size_t = usize;
37
38pub use std::os::raw::c_char as char;
39pub use std::os::raw::c_int as int;
40pub use std::os::raw::c_long as long;
41
42pub type Opaque = [u8; 0];
43
44#[repr(transparent)]
46#[derive(Debug)]
47pub struct UniquePtr<T: ?Sized>(Option<UniqueRef<T>>);
48
49impl<T: ?Sized> UniquePtr<T> {
50 pub fn is_null(&self) -> bool {
51 self.0.is_none()
52 }
53
54 pub fn as_ref(&self) -> Option<&UniqueRef<T>> {
55 self.0.as_ref()
56 }
57
58 pub fn as_mut(&mut self) -> Option<&mut UniqueRef<T>> {
59 self.0.as_mut()
60 }
61
62 pub fn take(&mut self) -> Option<UniqueRef<T>> {
63 take(&mut self.0)
64 }
65
66 pub fn unwrap(self) -> UniqueRef<T> {
67 self.0.unwrap()
68 }
69}
70
71impl<T> UniquePtr<T> {
72 pub unsafe fn from_raw(ptr: *mut T) -> Self {
73 assert_unique_ptr_layout_compatible::<Self, T>();
74 Self(UniqueRef::try_from_raw(ptr))
75 }
76
77 pub fn into_raw(self) -> *mut T {
78 self
79 .0
80 .map_or_else(null_mut, |unique_ref| unique_ref.into_raw())
81 }
82}
83
84impl<T: Shared> UniquePtr<T> {
85 pub fn make_shared(self) -> SharedPtr<T> {
86 self.into()
87 }
88}
89
90impl<T> Default for UniquePtr<T> {
91 fn default() -> Self {
92 assert_unique_ptr_layout_compatible::<Self, T>();
93 Self(None)
94 }
95}
96
97impl<T> From<UniqueRef<T>> for UniquePtr<T> {
98 fn from(unique_ref: UniqueRef<T>) -> Self {
99 assert_unique_ptr_layout_compatible::<Self, T>();
100 Self(Some(unique_ref))
101 }
102}
103
104#[repr(transparent)]
106#[derive(Debug)]
107pub struct UniqueRef<T: ?Sized>(NonNull<T>);
108
109impl<T> UniqueRef<T> {
110 pub(crate) unsafe fn try_from_raw(ptr: *mut T) -> Option<Self> {
111 assert_unique_ptr_layout_compatible::<Self, T>();
112 NonNull::new(ptr).map(Self)
113 }
114
115 pub(crate) unsafe fn from_raw(ptr: *mut T) -> Self {
116 assert_unique_ptr_layout_compatible::<Self, T>();
117 Self::try_from_raw(ptr).unwrap()
118 }
119
120 pub fn into_raw(self) -> *mut T {
121 let ptr = self.0.as_ptr();
122 forget(self);
123 ptr
124 }
125}
126
127impl<T: Shared> UniqueRef<T> {
128 pub fn make_shared(self) -> SharedRef<T> {
129 self.into()
130 }
131}
132
133impl<T: ?Sized> Drop for UniqueRef<T> {
134 fn drop(&mut self) {
135 unsafe { drop_in_place(self.0.as_ptr()) }
136 }
137}
138
139impl<T: ?Sized> Deref for UniqueRef<T> {
140 type Target = T;
141 fn deref(&self) -> &Self::Target {
142 unsafe { self.0.as_ref() }
143 }
144}
145
146impl<T: ?Sized> DerefMut for UniqueRef<T> {
147 fn deref_mut(&mut self) -> &mut Self::Target {
148 unsafe { self.0.as_mut() }
149 }
150}
151
152impl<T: ?Sized> AsRef<T> for UniqueRef<T> {
153 fn as_ref(&self) -> &T {
154 self
155 }
156}
157
158impl<T: ?Sized> AsMut<T> for UniqueRef<T> {
159 fn as_mut(&mut self) -> &mut T {
160 self
161 }
162}
163
164impl<T: ?Sized> Borrow<T> for UniqueRef<T> {
165 fn borrow(&self) -> &T {
166 self
167 }
168}
169
170impl<T: ?Sized> BorrowMut<T> for UniqueRef<T> {
171 fn borrow_mut(&mut self) -> &mut T {
172 self
173 }
174}
175
176fn assert_unique_ptr_layout_compatible<U, T>() {
177 assert_eq!(size_of::<U>(), size_of::<*mut T>());
180 assert_eq!(align_of::<U>(), align_of::<*mut T>());
181
182 assert!(needs_drop::<T>());
185}
186
187pub trait Shared
188where
189 Self: Sized,
190{
191 fn clone(shared_ptr: &SharedPtrBase<Self>) -> SharedPtrBase<Self>;
192 fn from_unique_ptr(unique_ptr: UniquePtr<Self>) -> SharedPtrBase<Self>;
193 fn get(shared_ptr: &SharedPtrBase<Self>) -> *const Self;
194 fn reset(shared_ptr: &mut SharedPtrBase<Self>);
195 fn use_count(shared_ptr: &SharedPtrBase<Self>) -> long;
196}
197
198#[repr(C)]
201#[derive(Eq, Debug, PartialEq)]
202pub struct SharedPtrBase<T: Shared>([usize; 2], PhantomData<T>);
203
204unsafe impl<T: Shared + Sync> Send for SharedPtrBase<T> {}
205unsafe impl<T: Shared + Sync> Sync for SharedPtrBase<T> {}
206
207impl<T: Shared> Default for SharedPtrBase<T> {
208 fn default() -> Self {
209 Self([0usize; 2], PhantomData)
210 }
211}
212
213impl<T: Shared> Drop for SharedPtrBase<T> {
214 fn drop(&mut self) {
215 <T as Shared>::reset(self);
216 }
217}
218
219#[repr(C)]
221#[derive(Debug)]
222pub struct SharedPtr<T: Shared>(SharedPtrBase<T>);
223
224impl<T: Shared> SharedPtr<T> {
225 #[track_caller]
233 pub fn assert_use_count_eq(&self, expected: usize) {
234 assert_shared_ptr_use_count_eq("SharedPtr", &self.0, expected);
235 }
236
237 pub fn is_null(&self) -> bool {
238 <T as Shared>::get(&self.0).is_null()
239 }
240
241 pub fn take(&mut self) -> Option<SharedRef<T>> {
242 if self.is_null() {
243 None
244 } else {
245 let base = take(&mut self.0);
246 Some(SharedRef(base))
247 }
248 }
249
250 pub fn unwrap(self) -> SharedRef<T> {
251 assert!(!self.is_null());
252 SharedRef(self.0)
253 }
254}
255
256impl<T: Shared> Clone for SharedPtr<T> {
257 fn clone(&self) -> Self {
258 Self(<T as Shared>::clone(&self.0))
259 }
260}
261
262impl<T: Shared> Default for SharedPtr<T> {
263 fn default() -> Self {
264 Self(Default::default())
265 }
266}
267
268impl<T, U> From<U> for SharedPtr<T>
269where
270 T: Shared,
271 U: Into<UniquePtr<T>>,
272{
273 fn from(unique_ptr: U) -> Self {
274 let unique_ptr = unique_ptr.into();
275 Self(<T as Shared>::from_unique_ptr(unique_ptr))
276 }
277}
278
279impl<T: Shared> From<SharedRef<T>> for SharedPtr<T> {
280 fn from(mut shared_ref: SharedRef<T>) -> Self {
281 Self(take(&mut shared_ref.0))
282 }
283}
284
285#[repr(C)]
288#[derive(Debug)]
289pub struct SharedRef<T: Shared>(SharedPtrBase<T>);
290
291impl<T: Shared> SharedRef<T> {
292 #[track_caller]
300 pub fn assert_use_count_eq(&self, expected: usize) {
301 assert_shared_ptr_use_count_eq("SharedRef", &self.0, expected);
302 }
303}
304
305impl<T: Shared> Clone for SharedRef<T> {
306 fn clone(&self) -> Self {
307 Self(<T as Shared>::clone(&self.0))
308 }
309}
310
311impl<T: Shared> From<UniqueRef<T>> for SharedRef<T> {
312 fn from(unique_ref: UniqueRef<T>) -> Self {
313 SharedPtr::from(unique_ref).unwrap()
314 }
315}
316
317impl<T: Shared> Deref for SharedRef<T> {
318 type Target = T;
319 fn deref(&self) -> &Self::Target {
320 unsafe { &*(<T as Shared>::get(&self.0)) }
321 }
322}
323
324impl<T: Shared> AsRef<T> for SharedRef<T> {
325 fn as_ref(&self) -> &T {
326 self
327 }
328}
329
330impl<T: Shared> Borrow<T> for SharedRef<T> {
331 fn borrow(&self) -> &T {
332 self
333 }
334}
335
336#[track_caller]
337fn assert_shared_ptr_use_count_eq<T: Shared>(
338 wrapper_type_name: &str,
339 shared_ptr: &SharedPtrBase<T>,
340 expected: usize,
341) {
342 let mut actual = T::use_count(shared_ptr);
343 let ok = match long::try_from(expected) {
344 Err(_) => false, Ok(expected) if actual == expected => true, Ok(expected) => {
347 pub const RETRY_TIMEOUT: Duration = Duration::from_secs(1);
348 let start = Instant::now();
349 loop {
350 yield_now();
351 actual = T::use_count(shared_ptr);
352 if actual == expected {
353 break true;
354 } else if start.elapsed() > RETRY_TIMEOUT {
355 break false;
356 }
357 }
358 }
359 };
360 assert!(
361 ok,
362 "assertion failed: `{wrapper_type_name}<{}>` reference count does not match expectation\
363 \n actual: {actual}\
364 \n expected: {expected}",
365 type_name::<T>(),
366 );
367}
368
369pub trait Allocated<T: ?Sized>:
374 Deref<Target = T> + Borrow<T> + 'static
375{
376}
377impl<A, T: ?Sized> Allocated<T> for A where
378 A: Deref<Target = T> + Borrow<T> + 'static
379{
380}
381
382pub(crate) enum Allocation<T: ?Sized + 'static> {
383 Static(&'static T),
384 Arc(Arc<T>),
385 Box(Box<T>),
386 Rc(Rc<T>),
387 UniqueRef(UniqueRef<T>),
388 Other(Box<dyn Borrow<T> + 'static>),
389 }
393
394impl<T: ?Sized + 'static> Allocation<T> {
395 unsafe fn transmute_wrap<Abstract, Concrete>(
396 value: Abstract,
397 wrap: fn(Concrete) -> Self,
398 ) -> Self {
399 assert_eq!(size_of::<Abstract>(), size_of::<Concrete>());
400 let wrapped = wrap(transmute_copy(&value));
401 forget(value);
402 wrapped
403 }
404
405 fn try_wrap<Abstract: 'static, Concrete: 'static>(
406 value: Abstract,
407 wrap: fn(Concrete) -> Self,
408 ) -> Result<Self, Abstract> {
409 if <dyn Any>::is::<Concrete>(&value) {
410 Ok(unsafe { Self::transmute_wrap(value, wrap) })
411 } else {
412 Err(value)
413 }
414 }
415
416 pub fn of<Abstract: Deref<Target = T> + Borrow<T> + 'static>(
417 a: Abstract,
418 ) -> Self {
419 Self::try_wrap(a, identity)
420 .or_else(|a| Self::try_wrap(a, Self::Static))
421 .or_else(|a| Self::try_wrap(a, Self::Arc))
422 .or_else(|a| Self::try_wrap(a, Self::Box))
423 .or_else(|a| Self::try_wrap(a, Self::Rc))
424 .or_else(|a| Self::try_wrap(a, Self::UniqueRef))
425 .unwrap_or_else(|a| Self::Other(Box::from(a)))
426 }
427}
428
429impl<T: Debug + ?Sized> Debug for Allocation<T> {
430 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
431 match self {
432 Allocation::Arc(r) => f.debug_tuple("Arc").field(&r).finish(),
433 Allocation::Box(b) => f.debug_tuple("Box").field(&b).finish(),
434 Allocation::Other(_) => f.debug_tuple("Other").finish(),
435 Allocation::Rc(r) => f.debug_tuple("Rc").field(&r).finish(),
436 Allocation::Static(s) => f.debug_tuple("Static").field(&s).finish(),
437 Allocation::UniqueRef(u) => f.debug_tuple("UniqueRef").field(&u).finish(),
438 }
439 }
440}
441
442impl<T: ?Sized> Deref for Allocation<T> {
443 type Target = T;
444 fn deref(&self) -> &Self::Target {
445 match self {
446 Self::Static(v) => v.borrow(),
447 Self::Arc(v) => v.borrow(),
448 Self::Box(v) => v.borrow(),
449 Self::Rc(v) => v.borrow(),
450 Self::UniqueRef(v) => v.borrow(),
451 Self::Other(v) => (**v).borrow(),
452 }
453 }
454}
455
456impl<T: ?Sized> AsRef<T> for Allocation<T> {
457 fn as_ref(&self) -> &T {
458 self
459 }
460}
461
462impl<T: ?Sized> Borrow<T> for Allocation<T> {
463 fn borrow(&self) -> &T {
464 self
465 }
466}
467
468#[repr(C)]
469#[derive(Debug, PartialEq, Eq)]
470pub enum MaybeBool {
471 JustFalse = 0,
472 JustTrue = 1,
473 Nothing = 2,
474}
475
476impl From<MaybeBool> for Option<bool> {
477 fn from(b: MaybeBool) -> Self {
478 match b {
479 MaybeBool::JustFalse => Some(false),
480 MaybeBool::JustTrue => Some(true),
481 MaybeBool::Nothing => None,
482 }
483 }
484}
485
486impl From<Option<bool>> for MaybeBool {
487 fn from(option: Option<bool>) -> Self {
488 match option {
489 Some(false) => MaybeBool::JustFalse,
490 Some(true) => MaybeBool::JustTrue,
491 None => MaybeBool::Nothing,
492 }
493 }
494}
495
496#[derive(Copy, Clone, Debug)]
497#[repr(transparent)]
498pub struct CxxVTable(pub *const Opaque);
499
500#[allow(unused)]
501#[derive(Copy, Clone, Debug)]
502pub struct RustVTable<DynT>(pub *const Opaque, pub PhantomData<DynT>);
503
504#[derive(Debug)]
505pub struct FieldOffset<F>(usize, PhantomData<F>);
506
507unsafe impl<F> Send for FieldOffset<F> where F: Send {}
508unsafe impl<F> Sync for FieldOffset<F> where F: Sync {}
509
510impl<F> Copy for FieldOffset<F> {}
511
512impl<F> Clone for FieldOffset<F> {
513 fn clone(&self) -> Self {
514 *self
515 }
516}
517
518impl<F> FieldOffset<F> {
519 pub fn from_ptrs<E>(embedder_ptr: *const E, field_ptr: *const F) -> Self {
520 let embedder_addr = embedder_ptr as usize;
521 let field_addr = field_ptr as usize;
522 assert!(field_addr >= embedder_addr);
523 assert!((field_addr + size_of::<F>()) <= (embedder_addr + size_of::<E>()));
524 Self(field_addr - embedder_addr, PhantomData)
525 }
526
527 #[allow(clippy::wrong_self_convention)]
528 pub unsafe fn to_embedder<E>(self, field: &F) -> &E {
529 (((field as *const _ as usize) - self.0) as *const E)
530 .as_ref()
531 .unwrap()
532 }
533
534 #[allow(clippy::wrong_self_convention)]
535 pub unsafe fn to_embedder_mut<E>(self, field: &mut F) -> &mut E {
536 (((field as *mut _ as usize) - self.0) as *mut E)
537 .as_mut()
538 .unwrap()
539 }
540}
541
542#[repr(C)]
543#[derive(Debug, Default)]
544pub struct Maybe<T> {
545 has_value: bool,
546 value: T,
547}
548
549impl<T> From<Maybe<T>> for Option<T> {
550 fn from(maybe: Maybe<T>) -> Self {
551 if maybe.has_value {
552 Some(maybe.value)
553 } else {
554 None
555 }
556 }
557}
558
559pub trait UnitType
560where
561 Self: Copy + Sized,
562{
563 #[inline(always)]
564 fn get() -> Self {
565 UnitValue::<Self>::get()
566 }
567}
568
569impl<T> UnitType for T where T: Copy + Sized {}
570
571#[derive(Copy, Clone, Debug)]
572struct UnitValue<T>(PhantomData<T>)
573where
574 Self: Sized;
575
576impl<T> UnitValue<T>
577where
578 Self: Copy + Sized,
579{
580 const SELF: Self = Self::new_checked();
581
582 const fn new_checked() -> Self {
583 let size_must_be_0 = size_of::<T>();
585 let s = Self(PhantomData::<T>);
586 [s][size_must_be_0]
587 }
588
589 #[inline(always)]
590 fn get_checked(self) -> T {
591 assert_eq!(size_of::<T>(), 0);
594 unsafe { std::mem::MaybeUninit::<T>::zeroed().assume_init() }
595 }
596
597 #[inline(always)]
598 pub fn get() -> T {
599 Self::SELF.get_checked()
602 }
603}
604
605#[derive(Debug)]
606pub struct DefaultTag;
607
608#[derive(Debug)]
609pub struct IdenticalConversionTag;
610
611pub trait MapFnFrom<F, Tag = DefaultTag>
612where
613 F: UnitType,
614 Self: Sized,
615{
616 fn mapping() -> Self;
617
618 #[inline(always)]
619 fn map_fn_from(_: F) -> Self {
620 Self::mapping()
621 }
622}
623
624impl<F> MapFnFrom<F, IdenticalConversionTag> for F
625where
626 Self: UnitType,
627{
628 #[inline(always)]
629 fn mapping() -> Self {
630 Self::get()
631 }
632}
633
634pub trait MapFnTo<T, Tag = DefaultTag>
635where
636 Self: UnitType,
637 T: Sized,
638{
639 fn mapping() -> T;
640
641 #[inline(always)]
642 fn map_fn_to(self) -> T {
643 Self::mapping()
644 }
645}
646
647impl<F, T, Tag> MapFnTo<T, Tag> for F
648where
649 Self: UnitType,
650 T: MapFnFrom<F, Tag>,
651{
652 #[inline(always)]
653 fn mapping() -> T {
654 T::map_fn_from(F::get())
655 }
656}
657
658pub trait CFnFrom<F>
659where
660 Self: Sized,
661 F: UnitType,
662{
663 fn mapping() -> Self;
664
665 #[inline(always)]
666 fn c_fn_from(_: F) -> Self {
667 Self::mapping()
668 }
669}
670
671macro_rules! impl_c_fn_from {
672 ($($arg:ident: $ty:ident),*) => {
673 impl<F, R, $($ty),*> CFnFrom<F> for extern "C" fn($($ty),*) -> R
674 where
675 F: UnitType + Fn($($ty),*) -> R,
676 {
677 #[inline(always)]
678 fn mapping() -> Self {
679 extern "C" fn c_fn<F, R, $($ty),*>($($arg: $ty),*) -> R
680 where
681 F: UnitType + Fn($($ty),*) -> R,
682 {
683 (F::get())($($arg),*)
684 }
685 c_fn::<F, R, $($ty),*>
686 }
687 }
688 };
689}
690
691impl_c_fn_from!();
692impl_c_fn_from!(a0: A0);
693impl_c_fn_from!(a0: A0, a1: A1);
694impl_c_fn_from!(a0: A0, a1: A1, a2: A2);
695impl_c_fn_from!(a0: A0, a1: A1, a2: A2, a3: A3);
696impl_c_fn_from!(a0: A0, a1: A1, a2: A2, a3: A3, a4: A4);
697impl_c_fn_from!(a0: A0, a1: A1, a2: A2, a3: A3, a4: A4, a5: A5);
698impl_c_fn_from!(a0: A0, a1: A1, a2: A2, a3: A3, a4: A4, a5: A5, a6: A6);
699
700pub trait ToCFn<T>
701where
702 Self: UnitType,
703 T: Sized,
704{
705 fn mapping() -> T;
706
707 #[inline(always)]
708 fn to_c_fn(self) -> T {
709 Self::mapping()
710 }
711}
712
713impl<F, T> ToCFn<T> for F
714where
715 Self: UnitType,
716 T: CFnFrom<F>,
717{
718 #[inline(always)]
719 fn mapping() -> T {
720 T::c_fn_from(F::get())
721 }
722}
723
724#[cfg(test)]
725mod tests {
726 use super::*;
727 use std::ptr::null;
728 use std::sync::atomic::AtomicBool;
729 use std::sync::atomic::Ordering;
730
731 #[derive(Eq, PartialEq)]
732 struct MockSharedObj {
733 pub inner: u32,
734 }
735
736 impl MockSharedObj {
737 const INSTANCE_A: Self = Self { inner: 11111 };
738 const INSTANCE_B: Self = Self { inner: 22222 };
739
740 const SHARED_PTR_BASE_A: SharedPtrBase<Self> =
741 SharedPtrBase([1, 1], PhantomData);
742 const SHARED_PTR_BASE_B: SharedPtrBase<Self> =
743 SharedPtrBase([2, 2], PhantomData);
744 }
745
746 impl Shared for MockSharedObj {
747 fn clone(_: &SharedPtrBase<Self>) -> SharedPtrBase<Self> {
748 unimplemented!()
749 }
750
751 fn from_unique_ptr(_: UniquePtr<Self>) -> SharedPtrBase<Self> {
752 unimplemented!()
753 }
754
755 fn get(p: &SharedPtrBase<Self>) -> *const Self {
756 match p {
757 &Self::SHARED_PTR_BASE_A => &Self::INSTANCE_A,
758 &Self::SHARED_PTR_BASE_B => &Self::INSTANCE_B,
759 p if p == &Default::default() => null(),
760 _ => unreachable!(),
761 }
762 }
763
764 fn reset(p: &mut SharedPtrBase<Self>) {
765 forget(take(p));
766 }
767
768 fn use_count(p: &SharedPtrBase<Self>) -> long {
769 match p {
770 &Self::SHARED_PTR_BASE_A => 1,
771 &Self::SHARED_PTR_BASE_B => 2,
772 p if p == &Default::default() => 0,
773 _ => unreachable!(),
774 }
775 }
776 }
777
778 #[test]
779 fn shared_ptr_and_shared_ref() {
780 let mut shared_ptr_a1 = SharedPtr(MockSharedObj::SHARED_PTR_BASE_A);
781 assert!(!shared_ptr_a1.is_null());
782 shared_ptr_a1.assert_use_count_eq(1);
783
784 let shared_ref_a: SharedRef<_> = shared_ptr_a1.take().unwrap();
785 assert_eq!(shared_ref_a.inner, 11111);
786 shared_ref_a.assert_use_count_eq(1);
787
788 assert!(shared_ptr_a1.is_null());
789 shared_ptr_a1.assert_use_count_eq(0);
790
791 let shared_ptr_a2: SharedPtr<_> = shared_ref_a.into();
792 assert!(!shared_ptr_a2.is_null());
793 shared_ptr_a2.assert_use_count_eq(1);
794 assert_eq!(shared_ptr_a2.unwrap().inner, 11111);
795
796 let mut shared_ptr_b1 = SharedPtr(MockSharedObj::SHARED_PTR_BASE_B);
797 assert!(!shared_ptr_b1.is_null());
798 shared_ptr_b1.assert_use_count_eq(2);
799
800 let shared_ref_b: SharedRef<_> = shared_ptr_b1.take().unwrap();
801 assert_eq!(shared_ref_b.inner, 22222);
802 shared_ref_b.assert_use_count_eq(2);
803
804 assert!(shared_ptr_b1.is_null());
805 shared_ptr_b1.assert_use_count_eq(0);
806
807 let shared_ptr_b2: SharedPtr<_> = shared_ref_b.into();
808 assert!(!shared_ptr_b2.is_null());
809 shared_ptr_b2.assert_use_count_eq(2);
810 assert_eq!(shared_ptr_b2.unwrap().inner, 22222);
811 }
812
813 #[test]
814 #[should_panic(expected = "assertion failed: \
815 `SharedPtr<v8::support::tests::MockSharedObj>` reference count \
816 does not match expectation")]
817 fn shared_ptr_use_count_assertion_failed() {
818 let shared_ptr: SharedPtr<MockSharedObj> = Default::default();
819 shared_ptr.assert_use_count_eq(3);
820 }
821
822 #[test]
823 #[should_panic(expected = "assertion failed: \
824 `SharedRef<v8::support::tests::MockSharedObj>` reference count \
825 does not match expectation")]
826 fn shared_ref_use_count_assertion_failed() {
827 let shared_ref = SharedRef(MockSharedObj::SHARED_PTR_BASE_B);
828 shared_ref.assert_use_count_eq(7);
829 }
830
831 static TEST_OBJ_DROPPED: AtomicBool = AtomicBool::new(false);
832
833 struct TestObj {
834 pub id: u32,
835 }
836
837 impl Drop for TestObj {
838 fn drop(&mut self) {
839 assert!(!TEST_OBJ_DROPPED.swap(true, Ordering::SeqCst));
840 }
841 }
842
843 struct TestObjRef(TestObj);
844
845 impl Deref for TestObjRef {
846 type Target = TestObj;
847
848 fn deref(&self) -> &TestObj {
849 &self.0
850 }
851 }
852
853 impl Borrow<TestObj> for TestObjRef {
854 fn borrow(&self) -> &TestObj {
855 self
856 }
857 }
858
859 #[test]
860 fn allocation() {
861 static STATIC_OBJ: TestObj = TestObj { id: 1 };
863 let owner = Allocation::of(&STATIC_OBJ);
864 match owner {
865 Allocation::Static(_) => assert_eq!(owner.id, 1),
866 _ => panic!(),
867 }
868 drop(owner);
869 assert!(!TEST_OBJ_DROPPED.load(Ordering::SeqCst));
870
871 let owner = Allocation::of(Arc::new(TestObj { id: 2 }));
873 match owner {
874 Allocation::Arc(_) => assert_eq!(owner.id, 2),
875 _ => panic!(),
876 }
877 drop(owner);
878 assert!(TEST_OBJ_DROPPED.swap(false, Ordering::SeqCst));
879
880 let owner = Allocation::of(Box::new(TestObj { id: 3 }));
882 match owner {
883 Allocation::Box(_) => assert_eq!(owner.id, 3),
884 _ => panic!(),
885 }
886 drop(owner);
887 assert!(TEST_OBJ_DROPPED.swap(false, Ordering::SeqCst));
888
889 let owner = Allocation::of(Rc::new(TestObj { id: 4 }));
891 match owner {
892 Allocation::Rc(_) => assert_eq!(owner.id, 4),
893 _ => panic!(),
894 }
895 drop(owner);
896 assert!(TEST_OBJ_DROPPED.swap(false, Ordering::SeqCst));
897
898 let owner = Allocation::of(TestObjRef(TestObj { id: 5 }));
900 match owner {
901 Allocation::Other(_) => assert_eq!(owner.id, 5),
902 _ => panic!(),
903 }
904 drop(owner);
905 assert!(TEST_OBJ_DROPPED.swap(false, Ordering::SeqCst));
906
907 let vec = vec![1u8, 2, 3, 5, 8, 13, 21];
909 let vec_element_ptrs =
910 vec.iter().map(|i| i as *const u8).collect::<Vec<_>>();
911 let owner = Allocation::of(vec);
912 match owner {
913 Allocation::Other(_) => {}
914 _ => panic!(),
915 }
916 owner
917 .iter()
918 .map(|i| i as *const u8)
919 .zip(vec_element_ptrs)
920 .for_each(|(p1, p2)| assert_eq!(p1, p2));
921 }
922}