v8/
support.rs

1use std::any::Any;
2use std::any::type_name;
3use std::borrow::Borrow;
4use std::borrow::BorrowMut;
5use std::convert::AsMut;
6use std::convert::AsRef;
7use std::convert::TryFrom;
8use std::convert::identity;
9use std::fmt::{self, Debug, Formatter};
10use std::marker::PhantomData;
11use std::mem::align_of;
12use std::mem::forget;
13use std::mem::needs_drop;
14use std::mem::size_of;
15use std::mem::take;
16use std::mem::transmute_copy;
17use std::ops::Deref;
18use std::ops::DerefMut;
19use std::ptr::NonNull;
20use std::ptr::drop_in_place;
21use std::ptr::null_mut;
22use std::rc::Rc;
23use std::sync::Arc;
24use std::thread::yield_now;
25use std::time::Duration;
26use std::time::Instant;
27
28// TODO use libc::intptr_t when stable.
29// https://doc.rust-lang.org/1.7.0/libc/type.intptr_t.html
30#[allow(non_camel_case_types)]
31pub type intptr_t = isize;
32
33// TODO use libc::size_t when stable.
34// https://doc.rust-lang.org/1.7.0/libc/type.size_t.html
35#[allow(non_camel_case_types)]
36pub type size_t = usize;
37
38pub use std::os::raw::c_char as char;
39pub use std::os::raw::c_int as int;
40pub use std::os::raw::c_long as long;
41
42pub type Opaque = [u8; 0];
43
44/// Pointer to object allocated on the C++ heap. The pointer may be null.
45#[repr(transparent)]
46#[derive(Debug)]
47pub struct UniquePtr<T: ?Sized>(Option<UniqueRef<T>>);
48
49impl<T: ?Sized> UniquePtr<T> {
50  pub fn is_null(&self) -> bool {
51    self.0.is_none()
52  }
53
54  pub fn as_ref(&self) -> Option<&UniqueRef<T>> {
55    self.0.as_ref()
56  }
57
58  pub fn as_mut(&mut self) -> Option<&mut UniqueRef<T>> {
59    self.0.as_mut()
60  }
61
62  pub fn take(&mut self) -> Option<UniqueRef<T>> {
63    take(&mut self.0)
64  }
65
66  pub fn unwrap(self) -> UniqueRef<T> {
67    self.0.unwrap()
68  }
69}
70
71impl<T> UniquePtr<T> {
72  pub unsafe fn from_raw(ptr: *mut T) -> Self {
73    assert_unique_ptr_layout_compatible::<Self, T>();
74    Self(unsafe { UniqueRef::try_from_raw(ptr) })
75  }
76
77  pub fn into_raw(self) -> *mut T {
78    self
79      .0
80      .map_or_else(null_mut, |unique_ref| unique_ref.into_raw())
81  }
82}
83
84impl<T: Shared> UniquePtr<T> {
85  pub fn make_shared(self) -> SharedPtr<T> {
86    self.into()
87  }
88}
89
90impl<T> Default for UniquePtr<T> {
91  fn default() -> Self {
92    assert_unique_ptr_layout_compatible::<Self, T>();
93    Self(None)
94  }
95}
96
97impl<T> From<UniqueRef<T>> for UniquePtr<T> {
98  fn from(unique_ref: UniqueRef<T>) -> Self {
99    assert_unique_ptr_layout_compatible::<Self, T>();
100    Self(Some(unique_ref))
101  }
102}
103
104/// Pointer to object allocated on the C++ heap. The pointer may not be null.
105#[repr(transparent)]
106#[derive(Debug)]
107pub struct UniqueRef<T: ?Sized>(NonNull<T>);
108
109impl<T> UniqueRef<T> {
110  pub(crate) unsafe fn try_from_raw(ptr: *mut T) -> Option<Self> {
111    assert_unique_ptr_layout_compatible::<Self, T>();
112    NonNull::new(ptr).map(Self)
113  }
114
115  pub(crate) unsafe fn from_raw(ptr: *mut T) -> Self {
116    assert_unique_ptr_layout_compatible::<Self, T>();
117    unsafe { Self::try_from_raw(ptr).unwrap() }
118  }
119
120  pub fn into_raw(self) -> *mut T {
121    let ptr = self.0.as_ptr();
122    forget(self);
123    ptr
124  }
125}
126
127impl<T: Shared> UniqueRef<T> {
128  pub fn make_shared(self) -> SharedRef<T> {
129    self.into()
130  }
131}
132
133impl<T: ?Sized> Drop for UniqueRef<T> {
134  fn drop(&mut self) {
135    unsafe { drop_in_place(self.0.as_ptr()) }
136  }
137}
138
139impl<T: ?Sized> Deref for UniqueRef<T> {
140  type Target = T;
141  fn deref(&self) -> &Self::Target {
142    unsafe { self.0.as_ref() }
143  }
144}
145
146impl<T: ?Sized> DerefMut for UniqueRef<T> {
147  fn deref_mut(&mut self) -> &mut Self::Target {
148    unsafe { self.0.as_mut() }
149  }
150}
151
152impl<T: ?Sized> AsRef<T> for UniqueRef<T> {
153  fn as_ref(&self) -> &T {
154    self
155  }
156}
157
158impl<T: ?Sized> AsMut<T> for UniqueRef<T> {
159  fn as_mut(&mut self) -> &mut T {
160    self
161  }
162}
163
164impl<T: ?Sized> Borrow<T> for UniqueRef<T> {
165  fn borrow(&self) -> &T {
166    self
167  }
168}
169
170impl<T: ?Sized> BorrowMut<T> for UniqueRef<T> {
171  fn borrow_mut(&mut self) -> &mut T {
172    self
173  }
174}
175
176fn assert_unique_ptr_layout_compatible<U, T>() {
177  // Assert that `U` (a `UniqueRef` or `UniquePtr`) has the same memory layout
178  // as a raw C pointer.
179  assert_eq!(size_of::<U>(), size_of::<*mut T>());
180  assert_eq!(align_of::<U>(), align_of::<*mut T>());
181
182  // Assert that `T` (probably) implements `Drop`. If it doesn't, a regular
183  // reference should be used instead of UniquePtr/UniqueRef.
184  assert!(needs_drop::<T>());
185}
186
187pub trait Shared
188where
189  Self: Sized,
190{
191  fn clone(shared_ptr: &SharedPtrBase<Self>) -> SharedPtrBase<Self>;
192  fn from_unique_ptr(unique_ptr: UniquePtr<Self>) -> SharedPtrBase<Self>;
193  fn get(shared_ptr: &SharedPtrBase<Self>) -> *const Self;
194  fn reset(shared_ptr: &mut SharedPtrBase<Self>);
195  fn use_count(shared_ptr: &SharedPtrBase<Self>) -> long;
196}
197
198/// Private base type which is shared by the `SharedPtr` and `SharedRef`
199/// implementations.
200#[repr(C)]
201#[derive(Eq, Debug, PartialEq)]
202pub struct SharedPtrBase<T: Shared>([usize; 2], PhantomData<T>);
203
204unsafe impl<T: Shared + Sync> Send for SharedPtrBase<T> {}
205unsafe impl<T: Shared + Sync> Sync for SharedPtrBase<T> {}
206
207impl<T: Shared> Default for SharedPtrBase<T> {
208  fn default() -> Self {
209    Self([0usize; 2], PhantomData)
210  }
211}
212
213impl<T: Shared> Drop for SharedPtrBase<T> {
214  fn drop(&mut self) {
215    <T as Shared>::reset(self);
216  }
217}
218
219/// Wrapper around a C++ shared_ptr. A shared_ptr may be be null.
220#[repr(C)]
221#[derive(Debug)]
222pub struct SharedPtr<T: Shared>(SharedPtrBase<T>);
223
224impl<T: Shared> SharedPtr<T> {
225  /// Asserts that the number of references to the shared inner value is equal
226  /// to the `expected` count.
227  ///
228  /// This function relies on the C++ method `std::shared_ptr::use_count()`,
229  /// which usually performs a relaxed load. This function will repeatedly call
230  /// `use_count()` until it returns the expected value, for up to one second.
231  /// Therefore it should probably not be used in performance critical code.
232  #[track_caller]
233  pub fn assert_use_count_eq(&self, expected: usize) {
234    assert_shared_ptr_use_count_eq("SharedPtr", &self.0, expected);
235  }
236
237  pub fn is_null(&self) -> bool {
238    <T as Shared>::get(&self.0).is_null()
239  }
240
241  pub fn take(&mut self) -> Option<SharedRef<T>> {
242    if self.is_null() {
243      None
244    } else {
245      let base = take(&mut self.0);
246      Some(SharedRef(base))
247    }
248  }
249
250  pub fn unwrap(self) -> SharedRef<T> {
251    assert!(!self.is_null());
252    SharedRef(self.0)
253  }
254}
255
256impl<T: Shared> Clone for SharedPtr<T> {
257  fn clone(&self) -> Self {
258    Self(<T as Shared>::clone(&self.0))
259  }
260}
261
262impl<T: Shared> Default for SharedPtr<T> {
263  fn default() -> Self {
264    Self(Default::default())
265  }
266}
267
268impl<T, U> From<U> for SharedPtr<T>
269where
270  T: Shared,
271  U: Into<UniquePtr<T>>,
272{
273  fn from(unique_ptr: U) -> Self {
274    let unique_ptr = unique_ptr.into();
275    Self(<T as Shared>::from_unique_ptr(unique_ptr))
276  }
277}
278
279impl<T: Shared> From<SharedRef<T>> for SharedPtr<T> {
280  fn from(mut shared_ref: SharedRef<T>) -> Self {
281    Self(take(&mut shared_ref.0))
282  }
283}
284
285/// Wrapper around a C++ shared_ptr. The shared_ptr is assumed to contain a
286/// value and may not be null.
287#[repr(C)]
288#[derive(Debug)]
289pub struct SharedRef<T: Shared>(SharedPtrBase<T>);
290
291impl<T: Shared> SharedRef<T> {
292  /// Asserts that the number of references to the shared inner value is equal
293  /// to the `expected` count.
294  ///
295  /// This function relies on the C++ method `std::shared_ptr::use_count()`,
296  /// which usually performs a relaxed load. This function will repeatedly call
297  /// `use_count()` until it returns the expected value, for up to one second.
298  /// Therefore it should probably not be used in performance critical code.
299  #[track_caller]
300  pub fn assert_use_count_eq(&self, expected: usize) {
301    assert_shared_ptr_use_count_eq("SharedRef", &self.0, expected);
302  }
303}
304
305impl<T: Shared> Clone for SharedRef<T> {
306  fn clone(&self) -> Self {
307    Self(<T as Shared>::clone(&self.0))
308  }
309}
310
311impl<T: Shared> From<UniqueRef<T>> for SharedRef<T> {
312  fn from(unique_ref: UniqueRef<T>) -> Self {
313    SharedPtr::from(unique_ref).unwrap()
314  }
315}
316
317impl<T: Shared> Deref for SharedRef<T> {
318  type Target = T;
319  fn deref(&self) -> &Self::Target {
320    unsafe { &*(<T as Shared>::get(&self.0)) }
321  }
322}
323
324impl<T: Shared> AsRef<T> for SharedRef<T> {
325  fn as_ref(&self) -> &T {
326    self
327  }
328}
329
330impl<T: Shared> Borrow<T> for SharedRef<T> {
331  fn borrow(&self) -> &T {
332    self
333  }
334}
335
336#[track_caller]
337fn assert_shared_ptr_use_count_eq<T: Shared>(
338  wrapper_type_name: &str,
339  shared_ptr: &SharedPtrBase<T>,
340  expected: usize,
341) {
342  let mut actual = T::use_count(shared_ptr);
343  let ok = match long::try_from(expected) {
344    Err(_) => false, // Non-`long` value can never match actual use count.
345    Ok(expected) if actual == expected => true, // Fast path.
346    Ok(expected) => {
347      pub const RETRY_TIMEOUT: Duration = Duration::from_secs(1);
348      let start = Instant::now();
349      loop {
350        yield_now();
351        actual = T::use_count(shared_ptr);
352        if actual == expected {
353          break true;
354        } else if start.elapsed() > RETRY_TIMEOUT {
355          break false;
356        }
357      }
358    }
359  };
360  assert!(
361    ok,
362    "assertion failed: `{wrapper_type_name}<{}>` reference count does not match expectation\
363       \n   actual: {actual}\
364       \n expected: {expected}",
365    type_name::<T>(),
366  );
367}
368
369/// A trait for values with static lifetimes that are allocated at a fixed
370/// address in memory. Practically speaking, that means they're either a
371/// `&'static` reference, or they're heap-allocated in a `Arc`, `Box`, `Rc`,
372/// `UniqueRef`, `SharedRef` or `Vec`.
373pub trait Allocated<T: ?Sized>:
374  Deref<Target = T> + Borrow<T> + 'static
375{
376}
377impl<A, T: ?Sized> Allocated<T> for A where
378  A: Deref<Target = T> + Borrow<T> + 'static
379{
380}
381
382pub(crate) enum Allocation<T: ?Sized + 'static> {
383  Static(&'static T),
384  Arc(Arc<T>),
385  Box(Box<T>),
386  Rc(Rc<T>),
387  UniqueRef(UniqueRef<T>),
388  Other(Box<dyn Borrow<T> + 'static>),
389  // Note: it would be nice to add `SharedRef` to this list, but it
390  // requires the `T: Shared` bound, and it's unfortunately not possible
391  // to set bounds on individual enum variants.
392}
393
394impl<T: ?Sized + 'static> Allocation<T> {
395  unsafe fn transmute_wrap<Abstract, Concrete>(
396    value: Abstract,
397    wrap: fn(Concrete) -> Self,
398  ) -> Self {
399    assert_eq!(size_of::<Abstract>(), size_of::<Concrete>());
400    let wrapped = wrap(unsafe { transmute_copy(&value) });
401    forget(value);
402    wrapped
403  }
404
405  fn try_wrap<Abstract: 'static, Concrete: 'static>(
406    value: Abstract,
407    wrap: fn(Concrete) -> Self,
408  ) -> Result<Self, Abstract> {
409    if <dyn Any>::is::<Concrete>(&value) {
410      Ok(unsafe { Self::transmute_wrap(value, wrap) })
411    } else {
412      Err(value)
413    }
414  }
415
416  pub fn of<Abstract: Deref<Target = T> + Borrow<T> + 'static>(
417    a: Abstract,
418  ) -> Self {
419    Self::try_wrap(a, identity)
420      .or_else(|a| Self::try_wrap(a, Self::Static))
421      .or_else(|a| Self::try_wrap(a, Self::Arc))
422      .or_else(|a| Self::try_wrap(a, Self::Box))
423      .or_else(|a| Self::try_wrap(a, Self::Rc))
424      .or_else(|a| Self::try_wrap(a, Self::UniqueRef))
425      .unwrap_or_else(|a| Self::Other(Box::from(a)))
426  }
427}
428
429impl<T: Debug + ?Sized> Debug for Allocation<T> {
430  fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
431    match self {
432      Allocation::Arc(r) => f.debug_tuple("Arc").field(&r).finish(),
433      Allocation::Box(b) => f.debug_tuple("Box").field(&b).finish(),
434      Allocation::Other(_) => f.debug_tuple("Other").finish(),
435      Allocation::Rc(r) => f.debug_tuple("Rc").field(&r).finish(),
436      Allocation::Static(s) => f.debug_tuple("Static").field(&s).finish(),
437      Allocation::UniqueRef(u) => f.debug_tuple("UniqueRef").field(&u).finish(),
438    }
439  }
440}
441
442impl<T: ?Sized> Deref for Allocation<T> {
443  type Target = T;
444  fn deref(&self) -> &Self::Target {
445    match self {
446      Self::Static(v) => v.borrow(),
447      Self::Arc(v) => v.borrow(),
448      Self::Box(v) => v.borrow(),
449      Self::Rc(v) => v.borrow(),
450      Self::UniqueRef(v) => v.borrow(),
451      Self::Other(v) => (**v).borrow(),
452    }
453  }
454}
455
456impl<T: ?Sized> AsRef<T> for Allocation<T> {
457  fn as_ref(&self) -> &T {
458    self
459  }
460}
461
462impl<T: ?Sized> Borrow<T> for Allocation<T> {
463  fn borrow(&self) -> &T {
464    self
465  }
466}
467
468#[repr(C)]
469#[derive(Debug, PartialEq, Eq)]
470pub enum MaybeBool {
471  JustFalse = 0,
472  JustTrue = 1,
473  Nothing = 2,
474}
475
476impl From<MaybeBool> for Option<bool> {
477  fn from(b: MaybeBool) -> Self {
478    match b {
479      MaybeBool::JustFalse => Some(false),
480      MaybeBool::JustTrue => Some(true),
481      MaybeBool::Nothing => None,
482    }
483  }
484}
485
486impl From<Option<bool>> for MaybeBool {
487  fn from(option: Option<bool>) -> Self {
488    match option {
489      Some(false) => MaybeBool::JustFalse,
490      Some(true) => MaybeBool::JustTrue,
491      None => MaybeBool::Nothing,
492    }
493  }
494}
495
496#[derive(Copy, Clone, Debug)]
497#[repr(transparent)]
498pub struct CxxVTable(pub *const Opaque);
499
500#[allow(unused)]
501#[derive(Copy, Clone, Debug)]
502pub struct RustVTable<DynT>(pub *const Opaque, pub PhantomData<DynT>);
503
504#[derive(Debug)]
505pub struct FieldOffset<F>(usize, PhantomData<F>);
506
507unsafe impl<F> Send for FieldOffset<F> where F: Send {}
508unsafe impl<F> Sync for FieldOffset<F> where F: Sync {}
509
510impl<F> Copy for FieldOffset<F> {}
511
512impl<F> Clone for FieldOffset<F> {
513  fn clone(&self) -> Self {
514    *self
515  }
516}
517
518impl<F> FieldOffset<F> {
519  pub fn from_ptrs<E>(embedder_ptr: *const E, field_ptr: *const F) -> Self {
520    let embedder_addr = embedder_ptr as usize;
521    let field_addr = field_ptr as usize;
522    assert!(field_addr >= embedder_addr);
523    assert!((field_addr + size_of::<F>()) <= (embedder_addr + size_of::<E>()));
524    Self(field_addr - embedder_addr, PhantomData)
525  }
526
527  #[allow(clippy::wrong_self_convention)]
528  pub unsafe fn to_embedder<E>(self, field: &F) -> &E {
529    unsafe {
530      (((field as *const _ as usize) - self.0) as *const E)
531        .as_ref()
532        .unwrap()
533    }
534  }
535
536  #[allow(clippy::wrong_self_convention)]
537  pub unsafe fn to_embedder_mut<E>(self, field: &mut F) -> &mut E {
538    unsafe {
539      (((field as *mut _ as usize) - self.0) as *mut E)
540        .as_mut()
541        .unwrap()
542    }
543  }
544}
545
546#[repr(C)]
547#[derive(Debug, Default)]
548pub struct Maybe<T> {
549  has_value: bool,
550  value: T,
551}
552
553impl<T> From<Maybe<T>> for Option<T> {
554  fn from(maybe: Maybe<T>) -> Self {
555    if maybe.has_value {
556      Some(maybe.value)
557    } else {
558      None
559    }
560  }
561}
562
563pub trait UnitType
564where
565  Self: Copy + Sized,
566{
567  #[inline(always)]
568  fn get() -> Self {
569    UnitValue::<Self>::get()
570  }
571}
572
573impl<T> UnitType for T where T: Copy + Sized {}
574
575#[derive(Copy, Clone, Debug)]
576struct UnitValue<T>(PhantomData<T>)
577where
578  Self: Sized;
579
580impl<T> UnitValue<T>
581where
582  Self: Copy + Sized,
583{
584  const SELF: Self = Self::new_checked();
585
586  const fn new_checked() -> Self {
587    // Statically assert that T is indeed a unit type.
588    let size_must_be_0 = size_of::<T>();
589    let s = Self(PhantomData::<T>);
590    [s][size_must_be_0]
591  }
592
593  #[inline(always)]
594  fn get_checked(self) -> T {
595    // This run-time check serves just as a backup for the compile-time
596    // check when Self::SELF is initialized.
597    assert_eq!(size_of::<T>(), 0);
598    unsafe { std::mem::MaybeUninit::<T>::zeroed().assume_init() }
599  }
600
601  #[inline(always)]
602  pub fn get() -> T {
603    // Accessing the Self::SELF is necessary to make the compile-time type check
604    // work.
605    Self::SELF.get_checked()
606  }
607}
608
609#[derive(Debug)]
610pub struct DefaultTag;
611
612#[derive(Debug)]
613pub struct IdenticalConversionTag;
614
615pub trait MapFnFrom<F, Tag = DefaultTag>
616where
617  F: UnitType,
618  Self: Sized,
619{
620  fn mapping() -> Self;
621
622  #[inline(always)]
623  fn map_fn_from(_: F) -> Self {
624    Self::mapping()
625  }
626}
627
628impl<F> MapFnFrom<F, IdenticalConversionTag> for F
629where
630  Self: UnitType,
631{
632  #[inline(always)]
633  fn mapping() -> Self {
634    Self::get()
635  }
636}
637
638pub trait MapFnTo<T, Tag = DefaultTag>
639where
640  Self: UnitType,
641  T: Sized,
642{
643  fn mapping() -> T;
644
645  #[inline(always)]
646  fn map_fn_to(self) -> T {
647    Self::mapping()
648  }
649}
650
651impl<F, T, Tag> MapFnTo<T, Tag> for F
652where
653  Self: UnitType,
654  T: MapFnFrom<F, Tag>,
655{
656  #[inline(always)]
657  fn mapping() -> T {
658    T::map_fn_from(F::get())
659  }
660}
661
662pub trait CFnFrom<F>
663where
664  Self: Sized,
665  F: UnitType,
666{
667  fn mapping() -> Self;
668
669  #[inline(always)]
670  fn c_fn_from(_: F) -> Self {
671    Self::mapping()
672  }
673}
674
675macro_rules! impl_c_fn_from {
676  ($($arg:ident: $ty:ident),*) => {
677    impl<F, R, $($ty),*> CFnFrom<F> for unsafe extern "C" fn($($ty),*) -> R
678    where
679      F: UnitType + Fn($($ty),*) -> R,
680    {
681      #[inline(always)]
682      fn mapping() -> Self {
683        unsafe extern "C" fn c_fn<F, R, $($ty),*>($($arg: $ty),*) -> R
684        where
685          F: UnitType + Fn($($ty),*) -> R,
686        {
687          (F::get())($($arg),*)
688        }
689        c_fn::<F, R, $($ty),*>
690      }
691    }
692  };
693}
694
695impl_c_fn_from!();
696impl_c_fn_from!(a0: A0);
697impl_c_fn_from!(a0: A0, a1: A1);
698impl_c_fn_from!(a0: A0, a1: A1, a2: A2);
699impl_c_fn_from!(a0: A0, a1: A1, a2: A2, a3: A3);
700impl_c_fn_from!(a0: A0, a1: A1, a2: A2, a3: A3, a4: A4);
701impl_c_fn_from!(a0: A0, a1: A1, a2: A2, a3: A3, a4: A4, a5: A5);
702impl_c_fn_from!(a0: A0, a1: A1, a2: A2, a3: A3, a4: A4, a5: A5, a6: A6);
703
704pub trait ToCFn<T>
705where
706  Self: UnitType,
707  T: Sized,
708{
709  fn mapping() -> T;
710
711  #[inline(always)]
712  fn to_c_fn(self) -> T {
713    Self::mapping()
714  }
715}
716
717impl<F, T> ToCFn<T> for F
718where
719  Self: UnitType,
720  T: CFnFrom<F>,
721{
722  #[inline(always)]
723  fn mapping() -> T {
724    T::c_fn_from(F::get())
725  }
726}
727
728#[cfg(test)]
729mod tests {
730  use super::*;
731  use std::ptr::null;
732  use std::sync::atomic::AtomicBool;
733  use std::sync::atomic::Ordering;
734
735  #[derive(Eq, PartialEq)]
736  struct MockSharedObj {
737    pub inner: u32,
738  }
739
740  impl MockSharedObj {
741    const INSTANCE_A: Self = Self { inner: 11111 };
742    const INSTANCE_B: Self = Self { inner: 22222 };
743
744    const SHARED_PTR_BASE_A: SharedPtrBase<Self> =
745      SharedPtrBase([1, 1], PhantomData);
746    const SHARED_PTR_BASE_B: SharedPtrBase<Self> =
747      SharedPtrBase([2, 2], PhantomData);
748  }
749
750  impl Shared for MockSharedObj {
751    fn clone(_: &SharedPtrBase<Self>) -> SharedPtrBase<Self> {
752      unimplemented!()
753    }
754
755    fn from_unique_ptr(_: UniquePtr<Self>) -> SharedPtrBase<Self> {
756      unimplemented!()
757    }
758
759    fn get(p: &SharedPtrBase<Self>) -> *const Self {
760      match p {
761        &Self::SHARED_PTR_BASE_A => &Self::INSTANCE_A,
762        &Self::SHARED_PTR_BASE_B => &Self::INSTANCE_B,
763        p if p == &Default::default() => null(),
764        _ => unreachable!(),
765      }
766    }
767
768    fn reset(p: &mut SharedPtrBase<Self>) {
769      forget(take(p));
770    }
771
772    fn use_count(p: &SharedPtrBase<Self>) -> long {
773      match p {
774        &Self::SHARED_PTR_BASE_A => 1,
775        &Self::SHARED_PTR_BASE_B => 2,
776        p if p == &Default::default() => 0,
777        _ => unreachable!(),
778      }
779    }
780  }
781
782  #[test]
783  fn shared_ptr_and_shared_ref() {
784    let mut shared_ptr_a1 = SharedPtr(MockSharedObj::SHARED_PTR_BASE_A);
785    assert!(!shared_ptr_a1.is_null());
786    shared_ptr_a1.assert_use_count_eq(1);
787
788    let shared_ref_a: SharedRef<_> = shared_ptr_a1.take().unwrap();
789    assert_eq!(shared_ref_a.inner, 11111);
790    shared_ref_a.assert_use_count_eq(1);
791
792    assert!(shared_ptr_a1.is_null());
793    shared_ptr_a1.assert_use_count_eq(0);
794
795    let shared_ptr_a2: SharedPtr<_> = shared_ref_a.into();
796    assert!(!shared_ptr_a2.is_null());
797    shared_ptr_a2.assert_use_count_eq(1);
798    assert_eq!(shared_ptr_a2.unwrap().inner, 11111);
799
800    let mut shared_ptr_b1 = SharedPtr(MockSharedObj::SHARED_PTR_BASE_B);
801    assert!(!shared_ptr_b1.is_null());
802    shared_ptr_b1.assert_use_count_eq(2);
803
804    let shared_ref_b: SharedRef<_> = shared_ptr_b1.take().unwrap();
805    assert_eq!(shared_ref_b.inner, 22222);
806    shared_ref_b.assert_use_count_eq(2);
807
808    assert!(shared_ptr_b1.is_null());
809    shared_ptr_b1.assert_use_count_eq(0);
810
811    let shared_ptr_b2: SharedPtr<_> = shared_ref_b.into();
812    assert!(!shared_ptr_b2.is_null());
813    shared_ptr_b2.assert_use_count_eq(2);
814    assert_eq!(shared_ptr_b2.unwrap().inner, 22222);
815  }
816
817  #[test]
818  #[should_panic(expected = "assertion failed: \
819      `SharedPtr<v8::support::tests::MockSharedObj>` reference count \
820      does not match expectation")]
821  fn shared_ptr_use_count_assertion_failed() {
822    let shared_ptr: SharedPtr<MockSharedObj> = Default::default();
823    shared_ptr.assert_use_count_eq(3);
824  }
825
826  #[test]
827  #[should_panic(expected = "assertion failed: \
828      `SharedRef<v8::support::tests::MockSharedObj>` reference count \
829      does not match expectation")]
830  fn shared_ref_use_count_assertion_failed() {
831    let shared_ref = SharedRef(MockSharedObj::SHARED_PTR_BASE_B);
832    shared_ref.assert_use_count_eq(7);
833  }
834
835  static TEST_OBJ_DROPPED: AtomicBool = AtomicBool::new(false);
836
837  struct TestObj {
838    pub id: u32,
839  }
840
841  impl Drop for TestObj {
842    fn drop(&mut self) {
843      assert!(!TEST_OBJ_DROPPED.swap(true, Ordering::SeqCst));
844    }
845  }
846
847  struct TestObjRef(TestObj);
848
849  impl Deref for TestObjRef {
850    type Target = TestObj;
851
852    fn deref(&self) -> &TestObj {
853      &self.0
854    }
855  }
856
857  impl Borrow<TestObj> for TestObjRef {
858    fn borrow(&self) -> &TestObj {
859      self
860    }
861  }
862
863  #[test]
864  fn allocation() {
865    // Static.
866    static STATIC_OBJ: TestObj = TestObj { id: 1 };
867    let owner = Allocation::of(&STATIC_OBJ);
868    match owner {
869      Allocation::Static(_) => assert_eq!(owner.id, 1),
870      _ => panic!(),
871    }
872    drop(owner);
873    assert!(!TEST_OBJ_DROPPED.load(Ordering::SeqCst));
874
875    // Arc.
876    let owner = Allocation::of(Arc::new(TestObj { id: 2 }));
877    match owner {
878      Allocation::Arc(_) => assert_eq!(owner.id, 2),
879      _ => panic!(),
880    }
881    drop(owner);
882    assert!(TEST_OBJ_DROPPED.swap(false, Ordering::SeqCst));
883
884    // Box.
885    let owner = Allocation::of(Box::new(TestObj { id: 3 }));
886    match owner {
887      Allocation::Box(_) => assert_eq!(owner.id, 3),
888      _ => panic!(),
889    }
890    drop(owner);
891    assert!(TEST_OBJ_DROPPED.swap(false, Ordering::SeqCst));
892
893    // Rc.
894    let owner = Allocation::of(Rc::new(TestObj { id: 4 }));
895    match owner {
896      Allocation::Rc(_) => assert_eq!(owner.id, 4),
897      _ => panic!(),
898    }
899    drop(owner);
900    assert!(TEST_OBJ_DROPPED.swap(false, Ordering::SeqCst));
901
902    // Other.
903    let owner = Allocation::of(TestObjRef(TestObj { id: 5 }));
904    match owner {
905      Allocation::Other(_) => assert_eq!(owner.id, 5),
906      _ => panic!(),
907    }
908    drop(owner);
909    assert!(TEST_OBJ_DROPPED.swap(false, Ordering::SeqCst));
910
911    // Contents of Vec should not be moved.
912    let vec = vec![1u8, 2, 3, 5, 8, 13, 21];
913    let vec_element_ptrs =
914      vec.iter().map(|i| i as *const u8).collect::<Vec<_>>();
915    let owner = Allocation::of(vec);
916    match owner {
917      Allocation::Other(_) => {}
918      _ => panic!(),
919    }
920    owner
921      .iter()
922      .map(|i| i as *const u8)
923      .zip(vec_element_ptrs)
924      .for_each(|(p1, p2)| assert_eq!(p1, p2));
925  }
926}