rusty_v8/
support.rs

1use std::any::type_name;
2use std::any::Any;
3use std::any::TypeId;
4use std::borrow::Borrow;
5use std::borrow::BorrowMut;
6use std::convert::identity;
7use std::convert::AsMut;
8use std::convert::AsRef;
9use std::convert::TryFrom;
10use std::fmt::{self, Debug, Formatter};
11use std::hash::BuildHasher;
12use std::hash::Hasher;
13use std::marker::PhantomData;
14use std::mem::align_of;
15use std::mem::forget;
16use std::mem::needs_drop;
17use std::mem::size_of;
18use std::mem::take;
19use std::mem::transmute_copy;
20use std::ops::Deref;
21use std::ops::DerefMut;
22use std::ptr::drop_in_place;
23use std::ptr::null_mut;
24use std::ptr::NonNull;
25use std::rc::Rc;
26use std::sync::Arc;
27use std::thread::yield_now;
28use std::time::Duration;
29use std::time::Instant;
30
31// TODO use libc::intptr_t when stable.
32// https://doc.rust-lang.org/1.7.0/libc/type.intptr_t.html
33#[allow(non_camel_case_types)]
34pub type intptr_t = isize;
35
36pub use std::os::raw::c_char as char;
37pub use std::os::raw::c_int as int;
38pub use std::os::raw::c_long as long;
39
40pub type Opaque = [u8; 0];
41
42/// Pointer to object allocated on the C++ heap. The pointer may be null.
43#[repr(transparent)]
44#[derive(Debug)]
45pub struct UniquePtr<T: ?Sized>(Option<UniqueRef<T>>);
46
47impl<T: ?Sized> UniquePtr<T> {
48  pub fn is_null(&self) -> bool {
49    self.0.is_none()
50  }
51
52  pub fn as_ref(&self) -> Option<&UniqueRef<T>> {
53    self.0.as_ref()
54  }
55
56  pub fn as_mut(&mut self) -> Option<&mut UniqueRef<T>> {
57    self.0.as_mut()
58  }
59
60  pub fn take(&mut self) -> Option<UniqueRef<T>> {
61    take(&mut self.0)
62  }
63
64  pub fn unwrap(self) -> UniqueRef<T> {
65    self.0.unwrap()
66  }
67}
68
69impl<T> UniquePtr<T> {
70  pub unsafe fn from_raw(ptr: *mut T) -> Self {
71    assert_unique_ptr_layout_compatible::<Self, T>();
72    Self(UniqueRef::try_from_raw(ptr))
73  }
74
75  pub fn into_raw(self) -> *mut T {
76    self
77      .0
78      .map(|unique_ref| unique_ref.into_raw())
79      .unwrap_or_else(null_mut)
80  }
81}
82
83impl<T: Shared> UniquePtr<T> {
84  pub fn make_shared(self) -> SharedPtr<T> {
85    self.into()
86  }
87}
88
89impl<T> Default for UniquePtr<T> {
90  fn default() -> Self {
91    assert_unique_ptr_layout_compatible::<Self, T>();
92    Self(None)
93  }
94}
95
96impl<T> From<UniqueRef<T>> for UniquePtr<T> {
97  fn from(unique_ref: UniqueRef<T>) -> Self {
98    assert_unique_ptr_layout_compatible::<Self, T>();
99    Self(Some(unique_ref))
100  }
101}
102
103/// Pointer to object allocated on the C++ heap. The pointer may not be null.
104#[repr(transparent)]
105#[derive(Debug)]
106pub struct UniqueRef<T: ?Sized>(NonNull<T>);
107
108impl<T> UniqueRef<T> {
109  pub(crate) unsafe fn try_from_raw(ptr: *mut T) -> Option<Self> {
110    assert_unique_ptr_layout_compatible::<Self, T>();
111    NonNull::new(ptr).map(Self)
112  }
113
114  pub(crate) unsafe fn from_raw(ptr: *mut T) -> Self {
115    assert_unique_ptr_layout_compatible::<Self, T>();
116    Self::try_from_raw(ptr).unwrap()
117  }
118
119  pub fn into_raw(self) -> *mut T {
120    let ptr = self.0.as_ptr();
121    forget(self);
122    ptr
123  }
124}
125
126impl<T: Shared> UniqueRef<T> {
127  pub fn make_shared(self) -> SharedRef<T> {
128    self.into()
129  }
130}
131
132impl<T: ?Sized> Drop for UniqueRef<T> {
133  fn drop(&mut self) {
134    unsafe { drop_in_place(self.0.as_ptr()) }
135  }
136}
137
138impl<T: ?Sized> Deref for UniqueRef<T> {
139  type Target = T;
140  fn deref(&self) -> &Self::Target {
141    unsafe { self.0.as_ref() }
142  }
143}
144
145impl<T: ?Sized> DerefMut for UniqueRef<T> {
146  fn deref_mut(&mut self) -> &mut Self::Target {
147    unsafe { self.0.as_mut() }
148  }
149}
150
151impl<T: ?Sized> AsRef<T> for UniqueRef<T> {
152  fn as_ref(&self) -> &T {
153    &**self
154  }
155}
156
157impl<T: ?Sized> AsMut<T> for UniqueRef<T> {
158  fn as_mut(&mut self) -> &mut T {
159    &mut **self
160  }
161}
162
163impl<T: ?Sized> Borrow<T> for UniqueRef<T> {
164  fn borrow(&self) -> &T {
165    &**self
166  }
167}
168
169impl<T: ?Sized> BorrowMut<T> for UniqueRef<T> {
170  fn borrow_mut(&mut self) -> &mut T {
171    &mut **self
172  }
173}
174
175fn assert_unique_ptr_layout_compatible<U, T>() {
176  // Assert that `U` (a `UniqueRef` or `UniquePtr`) has the same memory layout
177  // as a raw C pointer.
178  assert_eq!(size_of::<U>(), size_of::<*mut T>());
179  assert_eq!(align_of::<U>(), align_of::<*mut T>());
180
181  // Assert that `T` (probably) implements `Drop`. If it doesn't, a regular
182  // reference should be used instead of UniquePtr/UniqueRef.
183  assert!(needs_drop::<T>());
184}
185
186pub trait Shared
187where
188  Self: Sized,
189{
190  fn clone(shared_ptr: &SharedPtrBase<Self>) -> SharedPtrBase<Self>;
191  fn from_unique_ptr(unique_ptr: UniquePtr<Self>) -> SharedPtrBase<Self>;
192  fn get(shared_ptr: &SharedPtrBase<Self>) -> *const Self;
193  fn reset(shared_ptr: &mut SharedPtrBase<Self>);
194  fn use_count(shared_ptr: &SharedPtrBase<Self>) -> long;
195}
196
197/// Private base type which is shared by the `SharedPtr` and `SharedRef`
198/// implementations.
199#[repr(C)]
200#[derive(Eq, Debug, PartialEq)]
201pub struct SharedPtrBase<T: Shared>([usize; 2], PhantomData<T>);
202
203unsafe impl<T: Shared + Sync> Send for SharedPtrBase<T> {}
204unsafe impl<T: Shared + Sync> Sync for SharedPtrBase<T> {}
205
206impl<T: Shared> Default for SharedPtrBase<T> {
207  fn default() -> Self {
208    Self([0usize; 2], PhantomData)
209  }
210}
211
212impl<T: Shared> Drop for SharedPtrBase<T> {
213  fn drop(&mut self) {
214    <T as Shared>::reset(self);
215  }
216}
217
218/// Wrapper around a C++ shared_ptr. A shared_ptr may be be null.
219#[repr(C)]
220#[derive(Debug)]
221pub struct SharedPtr<T: Shared>(SharedPtrBase<T>);
222
223impl<T: Shared> SharedPtr<T> {
224  /// Asserts that the number of references to the shared inner value is equal
225  /// to the `expected` count.
226  ///
227  /// This function relies on the C++ method `std::shared_ptr::use_count()`,
228  /// which usually performs a relaxed load. This function will repeatedly call
229  /// `use_count()` until it returns the expected value, for up to one second.
230  /// Therefore it should probably not be used in performance critical code.
231  #[track_caller]
232  pub fn assert_use_count_eq(&self, expected: usize) {
233    assert_shared_ptr_use_count_eq("SharedPtr", &self.0, expected);
234  }
235
236  pub fn is_null(&self) -> bool {
237    <T as Shared>::get(&self.0).is_null()
238  }
239
240  pub fn take(&mut self) -> Option<SharedRef<T>> {
241    if self.is_null() {
242      None
243    } else {
244      let base = take(&mut self.0);
245      Some(SharedRef(base))
246    }
247  }
248
249  pub fn unwrap(self) -> SharedRef<T> {
250    assert!(!self.is_null());
251    SharedRef(self.0)
252  }
253}
254
255impl<T: Shared> Clone for SharedPtr<T> {
256  fn clone(&self) -> Self {
257    Self(<T as Shared>::clone(&self.0))
258  }
259}
260
261impl<T: Shared> Default for SharedPtr<T> {
262  fn default() -> Self {
263    Self(Default::default())
264  }
265}
266
267impl<T, U> From<U> for SharedPtr<T>
268where
269  T: Shared,
270  U: Into<UniquePtr<T>>,
271{
272  fn from(unique_ptr: U) -> Self {
273    let unique_ptr = unique_ptr.into();
274    Self(<T as Shared>::from_unique_ptr(unique_ptr))
275  }
276}
277
278impl<T: Shared> From<SharedRef<T>> for SharedPtr<T> {
279  fn from(mut shared_ref: SharedRef<T>) -> Self {
280    Self(take(&mut shared_ref.0))
281  }
282}
283
284/// Wrapper around a C++ shared_ptr. The shared_ptr is assumed to contain a
285/// value and may not be null.
286#[repr(C)]
287#[derive(Debug)]
288pub struct SharedRef<T: Shared>(SharedPtrBase<T>);
289
290impl<T: Shared> SharedRef<T> {
291  /// Asserts that the number of references to the shared inner value is equal
292  /// to the `expected` count.
293  ///
294  /// This function relies on the C++ method `std::shared_ptr::use_count()`,
295  /// which usually performs a relaxed load. This function will repeatedly call
296  /// `use_count()` until it returns the expected value, for up to one second.
297  /// Therefore it should probably not be used in performance critical code.
298  #[track_caller]
299  pub fn assert_use_count_eq(&self, expected: usize) {
300    assert_shared_ptr_use_count_eq("SharedRef", &self.0, expected);
301  }
302}
303
304impl<T: Shared> Clone for SharedRef<T> {
305  fn clone(&self) -> Self {
306    Self(<T as Shared>::clone(&self.0))
307  }
308}
309
310impl<T: Shared> From<UniqueRef<T>> for SharedRef<T> {
311  fn from(unique_ref: UniqueRef<T>) -> Self {
312    SharedPtr::from(unique_ref).unwrap()
313  }
314}
315
316impl<T: Shared> Deref for SharedRef<T> {
317  type Target = T;
318  fn deref(&self) -> &Self::Target {
319    unsafe { &*(<T as Shared>::get(&self.0)) }
320  }
321}
322
323impl<T: Shared> AsRef<T> for SharedRef<T> {
324  fn as_ref(&self) -> &T {
325    &**self
326  }
327}
328
329impl<T: Shared> Borrow<T> for SharedRef<T> {
330  fn borrow(&self) -> &T {
331    &**self
332  }
333}
334
335#[track_caller]
336fn assert_shared_ptr_use_count_eq<T: Shared>(
337  wrapper_type_name: &str,
338  shared_ptr: &SharedPtrBase<T>,
339  expected: usize,
340) {
341  let mut actual = T::use_count(shared_ptr);
342  let ok = match long::try_from(expected) {
343    Err(_) => false, // Non-`long` value can never match actual use count.
344    Ok(expected) if actual == expected => true, // Fast path.
345    Ok(expected) => {
346      pub const RETRY_TIMEOUT: Duration = Duration::from_secs(1);
347      let start = Instant::now();
348      loop {
349        yield_now();
350        actual = T::use_count(shared_ptr);
351        if actual == expected {
352          break true;
353        } else if start.elapsed() > RETRY_TIMEOUT {
354          break false;
355        }
356      }
357    }
358  };
359  assert!(
360    ok,
361    "assertion failed: `{}<{}>` reference count does not match expectation\
362       \n   actual: {}\
363       \n expected: {}",
364    wrapper_type_name,
365    type_name::<T>(),
366    actual,
367    expected
368  );
369}
370
371/// A trait for values with static lifetimes that are allocated at a fixed
372/// address in memory. Practically speaking, that means they're either a
373/// `&'static` reference, or they're heap-allocated in a `Arc`, `Box`, `Rc`,
374/// `UniqueRef`, `SharedRef` or `Vec`.
375pub trait Allocated<T: ?Sized>:
376  Deref<Target = T> + Borrow<T> + 'static
377{
378}
379impl<A, T: ?Sized> Allocated<T> for A where
380  A: Deref<Target = T> + Borrow<T> + 'static
381{
382}
383
384pub(crate) enum Allocation<T: ?Sized + 'static> {
385  Static(&'static T),
386  Arc(Arc<T>),
387  Box(Box<T>),
388  Rc(Rc<T>),
389  UniqueRef(UniqueRef<T>),
390  Other(Box<dyn Borrow<T> + 'static>),
391  // Note: it would be nice to add `SharedRef` to this list, but it
392  // requires the `T: Shared` bound, and it's unfortunately not possible
393  // to set bounds on individual enum variants.
394}
395
396impl<T: ?Sized + 'static> Allocation<T> {
397  unsafe fn transmute_wrap<Abstract, Concrete>(
398    value: Abstract,
399    wrap: fn(Concrete) -> Self,
400  ) -> Self {
401    assert_eq!(size_of::<Abstract>(), size_of::<Concrete>());
402    let wrapped = wrap(transmute_copy(&value));
403    forget(value);
404    wrapped
405  }
406
407  fn try_wrap<Abstract: 'static, Concrete: 'static>(
408    value: Abstract,
409    wrap: fn(Concrete) -> Self,
410  ) -> Result<Self, Abstract> {
411    if <dyn Any>::is::<Concrete>(&value) {
412      Ok(unsafe { Self::transmute_wrap(value, wrap) })
413    } else {
414      Err(value)
415    }
416  }
417
418  pub fn of<Abstract: Deref<Target = T> + Borrow<T> + 'static>(
419    a: Abstract,
420  ) -> Self {
421    Self::try_wrap(a, identity)
422      .or_else(|a| Self::try_wrap(a, Self::Static))
423      .or_else(|a| Self::try_wrap(a, Self::Arc))
424      .or_else(|a| Self::try_wrap(a, Self::Box))
425      .or_else(|a| Self::try_wrap(a, Self::Rc))
426      .or_else(|a| Self::try_wrap(a, Self::UniqueRef))
427      .unwrap_or_else(|a| Self::Other(Box::from(a)))
428  }
429}
430
431impl<T: Debug + ?Sized> Debug for Allocation<T> {
432  fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
433    match self {
434      Allocation::Arc(r) => f.debug_tuple("Arc").field(&r).finish(),
435      Allocation::Box(b) => f.debug_tuple("Box").field(&b).finish(),
436      Allocation::Other(_) => f.debug_tuple("Other").finish(),
437      Allocation::Rc(r) => f.debug_tuple("Rc").field(&r).finish(),
438      Allocation::Static(s) => f.debug_tuple("Static").field(&s).finish(),
439      Allocation::UniqueRef(u) => f.debug_tuple("UniqueRef").field(&u).finish(),
440    }
441  }
442}
443
444impl<T: ?Sized> Deref for Allocation<T> {
445  type Target = T;
446  fn deref(&self) -> &Self::Target {
447    match self {
448      Self::Static(v) => v.borrow(),
449      Self::Arc(v) => v.borrow(),
450      Self::Box(v) => v.borrow(),
451      Self::Rc(v) => v.borrow(),
452      Self::UniqueRef(v) => v.borrow(),
453      Self::Other(v) => (&**v).borrow(),
454    }
455  }
456}
457
458impl<T: ?Sized> AsRef<T> for Allocation<T> {
459  fn as_ref(&self) -> &T {
460    &**self
461  }
462}
463
464impl<T: ?Sized> Borrow<T> for Allocation<T> {
465  fn borrow(&self) -> &T {
466    &**self
467  }
468}
469
470#[repr(C)]
471#[derive(Debug, PartialEq)]
472pub enum MaybeBool {
473  JustFalse = 0,
474  JustTrue = 1,
475  Nothing = 2,
476}
477
478impl From<MaybeBool> for Option<bool> {
479  fn from(b: MaybeBool) -> Self {
480    match b {
481      MaybeBool::JustFalse => Some(false),
482      MaybeBool::JustTrue => Some(true),
483      MaybeBool::Nothing => None,
484    }
485  }
486}
487
488impl From<Option<bool>> for MaybeBool {
489  fn from(option: Option<bool>) -> Self {
490    match option {
491      Some(false) => MaybeBool::JustFalse,
492      Some(true) => MaybeBool::JustTrue,
493      None => MaybeBool::Nothing,
494    }
495  }
496}
497
498#[derive(Copy, Clone, Debug)]
499#[repr(transparent)]
500pub struct CxxVTable(pub *const Opaque);
501
502#[derive(Copy, Clone, Debug)]
503pub struct RustVTable<DynT>(pub *const Opaque, pub PhantomData<DynT>);
504
505#[derive(Debug)]
506pub struct FieldOffset<F>(usize, PhantomData<F>);
507
508unsafe impl<F> Send for FieldOffset<F> where F: Send {}
509unsafe impl<F> Sync for FieldOffset<F> where F: Sync {}
510
511impl<F> Copy for FieldOffset<F> {}
512
513impl<F> Clone for FieldOffset<F> {
514  fn clone(&self) -> Self {
515    Self(self.0, self.1)
516  }
517}
518
519impl<F> FieldOffset<F> {
520  pub fn from_ptrs<E>(embedder_ptr: *const E, field_ptr: *const F) -> Self {
521    let embedder_addr = embedder_ptr as usize;
522    let field_addr = field_ptr as usize;
523    assert!(field_addr >= embedder_addr);
524    assert!((field_addr + size_of::<F>()) <= (embedder_addr + size_of::<E>()));
525    Self(field_addr - embedder_addr, PhantomData)
526  }
527
528  #[allow(clippy::wrong_self_convention)]
529  pub unsafe fn to_embedder<E>(self, field: &F) -> &E {
530    (((field as *const _ as usize) - self.0) as *const E)
531      .as_ref()
532      .unwrap()
533  }
534
535  #[allow(clippy::wrong_self_convention)]
536  pub unsafe fn to_embedder_mut<E>(self, field: &mut F) -> &mut E {
537    (((field as *mut _ as usize) - self.0) as *mut E)
538      .as_mut()
539      .unwrap()
540  }
541}
542
543/// A special hasher that is optimized for hashing `std::any::TypeId` values.
544/// It can't be used for anything else.
545#[derive(Clone, Default)]
546pub(crate) struct TypeIdHasher {
547  state: Option<u64>,
548}
549
550impl Hasher for TypeIdHasher {
551  fn write(&mut self, _bytes: &[u8]) {
552    panic!("TypeIdHasher::write() called unexpectedly");
553  }
554
555  fn write_u64(&mut self, value: u64) {
556    // `TypeId` values are actually 64-bit values which themselves come out of
557    // some hash function, so it's unnecessary to shuffle their bits further.
558    assert_eq!(size_of::<TypeId>(), size_of::<u64>());
559    assert_eq!(align_of::<TypeId>(), size_of::<u64>());
560    let prev_state = self.state.replace(value);
561    assert_eq!(prev_state, None);
562  }
563
564  fn finish(&self) -> u64 {
565    self.state.unwrap()
566  }
567}
568
569/// Factory for instances of `TypeIdHasher`. This is the type that one would
570/// pass to the constructor of some map/set type in order to make it use
571/// `TypeIdHasher` instead of the default hasher implementation.
572#[derive(Copy, Clone, Default)]
573pub(crate) struct BuildTypeIdHasher;
574
575impl BuildHasher for BuildTypeIdHasher {
576  type Hasher = TypeIdHasher;
577
578  fn build_hasher(&self) -> Self::Hasher {
579    Default::default()
580  }
581}
582
583#[repr(C)]
584#[derive(Debug, Default)]
585pub struct Maybe<T> {
586  has_value: bool,
587  value: T,
588}
589
590impl<T> From<Maybe<T>> for Option<T> {
591  fn from(maybe: Maybe<T>) -> Self {
592    if maybe.has_value {
593      Some(maybe.value)
594    } else {
595      None
596    }
597  }
598}
599
600pub trait UnitType
601where
602  Self: Copy + Sized,
603{
604  #[inline(always)]
605  fn get() -> Self {
606    UnitValue::<Self>::get()
607  }
608}
609
610impl<T> UnitType for T where T: Copy + Sized {}
611
612#[derive(Copy, Clone, Debug)]
613struct UnitValue<T>(PhantomData<T>)
614where
615  Self: Sized;
616
617impl<T> UnitValue<T>
618where
619  Self: Copy + Sized,
620{
621  const SELF: Self = Self::new_checked();
622
623  const fn new_checked() -> Self {
624    // Statically assert that T is indeed a unit type.
625    let size_must_be_0 = size_of::<T>();
626    let s = Self(PhantomData::<T>);
627    [s][size_must_be_0]
628  }
629
630  #[inline(always)]
631  fn get_checked(self) -> T {
632    // This run-time check serves just as a backup for the compile-time
633    // check when Self::SELF is initialized.
634    assert_eq!(size_of::<T>(), 0);
635    unsafe { std::mem::MaybeUninit::<T>::zeroed().assume_init() }
636  }
637
638  #[inline(always)]
639  pub fn get() -> T {
640    // Accessing the Self::SELF is necessary to make the compile-time type check
641    // work.
642    Self::SELF.get_checked()
643  }
644}
645
646#[derive(Debug)]
647pub struct DefaultTag;
648
649#[derive(Debug)]
650pub struct IdenticalConversionTag;
651
652pub trait MapFnFrom<F, Tag = DefaultTag>
653where
654  F: UnitType,
655  Self: Sized,
656{
657  fn mapping() -> Self;
658
659  #[inline(always)]
660  fn map_fn_from(_: F) -> Self {
661    Self::mapping()
662  }
663}
664
665impl<F> MapFnFrom<F, IdenticalConversionTag> for F
666where
667  Self: UnitType,
668{
669  #[inline(always)]
670  fn mapping() -> Self {
671    Self::get()
672  }
673}
674
675pub trait MapFnTo<T, Tag = DefaultTag>
676where
677  Self: UnitType,
678  T: Sized,
679{
680  fn mapping() -> T;
681
682  #[inline(always)]
683  fn map_fn_to(self) -> T {
684    Self::mapping()
685  }
686}
687
688impl<F, T, Tag> MapFnTo<T, Tag> for F
689where
690  Self: UnitType,
691  T: MapFnFrom<F, Tag>,
692{
693  #[inline(always)]
694  fn mapping() -> T {
695    T::map_fn_from(F::get())
696  }
697}
698
699pub trait CFnFrom<F>
700where
701  Self: Sized,
702  F: UnitType,
703{
704  fn mapping() -> Self;
705
706  #[inline(always)]
707  fn c_fn_from(_: F) -> Self {
708    Self::mapping()
709  }
710}
711
712macro_rules! impl_c_fn_from {
713  ($($arg:ident: $ty:ident),*) => {
714    impl<F, R, $($ty),*> CFnFrom<F> for extern "C" fn($($ty),*) -> R
715    where
716      F: UnitType + Fn($($ty),*) -> R,
717    {
718      #[inline(always)]
719      fn mapping() -> Self {
720        extern "C" fn c_fn<F, R, $($ty),*>($($arg: $ty),*) -> R
721        where
722          F: UnitType + Fn($($ty),*) -> R,
723        {
724          (F::get())($($arg),*)
725        }
726        c_fn::<F, R, $($ty),*>
727      }
728    }
729  };
730}
731
732impl_c_fn_from!();
733impl_c_fn_from!(a0: A0);
734impl_c_fn_from!(a0: A0, a1: A1);
735impl_c_fn_from!(a0: A0, a1: A1, a2: A2);
736impl_c_fn_from!(a0: A0, a1: A1, a2: A2, a3: A3);
737impl_c_fn_from!(a0: A0, a1: A1, a2: A2, a3: A3, a4: A4);
738impl_c_fn_from!(a0: A0, a1: A1, a2: A2, a3: A3, a4: A4, a5: A5);
739impl_c_fn_from!(a0: A0, a1: A1, a2: A2, a3: A3, a4: A4, a5: A5, a6: A6);
740
741pub trait ToCFn<T>
742where
743  Self: UnitType,
744  T: Sized,
745{
746  fn mapping() -> T;
747
748  #[inline(always)]
749  fn to_c_fn(self) -> T {
750    Self::mapping()
751  }
752}
753
754impl<F, T> ToCFn<T> for F
755where
756  Self: UnitType,
757  T: CFnFrom<F>,
758{
759  #[inline(always)]
760  fn mapping() -> T {
761    T::c_fn_from(F::get())
762  }
763}
764
765#[cfg(test)]
766mod tests {
767  use super::*;
768  use std::ptr::null;
769  use std::sync::atomic::AtomicBool;
770  use std::sync::atomic::Ordering;
771
772  #[derive(Eq, PartialEq)]
773  struct MockSharedObj {
774    pub inner: u32,
775  }
776
777  impl MockSharedObj {
778    const INSTANCE_A: Self = Self { inner: 11111 };
779    const INSTANCE_B: Self = Self { inner: 22222 };
780
781    const SHARED_PTR_BASE_A: SharedPtrBase<Self> =
782      SharedPtrBase([1, 1], PhantomData);
783    const SHARED_PTR_BASE_B: SharedPtrBase<Self> =
784      SharedPtrBase([2, 2], PhantomData);
785  }
786
787  impl Shared for MockSharedObj {
788    fn clone(_: &SharedPtrBase<Self>) -> SharedPtrBase<Self> {
789      unimplemented!()
790    }
791
792    fn from_unique_ptr(_: UniquePtr<Self>) -> SharedPtrBase<Self> {
793      unimplemented!()
794    }
795
796    fn get(p: &SharedPtrBase<Self>) -> *const Self {
797      match p {
798        &Self::SHARED_PTR_BASE_A => &Self::INSTANCE_A,
799        &Self::SHARED_PTR_BASE_B => &Self::INSTANCE_B,
800        p if p == &Default::default() => null(),
801        _ => unreachable!(),
802      }
803    }
804
805    fn reset(p: &mut SharedPtrBase<Self>) {
806      forget(take(p));
807    }
808
809    fn use_count(p: &SharedPtrBase<Self>) -> long {
810      match p {
811        &Self::SHARED_PTR_BASE_A => 1,
812        &Self::SHARED_PTR_BASE_B => 2,
813        p if p == &Default::default() => 0,
814        _ => unreachable!(),
815      }
816    }
817  }
818
819  #[test]
820  fn shared_ptr_and_shared_ref() {
821    let mut shared_ptr_a1 = SharedPtr(MockSharedObj::SHARED_PTR_BASE_A);
822    assert!(!shared_ptr_a1.is_null());
823    shared_ptr_a1.assert_use_count_eq(1);
824
825    let shared_ref_a: SharedRef<_> = shared_ptr_a1.take().unwrap();
826    assert_eq!(shared_ref_a.inner, 11111);
827    shared_ref_a.assert_use_count_eq(1);
828
829    assert!(shared_ptr_a1.is_null());
830    shared_ptr_a1.assert_use_count_eq(0);
831
832    let shared_ptr_a2: SharedPtr<_> = shared_ref_a.into();
833    assert!(!shared_ptr_a2.is_null());
834    shared_ptr_a2.assert_use_count_eq(1);
835    assert_eq!(shared_ptr_a2.unwrap().inner, 11111);
836
837    let mut shared_ptr_b1 = SharedPtr(MockSharedObj::SHARED_PTR_BASE_B);
838    assert!(!shared_ptr_b1.is_null());
839    shared_ptr_b1.assert_use_count_eq(2);
840
841    let shared_ref_b: SharedRef<_> = shared_ptr_b1.take().unwrap();
842    assert_eq!(shared_ref_b.inner, 22222);
843    shared_ref_b.assert_use_count_eq(2);
844
845    assert!(shared_ptr_b1.is_null());
846    shared_ptr_b1.assert_use_count_eq(0);
847
848    let shared_ptr_b2: SharedPtr<_> = shared_ref_b.into();
849    assert!(!shared_ptr_b2.is_null());
850    shared_ptr_b2.assert_use_count_eq(2);
851    assert_eq!(shared_ptr_b2.unwrap().inner, 22222);
852  }
853
854  #[test]
855  #[should_panic(expected = "assertion failed: \
856      `SharedPtr<rusty_v8::support::tests::MockSharedObj>` reference count \
857      does not match expectation")]
858  fn shared_ptr_use_count_assertion_failed() {
859    let shared_ptr: SharedPtr<MockSharedObj> = Default::default();
860    shared_ptr.assert_use_count_eq(3);
861  }
862
863  #[test]
864  #[should_panic(expected = "assertion failed: \
865      `SharedRef<rusty_v8::support::tests::MockSharedObj>` reference count \
866      does not match expectation")]
867  fn shared_ref_use_count_assertion_failed() {
868    let shared_ref = SharedRef(MockSharedObj::SHARED_PTR_BASE_B);
869    shared_ref.assert_use_count_eq(7);
870  }
871
872  static TEST_OBJ_DROPPED: AtomicBool = AtomicBool::new(false);
873
874  struct TestObj {
875    pub id: u32,
876  }
877
878  impl Drop for TestObj {
879    fn drop(&mut self) {
880      assert!(!TEST_OBJ_DROPPED.swap(true, Ordering::SeqCst));
881    }
882  }
883
884  struct TestObjRef(TestObj);
885
886  impl Deref for TestObjRef {
887    type Target = TestObj;
888
889    fn deref(&self) -> &TestObj {
890      &self.0
891    }
892  }
893
894  impl Borrow<TestObj> for TestObjRef {
895    fn borrow(&self) -> &TestObj {
896      &**self
897    }
898  }
899
900  #[test]
901  fn allocation() {
902    // Static.
903    static STATIC_OBJ: TestObj = TestObj { id: 1 };
904    let owner = Allocation::of(&STATIC_OBJ);
905    match owner {
906      Allocation::Static(_) => assert_eq!(owner.id, 1),
907      _ => panic!(),
908    }
909    drop(owner);
910    assert!(!TEST_OBJ_DROPPED.load(Ordering::SeqCst));
911
912    // Arc.
913    let owner = Allocation::of(Arc::new(TestObj { id: 2 }));
914    match owner {
915      Allocation::Arc(_) => assert_eq!(owner.id, 2),
916      _ => panic!(),
917    }
918    drop(owner);
919    assert!(TEST_OBJ_DROPPED.swap(false, Ordering::SeqCst));
920
921    // Box.
922    let owner = Allocation::of(Box::new(TestObj { id: 3 }));
923    match owner {
924      Allocation::Box(_) => assert_eq!(owner.id, 3),
925      _ => panic!(),
926    }
927    drop(owner);
928    assert!(TEST_OBJ_DROPPED.swap(false, Ordering::SeqCst));
929
930    // Rc.
931    let owner = Allocation::of(Rc::new(TestObj { id: 4 }));
932    match owner {
933      Allocation::Rc(_) => assert_eq!(owner.id, 4),
934      _ => panic!(),
935    }
936    drop(owner);
937    assert!(TEST_OBJ_DROPPED.swap(false, Ordering::SeqCst));
938
939    // Other.
940    let owner = Allocation::of(TestObjRef(TestObj { id: 5 }));
941    match owner {
942      Allocation::Other(_) => assert_eq!(owner.id, 5),
943      _ => panic!(),
944    }
945    drop(owner);
946    assert!(TEST_OBJ_DROPPED.swap(false, Ordering::SeqCst));
947
948    // Contents of Vec should not be moved.
949    let vec = vec![1u8, 2, 3, 5, 8, 13, 21];
950    let vec_element_ptrs =
951      vec.iter().map(|i| i as *const u8).collect::<Vec<_>>();
952    let owner = Allocation::of(vec);
953    match owner {
954      Allocation::Other(_) => {}
955      _ => panic!(),
956    }
957    owner
958      .iter()
959      .map(|i| i as *const u8)
960      .zip(vec_element_ptrs)
961      .for_each(|(p1, p2)| assert_eq!(p1, p2));
962  }
963}