v8/
support.rs

1use std::any::type_name;
2use std::borrow::Borrow;
3use std::borrow::BorrowMut;
4use std::convert::AsMut;
5use std::convert::AsRef;
6use std::convert::TryFrom;
7use std::fmt::Debug;
8use std::marker::PhantomData;
9use std::mem::align_of;
10use std::mem::forget;
11use std::mem::size_of;
12use std::mem::take;
13use std::ops::Deref;
14use std::ops::DerefMut;
15use std::ptr::NonNull;
16use std::ptr::drop_in_place;
17use std::ptr::null_mut;
18use std::thread::yield_now;
19use std::time::Duration;
20use std::time::Instant;
21
22// TODO use libc::intptr_t when stable.
23// https://doc.rust-lang.org/1.7.0/libc/type.intptr_t.html
24#[allow(non_camel_case_types)]
25pub type intptr_t = isize;
26
27// TODO use libc::size_t when stable.
28// https://doc.rust-lang.org/1.7.0/libc/type.size_t.html
29#[allow(non_camel_case_types)]
30pub type size_t = usize;
31
32pub use std::os::raw::c_char as char;
33pub use std::os::raw::c_int as int;
34pub use std::os::raw::c_long as long;
35
36pub type Opaque = [u8; 0];
37
38/// Pointer to object allocated on the C++ heap. The pointer may be null.
39#[repr(transparent)]
40#[derive(Debug)]
41pub struct UniquePtr<T: ?Sized>(Option<UniqueRef<T>>);
42
43impl<T: ?Sized> UniquePtr<T> {
44  pub fn is_null(&self) -> bool {
45    self.0.is_none()
46  }
47
48  pub fn as_ref(&self) -> Option<&UniqueRef<T>> {
49    self.0.as_ref()
50  }
51
52  pub fn as_mut(&mut self) -> Option<&mut UniqueRef<T>> {
53    self.0.as_mut()
54  }
55
56  pub fn take(&mut self) -> Option<UniqueRef<T>> {
57    take(&mut self.0)
58  }
59
60  pub fn unwrap(self) -> UniqueRef<T> {
61    self.0.unwrap()
62  }
63}
64
65impl<T> UniquePtr<T> {
66  pub unsafe fn from_raw(ptr: *mut T) -> Self {
67    assert_unique_ptr_layout_compatible::<Self, T>();
68    Self(unsafe { UniqueRef::try_from_raw(ptr) })
69  }
70
71  pub fn into_raw(self) -> *mut T {
72    self
73      .0
74      .map_or_else(null_mut, |unique_ref| unique_ref.into_raw())
75  }
76}
77
78impl<T: Shared> UniquePtr<T> {
79  pub fn make_shared(self) -> SharedPtr<T> {
80    self.into()
81  }
82}
83
84impl<T> Default for UniquePtr<T> {
85  fn default() -> Self {
86    assert_unique_ptr_layout_compatible::<Self, T>();
87    Self(None)
88  }
89}
90
91impl<T> From<UniqueRef<T>> for UniquePtr<T> {
92  fn from(unique_ref: UniqueRef<T>) -> Self {
93    assert_unique_ptr_layout_compatible::<Self, T>();
94    Self(Some(unique_ref))
95  }
96}
97
98/// Pointer to object allocated on the C++ heap. The pointer may not be null.
99#[repr(transparent)]
100#[derive(Debug)]
101pub struct UniqueRef<T: ?Sized>(NonNull<T>);
102
103impl<T> UniqueRef<T> {
104  pub(crate) unsafe fn try_from_raw(ptr: *mut T) -> Option<Self> {
105    assert_unique_ptr_layout_compatible::<Self, T>();
106    NonNull::new(ptr).map(Self)
107  }
108
109  pub(crate) unsafe fn from_raw(ptr: *mut T) -> Self {
110    assert_unique_ptr_layout_compatible::<Self, T>();
111    unsafe { Self::try_from_raw(ptr).unwrap() }
112  }
113
114  pub fn into_raw(self) -> *mut T {
115    let ptr = self.0.as_ptr();
116    forget(self);
117    ptr
118  }
119
120  pub(crate) fn as_ptr(&self) -> *mut T {
121    self.0.as_ptr()
122  }
123}
124
125impl<T: Shared> UniqueRef<T> {
126  pub fn make_shared(self) -> SharedRef<T> {
127    self.into()
128  }
129}
130
131impl<T: ?Sized> Drop for UniqueRef<T> {
132  fn drop(&mut self) {
133    unsafe { drop_in_place(self.0.as_ptr()) }
134  }
135}
136
137impl<T: ?Sized> Deref for UniqueRef<T> {
138  type Target = T;
139  fn deref(&self) -> &Self::Target {
140    unsafe { self.0.as_ref() }
141  }
142}
143
144impl<T: ?Sized> DerefMut for UniqueRef<T> {
145  fn deref_mut(&mut self) -> &mut Self::Target {
146    unsafe { self.0.as_mut() }
147  }
148}
149
150impl<T: ?Sized> AsRef<T> for UniqueRef<T> {
151  fn as_ref(&self) -> &T {
152    self
153  }
154}
155
156impl<T: ?Sized> AsMut<T> for UniqueRef<T> {
157  fn as_mut(&mut self) -> &mut T {
158    self
159  }
160}
161
162impl<T: ?Sized> Borrow<T> for UniqueRef<T> {
163  fn borrow(&self) -> &T {
164    self
165  }
166}
167
168impl<T: ?Sized> BorrowMut<T> for UniqueRef<T> {
169  fn borrow_mut(&mut self) -> &mut T {
170    self
171  }
172}
173
174fn assert_unique_ptr_layout_compatible<U, T>() {
175  // Assert that `U` (a `UniqueRef` or `UniquePtr`) has the same memory layout
176  // as a raw C pointer.
177  assert_eq!(size_of::<U>(), size_of::<*mut T>());
178  assert_eq!(align_of::<U>(), align_of::<*mut T>());
179}
180
181pub trait Shared
182where
183  Self: Sized,
184{
185  fn clone(shared_ptr: &SharedPtrBase<Self>) -> SharedPtrBase<Self>;
186  fn from_unique_ptr(unique_ptr: UniquePtr<Self>) -> SharedPtrBase<Self>;
187  fn get(shared_ptr: &SharedPtrBase<Self>) -> *const Self;
188  fn reset(shared_ptr: &mut SharedPtrBase<Self>);
189  fn use_count(shared_ptr: &SharedPtrBase<Self>) -> long;
190}
191
192/// Private base type which is shared by the `SharedPtr` and `SharedRef`
193/// implementations.
194#[repr(C)]
195#[derive(Eq, Debug, PartialEq)]
196pub struct SharedPtrBase<T: Shared>([usize; 2], PhantomData<T>);
197
198unsafe impl<T: Shared + Sync> Send for SharedPtrBase<T> {}
199unsafe impl<T: Shared + Sync> Sync for SharedPtrBase<T> {}
200
201impl<T: Shared> Default for SharedPtrBase<T> {
202  fn default() -> Self {
203    Self([0usize; 2], PhantomData)
204  }
205}
206
207impl<T: Shared> Drop for SharedPtrBase<T> {
208  fn drop(&mut self) {
209    <T as Shared>::reset(self);
210  }
211}
212
213/// Wrapper around a C++ shared_ptr. A shared_ptr may be be null.
214#[repr(C)]
215#[derive(Debug)]
216pub struct SharedPtr<T: Shared>(SharedPtrBase<T>);
217
218impl<T: Shared> SharedPtr<T> {
219  /// Asserts that the number of references to the shared inner value is equal
220  /// to the `expected` count.
221  ///
222  /// This function relies on the C++ method `std::shared_ptr::use_count()`,
223  /// which usually performs a relaxed load. This function will repeatedly call
224  /// `use_count()` until it returns the expected value, for up to one second.
225  /// Therefore it should probably not be used in performance critical code.
226  #[track_caller]
227  pub fn assert_use_count_eq(&self, expected: usize) {
228    assert_shared_ptr_use_count_eq("SharedPtr", &self.0, expected);
229  }
230
231  pub fn is_null(&self) -> bool {
232    <T as Shared>::get(&self.0).is_null()
233  }
234
235  pub fn take(&mut self) -> Option<SharedRef<T>> {
236    if self.is_null() {
237      None
238    } else {
239      let base = take(&mut self.0);
240      Some(SharedRef(base))
241    }
242  }
243
244  pub fn unwrap(self) -> SharedRef<T> {
245    assert!(!self.is_null());
246    SharedRef(self.0)
247  }
248}
249
250impl<T: Shared> Clone for SharedPtr<T> {
251  fn clone(&self) -> Self {
252    Self(<T as Shared>::clone(&self.0))
253  }
254}
255
256impl<T: Shared> Default for SharedPtr<T> {
257  fn default() -> Self {
258    Self(Default::default())
259  }
260}
261
262impl<T, U> From<U> for SharedPtr<T>
263where
264  T: Shared,
265  U: Into<UniquePtr<T>>,
266{
267  fn from(unique_ptr: U) -> Self {
268    let unique_ptr = unique_ptr.into();
269    Self(<T as Shared>::from_unique_ptr(unique_ptr))
270  }
271}
272
273impl<T: Shared> From<SharedRef<T>> for SharedPtr<T> {
274  fn from(mut shared_ref: SharedRef<T>) -> Self {
275    Self(take(&mut shared_ref.0))
276  }
277}
278
279/// Wrapper around a C++ shared_ptr. The shared_ptr is assumed to contain a
280/// value and may not be null.
281#[repr(C)]
282#[derive(Debug)]
283pub struct SharedRef<T: Shared>(SharedPtrBase<T>);
284
285impl<T: Shared> SharedRef<T> {
286  /// Asserts that the number of references to the shared inner value is equal
287  /// to the `expected` count.
288  ///
289  /// This function relies on the C++ method `std::shared_ptr::use_count()`,
290  /// which usually performs a relaxed load. This function will repeatedly call
291  /// `use_count()` until it returns the expected value, for up to one second.
292  /// Therefore it should probably not be used in performance critical code.
293  #[track_caller]
294  pub fn assert_use_count_eq(&self, expected: usize) {
295    assert_shared_ptr_use_count_eq("SharedRef", &self.0, expected);
296  }
297}
298
299impl<T: Shared> Clone for SharedRef<T> {
300  fn clone(&self) -> Self {
301    Self(<T as Shared>::clone(&self.0))
302  }
303}
304
305impl<T: Shared> From<UniqueRef<T>> for SharedRef<T> {
306  fn from(unique_ref: UniqueRef<T>) -> Self {
307    SharedPtr::from(unique_ref).unwrap()
308  }
309}
310
311impl<T: Shared> Deref for SharedRef<T> {
312  type Target = T;
313  fn deref(&self) -> &Self::Target {
314    unsafe { &*(<T as Shared>::get(&self.0)) }
315  }
316}
317
318impl<T: Shared> AsRef<T> for SharedRef<T> {
319  fn as_ref(&self) -> &T {
320    self
321  }
322}
323
324impl<T: Shared> Borrow<T> for SharedRef<T> {
325  fn borrow(&self) -> &T {
326    self
327  }
328}
329
330#[track_caller]
331fn assert_shared_ptr_use_count_eq<T: Shared>(
332  wrapper_type_name: &str,
333  shared_ptr: &SharedPtrBase<T>,
334  expected: usize,
335) {
336  let mut actual = T::use_count(shared_ptr);
337  let ok = match long::try_from(expected) {
338    Err(_) => false, // Non-`long` value can never match actual use count.
339    Ok(expected) if actual == expected => true, // Fast path.
340    Ok(expected) => {
341      pub const RETRY_TIMEOUT: Duration = Duration::from_secs(1);
342      let start = Instant::now();
343      loop {
344        yield_now();
345        actual = T::use_count(shared_ptr);
346        if actual == expected {
347          break true;
348        } else if start.elapsed() > RETRY_TIMEOUT {
349          break false;
350        }
351      }
352    }
353  };
354  assert!(
355    ok,
356    "assertion failed: `{wrapper_type_name}<{}>` reference count does not match expectation\
357       \n   actual: {actual}\
358       \n expected: {expected}",
359    type_name::<T>(),
360  );
361}
362
363#[repr(C)]
364#[derive(Debug, PartialEq, Eq)]
365pub enum MaybeBool {
366  JustFalse = 0,
367  JustTrue = 1,
368  Nothing = 2,
369}
370
371impl From<MaybeBool> for Option<bool> {
372  fn from(b: MaybeBool) -> Self {
373    match b {
374      MaybeBool::JustFalse => Some(false),
375      MaybeBool::JustTrue => Some(true),
376      MaybeBool::Nothing => None,
377    }
378  }
379}
380
381impl From<Option<bool>> for MaybeBool {
382  fn from(option: Option<bool>) -> Self {
383    match option {
384      Some(false) => MaybeBool::JustFalse,
385      Some(true) => MaybeBool::JustTrue,
386      None => MaybeBool::Nothing,
387    }
388  }
389}
390
391#[derive(Copy, Clone, Debug)]
392#[repr(transparent)]
393pub struct CxxVTable(pub *const Opaque);
394
395#[allow(unused)]
396#[derive(Copy, Clone, Debug)]
397pub struct RustVTable<DynT>(pub *const Opaque, pub PhantomData<DynT>);
398
399#[derive(Debug)]
400pub struct FieldOffset<F>(usize, PhantomData<F>);
401
402unsafe impl<F> Send for FieldOffset<F> where F: Send {}
403unsafe impl<F> Sync for FieldOffset<F> where F: Sync {}
404
405impl<F> Copy for FieldOffset<F> {}
406
407impl<F> Clone for FieldOffset<F> {
408  fn clone(&self) -> Self {
409    *self
410  }
411}
412
413impl<F> FieldOffset<F> {
414  pub fn from_ptrs<E>(embedder_ptr: *const E, field_ptr: *const F) -> Self {
415    let embedder_addr = embedder_ptr as usize;
416    let field_addr = field_ptr as usize;
417    assert!(field_addr >= embedder_addr);
418    assert!((field_addr + size_of::<F>()) <= (embedder_addr + size_of::<E>()));
419    Self(field_addr - embedder_addr, PhantomData)
420  }
421
422  #[allow(clippy::wrong_self_convention)]
423  pub unsafe fn to_embedder<E>(self, field: &F) -> &E {
424    unsafe {
425      (((field as *const _ as usize) - self.0) as *const E)
426        .as_ref()
427        .unwrap()
428    }
429  }
430}
431
432#[repr(C)]
433#[derive(Debug, Default)]
434pub struct Maybe<T> {
435  has_value: bool,
436  value: T,
437}
438
439impl<T> From<Maybe<T>> for Option<T> {
440  fn from(maybe: Maybe<T>) -> Self {
441    if maybe.has_value {
442      Some(maybe.value)
443    } else {
444      None
445    }
446  }
447}
448
449pub trait UnitType
450where
451  Self: Copy + Sized,
452{
453  #[inline(always)]
454  fn get() -> Self {
455    UnitValue::<Self>::get()
456  }
457}
458
459impl<T> UnitType for T where T: Copy + Sized {}
460
461#[derive(Copy, Clone, Debug)]
462struct UnitValue<T>(PhantomData<T>)
463where
464  Self: Sized;
465
466impl<T> UnitValue<T>
467where
468  Self: Copy + Sized,
469{
470  const SELF: Self = Self::new_checked();
471
472  const fn new_checked() -> Self {
473    // Statically assert that T is indeed a unit type.
474    let size_must_be_0 = size_of::<T>();
475    let s = Self(PhantomData::<T>);
476    [s][size_must_be_0]
477  }
478
479  #[inline(always)]
480  fn get_checked(self) -> T {
481    // This run-time check serves just as a backup for the compile-time
482    // check when Self::SELF is initialized.
483    assert_eq!(size_of::<T>(), 0);
484    unsafe { std::mem::MaybeUninit::<T>::zeroed().assume_init() }
485  }
486
487  #[inline(always)]
488  pub fn get() -> T {
489    // Accessing the Self::SELF is necessary to make the compile-time type check
490    // work.
491    Self::SELF.get_checked()
492  }
493}
494
495#[derive(Debug)]
496pub struct DefaultTag;
497
498#[derive(Debug)]
499pub struct IdenticalConversionTag;
500
501pub trait MapFnFrom<F, Tag = DefaultTag>
502where
503  F: UnitType,
504  Self: Sized,
505{
506  fn mapping() -> Self;
507
508  #[inline(always)]
509  fn map_fn_from(_: F) -> Self {
510    Self::mapping()
511  }
512}
513
514impl<F> MapFnFrom<F, IdenticalConversionTag> for F
515where
516  Self: UnitType,
517{
518  #[inline(always)]
519  fn mapping() -> Self {
520    Self::get()
521  }
522}
523
524pub trait MapFnTo<T, Tag = DefaultTag>
525where
526  Self: UnitType,
527  T: Sized,
528{
529  fn mapping() -> T;
530
531  #[inline(always)]
532  fn map_fn_to(self) -> T {
533    Self::mapping()
534  }
535}
536
537impl<F, T, Tag> MapFnTo<T, Tag> for F
538where
539  Self: UnitType,
540  T: MapFnFrom<F, Tag>,
541{
542  #[inline(always)]
543  fn mapping() -> T {
544    T::map_fn_from(F::get())
545  }
546}
547
548pub trait CFnFrom<F>
549where
550  Self: Sized,
551  F: UnitType,
552{
553  fn mapping() -> Self;
554
555  #[inline(always)]
556  fn c_fn_from(_: F) -> Self {
557    Self::mapping()
558  }
559}
560
561macro_rules! impl_c_fn_from {
562  ($($arg:ident: $ty:ident),*) => {
563    impl<F, R, $($ty),*> CFnFrom<F> for unsafe extern "C" fn($($ty),*) -> R
564    where
565      F: UnitType + Fn($($ty),*) -> R,
566    {
567      #[inline(always)]
568      fn mapping() -> Self {
569        unsafe extern "C" fn c_fn<F, R, $($ty),*>($($arg: $ty),*) -> R
570        where
571          F: UnitType + Fn($($ty),*) -> R,
572        {
573          (F::get())($($arg),*)
574        }
575        c_fn::<F, R, $($ty),*>
576      }
577    }
578  };
579}
580
581impl_c_fn_from!();
582impl_c_fn_from!(a0: A0);
583impl_c_fn_from!(a0: A0, a1: A1);
584impl_c_fn_from!(a0: A0, a1: A1, a2: A2);
585impl_c_fn_from!(a0: A0, a1: A1, a2: A2, a3: A3);
586impl_c_fn_from!(a0: A0, a1: A1, a2: A2, a3: A3, a4: A4);
587impl_c_fn_from!(a0: A0, a1: A1, a2: A2, a3: A3, a4: A4, a5: A5);
588impl_c_fn_from!(a0: A0, a1: A1, a2: A2, a3: A3, a4: A4, a5: A5, a6: A6);
589
590pub trait ToCFn<T>
591where
592  Self: UnitType,
593  T: Sized,
594{
595  fn mapping() -> T;
596
597  #[inline(always)]
598  fn to_c_fn(self) -> T {
599    Self::mapping()
600  }
601}
602
603impl<F, T> ToCFn<T> for F
604where
605  Self: UnitType,
606  T: CFnFrom<F>,
607{
608  #[inline(always)]
609  fn mapping() -> T {
610    T::c_fn_from(F::get())
611  }
612}
613
614macro_rules! assert_layout_subset {
615  ($subset: ty, $superset: ty { $($field: ident),* $(,)? }) => {
616    const _: () = {
617      if !(std::mem::size_of::<$subset>() < std::mem::size_of::<$superset>()) {
618        panic!(concat!(
619          "assertion failed: ",
620          "size of `",
621          stringify!($subset),
622          "` is greater than `",
623          stringify!($superset),
624          "`"
625        ));
626      }
627      if !(std::mem::align_of::<$subset>() == std::mem::align_of::<$superset>()) {
628        panic!(concat!(
629          "assertion failed: `",
630          stringify!($subset),
631          "` and `",
632          stringify!($superset),
633          "` have different alignments"
634        ));
635      }
636      $(
637        if std::mem::offset_of!($subset, $field) != std::mem::offset_of!($superset, $field) {
638          panic!(concat!(
639            "assertion failed: `",
640            stringify!($subset),
641            "` and `",
642            stringify!($superset),
643            "` have different offsets for field `",
644            stringify!($field),
645            "`"
646          ));
647        }
648      )*
649    };
650  };
651}
652
653pub(crate) use assert_layout_subset;
654
655#[cfg(test)]
656mod tests {
657  use super::*;
658  use std::ptr::null;
659
660  #[derive(Eq, PartialEq)]
661  struct MockSharedObj {
662    pub inner: u32,
663  }
664
665  impl MockSharedObj {
666    const INSTANCE_A: Self = Self { inner: 11111 };
667    const INSTANCE_B: Self = Self { inner: 22222 };
668
669    const SHARED_PTR_BASE_A: SharedPtrBase<Self> =
670      SharedPtrBase([1, 1], PhantomData);
671    const SHARED_PTR_BASE_B: SharedPtrBase<Self> =
672      SharedPtrBase([2, 2], PhantomData);
673  }
674
675  impl Shared for MockSharedObj {
676    fn clone(_: &SharedPtrBase<Self>) -> SharedPtrBase<Self> {
677      unimplemented!()
678    }
679
680    fn from_unique_ptr(_: UniquePtr<Self>) -> SharedPtrBase<Self> {
681      unimplemented!()
682    }
683
684    fn get(p: &SharedPtrBase<Self>) -> *const Self {
685      match p {
686        &Self::SHARED_PTR_BASE_A => &Self::INSTANCE_A,
687        &Self::SHARED_PTR_BASE_B => &Self::INSTANCE_B,
688        p if p == &Default::default() => null(),
689        _ => unreachable!(),
690      }
691    }
692
693    fn reset(p: &mut SharedPtrBase<Self>) {
694      forget(take(p));
695    }
696
697    fn use_count(p: &SharedPtrBase<Self>) -> long {
698      match p {
699        &Self::SHARED_PTR_BASE_A => 1,
700        &Self::SHARED_PTR_BASE_B => 2,
701        p if p == &Default::default() => 0,
702        _ => unreachable!(),
703      }
704    }
705  }
706
707  #[test]
708  fn shared_ptr_and_shared_ref() {
709    let mut shared_ptr_a1 = SharedPtr(MockSharedObj::SHARED_PTR_BASE_A);
710    assert!(!shared_ptr_a1.is_null());
711    shared_ptr_a1.assert_use_count_eq(1);
712
713    let shared_ref_a: SharedRef<_> = shared_ptr_a1.take().unwrap();
714    assert_eq!(shared_ref_a.inner, 11111);
715    shared_ref_a.assert_use_count_eq(1);
716
717    assert!(shared_ptr_a1.is_null());
718    shared_ptr_a1.assert_use_count_eq(0);
719
720    let shared_ptr_a2: SharedPtr<_> = shared_ref_a.into();
721    assert!(!shared_ptr_a2.is_null());
722    shared_ptr_a2.assert_use_count_eq(1);
723    assert_eq!(shared_ptr_a2.unwrap().inner, 11111);
724
725    let mut shared_ptr_b1 = SharedPtr(MockSharedObj::SHARED_PTR_BASE_B);
726    assert!(!shared_ptr_b1.is_null());
727    shared_ptr_b1.assert_use_count_eq(2);
728
729    let shared_ref_b: SharedRef<_> = shared_ptr_b1.take().unwrap();
730    assert_eq!(shared_ref_b.inner, 22222);
731    shared_ref_b.assert_use_count_eq(2);
732
733    assert!(shared_ptr_b1.is_null());
734    shared_ptr_b1.assert_use_count_eq(0);
735
736    let shared_ptr_b2: SharedPtr<_> = shared_ref_b.into();
737    assert!(!shared_ptr_b2.is_null());
738    shared_ptr_b2.assert_use_count_eq(2);
739    assert_eq!(shared_ptr_b2.unwrap().inner, 22222);
740  }
741
742  #[test]
743  #[should_panic(expected = "assertion failed: \
744      `SharedPtr<v8::support::tests::MockSharedObj>` reference count \
745      does not match expectation")]
746  fn shared_ptr_use_count_assertion_failed() {
747    let shared_ptr: SharedPtr<MockSharedObj> = Default::default();
748    shared_ptr.assert_use_count_eq(3);
749  }
750
751  #[test]
752  #[should_panic(expected = "assertion failed: \
753      `SharedRef<v8::support::tests::MockSharedObj>` reference count \
754      does not match expectation")]
755  fn shared_ref_use_count_assertion_failed() {
756    let shared_ref = SharedRef(MockSharedObj::SHARED_PTR_BASE_B);
757    shared_ref.assert_use_count_eq(7);
758  }
759}