v8/
support.rs

1use std::any::type_name;
2use std::borrow::Borrow;
3use std::borrow::BorrowMut;
4use std::convert::AsMut;
5use std::convert::AsRef;
6use std::convert::TryFrom;
7use std::fmt::Debug;
8use std::marker::PhantomData;
9use std::mem::align_of;
10use std::mem::forget;
11use std::mem::needs_drop;
12use std::mem::size_of;
13use std::mem::take;
14use std::ops::Deref;
15use std::ops::DerefMut;
16use std::ptr::NonNull;
17use std::ptr::drop_in_place;
18use std::ptr::null_mut;
19use std::thread::yield_now;
20use std::time::Duration;
21use std::time::Instant;
22
23// TODO use libc::intptr_t when stable.
24// https://doc.rust-lang.org/1.7.0/libc/type.intptr_t.html
25#[allow(non_camel_case_types)]
26pub type intptr_t = isize;
27
28// TODO use libc::size_t when stable.
29// https://doc.rust-lang.org/1.7.0/libc/type.size_t.html
30#[allow(non_camel_case_types)]
31pub type size_t = usize;
32
33pub use std::os::raw::c_char as char;
34pub use std::os::raw::c_int as int;
35pub use std::os::raw::c_long as long;
36
37pub type Opaque = [u8; 0];
38
39/// Pointer to object allocated on the C++ heap. The pointer may be null.
40#[repr(transparent)]
41#[derive(Debug)]
42pub struct UniquePtr<T: ?Sized>(Option<UniqueRef<T>>);
43
44impl<T: ?Sized> UniquePtr<T> {
45  pub fn is_null(&self) -> bool {
46    self.0.is_none()
47  }
48
49  pub fn as_ref(&self) -> Option<&UniqueRef<T>> {
50    self.0.as_ref()
51  }
52
53  pub fn as_mut(&mut self) -> Option<&mut UniqueRef<T>> {
54    self.0.as_mut()
55  }
56
57  pub fn take(&mut self) -> Option<UniqueRef<T>> {
58    take(&mut self.0)
59  }
60
61  pub fn unwrap(self) -> UniqueRef<T> {
62    self.0.unwrap()
63  }
64}
65
66impl<T> UniquePtr<T> {
67  pub unsafe fn from_raw(ptr: *mut T) -> Self {
68    assert_unique_ptr_layout_compatible::<Self, T>();
69    Self(unsafe { UniqueRef::try_from_raw(ptr) })
70  }
71
72  pub fn into_raw(self) -> *mut T {
73    self
74      .0
75      .map_or_else(null_mut, |unique_ref| unique_ref.into_raw())
76  }
77}
78
79impl<T: Shared> UniquePtr<T> {
80  pub fn make_shared(self) -> SharedPtr<T> {
81    self.into()
82  }
83}
84
85impl<T> Default for UniquePtr<T> {
86  fn default() -> Self {
87    assert_unique_ptr_layout_compatible::<Self, T>();
88    Self(None)
89  }
90}
91
92impl<T> From<UniqueRef<T>> for UniquePtr<T> {
93  fn from(unique_ref: UniqueRef<T>) -> Self {
94    assert_unique_ptr_layout_compatible::<Self, T>();
95    Self(Some(unique_ref))
96  }
97}
98
99/// Pointer to object allocated on the C++ heap. The pointer may not be null.
100#[repr(transparent)]
101#[derive(Debug)]
102pub struct UniqueRef<T: ?Sized>(NonNull<T>);
103
104impl<T> UniqueRef<T> {
105  pub(crate) unsafe fn try_from_raw(ptr: *mut T) -> Option<Self> {
106    assert_unique_ptr_layout_compatible::<Self, T>();
107    NonNull::new(ptr).map(Self)
108  }
109
110  pub(crate) unsafe fn from_raw(ptr: *mut T) -> Self {
111    assert_unique_ptr_layout_compatible::<Self, T>();
112    unsafe { Self::try_from_raw(ptr).unwrap() }
113  }
114
115  pub fn into_raw(self) -> *mut T {
116    let ptr = self.0.as_ptr();
117    forget(self);
118    ptr
119  }
120}
121
122impl<T: Shared> UniqueRef<T> {
123  pub fn make_shared(self) -> SharedRef<T> {
124    self.into()
125  }
126}
127
128impl<T: ?Sized> Drop for UniqueRef<T> {
129  fn drop(&mut self) {
130    unsafe { drop_in_place(self.0.as_ptr()) }
131  }
132}
133
134impl<T: ?Sized> Deref for UniqueRef<T> {
135  type Target = T;
136  fn deref(&self) -> &Self::Target {
137    unsafe { self.0.as_ref() }
138  }
139}
140
141impl<T: ?Sized> DerefMut for UniqueRef<T> {
142  fn deref_mut(&mut self) -> &mut Self::Target {
143    unsafe { self.0.as_mut() }
144  }
145}
146
147impl<T: ?Sized> AsRef<T> for UniqueRef<T> {
148  fn as_ref(&self) -> &T {
149    self
150  }
151}
152
153impl<T: ?Sized> AsMut<T> for UniqueRef<T> {
154  fn as_mut(&mut self) -> &mut T {
155    self
156  }
157}
158
159impl<T: ?Sized> Borrow<T> for UniqueRef<T> {
160  fn borrow(&self) -> &T {
161    self
162  }
163}
164
165impl<T: ?Sized> BorrowMut<T> for UniqueRef<T> {
166  fn borrow_mut(&mut self) -> &mut T {
167    self
168  }
169}
170
171fn assert_unique_ptr_layout_compatible<U, T>() {
172  // Assert that `U` (a `UniqueRef` or `UniquePtr`) has the same memory layout
173  // as a raw C pointer.
174  assert_eq!(size_of::<U>(), size_of::<*mut T>());
175  assert_eq!(align_of::<U>(), align_of::<*mut T>());
176
177  // Assert that `T` (probably) implements `Drop`. If it doesn't, a regular
178  // reference should be used instead of UniquePtr/UniqueRef.
179  assert!(needs_drop::<T>());
180}
181
182pub trait Shared
183where
184  Self: Sized,
185{
186  fn clone(shared_ptr: &SharedPtrBase<Self>) -> SharedPtrBase<Self>;
187  fn from_unique_ptr(unique_ptr: UniquePtr<Self>) -> SharedPtrBase<Self>;
188  fn get(shared_ptr: &SharedPtrBase<Self>) -> *const Self;
189  fn reset(shared_ptr: &mut SharedPtrBase<Self>);
190  fn use_count(shared_ptr: &SharedPtrBase<Self>) -> long;
191}
192
193/// Private base type which is shared by the `SharedPtr` and `SharedRef`
194/// implementations.
195#[repr(C)]
196#[derive(Eq, Debug, PartialEq)]
197pub struct SharedPtrBase<T: Shared>([usize; 2], PhantomData<T>);
198
199unsafe impl<T: Shared + Sync> Send for SharedPtrBase<T> {}
200unsafe impl<T: Shared + Sync> Sync for SharedPtrBase<T> {}
201
202impl<T: Shared> Default for SharedPtrBase<T> {
203  fn default() -> Self {
204    Self([0usize; 2], PhantomData)
205  }
206}
207
208impl<T: Shared> Drop for SharedPtrBase<T> {
209  fn drop(&mut self) {
210    <T as Shared>::reset(self);
211  }
212}
213
214/// Wrapper around a C++ shared_ptr. A shared_ptr may be be null.
215#[repr(C)]
216#[derive(Debug)]
217pub struct SharedPtr<T: Shared>(SharedPtrBase<T>);
218
219impl<T: Shared> SharedPtr<T> {
220  /// Asserts that the number of references to the shared inner value is equal
221  /// to the `expected` count.
222  ///
223  /// This function relies on the C++ method `std::shared_ptr::use_count()`,
224  /// which usually performs a relaxed load. This function will repeatedly call
225  /// `use_count()` until it returns the expected value, for up to one second.
226  /// Therefore it should probably not be used in performance critical code.
227  #[track_caller]
228  pub fn assert_use_count_eq(&self, expected: usize) {
229    assert_shared_ptr_use_count_eq("SharedPtr", &self.0, expected);
230  }
231
232  pub fn is_null(&self) -> bool {
233    <T as Shared>::get(&self.0).is_null()
234  }
235
236  pub fn take(&mut self) -> Option<SharedRef<T>> {
237    if self.is_null() {
238      None
239    } else {
240      let base = take(&mut self.0);
241      Some(SharedRef(base))
242    }
243  }
244
245  pub fn unwrap(self) -> SharedRef<T> {
246    assert!(!self.is_null());
247    SharedRef(self.0)
248  }
249}
250
251impl<T: Shared> Clone for SharedPtr<T> {
252  fn clone(&self) -> Self {
253    Self(<T as Shared>::clone(&self.0))
254  }
255}
256
257impl<T: Shared> Default for SharedPtr<T> {
258  fn default() -> Self {
259    Self(Default::default())
260  }
261}
262
263impl<T, U> From<U> for SharedPtr<T>
264where
265  T: Shared,
266  U: Into<UniquePtr<T>>,
267{
268  fn from(unique_ptr: U) -> Self {
269    let unique_ptr = unique_ptr.into();
270    Self(<T as Shared>::from_unique_ptr(unique_ptr))
271  }
272}
273
274impl<T: Shared> From<SharedRef<T>> for SharedPtr<T> {
275  fn from(mut shared_ref: SharedRef<T>) -> Self {
276    Self(take(&mut shared_ref.0))
277  }
278}
279
280/// Wrapper around a C++ shared_ptr. The shared_ptr is assumed to contain a
281/// value and may not be null.
282#[repr(C)]
283#[derive(Debug)]
284pub struct SharedRef<T: Shared>(SharedPtrBase<T>);
285
286impl<T: Shared> SharedRef<T> {
287  /// Asserts that the number of references to the shared inner value is equal
288  /// to the `expected` count.
289  ///
290  /// This function relies on the C++ method `std::shared_ptr::use_count()`,
291  /// which usually performs a relaxed load. This function will repeatedly call
292  /// `use_count()` until it returns the expected value, for up to one second.
293  /// Therefore it should probably not be used in performance critical code.
294  #[track_caller]
295  pub fn assert_use_count_eq(&self, expected: usize) {
296    assert_shared_ptr_use_count_eq("SharedRef", &self.0, expected);
297  }
298}
299
300impl<T: Shared> Clone for SharedRef<T> {
301  fn clone(&self) -> Self {
302    Self(<T as Shared>::clone(&self.0))
303  }
304}
305
306impl<T: Shared> From<UniqueRef<T>> for SharedRef<T> {
307  fn from(unique_ref: UniqueRef<T>) -> Self {
308    SharedPtr::from(unique_ref).unwrap()
309  }
310}
311
312impl<T: Shared> Deref for SharedRef<T> {
313  type Target = T;
314  fn deref(&self) -> &Self::Target {
315    unsafe { &*(<T as Shared>::get(&self.0)) }
316  }
317}
318
319impl<T: Shared> AsRef<T> for SharedRef<T> {
320  fn as_ref(&self) -> &T {
321    self
322  }
323}
324
325impl<T: Shared> Borrow<T> for SharedRef<T> {
326  fn borrow(&self) -> &T {
327    self
328  }
329}
330
331#[track_caller]
332fn assert_shared_ptr_use_count_eq<T: Shared>(
333  wrapper_type_name: &str,
334  shared_ptr: &SharedPtrBase<T>,
335  expected: usize,
336) {
337  let mut actual = T::use_count(shared_ptr);
338  let ok = match long::try_from(expected) {
339    Err(_) => false, // Non-`long` value can never match actual use count.
340    Ok(expected) if actual == expected => true, // Fast path.
341    Ok(expected) => {
342      pub const RETRY_TIMEOUT: Duration = Duration::from_secs(1);
343      let start = Instant::now();
344      loop {
345        yield_now();
346        actual = T::use_count(shared_ptr);
347        if actual == expected {
348          break true;
349        } else if start.elapsed() > RETRY_TIMEOUT {
350          break false;
351        }
352      }
353    }
354  };
355  assert!(
356    ok,
357    "assertion failed: `{wrapper_type_name}<{}>` reference count does not match expectation\
358       \n   actual: {actual}\
359       \n expected: {expected}",
360    type_name::<T>(),
361  );
362}
363
364#[repr(C)]
365#[derive(Debug, PartialEq, Eq)]
366pub enum MaybeBool {
367  JustFalse = 0,
368  JustTrue = 1,
369  Nothing = 2,
370}
371
372impl From<MaybeBool> for Option<bool> {
373  fn from(b: MaybeBool) -> Self {
374    match b {
375      MaybeBool::JustFalse => Some(false),
376      MaybeBool::JustTrue => Some(true),
377      MaybeBool::Nothing => None,
378    }
379  }
380}
381
382impl From<Option<bool>> for MaybeBool {
383  fn from(option: Option<bool>) -> Self {
384    match option {
385      Some(false) => MaybeBool::JustFalse,
386      Some(true) => MaybeBool::JustTrue,
387      None => MaybeBool::Nothing,
388    }
389  }
390}
391
392#[derive(Copy, Clone, Debug)]
393#[repr(transparent)]
394pub struct CxxVTable(pub *const Opaque);
395
396#[allow(unused)]
397#[derive(Copy, Clone, Debug)]
398pub struct RustVTable<DynT>(pub *const Opaque, pub PhantomData<DynT>);
399
400#[derive(Debug)]
401pub struct FieldOffset<F>(usize, PhantomData<F>);
402
403unsafe impl<F> Send for FieldOffset<F> where F: Send {}
404unsafe impl<F> Sync for FieldOffset<F> where F: Sync {}
405
406impl<F> Copy for FieldOffset<F> {}
407
408impl<F> Clone for FieldOffset<F> {
409  fn clone(&self) -> Self {
410    *self
411  }
412}
413
414impl<F> FieldOffset<F> {
415  pub fn from_ptrs<E>(embedder_ptr: *const E, field_ptr: *const F) -> Self {
416    let embedder_addr = embedder_ptr as usize;
417    let field_addr = field_ptr as usize;
418    assert!(field_addr >= embedder_addr);
419    assert!((field_addr + size_of::<F>()) <= (embedder_addr + size_of::<E>()));
420    Self(field_addr - embedder_addr, PhantomData)
421  }
422
423  #[allow(clippy::wrong_self_convention)]
424  pub unsafe fn to_embedder<E>(self, field: &F) -> &E {
425    unsafe {
426      (((field as *const _ as usize) - self.0) as *const E)
427        .as_ref()
428        .unwrap()
429    }
430  }
431
432  #[allow(clippy::wrong_self_convention)]
433  pub unsafe fn to_embedder_mut<E>(self, field: &mut F) -> &mut E {
434    unsafe {
435      (((field as *mut _ as usize) - self.0) as *mut E)
436        .as_mut()
437        .unwrap()
438    }
439  }
440}
441
442#[repr(C)]
443#[derive(Debug, Default)]
444pub struct Maybe<T> {
445  has_value: bool,
446  value: T,
447}
448
449impl<T> From<Maybe<T>> for Option<T> {
450  fn from(maybe: Maybe<T>) -> Self {
451    if maybe.has_value {
452      Some(maybe.value)
453    } else {
454      None
455    }
456  }
457}
458
459pub trait UnitType
460where
461  Self: Copy + Sized,
462{
463  #[inline(always)]
464  fn get() -> Self {
465    UnitValue::<Self>::get()
466  }
467}
468
469impl<T> UnitType for T where T: Copy + Sized {}
470
471#[derive(Copy, Clone, Debug)]
472struct UnitValue<T>(PhantomData<T>)
473where
474  Self: Sized;
475
476impl<T> UnitValue<T>
477where
478  Self: Copy + Sized,
479{
480  const SELF: Self = Self::new_checked();
481
482  const fn new_checked() -> Self {
483    // Statically assert that T is indeed a unit type.
484    let size_must_be_0 = size_of::<T>();
485    let s = Self(PhantomData::<T>);
486    [s][size_must_be_0]
487  }
488
489  #[inline(always)]
490  fn get_checked(self) -> T {
491    // This run-time check serves just as a backup for the compile-time
492    // check when Self::SELF is initialized.
493    assert_eq!(size_of::<T>(), 0);
494    unsafe { std::mem::MaybeUninit::<T>::zeroed().assume_init() }
495  }
496
497  #[inline(always)]
498  pub fn get() -> T {
499    // Accessing the Self::SELF is necessary to make the compile-time type check
500    // work.
501    Self::SELF.get_checked()
502  }
503}
504
505#[derive(Debug)]
506pub struct DefaultTag;
507
508#[derive(Debug)]
509pub struct IdenticalConversionTag;
510
511pub trait MapFnFrom<F, Tag = DefaultTag>
512where
513  F: UnitType,
514  Self: Sized,
515{
516  fn mapping() -> Self;
517
518  #[inline(always)]
519  fn map_fn_from(_: F) -> Self {
520    Self::mapping()
521  }
522}
523
524impl<F> MapFnFrom<F, IdenticalConversionTag> for F
525where
526  Self: UnitType,
527{
528  #[inline(always)]
529  fn mapping() -> Self {
530    Self::get()
531  }
532}
533
534pub trait MapFnTo<T, Tag = DefaultTag>
535where
536  Self: UnitType,
537  T: Sized,
538{
539  fn mapping() -> T;
540
541  #[inline(always)]
542  fn map_fn_to(self) -> T {
543    Self::mapping()
544  }
545}
546
547impl<F, T, Tag> MapFnTo<T, Tag> for F
548where
549  Self: UnitType,
550  T: MapFnFrom<F, Tag>,
551{
552  #[inline(always)]
553  fn mapping() -> T {
554    T::map_fn_from(F::get())
555  }
556}
557
558pub trait CFnFrom<F>
559where
560  Self: Sized,
561  F: UnitType,
562{
563  fn mapping() -> Self;
564
565  #[inline(always)]
566  fn c_fn_from(_: F) -> Self {
567    Self::mapping()
568  }
569}
570
571macro_rules! impl_c_fn_from {
572  ($($arg:ident: $ty:ident),*) => {
573    impl<F, R, $($ty),*> CFnFrom<F> for unsafe extern "C" fn($($ty),*) -> R
574    where
575      F: UnitType + Fn($($ty),*) -> R,
576    {
577      #[inline(always)]
578      fn mapping() -> Self {
579        unsafe extern "C" fn c_fn<F, R, $($ty),*>($($arg: $ty),*) -> R
580        where
581          F: UnitType + Fn($($ty),*) -> R,
582        {
583          (F::get())($($arg),*)
584        }
585        c_fn::<F, R, $($ty),*>
586      }
587    }
588  };
589}
590
591impl_c_fn_from!();
592impl_c_fn_from!(a0: A0);
593impl_c_fn_from!(a0: A0, a1: A1);
594impl_c_fn_from!(a0: A0, a1: A1, a2: A2);
595impl_c_fn_from!(a0: A0, a1: A1, a2: A2, a3: A3);
596impl_c_fn_from!(a0: A0, a1: A1, a2: A2, a3: A3, a4: A4);
597impl_c_fn_from!(a0: A0, a1: A1, a2: A2, a3: A3, a4: A4, a5: A5);
598impl_c_fn_from!(a0: A0, a1: A1, a2: A2, a3: A3, a4: A4, a5: A5, a6: A6);
599
600pub trait ToCFn<T>
601where
602  Self: UnitType,
603  T: Sized,
604{
605  fn mapping() -> T;
606
607  #[inline(always)]
608  fn to_c_fn(self) -> T {
609    Self::mapping()
610  }
611}
612
613impl<F, T> ToCFn<T> for F
614where
615  Self: UnitType,
616  T: CFnFrom<F>,
617{
618  #[inline(always)]
619  fn mapping() -> T {
620    T::c_fn_from(F::get())
621  }
622}
623
624#[cfg(test)]
625mod tests {
626  use super::*;
627  use std::ptr::null;
628
629  #[derive(Eq, PartialEq)]
630  struct MockSharedObj {
631    pub inner: u32,
632  }
633
634  impl MockSharedObj {
635    const INSTANCE_A: Self = Self { inner: 11111 };
636    const INSTANCE_B: Self = Self { inner: 22222 };
637
638    const SHARED_PTR_BASE_A: SharedPtrBase<Self> =
639      SharedPtrBase([1, 1], PhantomData);
640    const SHARED_PTR_BASE_B: SharedPtrBase<Self> =
641      SharedPtrBase([2, 2], PhantomData);
642  }
643
644  impl Shared for MockSharedObj {
645    fn clone(_: &SharedPtrBase<Self>) -> SharedPtrBase<Self> {
646      unimplemented!()
647    }
648
649    fn from_unique_ptr(_: UniquePtr<Self>) -> SharedPtrBase<Self> {
650      unimplemented!()
651    }
652
653    fn get(p: &SharedPtrBase<Self>) -> *const Self {
654      match p {
655        &Self::SHARED_PTR_BASE_A => &Self::INSTANCE_A,
656        &Self::SHARED_PTR_BASE_B => &Self::INSTANCE_B,
657        p if p == &Default::default() => null(),
658        _ => unreachable!(),
659      }
660    }
661
662    fn reset(p: &mut SharedPtrBase<Self>) {
663      forget(take(p));
664    }
665
666    fn use_count(p: &SharedPtrBase<Self>) -> long {
667      match p {
668        &Self::SHARED_PTR_BASE_A => 1,
669        &Self::SHARED_PTR_BASE_B => 2,
670        p if p == &Default::default() => 0,
671        _ => unreachable!(),
672      }
673    }
674  }
675
676  #[test]
677  fn shared_ptr_and_shared_ref() {
678    let mut shared_ptr_a1 = SharedPtr(MockSharedObj::SHARED_PTR_BASE_A);
679    assert!(!shared_ptr_a1.is_null());
680    shared_ptr_a1.assert_use_count_eq(1);
681
682    let shared_ref_a: SharedRef<_> = shared_ptr_a1.take().unwrap();
683    assert_eq!(shared_ref_a.inner, 11111);
684    shared_ref_a.assert_use_count_eq(1);
685
686    assert!(shared_ptr_a1.is_null());
687    shared_ptr_a1.assert_use_count_eq(0);
688
689    let shared_ptr_a2: SharedPtr<_> = shared_ref_a.into();
690    assert!(!shared_ptr_a2.is_null());
691    shared_ptr_a2.assert_use_count_eq(1);
692    assert_eq!(shared_ptr_a2.unwrap().inner, 11111);
693
694    let mut shared_ptr_b1 = SharedPtr(MockSharedObj::SHARED_PTR_BASE_B);
695    assert!(!shared_ptr_b1.is_null());
696    shared_ptr_b1.assert_use_count_eq(2);
697
698    let shared_ref_b: SharedRef<_> = shared_ptr_b1.take().unwrap();
699    assert_eq!(shared_ref_b.inner, 22222);
700    shared_ref_b.assert_use_count_eq(2);
701
702    assert!(shared_ptr_b1.is_null());
703    shared_ptr_b1.assert_use_count_eq(0);
704
705    let shared_ptr_b2: SharedPtr<_> = shared_ref_b.into();
706    assert!(!shared_ptr_b2.is_null());
707    shared_ptr_b2.assert_use_count_eq(2);
708    assert_eq!(shared_ptr_b2.unwrap().inner, 22222);
709  }
710
711  #[test]
712  #[should_panic(expected = "assertion failed: \
713      `SharedPtr<v8::support::tests::MockSharedObj>` reference count \
714      does not match expectation")]
715  fn shared_ptr_use_count_assertion_failed() {
716    let shared_ptr: SharedPtr<MockSharedObj> = Default::default();
717    shared_ptr.assert_use_count_eq(3);
718  }
719
720  #[test]
721  #[should_panic(expected = "assertion failed: \
722      `SharedRef<v8::support::tests::MockSharedObj>` reference count \
723      does not match expectation")]
724  fn shared_ref_use_count_assertion_failed() {
725    let shared_ref = SharedRef(MockSharedObj::SHARED_PTR_BASE_B);
726    shared_ref.assert_use_count_eq(7);
727  }
728}