Skip to main content

v8/
handle.rs

1use std::borrow::Borrow;
2use std::cell::Cell;
3use std::ffi::c_void;
4use std::hash::Hash;
5use std::hash::Hasher;
6use std::marker::PhantomData;
7use std::mem::forget;
8use std::mem::transmute;
9use std::ops::Deref;
10use std::ptr::NonNull;
11
12use crate::Data;
13use crate::Isolate;
14use crate::IsolateHandle;
15use crate::isolate::RealIsolate;
16use crate::scope::GetIsolate;
17use crate::scope::PinScope;
18use crate::support::Opaque;
19
20unsafe extern "C" {
21  fn v8__Local__New(
22    isolate: *mut RealIsolate,
23    other: *const Data,
24  ) -> *const Data;
25  fn v8__Global__New(
26    isolate: *mut RealIsolate,
27    data: *const Data,
28  ) -> *const Data;
29  fn v8__Global__NewWeak(
30    isolate: *mut RealIsolate,
31    data: *const Data,
32    parameter: *const c_void,
33    callback: unsafe extern "C" fn(*const WeakCallbackInfo),
34  ) -> *const Data;
35  fn v8__Global__Reset(data: *const Data);
36  fn v8__WeakCallbackInfo__GetIsolate(
37    this: *const WeakCallbackInfo,
38  ) -> *mut RealIsolate;
39  fn v8__WeakCallbackInfo__GetParameter(
40    this: *const WeakCallbackInfo,
41  ) -> *mut c_void;
42  fn v8__WeakCallbackInfo__SetSecondPassCallback(
43    this: *const WeakCallbackInfo,
44    callback: unsafe extern "C" fn(*const WeakCallbackInfo),
45  );
46
47  fn v8__TracedReference__CONSTRUCT(this: *mut TracedReference<Data>);
48  fn v8__TracedReference__DESTRUCT(this: *mut TracedReference<Data>);
49  fn v8__TracedReference__Reset(
50    this: *mut TracedReference<Data>,
51    isolate: *mut RealIsolate,
52    data: *mut Data,
53  );
54  fn v8__TracedReference__Get(
55    this: *const TracedReference<Data>,
56    isolate: *mut RealIsolate,
57  ) -> *const Data;
58
59  fn v8__Eternal__CONSTRUCT(this: *mut Eternal<Data>);
60  fn v8__Eternal__DESTRUCT(this: *mut Eternal<Data>);
61  fn v8__Eternal__Clear(this: *mut Eternal<Data>);
62  fn v8__Eternal__Get(
63    this: *const Eternal<Data>,
64    isolate: *mut RealIsolate,
65  ) -> *const Data;
66  fn v8__Eternal__Set(
67    this: *mut Eternal<Data>,
68    isolate: *mut RealIsolate,
69    data: *mut Data,
70  );
71  fn v8__Eternal__IsEmpty(this: *const Eternal<Data>) -> bool;
72}
73
74/// An object reference managed by the v8 garbage collector.
75///
76/// All objects returned from v8 have to be tracked by the garbage
77/// collector so that it knows that the objects are still alive.  Also,
78/// because the garbage collector may move objects, it is unsafe to
79/// point directly to an object.  Instead, all objects are stored in
80/// handles which are known by the garbage collector and updated
81/// whenever an object moves.  Handles should always be passed by value
82/// (except in cases like out-parameters) and they should never be
83/// allocated on the heap.
84///
85/// There are two types of handles: local and persistent handles.
86///
87/// Local handles are light-weight and transient and typically used in
88/// local operations.  They are managed by HandleScopes. That means that a
89/// HandleScope must exist on the stack when they are created and that they are
90/// only valid inside of the `HandleScope` active during their creation.
91/// For passing a local handle to an outer `HandleScope`, an
92/// `EscapableHandleScope` and its `Escape()` method must be used.
93///
94/// Persistent handles can be used when storing objects across several
95/// independent operations and have to be explicitly deallocated when they're no
96/// longer used.
97///
98/// It is safe to extract the object stored in the handle by
99/// dereferencing the handle (for instance, to extract the `*Object` from
100/// a `Local<Object>`); the value will still be governed by a handle
101/// behind the scenes and the same rules apply to these values as to
102/// their handles.
103///
104/// Note: Local handles in Rusty V8 differ from the V8 C++ API in that they are
105/// never empty. In situations where empty handles are needed, use
106/// `Option<Local>`.
107#[repr(C)]
108#[derive(Debug)]
109pub struct Local<'s, T>(NonNull<T>, PhantomData<&'s ()>);
110
111mod sealed {
112  pub trait Sealed {}
113}
114
115// this trait exists to allow you to specify the output lifetime for `Local::extend_lifetime_unchecked`.
116// so you can do something like `unsafe { Local::extend_lifetime_unchecked::<Local<'o, T>>(local) }`.
117// if it were just a lifetime parameter, it would be "late bound" and you could not explicitly specify the output lifetime.
118pub trait ExtendLifetime<'s, T>: sealed::Sealed {
119  type Input;
120  unsafe fn extend_lifetime_unchecked_from(value: Self::Input) -> Self;
121}
122
123impl<T> sealed::Sealed for Local<'_, T> {}
124
125impl<'s, T> ExtendLifetime<'s, T> for Local<'_, T> {
126  type Input = Local<'s, T>;
127  unsafe fn extend_lifetime_unchecked_from(value: Self::Input) -> Self {
128    unsafe { Local::from_non_null(value.as_non_null()) }
129  }
130}
131
132impl<'s, T> Local<'s, T> {
133  /// Construct a new Local from an existing Handle.
134  #[inline(always)]
135  pub fn new<'i>(
136    scope: &PinScope<'s, 'i, ()>,
137    handle: impl Handle<Data = T>,
138  ) -> Local<'s, T> {
139    let HandleInfo { data, host } = handle.get_handle_info();
140    host.assert_match_isolate(scope);
141    unsafe {
142      scope.cast_local(|sd| {
143        v8__Local__New(sd.get_isolate_ptr(), data.cast().as_ptr()) as *const T
144      })
145    }
146    .unwrap()
147  }
148
149  /// Create a local handle by downcasting from one of its super types.
150  /// This function is unsafe because the cast is unchecked.
151  #[inline(always)]
152  pub unsafe fn cast_unchecked<A>(other: Local<'s, A>) -> Self
153  where
154    Local<'s, A>: TryFrom<Self>,
155  {
156    unsafe { transmute(other) }
157  }
158  /// Extend the lifetime of a `Local` handle to a longer lifetime.
159  ///
160  /// # Safety
161  ///
162  /// The caller is responsible for ensuring that the `Local` handle is valid
163  /// for the longer lifetime. Incorrect usage can lead to the usage of invalid
164  /// handles
165  ///
166  /// # Example
167  ///
168  /// ```ignore
169  /// let isolate = unsafe { Isolate::from_raw_isolate_ptr(isolate_ptr) };
170  /// callback_scope!(unsafe scope, &mut isolate);
171  /// // the lifetime of the local handle will be tied to the lifetime of `&mut isolate`,
172  /// // which, because we've created it from a raw pointer, is only as long as the current function.
173  /// // the real lifetime at runtime is
174  /// // actually the lifetime of the parent scope. if we can guarantee that the parent scope lives at least as long as
175  /// // `'o`, it is valid to extend the lifetime of the local handle to `'o` by using `extend_lifetime_unchecked`.
176  /// let context = Local::new(scope, context_global_handle);
177  ///
178  /// let local_longer_lifetime = unsafe { local.extend_lifetime_unchecked::<Local<'o, T>>() };
179  /// ```
180  #[inline(always)]
181  pub unsafe fn extend_lifetime_unchecked<'o, O>(self) -> O
182  where
183    O: ExtendLifetime<'s, T, Input = Self>,
184  {
185    unsafe { O::extend_lifetime_unchecked_from(self) }
186  }
187
188  #[inline(always)]
189  pub(crate) unsafe fn from_raw(ptr: *const T) -> Option<Self> {
190    NonNull::new(ptr as *mut _).map(|nn| unsafe { Self::from_non_null(nn) })
191  }
192
193  #[inline(always)]
194  pub(crate) unsafe fn from_raw_unchecked(ptr: *const T) -> Self {
195    Self(
196      unsafe { NonNull::new_unchecked(ptr as *mut _) },
197      PhantomData,
198    )
199  }
200
201  #[inline(always)]
202  pub(crate) unsafe fn from_non_null(nn: NonNull<T>) -> Self {
203    Self(nn, PhantomData)
204  }
205
206  #[inline(always)]
207  pub(crate) fn as_non_null(self) -> NonNull<T> {
208    self.0
209  }
210
211  #[inline(always)]
212  pub(crate) fn slice_into_raw(slice: &[Self]) -> &[*const T] {
213    unsafe { &*(slice as *const [Self] as *const [*const T]) }
214  }
215}
216
217impl<T> Copy for Local<'_, T> {}
218
219impl<T> Clone for Local<'_, T> {
220  fn clone(&self) -> Self {
221    *self
222  }
223}
224
225impl<T> Deref for Local<'_, T> {
226  type Target = T;
227  fn deref(&self) -> &T {
228    unsafe { self.0.as_ref() }
229  }
230}
231
232impl<'s, T> Local<'s, T> {
233  /// Attempts to cast the contained type to another,
234  /// returning an error if the conversion fails.
235  ///
236  /// # Examples
237  ///
238  /// ```ignore
239  /// let value: Local<'_, Value> = get_v8_value();
240  ///
241  /// if let Ok(func) = value.try_cast::<Function> {
242  ///   //
243  /// }
244  /// ```
245  #[inline(always)]
246  pub fn try_cast<A>(
247    self,
248  ) -> Result<Local<'s, A>, <Self as TryInto<Local<'s, A>>>::Error>
249  where
250    Self: TryInto<Local<'s, A>>,
251  {
252    self.try_into()
253  }
254
255  /// Attempts to cast the contained type to another,
256  /// panicking if the conversion fails.
257  ///
258  /// # Example
259  ///
260  /// ```ignore
261  /// let value: Local<'_, Value> = get_v8_value();
262  ///
263  /// let func = value.cast::<Function>();
264  /// ```
265  #[inline(always)]
266  pub fn cast<A>(self) -> Local<'s, A>
267  where
268    Self: TryInto<Local<'s, A>, Error: std::fmt::Debug>,
269  {
270    self.try_into().unwrap()
271  }
272}
273
274/// An object reference that is independent of any handle scope. Where
275/// a Local handle only lives as long as the HandleScope in which it was
276/// allocated, a global handle remains valid until it is dropped.
277///
278/// A global handle contains a reference to a storage cell within
279/// the V8 engine which holds an object value and which is updated by
280/// the garbage collector whenever the object is moved.
281///
282/// You can create a `v8::Local` out of `v8::Global` using
283/// `v8::Local::new(scope, global_handle)`.
284#[derive(Debug)]
285pub struct Global<T> {
286  data: NonNull<T>,
287  isolate_handle: IsolateHandle,
288}
289
290impl<T> Global<T> {
291  /// Construct a new Global from an existing Handle.
292  #[inline(always)]
293  pub fn new(isolate: &Isolate, handle: impl Handle<Data = T>) -> Self {
294    let HandleInfo { data, host } = handle.get_handle_info();
295    host.assert_match_isolate(isolate);
296    unsafe { Self::new_raw(isolate as *const Isolate as *mut Isolate, data) }
297  }
298
299  /// Implementation helper function that contains the code that can be shared
300  /// between `Global::new()` and `Global::clone()`.
301  #[inline(always)]
302  unsafe fn new_raw(isolate: *mut Isolate, data: NonNull<T>) -> Self {
303    let data = data.cast().as_ptr();
304    unsafe {
305      let data = v8__Global__New((*isolate).as_real_ptr(), data) as *const T;
306      let data = NonNull::new_unchecked(data as *mut _);
307      let isolate_handle = (*isolate).thread_safe_handle();
308      Self {
309        data,
310        isolate_handle,
311      }
312    }
313  }
314
315  /// Consume this `Global` and return the underlying raw pointer.
316  ///
317  /// The returned raw pointer must be converted back into a `Global` by using
318  /// [`Global::from_raw`], otherwise the V8 value referenced by this global
319  /// handle will be pinned on the V8 heap permanently and never get garbage
320  /// collected.
321  #[inline(always)]
322  pub fn into_raw(self) -> NonNull<T> {
323    let data = self.data;
324    forget(self);
325    data
326  }
327
328  /// Converts a raw pointer created with [`Global::into_raw()`] back to its
329  /// original `Global`.
330  #[inline(always)]
331  pub unsafe fn from_raw(isolate: &mut Isolate, data: NonNull<T>) -> Self {
332    let isolate_handle = isolate.thread_safe_handle();
333    Self {
334      data,
335      isolate_handle,
336    }
337  }
338
339  #[inline(always)]
340  pub fn open<'a>(&'a self, scope: &mut Isolate) -> &'a T {
341    Handle::open(self, scope)
342  }
343}
344
345impl<T> Clone for Global<T> {
346  fn clone(&self) -> Self {
347    let HandleInfo { data, host } = self.get_handle_info();
348    let mut isolate = unsafe { Isolate::from_non_null(host.get_isolate()) };
349    unsafe { Self::new_raw(isolate.as_mut(), data) }
350  }
351}
352
353impl<T> Drop for Global<T> {
354  fn drop(&mut self) {
355    unsafe {
356      if self.isolate_handle.get_isolate_ptr().is_null() {
357        // This `Global` handle is associated with an `Isolate` that has already
358        // been disposed.
359      } else {
360        // Destroy the storage cell that contains the contents of this Global.
361        v8__Global__Reset(self.data.cast().as_ptr());
362      }
363    }
364  }
365}
366
367/// An implementation of [`Handle`] that can be constructed unsafely from a
368/// reference.
369pub(crate) struct UnsafeRefHandle<'a, T> {
370  reference: &'a T,
371  isolate_handle: IsolateHandle,
372}
373impl<'a, T> UnsafeRefHandle<'a, T> {
374  /// Constructs an `UnsafeRefHandle`.
375  ///
376  /// # Safety
377  ///
378  /// `reference` must be derived from a [`Local`] or [`Global`] handle, and its
379  /// lifetime must not outlive that handle. Furthermore, `isolate` must be the
380  /// isolate associated with the handle (for [`Local`], the current isolate;
381  /// for [`Global`], the isolate you would pass to the [`Global::open()`]
382  /// method).
383  #[inline(always)]
384  pub unsafe fn new(reference: &'a T, isolate: &mut Isolate) -> Self {
385    UnsafeRefHandle {
386      reference,
387      isolate_handle: isolate.thread_safe_handle(),
388    }
389  }
390}
391
392pub trait Handle: Sized {
393  type Data;
394
395  #[doc(hidden)]
396  fn get_handle_info(&self) -> HandleInfo<Self::Data>;
397
398  /// Returns a reference to the V8 heap object that this handle represents.
399  /// The handle does not get cloned, nor is it converted to a `Local` handle.
400  ///
401  /// # Panics
402  ///
403  /// This function panics in the following situations:
404  /// - The handle is not hosted by the specified Isolate.
405  /// - The Isolate that hosts this handle has been disposed.
406  fn open<'a>(&'a self, isolate: &mut Isolate) -> &'a Self::Data {
407    let HandleInfo { data, host } = self.get_handle_info();
408    host.assert_match_isolate(isolate);
409    unsafe { &*data.as_ptr() }
410  }
411
412  /// Reads the inner value contained in this handle, _without_ verifying that
413  /// the this handle is hosted by the currently active `Isolate`.
414  ///
415  /// # Safety
416  ///
417  /// Using a V8 heap object with another `Isolate` than the `Isolate` that
418  /// hosts it is not permitted under any circumstance. Doing so leads to
419  /// undefined behavior, likely a crash.
420  ///
421  /// # Panics
422  ///
423  /// This function panics if the `Isolate` that hosts the handle has been
424  /// disposed.
425  unsafe fn get_unchecked(&self) -> &Self::Data {
426    let HandleInfo { data, host } = self.get_handle_info();
427    if let HandleHost::DisposedIsolate = host {
428      panic!("attempt to access Handle hosted by disposed Isolate");
429    }
430    unsafe { &*data.as_ptr() }
431  }
432}
433
434impl<T> Handle for Local<'_, T> {
435  type Data = T;
436  fn get_handle_info(&self) -> HandleInfo<T> {
437    HandleInfo::new(self.as_non_null(), HandleHost::Scope)
438  }
439}
440
441impl<'a, 's: 'a, T> Handle for &'a Local<'s, T> {
442  type Data = T;
443  fn get_handle_info(&self) -> HandleInfo<T> {
444    HandleInfo::new(self.as_non_null(), HandleHost::Scope)
445  }
446}
447
448impl<T> Handle for Global<T> {
449  type Data = T;
450  fn get_handle_info(&self) -> HandleInfo<T> {
451    HandleInfo::new(self.data, (&self.isolate_handle).into())
452  }
453}
454
455impl<T> Handle for &Global<T> {
456  type Data = T;
457  fn get_handle_info(&self) -> HandleInfo<T> {
458    HandleInfo::new(self.data, (&self.isolate_handle).into())
459  }
460}
461
462impl<T> Handle for UnsafeRefHandle<'_, T> {
463  type Data = T;
464  fn get_handle_info(&self) -> HandleInfo<T> {
465    HandleInfo::new(
466      NonNull::from(self.reference),
467      (&self.isolate_handle).into(),
468    )
469  }
470}
471
472impl<T> Handle for &UnsafeRefHandle<'_, T> {
473  type Data = T;
474  fn get_handle_info(&self) -> HandleInfo<T> {
475    HandleInfo::new(
476      NonNull::from(self.reference),
477      (&self.isolate_handle).into(),
478    )
479  }
480}
481
482impl<T> Borrow<T> for Local<'_, T> {
483  fn borrow(&self) -> &T {
484    self
485  }
486}
487
488impl<T> Borrow<T> for Global<T> {
489  fn borrow(&self) -> &T {
490    let HandleInfo { data, host } = self.get_handle_info();
491    if let HandleHost::DisposedIsolate = host {
492      panic!("attempt to access Handle hosted by disposed Isolate");
493    }
494    unsafe { &*data.as_ptr() }
495  }
496}
497
498impl<T> Eq for Local<'_, T> where T: Eq {}
499impl<T> Eq for Global<T> where T: Eq {}
500
501impl<T: Hash> Hash for Local<'_, T> {
502  fn hash<H: Hasher>(&self, state: &mut H) {
503    (**self).hash(state);
504  }
505}
506
507impl<T: Hash> Hash for Global<T> {
508  fn hash<H: Hasher>(&self, state: &mut H) {
509    unsafe {
510      if self.isolate_handle.get_isolate_ptr().is_null() {
511        panic!("can't hash Global after its host Isolate has been disposed");
512      }
513      self.data.as_ref().hash(state);
514    }
515  }
516}
517
518impl<T, Rhs: Handle> PartialEq<Rhs> for Local<'_, T>
519where
520  T: PartialEq<Rhs::Data>,
521{
522  fn eq(&self, other: &Rhs) -> bool {
523    let i1 = self.get_handle_info();
524    let i2 = other.get_handle_info();
525    i1.host.match_host(i2.host, None)
526      && unsafe { i1.data.as_ref() == i2.data.as_ref() }
527  }
528}
529
530impl<T, Rhs: Handle> PartialEq<Rhs> for Global<T>
531where
532  T: PartialEq<Rhs::Data>,
533{
534  fn eq(&self, other: &Rhs) -> bool {
535    let i1 = self.get_handle_info();
536    let i2 = other.get_handle_info();
537    i1.host.match_host(i2.host, None)
538      && unsafe { i1.data.as_ref() == i2.data.as_ref() }
539  }
540}
541
542#[derive(Copy, Debug, Clone)]
543pub struct HandleInfo<T> {
544  data: NonNull<T>,
545  host: HandleHost,
546}
547
548impl<T> HandleInfo<T> {
549  fn new(data: NonNull<T>, host: HandleHost) -> Self {
550    Self { data, host }
551  }
552}
553
554#[derive(Copy, Debug, Clone)]
555enum HandleHost {
556  // Note: the `HandleHost::Scope` variant does not indicate that the handle
557  // it applies to is not associated with an `Isolate`. It only means that
558  // the handle is a `Local` handle that was unable to provide a pointer to
559  // the `Isolate` that hosts it (the handle) and the currently entered
560  // scope.
561  Scope,
562  Isolate(NonNull<RealIsolate>),
563  DisposedIsolate,
564}
565
566impl From<&'_ Isolate> for HandleHost {
567  fn from(isolate: &'_ Isolate) -> Self {
568    Self::Isolate(unsafe { NonNull::new_unchecked(isolate.as_real_ptr()) })
569  }
570}
571
572impl From<&'_ IsolateHandle> for HandleHost {
573  fn from(isolate_handle: &IsolateHandle) -> Self {
574    NonNull::new(unsafe { isolate_handle.get_isolate_ptr() })
575      .map_or(Self::DisposedIsolate, Self::Isolate)
576  }
577}
578
579impl HandleHost {
580  /// Compares two `HandleHost` values, returning `true` if they refer to the
581  /// same `Isolate`, or `false` if they refer to different isolates.
582  ///
583  /// If the caller knows which `Isolate` the currently entered scope (if any)
584  /// belongs to, it should pass on this information via the second argument
585  /// (`scope_isolate_opt`).
586  ///
587  /// # Panics
588  ///
589  /// This function panics if one of the `HandleHost` values refers to an
590  /// `Isolate` that has been disposed.
591  ///
592  /// # Safety / Bugs
593  ///
594  /// The current implementation is a bit too forgiving. If it cannot decide
595  /// whether two hosts refer to the same `Isolate`, it just returns `true`.
596  /// Note that this can only happen when the caller does _not_ provide a value
597  /// for the `scope_isolate_opt` argument.
598  fn match_host(
599    self,
600    other: Self,
601    scope_isolate_opt: Option<&Isolate>,
602  ) -> bool {
603    let scope_isolate_opt_nn = scope_isolate_opt
604      .map(|isolate| unsafe { NonNull::new_unchecked(isolate.as_real_ptr()) });
605    match (self, other, scope_isolate_opt_nn) {
606      (Self::Scope, Self::Scope, _) => true,
607      (Self::Isolate(ile1), Self::Isolate(ile2), _) => ile1 == ile2,
608      (Self::Scope, Self::Isolate(ile1), Some(ile2)) => ile1 == ile2,
609      (Self::Isolate(ile1), Self::Scope, Some(ile2)) => ile1 == ile2,
610      // TODO(pisciaureus): If the caller didn't provide a `scope_isolate_opt`
611      // value that works, we can't do a meaningful check. So all we do for now
612      // is pretend the Isolates match and hope for the best. This eventually
613      // needs to be tightened up.
614      (Self::Scope, Self::Isolate(_), _) => true,
615      (Self::Isolate(_), Self::Scope, _) => true,
616      // Handles hosted in an Isolate that has been disposed aren't good for
617      // anything, even if a pair of handles used to to be hosted in the same
618      // now-disposed solate.
619      (Self::DisposedIsolate, ..) | (_, Self::DisposedIsolate, _) => {
620        panic!("attempt to access Handle hosted by disposed Isolate")
621      }
622    }
623  }
624
625  fn assert_match_host(self, other: Self, scope_opt: Option<&Isolate>) {
626    assert!(
627      self.match_host(other, scope_opt),
628      "attempt to use Handle in an Isolate that is not its host"
629    );
630  }
631
632  #[allow(dead_code)]
633  fn match_isolate(self, isolate: &Isolate) -> bool {
634    self.match_host(isolate.into(), Some(isolate))
635  }
636
637  fn assert_match_isolate(self, isolate: &Isolate) {
638    self.assert_match_host(isolate.into(), Some(isolate));
639  }
640
641  fn get_isolate(self) -> NonNull<RealIsolate> {
642    match self {
643      Self::Scope => panic!("host Isolate for Handle not available"),
644      Self::Isolate(ile) => ile,
645      Self::DisposedIsolate => panic!("attempt to access disposed Isolate"),
646    }
647  }
648
649  #[allow(dead_code)]
650  fn get_isolate_handle(self) -> IsolateHandle {
651    let isolate = unsafe { Isolate::from_non_null(self.get_isolate()) };
652    isolate.thread_safe_handle()
653  }
654}
655
656/// An object reference that does not prevent garbage collection for the object,
657/// and which allows installing finalization callbacks which will be called
658/// after the object has been GC'd.
659///
660/// Note that finalization callbacks are tied to the lifetime of a `Weak<T>`,
661/// and will not be called after the `Weak<T>` is dropped.
662///
663/// # `Clone`
664///
665/// Since finalization callbacks are specific to a `Weak<T>` instance, cloning
666/// will create a new object reference without a finalizer, as if created by
667/// [`Self::new`]. You can use [`Self::clone_with_finalizer`] to attach a
668/// finalization callback to the clone.
669#[derive(Debug)]
670pub struct Weak<T> {
671  data: Option<Box<WeakData<T>>>,
672  isolate_handle: IsolateHandle,
673}
674
675impl<T> Weak<T> {
676  pub fn new(isolate: &mut Isolate, handle: impl Handle<Data = T>) -> Self {
677    let HandleInfo { data, host } = handle.get_handle_info();
678    host.assert_match_isolate(isolate);
679    Self::new_raw(isolate, data, None)
680  }
681
682  /// Create a weak handle with a finalization callback installed.
683  ///
684  /// There is no guarantee as to *when* or even *if* the finalization callback
685  /// will be invoked. The invocation is performed solely on a best effort
686  /// basis. GC-based finalization should *not* be relied upon for any critical
687  /// form of resource management! Consider using
688  /// [`Self::with_guaranteed_finalizer`] instead.
689  ///
690  /// The callback does not have access to the inner value, because it has
691  /// already been collected by the time it runs.
692  pub fn with_finalizer(
693    isolate: &mut Isolate,
694    handle: impl Handle<Data = T>,
695    finalizer: Box<dyn FnOnce(&mut Isolate)>,
696  ) -> Self {
697    let HandleInfo { data, host } = handle.get_handle_info();
698    host.assert_match_isolate(isolate);
699    let finalizer_id = isolate
700      .get_finalizer_map_mut()
701      .add(FinalizerCallback::Regular(finalizer));
702    Self::new_raw(isolate, data, Some(finalizer_id))
703  }
704
705  /// Create a weak handle with a finalization callback installed, which is
706  /// guaranteed to run at some point.
707  ///
708  /// Unlike [`Self::with_finalizer`], whose finalization callbacks are not
709  /// guaranteed to run, this method is guaranteed to be called before the
710  /// isolate is destroyed. It can therefore be used for critical resource
711  /// management. Note that other than that, there is still no guarantee as to
712  /// *when* the callback will be called.
713  ///
714  /// Unlike regular finalizers, guaranteed finalizers aren't passed a mutable
715  /// [`Isolate`] reference, since they might be called when the isolate is
716  /// being destroyed, at which point it might be no longer valid to use.
717  /// Accessing the isolate (with unsafe code) from the finalizer callback is
718  /// therefore unsound, unless you prove the isolate is not being destroyed.
719  pub fn with_guaranteed_finalizer(
720    isolate: &mut Isolate,
721    handle: impl Handle<Data = T>,
722    finalizer: Box<dyn FnOnce()>,
723  ) -> Self {
724    let HandleInfo { data, host } = handle.get_handle_info();
725    host.assert_match_isolate(isolate);
726    let finalizer_id = isolate
727      .get_finalizer_map_mut()
728      .add(FinalizerCallback::Guaranteed(finalizer));
729    Self::new_raw(isolate, data, Some(finalizer_id))
730  }
731
732  fn new_raw(
733    isolate: *mut Isolate,
734    data: NonNull<T>,
735    finalizer_id: Option<FinalizerId>,
736  ) -> Self {
737    let weak_data = Box::new(WeakData {
738      pointer: Default::default(),
739      finalizer_id,
740      weak_dropped: Cell::new(false),
741    });
742    let data = data.cast().as_ptr();
743    let data = unsafe {
744      v8__Global__NewWeak(
745        (*isolate).as_real_ptr(),
746        data,
747        weak_data.deref() as *const _ as *const c_void,
748        Self::first_pass_callback,
749      )
750    };
751    weak_data
752      .pointer
753      .set(Some(unsafe { NonNull::new_unchecked(data as *mut _) }));
754    Self {
755      data: Some(weak_data),
756      isolate_handle: unsafe { (*isolate).thread_safe_handle() },
757    }
758  }
759
760  /// Creates a new empty handle, identical to one for an object that has
761  /// already been GC'd.
762  pub fn empty(isolate: &mut Isolate) -> Self {
763    Weak {
764      data: None,
765      isolate_handle: isolate.thread_safe_handle(),
766    }
767  }
768
769  /// Clones this handle and installs a finalizer callback on the clone, as if
770  /// by calling [`Self::with_finalizer`].
771  ///
772  /// Note that if this handle is empty (its value has already been GC'd), the
773  /// finalization callback will never run.
774  pub fn clone_with_finalizer(
775    &self,
776    finalizer: Box<dyn FnOnce(&mut Isolate)>,
777  ) -> Self {
778    self.clone_raw(Some(FinalizerCallback::Regular(finalizer)))
779  }
780
781  /// Clones this handle and installs a guaranteed finalizer callback on the
782  /// clone, as if by calling [`Self::with_guaranteed_finalizer`].
783  ///
784  /// Note that if this handle is empty (its value has already been GC'd), the
785  /// finalization callback will never run.
786  pub fn clone_with_guaranteed_finalizer(
787    &self,
788    finalizer: Box<dyn FnOnce()>,
789  ) -> Self {
790    self.clone_raw(Some(FinalizerCallback::Guaranteed(finalizer)))
791  }
792
793  fn clone_raw(&self, finalizer: Option<FinalizerCallback>) -> Self {
794    if let Some(data) = self.get_pointer() {
795      // SAFETY: We're in the isolate's thread, because Weak<T> isn't Send or
796      // Sync.
797      let isolate_ptr = unsafe { self.isolate_handle.get_isolate_ptr() };
798      if isolate_ptr.is_null() {
799        unreachable!("Isolate was dropped but weak handle wasn't reset.");
800      }
801      let mut isolate = unsafe { Isolate::from_raw_ptr(isolate_ptr) };
802      let finalizer_id = finalizer
803        .map(|finalizer| isolate.get_finalizer_map_mut().add(finalizer));
804      Self::new_raw(&mut isolate, data, finalizer_id)
805    } else {
806      Weak {
807        data: None,
808        isolate_handle: self.isolate_handle.clone(),
809      }
810    }
811  }
812
813  /// Converts an optional raw pointer created with [`Weak::into_raw()`] back to
814  /// its original `Weak`.
815  ///
816  /// This method is called with `Some`, the pointer is invalidated and it
817  /// cannot be used with this method again. Additionally, it is unsound to call
818  /// this method with an isolate other than that in which the original `Weak`
819  /// was created.
820  pub unsafe fn from_raw(
821    isolate: &mut Isolate,
822    data: Option<NonNull<WeakData<T>>>,
823  ) -> Self {
824    Weak {
825      data: data.map(|raw| unsafe { Box::from_raw(raw.cast().as_ptr()) }),
826      isolate_handle: isolate.thread_safe_handle(),
827    }
828  }
829
830  /// Consume this `Weak` handle and return the underlying raw pointer, or
831  /// `None` if the value has been GC'd.
832  ///
833  /// The return value can be converted back into a `Weak` by using
834  /// [`Weak::from_raw`]. Note that `Weak` allocates some memory, and if this
835  /// method returns `Some`, the pointer must be converted back into a `Weak`
836  /// for it to be freed.
837  ///
838  /// Note that this method might return `Some` even after the V8 value has been
839  /// GC'd.
840  pub fn into_raw(mut self) -> Option<NonNull<WeakData<T>>> {
841    if let Some(data) = self.data.take() {
842      let has_finalizer = if let Some(finalizer_id) = data.finalizer_id {
843        // SAFETY: We're in the isolate's thread because Weak isn't Send or Sync
844        let isolate_ptr = unsafe { self.isolate_handle.get_isolate_ptr() };
845        if isolate_ptr.is_null() {
846          // Disposed isolates have no finalizers.
847          false
848        } else {
849          let isolate = unsafe { Isolate::from_raw_ptr(isolate_ptr) };
850          isolate.get_finalizer_map().map.contains_key(&finalizer_id)
851        }
852      } else {
853        false
854      };
855
856      if data.pointer.get().is_none() && !has_finalizer {
857        // If the pointer is None and we're not waiting for the second pass,
858        // drop the box and return None.
859        None
860      } else {
861        assert!(!data.weak_dropped.get());
862        Some(unsafe { NonNull::new_unchecked(Box::into_raw(data)) })
863      }
864    } else {
865      None
866    }
867  }
868
869  fn get_pointer(&self) -> Option<NonNull<T>> {
870    if let Some(data) = &self.data {
871      // It seems like when the isolate is dropped, even the first pass callback
872      // might not be called.
873      if unsafe { self.isolate_handle.get_isolate_ptr() }.is_null() {
874        None
875      } else {
876        data.pointer.get()
877      }
878    } else {
879      None
880    }
881  }
882
883  pub fn is_empty(&self) -> bool {
884    self.get_pointer().is_none()
885  }
886
887  pub fn to_global(&self, isolate: &mut Isolate) -> Option<Global<T>> {
888    if let Some(data) = self.get_pointer() {
889      let handle_host: HandleHost = (&self.isolate_handle).into();
890      handle_host.assert_match_isolate(isolate);
891      Some(unsafe { Global::new_raw(isolate, data) })
892    } else {
893      None
894    }
895  }
896
897  pub fn to_local<'s>(
898    &self,
899    scope: &PinScope<'s, '_, ()>,
900  ) -> Option<Local<'s, T>> {
901    if let Some(data) = self.get_pointer() {
902      let handle_host: HandleHost = (&self.isolate_handle).into();
903      handle_host.assert_match_isolate(scope);
904      let local = unsafe {
905        scope.cast_local(|sd| {
906          v8__Local__New(sd.get_isolate_ptr(), data.cast().as_ptr()) as *const T
907        })
908      };
909      Some(local.unwrap())
910    } else {
911      None
912    }
913  }
914
915  // Finalization callbacks.
916
917  unsafe extern "C" fn first_pass_callback(wci: *const WeakCallbackInfo) {
918    // SAFETY: If this callback is called, then the weak handle hasn't been
919    // reset, which means the `Weak` instance which owns the pinned box that the
920    // parameter points to hasn't been dropped.
921    let weak_data = unsafe {
922      let ptr = v8__WeakCallbackInfo__GetParameter(wci);
923      &*(ptr as *mut WeakData<T>)
924    };
925
926    let data = weak_data.pointer.take().unwrap();
927    unsafe {
928      v8__Global__Reset(data.cast().as_ptr());
929    }
930
931    // Only set the second pass callback if there could be a finalizer.
932    if weak_data.finalizer_id.is_some() {
933      unsafe {
934        v8__WeakCallbackInfo__SetSecondPassCallback(
935          wci,
936          Self::second_pass_callback,
937        );
938      };
939    }
940  }
941
942  unsafe extern "C" fn second_pass_callback(wci: *const WeakCallbackInfo) {
943    // SAFETY: This callback is guaranteed by V8 to be called in the isolate's
944    // thread before the isolate is disposed.
945    let isolate = unsafe { v8__WeakCallbackInfo__GetIsolate(wci) };
946
947    // SAFETY: This callback might be called well after the first pass callback,
948    // which means the corresponding Weak might have been dropped. In Weak's
949    // Drop impl we make sure that if the second pass callback hasn't yet run, the
950    // Box<WeakData<T>> is leaked, so it will still be alive by the time this
951    // callback is called.
952    let weak_data = unsafe {
953      let ptr = v8__WeakCallbackInfo__GetParameter(wci);
954      &*(ptr as *mut WeakData<T>)
955    };
956
957    let mut isolate = unsafe { Isolate::from_raw_ptr(isolate) };
958    let finalizer: Option<FinalizerCallback> = {
959      let finalizer_id = weak_data.finalizer_id.unwrap();
960      isolate.get_finalizer_map_mut().map.remove(&finalizer_id)
961    };
962
963    if weak_data.weak_dropped.get() {
964      // SAFETY: If weak_dropped is true, the corresponding Weak has been dropped,
965      // so it's safe to take ownership of the Box<WeakData<T>> and drop it.
966      let _ = unsafe {
967        Box::from_raw(weak_data as *const WeakData<T> as *mut WeakData<T>)
968      };
969    }
970
971    match finalizer {
972      Some(FinalizerCallback::Regular(finalizer)) => finalizer(&mut isolate),
973      Some(FinalizerCallback::Guaranteed(finalizer)) => finalizer(),
974      None => {}
975    }
976  }
977}
978
979impl<T> Clone for Weak<T> {
980  fn clone(&self) -> Self {
981    self.clone_raw(None)
982  }
983}
984
985impl<T> Drop for Weak<T> {
986  fn drop(&mut self) {
987    // Returns whether the finalizer existed.
988    let remove_finalizer = |finalizer_id: Option<FinalizerId>| -> bool {
989      if let Some(finalizer_id) = finalizer_id {
990        // SAFETY: We're in the isolate's thread because `Weak` isn't Send or Sync.
991        let isolate_ptr = unsafe { self.isolate_handle.get_isolate_ptr() };
992        if !isolate_ptr.is_null() {
993          let mut isolate = unsafe { Isolate::from_raw_ptr(isolate_ptr) };
994          let finalizer =
995            isolate.get_finalizer_map_mut().map.remove(&finalizer_id);
996          return finalizer.is_some();
997        }
998      }
999      false
1000    };
1001
1002    if let Some(data) = self.get_pointer() {
1003      // If the pointer is not None, the first pass callback hasn't been
1004      // called yet, and resetting will prevent it from being called.
1005      unsafe { v8__Global__Reset(data.cast().as_ptr()) };
1006      remove_finalizer(self.data.as_ref().unwrap().finalizer_id);
1007    } else if let Some(weak_data) = self.data.take() {
1008      // The second pass callback removes the finalizer, so if there is one,
1009      // the second pass hasn't yet run, and WeakData will have to be alive.
1010      // In that case we leak the WeakData but remove the finalizer.
1011      if remove_finalizer(weak_data.finalizer_id) {
1012        weak_data.weak_dropped.set(true);
1013        Box::leak(weak_data);
1014      }
1015    }
1016  }
1017}
1018
1019impl<T> Eq for Weak<T> where T: Eq {}
1020
1021impl<T, Rhs: Handle> PartialEq<Rhs> for Weak<T>
1022where
1023  T: PartialEq<Rhs::Data>,
1024{
1025  fn eq(&self, other: &Rhs) -> bool {
1026    let HandleInfo {
1027      data: other_data,
1028      host: other_host,
1029    } = other.get_handle_info();
1030    let self_host: HandleHost = (&self.isolate_handle).into();
1031    if !self_host.match_host(other_host, None) {
1032      false
1033    } else if let Some(self_data) = self.get_pointer() {
1034      unsafe { self_data.as_ref() == other_data.as_ref() }
1035    } else {
1036      false
1037    }
1038  }
1039}
1040
1041impl<T, T2> PartialEq<Weak<T2>> for Weak<T>
1042where
1043  T: PartialEq<T2>,
1044{
1045  fn eq(&self, other: &Weak<T2>) -> bool {
1046    let self_host: HandleHost = (&self.isolate_handle).into();
1047    let other_host: HandleHost = (&other.isolate_handle).into();
1048    if !self_host.match_host(other_host, None) {
1049      return false;
1050    }
1051    match (self.get_pointer(), other.get_pointer()) {
1052      (Some(self_data), Some(other_data)) => unsafe {
1053        self_data.as_ref() == other_data.as_ref()
1054      },
1055      (None, None) => true,
1056      _ => false,
1057    }
1058  }
1059}
1060
1061/// The inner mechanism behind [`Weak`] and finalizations.
1062///
1063/// This struct is heap-allocated and will not move until it's dropped, so it
1064/// can be accessed by the finalization callbacks by creating a shared reference
1065/// from a pointer. The fields are wrapped in [`Cell`] so they are modifiable by
1066/// both the [`Weak`] and the finalization callbacks.
1067pub struct WeakData<T> {
1068  pointer: Cell<Option<NonNull<T>>>,
1069  finalizer_id: Option<FinalizerId>,
1070  weak_dropped: Cell<bool>,
1071}
1072
1073impl<T> std::fmt::Debug for WeakData<T> {
1074  fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1075    f.debug_struct("WeakData")
1076      .field("pointer", &self.pointer)
1077      .finish_non_exhaustive()
1078  }
1079}
1080
1081#[repr(C)]
1082struct WeakCallbackInfo(Opaque);
1083
1084type FinalizerId = usize;
1085
1086pub(crate) enum FinalizerCallback {
1087  Regular(Box<dyn FnOnce(&mut Isolate)>),
1088  Guaranteed(Box<dyn FnOnce()>),
1089}
1090
1091#[derive(Default)]
1092pub(crate) struct FinalizerMap {
1093  map: std::collections::HashMap<FinalizerId, FinalizerCallback>,
1094  next_id: FinalizerId,
1095}
1096
1097impl FinalizerMap {
1098  fn add(&mut self, finalizer: FinalizerCallback) -> FinalizerId {
1099    let id = self.next_id;
1100    // TODO: Overflow.
1101    self.next_id += 1;
1102    self.map.insert(id, finalizer);
1103    id
1104  }
1105
1106  pub(crate) fn drain(
1107    &mut self,
1108  ) -> impl Iterator<Item = FinalizerCallback> + '_ {
1109    self.map.drain().map(|(_, finalizer)| finalizer)
1110  }
1111}
1112
1113/// A traced handle without destructor that clears the handle. The embedder needs
1114/// to ensure that the handle is not accessed once the V8 object has been
1115/// reclaimed. For more details see BasicTracedReference.
1116#[repr(C)]
1117pub struct TracedReference<T> {
1118  data: [u8; crate::binding::v8__TracedReference_SIZE],
1119  _phantom: PhantomData<T>,
1120}
1121
1122impl<T> TracedReference<T> {
1123  /// An empty TracedReference without storage cell.
1124  pub fn empty() -> Self {
1125    let mut this = std::mem::MaybeUninit::uninit();
1126    unsafe {
1127      v8__TracedReference__CONSTRUCT(this.as_mut_ptr() as _);
1128      this.assume_init()
1129    }
1130  }
1131
1132  /// Construct a TracedReference from a Local.
1133  ///
1134  /// A new storage cell is created pointing to the same object.
1135  pub fn new<'s>(scope: &PinScope<'s, '_, ()>, data: Local<'s, T>) -> Self {
1136    let mut this = Self::empty();
1137    this.reset(scope, Some(data));
1138    this
1139  }
1140
1141  pub fn get<'s>(&self, scope: &PinScope<'s, '_, ()>) -> Option<Local<'s, T>> {
1142    unsafe {
1143      scope.cast_local(|sd| {
1144        v8__TracedReference__Get(
1145          self as *const Self as *const TracedReference<Data>,
1146          sd.get_isolate_ptr(),
1147        ) as *const T
1148      })
1149    }
1150  }
1151
1152  /// Always resets the reference. Creates a new reference from `other` if it is
1153  /// non-empty.
1154  pub fn reset<'s>(
1155    &mut self,
1156    scope: &PinScope<'s, '_, ()>,
1157    data: Option<Local<'s, T>>,
1158  ) {
1159    unsafe {
1160      v8__TracedReference__Reset(
1161        self as *mut Self as *mut TracedReference<Data>,
1162        scope.get_isolate_ptr(),
1163        data
1164          .map_or(std::ptr::null_mut(), |h| h.as_non_null().as_ptr())
1165          .cast(),
1166      );
1167    }
1168  }
1169}
1170
1171impl<T> Drop for TracedReference<T> {
1172  fn drop(&mut self) {
1173    unsafe {
1174      v8__TracedReference__DESTRUCT(
1175        self as *mut Self as *mut TracedReference<Data>,
1176      );
1177    }
1178  }
1179}
1180
1181/// Eternal handles are set-once handles that live for the lifetime of the isolate.
1182#[repr(C)]
1183pub struct Eternal<T> {
1184  data: [u8; crate::binding::v8__Eternal_SIZE],
1185  _phantom: PhantomData<T>,
1186}
1187
1188impl<T> Eternal<T> {
1189  pub fn empty() -> Self {
1190    let mut this = std::mem::MaybeUninit::uninit();
1191    unsafe {
1192      v8__Eternal__CONSTRUCT(this.as_mut_ptr() as _);
1193      this.assume_init()
1194    }
1195  }
1196
1197  pub fn clear(&self) {
1198    unsafe {
1199      v8__Eternal__Clear(self as *const Self as *mut Eternal<Data>);
1200    }
1201  }
1202
1203  pub fn set<'s>(&self, scope: &PinScope<'s, '_, ()>, data: Local<'s, T>) {
1204    unsafe {
1205      v8__Eternal__Set(
1206        self as *const Self as *mut Eternal<Data>,
1207        scope.get_isolate_ptr(),
1208        data.as_non_null().as_ptr().cast(),
1209      );
1210    }
1211  }
1212
1213  pub fn get<'s>(&self, scope: &PinScope<'s, '_, ()>) -> Option<Local<'s, T>> {
1214    unsafe {
1215      scope.cast_local(|sd| {
1216        v8__Eternal__Get(
1217          self as *const Self as *const Eternal<Data>,
1218          sd.get_isolate_ptr(),
1219        ) as *const T
1220      })
1221    }
1222  }
1223
1224  pub fn is_empty(&self) -> bool {
1225    unsafe { v8__Eternal__IsEmpty(self as *const Self as *const Eternal<Data>) }
1226  }
1227}
1228
1229impl<T> Drop for Eternal<T> {
1230  fn drop(&mut self) {
1231    unsafe {
1232      v8__Eternal__DESTRUCT(self as *mut Self as *mut Eternal<Data>);
1233    }
1234  }
1235}
1236
1237/// A Local<T> passed from V8 without an inherent scope.
1238/// The value must be "unsealed" with Scope::unseal to bind
1239/// it to a lifetime.
1240#[derive(Debug)]
1241#[repr(transparent)]
1242pub struct SealedLocal<T>(pub(crate) NonNull<T>);