Skip to main content

v8_goose/
isolate.rs

1// Copyright 2019-2021 the Deno authors. All rights reserved. MIT license.
2use crate::Array;
3use crate::CallbackScope;
4use crate::Context;
5use crate::Data;
6use crate::FixedArray;
7use crate::Function;
8use crate::FunctionCodeHandling;
9use crate::Local;
10use crate::Message;
11use crate::Module;
12use crate::Object;
13use crate::PinScope;
14use crate::Platform;
15use crate::Promise;
16use crate::PromiseResolver;
17use crate::StartupData;
18use crate::String;
19use crate::V8::get_current_platform;
20use crate::Value;
21use crate::binding::v8__HeapSpaceStatistics;
22use crate::binding::v8__HeapStatistics;
23use crate::binding::v8__Isolate__UseCounterFeature;
24pub use crate::binding::v8__ModuleImportPhase as ModuleImportPhase;
25use crate::cppgc::Heap;
26use crate::external_references::ExternalReference;
27use crate::function::FunctionCallbackInfo;
28use crate::gc::GCCallbackFlags;
29use crate::gc::GCType;
30use crate::handle::FinalizerCallback;
31use crate::handle::FinalizerMap;
32use crate::isolate_create_params::CreateParams;
33use crate::isolate_create_params::raw;
34use crate::promise::PromiseRejectMessage;
35use crate::snapshot::SnapshotCreator;
36use crate::support::MapFnFrom;
37use crate::support::MapFnTo;
38use crate::support::Opaque;
39use crate::support::ToCFn;
40use crate::support::UnitType;
41use crate::support::char;
42use crate::support::int;
43use crate::support::size_t;
44use crate::wasm::WasmStreaming;
45use crate::wasm::trampoline;
46use std::ffi::CStr;
47
48use std::any::Any;
49use std::any::TypeId;
50use std::borrow::Cow;
51use std::collections::HashMap;
52use std::ffi::c_void;
53use std::fmt::{self, Debug, Formatter};
54use std::hash::BuildHasher;
55use std::hash::Hasher;
56use std::mem::MaybeUninit;
57use std::mem::align_of;
58use std::mem::forget;
59use std::mem::needs_drop;
60use std::mem::size_of;
61use std::ops::Deref;
62use std::ops::DerefMut;
63use std::pin::pin;
64use std::ptr;
65use std::ptr::NonNull;
66use std::ptr::addr_of_mut;
67use std::ptr::drop_in_place;
68use std::ptr::null_mut;
69use std::sync::Arc;
70use std::sync::Mutex;
71
72/// Policy for running microtasks:
73///   - explicit: microtasks are invoked with the
74///     Isolate::PerformMicrotaskCheckpoint() method;
75///   - auto: microtasks are invoked when the script call depth decrements
76///     to zero.
77#[derive(Debug, Clone, Copy, PartialEq, Eq)]
78#[repr(C)]
79pub enum MicrotasksPolicy {
80  Explicit = 0,
81  // Scoped = 1 (RAII) is omitted for now, doesn't quite map to idiomatic Rust.
82  Auto = 2,
83}
84
85/// Memory pressure level for the MemoryPressureNotification.
86/// None hints V8 that there is no memory pressure.
87/// Moderate hints V8 to speed up incremental garbage collection at the cost
88/// of higher latency due to garbage collection pauses.
89/// Critical hints V8 to free memory as soon as possible. Garbage collection
90/// pauses at this level will be large.
91#[derive(Debug, Clone, Copy, PartialEq, Eq)]
92#[repr(C)]
93pub enum MemoryPressureLevel {
94  None = 0,
95  Moderate = 1,
96  Critical = 2,
97}
98
99/// Time zone redetection indicator for
100/// DateTimeConfigurationChangeNotification.
101///
102/// kSkip indicates V8 that the notification should not trigger redetecting
103/// host time zone. kRedetect indicates V8 that host time zone should be
104/// redetected, and used to set the default time zone.
105///
106/// The host time zone detection may require file system access or similar
107/// operations unlikely to be available inside a sandbox. If v8 is run inside a
108/// sandbox, the host time zone has to be detected outside the sandbox before
109/// calling DateTimeConfigurationChangeNotification function.
110#[derive(Debug, Clone, Copy, PartialEq, Eq)]
111#[repr(C)]
112pub enum TimeZoneDetection {
113  Skip = 0,
114  Redetect = 1,
115}
116
117/// PromiseHook with type Init is called when a new promise is
118/// created. When a new promise is created as part of the chain in the
119/// case of Promise.then or in the intermediate promises created by
120/// Promise.{race, all}/AsyncFunctionAwait, we pass the parent promise
121/// otherwise we pass undefined.
122///
123/// PromiseHook with type Resolve is called at the beginning of
124/// resolve or reject function defined by CreateResolvingFunctions.
125///
126/// PromiseHook with type Before is called at the beginning of the
127/// PromiseReactionJob.
128///
129/// PromiseHook with type After is called right at the end of the
130/// PromiseReactionJob.
131#[derive(Debug, Clone, Copy, PartialEq, Eq)]
132#[repr(C)]
133pub enum PromiseHookType {
134  Init,
135  Resolve,
136  Before,
137  After,
138}
139
140/// Types of garbage collections that can be requested via
141/// [`Isolate::request_garbage_collection_for_testing`].
142#[derive(Debug, Clone, Copy, PartialEq, Eq)]
143#[repr(C)]
144pub enum GarbageCollectionType {
145  Full,
146  Minor,
147}
148
149pub type MessageCallback = unsafe extern "C" fn(Local<Message>, Local<Value>);
150
151bitflags! {
152  #[derive(Debug, Clone, Copy, PartialEq, Eq)]
153  #[repr(transparent)]
154  pub struct MessageErrorLevel: int {
155    const LOG = 1 << 0;
156    const DEBUG = 1 << 1;
157    const INFO = 1 << 2;
158    const ERROR = 1 << 3;
159    const WARNING = 1 << 4;
160    const ALL = (1 << 5) - 1;
161  }
162}
163
164pub type PromiseHook =
165  unsafe extern "C" fn(PromiseHookType, Local<Promise>, Local<Value>);
166
167pub type PromiseRejectCallback = unsafe extern "C" fn(PromiseRejectMessage);
168
169#[derive(Debug, Clone, Copy, PartialEq, Eq)]
170#[repr(C)]
171pub enum WasmAsyncSuccess {
172  Success,
173  Fail,
174}
175pub type WasmAsyncResolvePromiseCallback = unsafe extern "C" fn(
176  UnsafeRawIsolatePtr,
177  Local<Context>,
178  Local<PromiseResolver>,
179  Local<Value>,
180  WasmAsyncSuccess,
181);
182
183pub type AllowWasmCodeGenerationCallback =
184  unsafe extern "C" fn(Local<Context>, Local<String>) -> bool;
185
186/// HostInitializeImportMetaObjectCallback is called the first time import.meta
187/// is accessed for a module. Subsequent access will reuse the same value.
188///
189/// The method combines two implementation-defined abstract operations into one:
190/// HostGetImportMetaProperties and HostFinalizeImportMeta.
191///
192/// The embedder should use v8::Object::CreateDataProperty to add properties on
193/// the meta object.
194pub type HostInitializeImportMetaObjectCallback =
195  unsafe extern "C" fn(Local<Context>, Local<Module>, Local<Object>);
196
197/// HostImportModuleDynamicallyCallback is called when we require the embedder
198/// to load a module. This is used as part of the dynamic import syntax.
199///
200/// The host_defined_options are metadata provided by the host environment, which may be used
201/// to customize or further specify how the module should be imported.
202///
203/// The resource_name is the identifier or path for the module or script making the import request.
204///
205/// The specifier is the name of the module that should be imported.
206///
207/// The import_attributes are import assertions for this request in the form:
208/// [key1, value1, key2, value2, ...] where the keys and values are of type
209/// v8::String. Note, unlike the FixedArray passed to ResolveModuleCallback and
210/// returned from ModuleRequest::GetImportAssertions(), this array does not
211/// contain the source Locations of the assertions.
212///
213/// The embedder must compile, instantiate, evaluate the Module, and obtain its
214/// namespace object.
215///
216/// The Promise returned from this function is forwarded to userland JavaScript.
217/// The embedder must resolve this promise with the module namespace object. In
218/// case of an exception, the embedder must reject this promise with the
219/// exception. If the promise creation itself fails (e.g. due to stack
220/// overflow), the embedder must propagate that exception by returning an empty
221/// MaybeLocal.
222///
223/// # Example
224///
225/// ```
226/// fn host_import_module_dynamically_callback_example<'s>(
227///   scope: &mut v8::HandleScope<'s>,
228///   host_defined_options: v8::Local<'s, v8::Data>,
229///   resource_name: v8::Local<'s, v8::Value>,
230///   specifier: v8::Local<'s, v8::String>,
231///   import_attributes: v8::Local<'s, v8::FixedArray>,
232/// ) -> Option<v8::Local<'s, v8::Promise>> {
233///   todo!()
234/// }
235/// ```
236pub trait HostImportModuleDynamicallyCallback:
237  UnitType
238  + for<'s, 'i> FnOnce(
239    &mut PinScope<'s, 'i>,
240    Local<'s, Data>,
241    Local<'s, Value>,
242    Local<'s, String>,
243    Local<'s, FixedArray>,
244  ) -> Option<Local<'s, Promise>>
245{
246  fn to_c_fn(self) -> RawHostImportModuleDynamicallyCallback;
247}
248
249#[cfg(target_family = "unix")]
250pub(crate) type RawHostImportModuleDynamicallyCallback =
251  for<'s> unsafe extern "C" fn(
252    Local<'s, Context>,
253    Local<'s, Data>,
254    Local<'s, Value>,
255    Local<'s, String>,
256    Local<'s, FixedArray>,
257  ) -> *mut Promise;
258
259#[cfg(all(
260  target_family = "windows",
261  any(target_arch = "x86_64", target_arch = "aarch64")
262))]
263pub type RawHostImportModuleDynamicallyCallback =
264  for<'s> unsafe extern "C" fn(
265    *mut *mut Promise,
266    Local<'s, Context>,
267    Local<'s, Data>,
268    Local<'s, Value>,
269    Local<'s, String>,
270    Local<'s, FixedArray>,
271  ) -> *mut *mut Promise;
272
273impl<F> HostImportModuleDynamicallyCallback for F
274where
275  F: UnitType
276    + for<'s, 'i> FnOnce(
277      &mut PinScope<'s, 'i>,
278      Local<'s, Data>,
279      Local<'s, Value>,
280      Local<'s, String>,
281      Local<'s, FixedArray>,
282    ) -> Option<Local<'s, Promise>>,
283{
284  #[inline(always)]
285  fn to_c_fn(self) -> RawHostImportModuleDynamicallyCallback {
286    #[allow(unused_variables)]
287    #[inline(always)]
288    fn scope_adapter<'s, 'i: 's, F: HostImportModuleDynamicallyCallback>(
289      context: Local<'s, Context>,
290      host_defined_options: Local<'s, Data>,
291      resource_name: Local<'s, Value>,
292      specifier: Local<'s, String>,
293      import_attributes: Local<'s, FixedArray>,
294    ) -> Option<Local<'s, Promise>> {
295      let scope = pin!(unsafe { CallbackScope::new(context) });
296      let mut scope = scope.init();
297      (F::get())(
298        &mut scope,
299        host_defined_options,
300        resource_name,
301        specifier,
302        import_attributes,
303      )
304    }
305
306    #[cfg(target_family = "unix")]
307    #[inline(always)]
308    unsafe extern "C" fn abi_adapter<
309      's,
310      F: HostImportModuleDynamicallyCallback,
311    >(
312      context: Local<'s, Context>,
313      host_defined_options: Local<'s, Data>,
314      resource_name: Local<'s, Value>,
315      specifier: Local<'s, String>,
316      import_attributes: Local<'s, FixedArray>,
317    ) -> *mut Promise {
318      scope_adapter::<F>(
319        context,
320        host_defined_options,
321        resource_name,
322        specifier,
323        import_attributes,
324      )
325      .map_or_else(null_mut, |return_value| return_value.as_non_null().as_ptr())
326    }
327
328    #[cfg(all(
329      target_family = "windows",
330      any(target_arch = "x86_64", target_arch = "aarch64")
331    ))]
332    #[inline(always)]
333    unsafe extern "C" fn abi_adapter<
334      's,
335      F: HostImportModuleDynamicallyCallback,
336    >(
337      return_value: *mut *mut Promise,
338      context: Local<'s, Context>,
339      host_defined_options: Local<'s, Data>,
340      resource_name: Local<'s, Value>,
341      specifier: Local<'s, String>,
342      import_attributes: Local<'s, FixedArray>,
343    ) -> *mut *mut Promise {
344      unsafe {
345        std::ptr::write(
346          return_value,
347          scope_adapter::<F>(
348            context,
349            host_defined_options,
350            resource_name,
351            specifier,
352            import_attributes,
353          )
354          .map(|return_value| return_value.as_non_null().as_ptr())
355          .unwrap_or_else(null_mut),
356        );
357        return_value
358      }
359    }
360
361    abi_adapter::<F>
362  }
363}
364
365/// HostImportModuleWithPhaseDynamicallyCallback is called when we
366/// require the embedder to load a module with a specific phase. This is used
367/// as part of the dynamic import syntax.
368///
369/// The referrer contains metadata about the script/module that calls
370/// import.
371///
372/// The specifier is the name of the module that should be imported.
373///
374/// The phase is the phase of the import requested.
375///
376/// The import_attributes are import attributes for this request in the form:
377/// [key1, value1, key2, value2, ...] where the keys and values are of type
378/// v8::String. Note, unlike the FixedArray passed to ResolveModuleCallback and
379/// returned from ModuleRequest::GetImportAttributes(), this array does not
380/// contain the source Locations of the attributes.
381///
382/// The Promise returned from this function is forwarded to userland
383/// JavaScript. The embedder must resolve this promise according to the phase
384/// requested:
385/// - For ModuleImportPhase::kSource, the promise must be resolved with a
386///   compiled ModuleSource object, or rejected with a SyntaxError if the
387///   module does not support source representation.
388/// - For ModuleImportPhase::kEvaluation, the promise must be resolved with a
389///   ModuleNamespace object of a module that has been compiled, instantiated,
390///   and evaluated.
391///
392/// In case of an exception, the embedder must reject this promise with the
393/// exception. If the promise creation itself fails (e.g. due to stack
394/// overflow), the embedder must propagate that exception by returning an empty
395/// MaybeLocal.
396///
397/// This callback is still experimental and is only invoked for source phase
398/// imports.
399pub trait HostImportModuleWithPhaseDynamicallyCallback:
400  UnitType
401  + for<'s, 'i> FnOnce(
402    &mut PinScope<'s, 'i>,
403    Local<'s, Data>,
404    Local<'s, Value>,
405    Local<'s, String>,
406    ModuleImportPhase,
407    Local<'s, FixedArray>,
408  ) -> Option<Local<'s, Promise>>
409{
410  fn to_c_fn(self) -> RawHostImportModuleWithPhaseDynamicallyCallback;
411}
412
413#[cfg(target_family = "unix")]
414pub(crate) type RawHostImportModuleWithPhaseDynamicallyCallback =
415  for<'s> unsafe extern "C" fn(
416    Local<'s, Context>,
417    Local<'s, Data>,
418    Local<'s, Value>,
419    Local<'s, String>,
420    ModuleImportPhase,
421    Local<'s, FixedArray>,
422  ) -> *mut Promise;
423
424#[cfg(all(
425  target_family = "windows",
426  any(target_arch = "x86_64", target_arch = "aarch64")
427))]
428pub type RawHostImportModuleWithPhaseDynamicallyCallback =
429  for<'s> unsafe extern "C" fn(
430    *mut *mut Promise,
431    Local<'s, Context>,
432    Local<'s, Data>,
433    Local<'s, Value>,
434    Local<'s, String>,
435    ModuleImportPhase,
436    Local<'s, FixedArray>,
437  ) -> *mut *mut Promise;
438
439impl<F> HostImportModuleWithPhaseDynamicallyCallback for F
440where
441  F: UnitType
442    + for<'s, 'i> FnOnce(
443      &mut PinScope<'s, 'i>,
444      Local<'s, Data>,
445      Local<'s, Value>,
446      Local<'s, String>,
447      ModuleImportPhase,
448      Local<'s, FixedArray>,
449    ) -> Option<Local<'s, Promise>>,
450{
451  #[inline(always)]
452  fn to_c_fn(self) -> RawHostImportModuleWithPhaseDynamicallyCallback {
453    #[allow(unused_variables)]
454    #[inline(always)]
455    fn scope_adapter<'s, F: HostImportModuleWithPhaseDynamicallyCallback>(
456      context: Local<'s, Context>,
457      host_defined_options: Local<'s, Data>,
458      resource_name: Local<'s, Value>,
459      specifier: Local<'s, String>,
460      import_phase: ModuleImportPhase,
461      import_attributes: Local<'s, FixedArray>,
462    ) -> Option<Local<'s, Promise>> {
463      let scope = pin!(unsafe { CallbackScope::new(context) });
464      let mut scope = scope.init();
465      (F::get())(
466        &mut scope,
467        host_defined_options,
468        resource_name,
469        specifier,
470        import_phase,
471        import_attributes,
472      )
473    }
474
475    #[cfg(target_family = "unix")]
476    #[inline(always)]
477    unsafe extern "C" fn abi_adapter<
478      's,
479      F: HostImportModuleWithPhaseDynamicallyCallback,
480    >(
481      context: Local<'s, Context>,
482      host_defined_options: Local<'s, Data>,
483      resource_name: Local<'s, Value>,
484      specifier: Local<'s, String>,
485      import_phase: ModuleImportPhase,
486      import_attributes: Local<'s, FixedArray>,
487    ) -> *mut Promise {
488      scope_adapter::<F>(
489        context,
490        host_defined_options,
491        resource_name,
492        specifier,
493        import_phase,
494        import_attributes,
495      )
496      .map_or_else(null_mut, |return_value| return_value.as_non_null().as_ptr())
497    }
498
499    #[cfg(all(
500      target_family = "windows",
501      any(target_arch = "x86_64", target_arch = "aarch64")
502    ))]
503    #[inline(always)]
504    unsafe extern "C" fn abi_adapter<
505      's,
506      F: HostImportModuleWithPhaseDynamicallyCallback,
507    >(
508      return_value: *mut *mut Promise,
509      context: Local<'s, Context>,
510      host_defined_options: Local<'s, Data>,
511      resource_name: Local<'s, Value>,
512      specifier: Local<'s, String>,
513      import_phase: ModuleImportPhase,
514      import_attributes: Local<'s, FixedArray>,
515    ) -> *mut *mut Promise {
516      unsafe {
517        std::ptr::write(
518          return_value,
519          scope_adapter::<F>(
520            context,
521            host_defined_options,
522            resource_name,
523            specifier,
524            import_phase,
525            import_attributes,
526          )
527          .map(|return_value| return_value.as_non_null().as_ptr())
528          .unwrap_or_else(null_mut),
529        );
530        return_value
531      }
532    }
533
534    abi_adapter::<F>
535  }
536}
537
538/// `HostCreateShadowRealmContextCallback` is called each time a `ShadowRealm`
539/// is being constructed. You can use [`HandleScope::get_current_context`] to
540/// get the [`Context`] in which the constructor is being run.
541///
542/// The method combines [`Context`] creation and the implementation-defined
543/// abstract operation `HostInitializeShadowRealm` into one.
544///
545/// The embedder should use [`Context::new`] to create a new context. If the
546/// creation fails, the embedder must propagate that exception by returning
547/// [`None`].
548pub type HostCreateShadowRealmContextCallback =
549  for<'s, 'i> fn(scope: &mut PinScope<'s, 'i>) -> Option<Local<'s, Context>>;
550
551pub type GcCallbackWithData = unsafe extern "C" fn(
552  isolate: UnsafeRawIsolatePtr,
553  r#type: GCType,
554  flags: GCCallbackFlags,
555  data: *mut c_void,
556);
557
558pub type InterruptCallback =
559  unsafe extern "C" fn(isolate: UnsafeRawIsolatePtr, data: *mut c_void);
560
561pub type NearHeapLimitCallback = unsafe extern "C" fn(
562  data: *mut c_void,
563  current_heap_limit: usize,
564  initial_heap_limit: usize,
565) -> usize;
566
567#[repr(C)]
568pub struct OomDetails {
569  pub is_heap_oom: bool,
570  pub detail: *const char,
571}
572
573pub type OomErrorCallback =
574  unsafe extern "C" fn(location: *const char, details: &OomDetails);
575
576// Windows x64 ABI: MaybeLocal<Value> returned on the stack.
577#[cfg(target_os = "windows")]
578pub type PrepareStackTraceCallback<'s> =
579  unsafe extern "C" fn(
580    *mut *const Value,
581    Local<'s, Context>,
582    Local<'s, Value>,
583    Local<'s, Array>,
584  ) -> *mut *const Value;
585
586// System V ABI: MaybeLocal<Value> returned in a register.
587// System V i386 ABI: Local<Value> returned in hidden pointer (struct).
588#[cfg(not(target_os = "windows"))]
589#[repr(C)]
590pub struct PrepareStackTraceCallbackRet(*const Value);
591
592#[cfg(not(target_os = "windows"))]
593pub type PrepareStackTraceCallback<'s> =
594  unsafe extern "C" fn(
595    Local<'s, Context>,
596    Local<'s, Value>,
597    Local<'s, Array>,
598  ) -> PrepareStackTraceCallbackRet;
599
600pub type UseCounterFeature = v8__Isolate__UseCounterFeature;
601pub type UseCounterCallback =
602  unsafe extern "C" fn(&mut Isolate, UseCounterFeature);
603
604unsafe extern "C" {
605  fn v8__Isolate__New(params: *const raw::CreateParams) -> *mut RealIsolate;
606  fn v8__Isolate__Dispose(this: *mut RealIsolate);
607  fn v8__Isolate__GetNumberOfDataSlots(this: *const RealIsolate) -> u32;
608  fn v8__Isolate__GetData(
609    isolate: *const RealIsolate,
610    slot: u32,
611  ) -> *mut c_void;
612  fn v8__Isolate__SetData(
613    isolate: *const RealIsolate,
614    slot: u32,
615    data: *mut c_void,
616  );
617  fn v8__Isolate__Enter(this: *mut RealIsolate);
618  fn v8__Isolate__Exit(this: *mut RealIsolate);
619  fn v8__Isolate__GetCurrent() -> *mut RealIsolate;
620  fn v8__Isolate__MemoryPressureNotification(this: *mut RealIsolate, level: u8);
621  fn v8__Isolate__ClearKeptObjects(isolate: *mut RealIsolate);
622  fn v8__Isolate__LowMemoryNotification(isolate: *mut RealIsolate);
623  fn v8__Isolate__GetHeapStatistics(
624    this: *mut RealIsolate,
625    s: *mut v8__HeapStatistics,
626  );
627  fn v8__Isolate__SetCaptureStackTraceForUncaughtExceptions(
628    this: *mut RealIsolate,
629    capture: bool,
630    frame_limit: i32,
631  );
632  fn v8__Isolate__AddMessageListener(
633    isolate: *mut RealIsolate,
634    callback: MessageCallback,
635  ) -> bool;
636  fn v8__Isolate__AddMessageListenerWithErrorLevel(
637    isolate: *mut RealIsolate,
638    callback: MessageCallback,
639    message_levels: MessageErrorLevel,
640  ) -> bool;
641  fn v8__Isolate__AddGCPrologueCallback(
642    isolate: *mut RealIsolate,
643    callback: GcCallbackWithData,
644    data: *mut c_void,
645    gc_type_filter: GCType,
646  );
647  fn v8__Isolate__RemoveGCPrologueCallback(
648    isolate: *mut RealIsolate,
649    callback: GcCallbackWithData,
650    data: *mut c_void,
651  );
652  fn v8__Isolate__AddGCEpilogueCallback(
653    isolate: *mut RealIsolate,
654    callback: GcCallbackWithData,
655    data: *mut c_void,
656    gc_type_filter: GCType,
657  );
658  fn v8__Isolate__RemoveGCEpilogueCallback(
659    isolate: *mut RealIsolate,
660    callback: GcCallbackWithData,
661    data: *mut c_void,
662  );
663  fn v8__Isolate__NumberOfHeapSpaces(isolate: *mut RealIsolate) -> size_t;
664  fn v8__Isolate__GetHeapSpaceStatistics(
665    isolate: *mut RealIsolate,
666    space_statistics: *mut v8__HeapSpaceStatistics,
667    index: size_t,
668  ) -> bool;
669  fn v8__Isolate__AddNearHeapLimitCallback(
670    isolate: *mut RealIsolate,
671    callback: NearHeapLimitCallback,
672    data: *mut c_void,
673  );
674  fn v8__Isolate__RemoveNearHeapLimitCallback(
675    isolate: *mut RealIsolate,
676    callback: NearHeapLimitCallback,
677    heap_limit: usize,
678  );
679  fn v8__Isolate__SetOOMErrorHandler(
680    isolate: *mut RealIsolate,
681    callback: OomErrorCallback,
682  );
683  fn v8__Isolate__AdjustAmountOfExternalAllocatedMemory(
684    isolate: *mut RealIsolate,
685    change_in_bytes: i64,
686  ) -> i64;
687  fn v8__Isolate__GetCppHeap(isolate: *mut RealIsolate) -> *mut Heap;
688  fn v8__Isolate__SetPrepareStackTraceCallback(
689    isolate: *mut RealIsolate,
690    callback: PrepareStackTraceCallback,
691  );
692  fn v8__Isolate__SetPromiseHook(isolate: *mut RealIsolate, hook: PromiseHook);
693  fn v8__Isolate__SetPromiseRejectCallback(
694    isolate: *mut RealIsolate,
695    callback: PromiseRejectCallback,
696  );
697  fn v8__Isolate__SetWasmAsyncResolvePromiseCallback(
698    isolate: *mut RealIsolate,
699    callback: WasmAsyncResolvePromiseCallback,
700  );
701  fn v8__Isolate__SetAllowWasmCodeGenerationCallback(
702    isolate: *mut RealIsolate,
703    callback: AllowWasmCodeGenerationCallback,
704  );
705  fn v8__Isolate__SetHostInitializeImportMetaObjectCallback(
706    isolate: *mut RealIsolate,
707    callback: HostInitializeImportMetaObjectCallback,
708  );
709  fn v8__Isolate__SetHostImportModuleDynamicallyCallback(
710    isolate: *mut RealIsolate,
711    callback: RawHostImportModuleDynamicallyCallback,
712  );
713  fn v8__Isolate__SetHostImportModuleWithPhaseDynamicallyCallback(
714    isolate: *mut RealIsolate,
715    callback: RawHostImportModuleWithPhaseDynamicallyCallback,
716  );
717  #[cfg(not(target_os = "windows"))]
718  fn v8__Isolate__SetHostCreateShadowRealmContextCallback(
719    isolate: *mut RealIsolate,
720    callback: unsafe extern "C" fn(
721      initiator_context: Local<Context>,
722    ) -> *mut Context,
723  );
724  #[cfg(target_os = "windows")]
725  fn v8__Isolate__SetHostCreateShadowRealmContextCallback(
726    isolate: *mut RealIsolate,
727    callback: unsafe extern "C" fn(
728      rv: *mut *mut Context,
729      initiator_context: Local<Context>,
730    ) -> *mut *mut Context,
731  );
732  fn v8__Isolate__SetUseCounterCallback(
733    isolate: *mut RealIsolate,
734    callback: UseCounterCallback,
735  );
736  fn v8__Isolate__RequestInterrupt(
737    isolate: *const RealIsolate,
738    callback: InterruptCallback,
739    data: *mut c_void,
740  );
741  fn v8__Isolate__TerminateExecution(isolate: *const RealIsolate);
742  fn v8__Isolate__IsExecutionTerminating(isolate: *const RealIsolate) -> bool;
743  fn v8__Isolate__CancelTerminateExecution(isolate: *const RealIsolate);
744  fn v8__Isolate__GetMicrotasksPolicy(
745    isolate: *const RealIsolate,
746  ) -> MicrotasksPolicy;
747  fn v8__Isolate__SetMicrotasksPolicy(
748    isolate: *mut RealIsolate,
749    policy: MicrotasksPolicy,
750  );
751  fn v8__Isolate__PerformMicrotaskCheckpoint(isolate: *mut RealIsolate);
752  fn v8__Isolate__EnqueueMicrotask(
753    isolate: *mut RealIsolate,
754    function: *const Function,
755  );
756  fn v8__Isolate__SetAllowAtomicsWait(isolate: *mut RealIsolate, allow: bool);
757  fn v8__Isolate__SetWasmStreamingCallback(
758    isolate: *mut RealIsolate,
759    callback: unsafe extern "C" fn(*const FunctionCallbackInfo),
760  );
761  fn v8__Isolate__DateTimeConfigurationChangeNotification(
762    isolate: *mut RealIsolate,
763    time_zone_detection: TimeZoneDetection,
764  );
765  fn v8__Isolate__HasPendingBackgroundTasks(
766    isolate: *const RealIsolate,
767  ) -> bool;
768  fn v8__Isolate__RequestGarbageCollectionForTesting(
769    isolate: *mut RealIsolate,
770    r#type: usize,
771  );
772
773  fn v8__HeapProfiler__TakeHeapSnapshot(
774    isolate: *mut RealIsolate,
775    callback: unsafe extern "C" fn(*mut c_void, *const u8, usize) -> bool,
776    arg: *mut c_void,
777  );
778}
779
780/// Isolate represents an isolated instance of the V8 engine.  V8 isolates have
781/// completely separate states.  Objects from one isolate must not be used in
782/// other isolates.  The embedder can create multiple isolates and use them in
783/// parallel in multiple threads.  An isolate can be entered by at most one
784/// thread at any given time.  The Locker/Unlocker API must be used to
785/// synchronize.
786///
787/// rusty_v8 note: Unlike in the C++ API, the Isolate is entered when it is
788/// constructed and exited when dropped. Because of that v8::OwnedIsolate
789/// instances must be dropped in the reverse order of creation
790#[repr(transparent)]
791#[derive(Debug)]
792pub struct Isolate(NonNull<RealIsolate>);
793
794#[repr(transparent)]
795#[derive(Debug, Clone, Copy)]
796pub struct UnsafeRawIsolatePtr(*mut RealIsolate);
797
798impl UnsafeRawIsolatePtr {
799  pub fn null() -> Self {
800    Self(std::ptr::null_mut())
801  }
802
803  pub fn is_null(&self) -> bool {
804    self.0.is_null()
805  }
806}
807
808#[repr(C)]
809pub struct RealIsolate(Opaque);
810
811impl Isolate {
812  pub(crate) fn as_real_ptr(&self) -> *mut RealIsolate {
813    self.0.as_ptr()
814  }
815
816  pub unsafe fn as_raw_isolate_ptr(&self) -> UnsafeRawIsolatePtr {
817    UnsafeRawIsolatePtr(self.0.as_ptr())
818  }
819
820  #[inline]
821  pub unsafe fn from_raw_isolate_ptr(ptr: UnsafeRawIsolatePtr) -> Self {
822    Self(NonNull::new(ptr.0).unwrap())
823  }
824
825  #[inline]
826  pub unsafe fn from_raw_isolate_ptr_unchecked(
827    ptr: UnsafeRawIsolatePtr,
828  ) -> Self {
829    Self(unsafe { NonNull::new_unchecked(ptr.0) })
830  }
831
832  pub unsafe fn from_raw_ptr_unchecked(ptr: *mut RealIsolate) -> Self {
833    Self(unsafe { NonNull::new_unchecked(ptr) })
834  }
835
836  pub unsafe fn from_raw_ptr(ptr: *mut RealIsolate) -> Self {
837    Self(NonNull::new(ptr).unwrap())
838  }
839
840  #[inline]
841  pub unsafe fn ref_from_raw_isolate_ptr(ptr: &UnsafeRawIsolatePtr) -> &Self {
842    if ptr.is_null() {
843      panic!("UnsafeRawIsolatePtr is null");
844    }
845    unsafe { &*(ptr as *const UnsafeRawIsolatePtr as *const Isolate) }
846  }
847
848  #[inline]
849  pub unsafe fn ref_from_raw_isolate_ptr_unchecked(
850    ptr: &UnsafeRawIsolatePtr,
851  ) -> &Self {
852    unsafe { &*(ptr as *const UnsafeRawIsolatePtr as *const Isolate) }
853  }
854
855  #[inline]
856  pub unsafe fn ref_from_raw_isolate_ptr_mut(
857    ptr: &mut UnsafeRawIsolatePtr,
858  ) -> &mut Self {
859    if ptr.is_null() {
860      panic!("UnsafeRawIsolatePtr is null");
861    }
862    unsafe { &mut *(ptr as *mut UnsafeRawIsolatePtr as *mut Isolate) }
863  }
864
865  #[inline]
866  pub unsafe fn ref_from_raw_isolate_ptr_mut_unchecked(
867    ptr: &mut UnsafeRawIsolatePtr,
868  ) -> &mut Self {
869    unsafe { &mut *(ptr as *mut UnsafeRawIsolatePtr as *mut Isolate) }
870  }
871
872  #[inline]
873  pub(crate) unsafe fn from_non_null(ptr: NonNull<RealIsolate>) -> Self {
874    Self(ptr)
875  }
876
877  #[inline]
878  pub(crate) unsafe fn from_raw_ref(ptr: &NonNull<RealIsolate>) -> &Self {
879    // SAFETY: Isolate is a repr(transparent) wrapper around NonNull<RealIsolate>
880    unsafe { &*(ptr as *const NonNull<RealIsolate> as *const Isolate) }
881  }
882
883  #[inline]
884  pub(crate) unsafe fn from_raw_ref_mut(
885    ptr: &mut NonNull<RealIsolate>,
886  ) -> &mut Self {
887    // SAFETY: Isolate is a repr(transparent) wrapper around NonNull<RealIsolate>
888    unsafe { &mut *(ptr as *mut NonNull<RealIsolate> as *mut Isolate) }
889  }
890
891  // Isolate data slots used internally by rusty_v8.
892  const ANNEX_SLOT: u32 = 0;
893  const INTERNAL_DATA_SLOT_COUNT: u32 = 2;
894
895  #[inline(always)]
896  fn assert_embedder_data_slot_count_and_offset_correct(&self) {
897    assert!(
898      unsafe { v8__Isolate__GetNumberOfDataSlots(self.as_real_ptr()) }
899        >= Self::INTERNAL_DATA_SLOT_COUNT
900    )
901  }
902
903  fn new_impl(params: CreateParams) -> *mut RealIsolate {
904    crate::V8::assert_initialized();
905    let (raw_create_params, create_param_allocations) = params.finalize();
906    let cxx_isolate = unsafe { v8__Isolate__New(&raw_create_params) };
907    let mut isolate = unsafe { Isolate::from_raw_ptr(cxx_isolate) };
908    isolate.initialize(create_param_allocations);
909    cxx_isolate
910  }
911
912  pub(crate) fn initialize(&mut self, create_param_allocations: Box<dyn Any>) {
913    self.assert_embedder_data_slot_count_and_offset_correct();
914    self.create_annex(create_param_allocations);
915  }
916
917  /// Creates a new isolate.  Does not change the currently entered
918  /// isolate.
919  ///
920  /// When an isolate is no longer used its resources should be freed
921  /// by calling V8::dispose().  Using the delete operator is not allowed.
922  ///
923  /// V8::initialize() must have run prior to this.
924  #[allow(clippy::new_ret_no_self)]
925  pub fn new(params: CreateParams) -> OwnedIsolate {
926    OwnedIsolate::new(Self::new_impl(params))
927  }
928
929  #[allow(clippy::new_ret_no_self)]
930  pub fn snapshot_creator(
931    external_references: Option<Cow<'static, [ExternalReference]>>,
932    params: Option<CreateParams>,
933  ) -> OwnedIsolate {
934    SnapshotCreator::new(external_references, params)
935  }
936
937  #[allow(clippy::new_ret_no_self)]
938  pub fn snapshot_creator_from_existing_snapshot(
939    existing_snapshot_blob: StartupData,
940    external_references: Option<Cow<'static, [ExternalReference]>>,
941    params: Option<CreateParams>,
942  ) -> OwnedIsolate {
943    SnapshotCreator::from_existing_snapshot(
944      existing_snapshot_blob,
945      external_references,
946      params,
947    )
948  }
949
950  /// Initial configuration parameters for a new Isolate.
951  #[inline(always)]
952  pub fn create_params() -> CreateParams {
953    CreateParams::default()
954  }
955
956  #[inline(always)]
957  pub fn thread_safe_handle(&self) -> IsolateHandle {
958    IsolateHandle::new(self)
959  }
960
961  /// See [`IsolateHandle::terminate_execution`]
962  #[inline(always)]
963  pub fn terminate_execution(&self) -> bool {
964    self.thread_safe_handle().terminate_execution()
965  }
966
967  /// See [`IsolateHandle::cancel_terminate_execution`]
968  #[inline(always)]
969  pub fn cancel_terminate_execution(&self) -> bool {
970    self.thread_safe_handle().cancel_terminate_execution()
971  }
972
973  /// See [`IsolateHandle::is_execution_terminating`]
974  #[inline(always)]
975  pub fn is_execution_terminating(&self) -> bool {
976    self.thread_safe_handle().is_execution_terminating()
977  }
978
979  pub(crate) fn create_annex(
980    &mut self,
981    create_param_allocations: Box<dyn Any>,
982  ) {
983    let annex_arc = Arc::new(IsolateAnnex::new(self, create_param_allocations));
984    let annex_ptr = Arc::into_raw(annex_arc);
985    assert!(self.get_data_internal(Self::ANNEX_SLOT).is_null());
986    self.set_data_internal(Self::ANNEX_SLOT, annex_ptr as *mut _);
987  }
988
989  unsafe fn dispose_annex(&mut self) -> Box<dyn Any> {
990    // Set the `isolate` pointer inside the annex struct to null, so any
991    // IsolateHandle that outlives the isolate will know that it can't call
992    // methods on the isolate.
993    let annex = self.get_annex_mut();
994    {
995      let _lock = annex.isolate_mutex.lock().unwrap();
996      annex.isolate = null_mut();
997    }
998
999    // Clear slots and drop owned objects that were taken out of `CreateParams`.
1000    let create_param_allocations =
1001      std::mem::replace(&mut annex.create_param_allocations, Box::new(()));
1002    annex.slots.clear();
1003
1004    // Run through any remaining guaranteed finalizers.
1005    for finalizer in annex.finalizer_map.drain() {
1006      if let FinalizerCallback::Guaranteed(callback) = finalizer {
1007        callback();
1008      }
1009    }
1010
1011    // Subtract one from the Arc<IsolateAnnex> reference count.
1012    unsafe { Arc::from_raw(annex) };
1013    self.set_data(0, null_mut());
1014
1015    create_param_allocations
1016  }
1017
1018  #[inline(always)]
1019  fn get_annex(&self) -> &IsolateAnnex {
1020    let annex_ptr =
1021      self.get_data_internal(Self::ANNEX_SLOT) as *const IsolateAnnex;
1022    assert!(!annex_ptr.is_null());
1023    unsafe { &*annex_ptr }
1024  }
1025
1026  #[inline(always)]
1027  fn get_annex_mut(&mut self) -> &mut IsolateAnnex {
1028    let annex_ptr =
1029      self.get_data_internal(Self::ANNEX_SLOT) as *mut IsolateAnnex;
1030    assert!(!annex_ptr.is_null());
1031    unsafe { &mut *annex_ptr }
1032  }
1033
1034  pub(crate) fn set_snapshot_creator(
1035    &mut self,
1036    snapshot_creator: SnapshotCreator,
1037  ) {
1038    let prev = self
1039      .get_annex_mut()
1040      .maybe_snapshot_creator
1041      .replace(snapshot_creator);
1042    assert!(prev.is_none());
1043  }
1044
1045  pub(crate) fn get_finalizer_map(&self) -> &FinalizerMap {
1046    &self.get_annex().finalizer_map
1047  }
1048
1049  pub(crate) fn get_finalizer_map_mut(&mut self) -> &mut FinalizerMap {
1050    &mut self.get_annex_mut().finalizer_map
1051  }
1052
1053  fn get_annex_arc(&self) -> Arc<IsolateAnnex> {
1054    let annex_ptr = self.get_annex();
1055    let annex_arc = unsafe { Arc::from_raw(annex_ptr) };
1056    let _ = Arc::into_raw(annex_arc.clone());
1057    annex_arc
1058  }
1059
1060  /// Retrieve embedder-specific data from the isolate.
1061  /// Returns NULL if SetData has never been called for the given `slot`.
1062  pub fn get_data(&self, slot: u32) -> *mut c_void {
1063    self.get_data_internal(Self::INTERNAL_DATA_SLOT_COUNT + slot)
1064  }
1065
1066  /// Associate embedder-specific data with the isolate. `slot` has to be
1067  /// between 0 and `Isolate::get_number_of_data_slots()`.
1068  #[inline(always)]
1069  pub fn set_data(&mut self, slot: u32, data: *mut c_void) {
1070    self.set_data_internal(Self::INTERNAL_DATA_SLOT_COUNT + slot, data);
1071  }
1072
1073  /// Returns the maximum number of available embedder data slots. Valid slots
1074  /// are in the range of `0 <= n < Isolate::get_number_of_data_slots()`.
1075  pub fn get_number_of_data_slots(&self) -> u32 {
1076    let n = unsafe { v8__Isolate__GetNumberOfDataSlots(self.as_real_ptr()) };
1077    n - Self::INTERNAL_DATA_SLOT_COUNT
1078  }
1079
1080  #[inline(always)]
1081  pub(crate) fn get_data_internal(&self, slot: u32) -> *mut c_void {
1082    unsafe { v8__Isolate__GetData(self.as_real_ptr(), slot) }
1083  }
1084
1085  #[inline(always)]
1086  pub(crate) fn set_data_internal(&mut self, slot: u32, data: *mut c_void) {
1087    unsafe { v8__Isolate__SetData(self.as_real_ptr(), slot, data) }
1088  }
1089
1090  // pub(crate) fn init_scope_root(&mut self) {
1091  //   ScopeData::new_root(self);
1092  // }
1093
1094  // pub(crate) fn dispose_scope_root(&mut self) {
1095  //   ScopeData::drop_root(self);
1096  // }
1097
1098  // /// Returns a pointer to the `ScopeData` struct for the current scope.
1099  // #[inline(always)]
1100  // pub(crate) fn get_current_scope_data(&self) -> Option<NonNull<ScopeData>> {
1101  //   let scope_data_ptr = self.get_data_internal(Self::CURRENT_SCOPE_DATA_SLOT);
1102  //   NonNull::new(scope_data_ptr).map(NonNull::cast)
1103  // }
1104
1105  // /// Updates the slot that stores a `ScopeData` pointer for the current scope.
1106  // #[inline(always)]
1107  // pub(crate) fn set_current_scope_data(
1108  //   &mut self,
1109  //   scope_data: Option<NonNull<ScopeData>>,
1110  // ) {
1111  //   let scope_data_ptr = scope_data
1112  //     .map(NonNull::cast)
1113  //     .map_or_else(null_mut, NonNull::as_ptr);
1114  //   self.set_data_internal(Self::CURRENT_SCOPE_DATA_SLOT, scope_data_ptr);
1115  // }
1116
1117  /// Get a reference to embedder data added with `set_slot()`.
1118  #[inline(always)]
1119  pub fn get_slot<T: 'static>(&self) -> Option<&T> {
1120    self
1121      .get_annex()
1122      .slots
1123      .get(&TypeId::of::<T>())
1124      .map(|slot| unsafe { slot.borrow::<T>() })
1125  }
1126
1127  /// Get a mutable reference to embedder data added with `set_slot()`.
1128  #[inline(always)]
1129  pub fn get_slot_mut<T: 'static>(&mut self) -> Option<&mut T> {
1130    self
1131      .get_annex_mut()
1132      .slots
1133      .get_mut(&TypeId::of::<T>())
1134      .map(|slot| unsafe { slot.borrow_mut::<T>() })
1135  }
1136
1137  /// Use with Isolate::get_slot and Isolate::get_slot_mut to associate state
1138  /// with an Isolate.
1139  ///
1140  /// This method gives ownership of value to the Isolate. Exactly one object of
1141  /// each type can be associated with an Isolate. If called more than once with
1142  /// an object of the same type, the earlier version will be dropped and
1143  /// replaced.
1144  ///
1145  /// Returns true if value was set without replacing an existing value.
1146  ///
1147  /// The value will be dropped when the isolate is dropped.
1148  #[inline(always)]
1149  pub fn set_slot<T: 'static>(&mut self, value: T) -> bool {
1150    self
1151      .get_annex_mut()
1152      .slots
1153      .insert(TypeId::of::<T>(), RawSlot::new(value))
1154      .is_none()
1155  }
1156
1157  /// Removes the embedder data added with `set_slot()` and returns it if it exists.
1158  #[inline(always)]
1159  pub fn remove_slot<T: 'static>(&mut self) -> Option<T> {
1160    self
1161      .get_annex_mut()
1162      .slots
1163      .remove(&TypeId::of::<T>())
1164      .map(|slot| unsafe { slot.into_inner::<T>() })
1165  }
1166
1167  /// Sets this isolate as the entered one for the current thread.
1168  /// Saves the previously entered one (if any), so that it can be
1169  /// restored when exiting.  Re-entering an isolate is allowed.
1170  ///
1171  /// rusty_v8 note: Unlike in the C++ API, the isolate is entered when it is
1172  /// constructed and exited when dropped.
1173  #[inline(always)]
1174  pub unsafe fn enter(&self) {
1175    unsafe {
1176      v8__Isolate__Enter(self.as_real_ptr());
1177    }
1178  }
1179
1180  /// Exits this isolate by restoring the previously entered one in the
1181  /// current thread.  The isolate may still stay the same, if it was
1182  /// entered more than once.
1183  ///
1184  /// Requires: self == Isolate::GetCurrent().
1185  ///
1186  /// rusty_v8 note: Unlike in the C++ API, the isolate is entered when it is
1187  /// constructed and exited when dropped.
1188  #[inline(always)]
1189  pub unsafe fn exit(&self) {
1190    unsafe {
1191      v8__Isolate__Exit(self.as_real_ptr());
1192    }
1193  }
1194
1195  /// Optional notification that the system is running low on memory.
1196  /// V8 uses these notifications to guide heuristics.
1197  /// It is allowed to call this function from another thread while
1198  /// the isolate is executing long running JavaScript code.
1199  #[inline(always)]
1200  pub fn memory_pressure_notification(&mut self, level: MemoryPressureLevel) {
1201    unsafe {
1202      v8__Isolate__MemoryPressureNotification(self.as_real_ptr(), level as u8)
1203    }
1204  }
1205
1206  /// Clears the set of objects held strongly by the heap. This set of
1207  /// objects are originally built when a WeakRef is created or
1208  /// successfully dereferenced.
1209  ///
1210  /// This is invoked automatically after microtasks are run. See
1211  /// MicrotasksPolicy for when microtasks are run.
1212  ///
1213  /// This needs to be manually invoked only if the embedder is manually
1214  /// running microtasks via a custom MicrotaskQueue class's PerformCheckpoint.
1215  /// In that case, it is the embedder's responsibility to make this call at a
1216  /// time which does not interrupt synchronous ECMAScript code execution.
1217  #[inline(always)]
1218  pub fn clear_kept_objects(&mut self) {
1219    unsafe { v8__Isolate__ClearKeptObjects(self.as_real_ptr()) }
1220  }
1221
1222  /// Optional notification that the system is running low on memory.
1223  /// V8 uses these notifications to attempt to free memory.
1224  #[inline(always)]
1225  pub fn low_memory_notification(&mut self) {
1226    unsafe { v8__Isolate__LowMemoryNotification(self.as_real_ptr()) }
1227  }
1228
1229  /// Get statistics about the heap memory usage.
1230  #[inline(always)]
1231  pub fn get_heap_statistics(&mut self) -> HeapStatistics {
1232    let inner = unsafe {
1233      let mut s = MaybeUninit::zeroed();
1234      v8__Isolate__GetHeapStatistics(self.as_real_ptr(), s.as_mut_ptr());
1235      s.assume_init()
1236    };
1237    HeapStatistics(inner)
1238  }
1239
1240  /// Returns the number of spaces in the heap.
1241  #[inline(always)]
1242  pub fn number_of_heap_spaces(&mut self) -> usize {
1243    unsafe { v8__Isolate__NumberOfHeapSpaces(self.as_real_ptr()) }
1244  }
1245
1246  /// Get the memory usage of a space in the heap.
1247  ///
1248  /// \param space_statistics The HeapSpaceStatistics object to fill in
1249  ///   statistics.
1250  /// \param index The index of the space to get statistics from, which ranges
1251  ///   from 0 to NumberOfHeapSpaces() - 1.
1252  /// \returns true on success.
1253  #[inline(always)]
1254  pub fn get_heap_space_statistics(
1255    &mut self,
1256    index: usize,
1257  ) -> Option<HeapSpaceStatistics> {
1258    let inner = unsafe {
1259      let mut s = MaybeUninit::zeroed();
1260      if !v8__Isolate__GetHeapSpaceStatistics(
1261        self.as_real_ptr(),
1262        s.as_mut_ptr(),
1263        index,
1264      ) {
1265        return None;
1266      }
1267      s.assume_init()
1268    };
1269    Some(HeapSpaceStatistics(inner))
1270  }
1271
1272  /// Tells V8 to capture current stack trace when uncaught exception occurs
1273  /// and report it to the message listeners. The option is off by default.
1274  #[inline(always)]
1275  pub fn set_capture_stack_trace_for_uncaught_exceptions(
1276    &mut self,
1277    capture: bool,
1278    frame_limit: i32,
1279  ) {
1280    unsafe {
1281      v8__Isolate__SetCaptureStackTraceForUncaughtExceptions(
1282        self.as_real_ptr(),
1283        capture,
1284        frame_limit,
1285      );
1286    }
1287  }
1288
1289  /// Adds a message listener (errors only).
1290  ///
1291  /// The same message listener can be added more than once and in that
1292  /// case it will be called more than once for each message.
1293  ///
1294  /// The exception object will be passed to the callback.
1295  #[inline(always)]
1296  pub fn add_message_listener(&mut self, callback: MessageCallback) -> bool {
1297    unsafe { v8__Isolate__AddMessageListener(self.as_real_ptr(), callback) }
1298  }
1299
1300  /// Adds a message listener for the specified message levels.
1301  #[inline(always)]
1302  pub fn add_message_listener_with_error_level(
1303    &mut self,
1304    callback: MessageCallback,
1305    message_levels: MessageErrorLevel,
1306  ) -> bool {
1307    unsafe {
1308      v8__Isolate__AddMessageListenerWithErrorLevel(
1309        self.as_real_ptr(),
1310        callback,
1311        message_levels,
1312      )
1313    }
1314  }
1315
1316  /// This specifies the callback called when the stack property of Error
1317  /// is accessed.
1318  ///
1319  /// PrepareStackTraceCallback is called when the stack property of an error is
1320  /// first accessed. The return value will be used as the stack value. If this
1321  /// callback is registed, the |Error.prepareStackTrace| API will be disabled.
1322  /// |sites| is an array of call sites, specified in
1323  /// https://v8.dev/docs/stack-trace-api
1324  #[inline(always)]
1325  pub fn set_prepare_stack_trace_callback<'s>(
1326    &mut self,
1327    callback: impl MapFnTo<PrepareStackTraceCallback<'s>>,
1328  ) {
1329    // Note: the C++ API returns a MaybeLocal but V8 asserts at runtime when
1330    // it's empty. That is, you can't return None and that's why the Rust API
1331    // expects Local<Value> instead of Option<Local<Value>>.
1332    unsafe {
1333      v8__Isolate__SetPrepareStackTraceCallback(
1334        self.as_real_ptr(),
1335        callback.map_fn_to(),
1336      );
1337    };
1338  }
1339
1340  /// Set the PromiseHook callback for various promise lifecycle
1341  /// events.
1342  #[inline(always)]
1343  pub fn set_promise_hook(&mut self, hook: PromiseHook) {
1344    unsafe { v8__Isolate__SetPromiseHook(self.as_real_ptr(), hook) }
1345  }
1346
1347  /// Set callback to notify about promise reject with no handler, or
1348  /// revocation of such a previous notification once the handler is added.
1349  #[inline(always)]
1350  pub fn set_promise_reject_callback(
1351    &mut self,
1352    callback: PromiseRejectCallback,
1353  ) {
1354    unsafe {
1355      v8__Isolate__SetPromiseRejectCallback(self.as_real_ptr(), callback)
1356    }
1357  }
1358
1359  #[inline(always)]
1360  pub fn set_wasm_async_resolve_promise_callback(
1361    &mut self,
1362    callback: WasmAsyncResolvePromiseCallback,
1363  ) {
1364    unsafe {
1365      v8__Isolate__SetWasmAsyncResolvePromiseCallback(
1366        self.as_real_ptr(),
1367        callback,
1368      )
1369    }
1370  }
1371
1372  #[inline(always)]
1373  pub fn set_allow_wasm_code_generation_callback(
1374    &mut self,
1375    callback: AllowWasmCodeGenerationCallback,
1376  ) {
1377    unsafe {
1378      v8__Isolate__SetAllowWasmCodeGenerationCallback(
1379        self.as_real_ptr(),
1380        callback,
1381      );
1382    }
1383  }
1384
1385  #[inline(always)]
1386  /// This specifies the callback called by the upcoming importa.meta
1387  /// language feature to retrieve host-defined meta data for a module.
1388  pub fn set_host_initialize_import_meta_object_callback(
1389    &mut self,
1390    callback: HostInitializeImportMetaObjectCallback,
1391  ) {
1392    unsafe {
1393      v8__Isolate__SetHostInitializeImportMetaObjectCallback(
1394        self.as_real_ptr(),
1395        callback,
1396      );
1397    }
1398  }
1399
1400  /// This specifies the callback called by the upcoming dynamic
1401  /// import() language feature to load modules.
1402  #[inline(always)]
1403  pub fn set_host_import_module_dynamically_callback(
1404    &mut self,
1405    callback: impl HostImportModuleDynamicallyCallback,
1406  ) {
1407    unsafe {
1408      v8__Isolate__SetHostImportModuleDynamicallyCallback(
1409        self.as_real_ptr(),
1410        callback.to_c_fn(),
1411      );
1412    }
1413  }
1414
1415  /// This specifies the callback called by the upcoming dynamic
1416  /// import() and import.source() language feature to load modules.
1417  ///
1418  /// This API is experimental and is expected to be changed or removed in the
1419  /// future. The callback is currently only called when for source-phase
1420  /// imports. Evaluation-phase imports use the existing
1421  /// HostImportModuleDynamicallyCallback callback.
1422  #[inline(always)]
1423  pub fn set_host_import_module_with_phase_dynamically_callback(
1424    &mut self,
1425    callback: impl HostImportModuleWithPhaseDynamicallyCallback,
1426  ) {
1427    unsafe {
1428      v8__Isolate__SetHostImportModuleWithPhaseDynamicallyCallback(
1429        self.as_real_ptr(),
1430        callback.to_c_fn(),
1431      );
1432    }
1433  }
1434
1435  /// This specifies the callback called by the upcoming `ShadowRealm`
1436  /// construction language feature to retrieve host created globals.
1437  pub fn set_host_create_shadow_realm_context_callback(
1438    &mut self,
1439    callback: HostCreateShadowRealmContextCallback,
1440  ) {
1441    #[inline]
1442    unsafe extern "C" fn rust_shadow_realm_callback(
1443      initiator_context: Local<Context>,
1444    ) -> *mut Context {
1445      let scope = pin!(unsafe { CallbackScope::new(initiator_context) });
1446      let mut scope = scope.init();
1447      let isolate = scope.as_ref();
1448      let callback = isolate
1449        .get_slot::<HostCreateShadowRealmContextCallback>()
1450        .unwrap();
1451      let context = callback(&mut scope);
1452      context.map_or_else(null_mut, |l| l.as_non_null().as_ptr())
1453    }
1454
1455    // Windows x64 ABI: MaybeLocal<Context> must be returned on the stack.
1456    #[cfg(target_os = "windows")]
1457    unsafe extern "C" fn rust_shadow_realm_callback_windows(
1458      rv: *mut *mut Context,
1459      initiator_context: Local<Context>,
1460    ) -> *mut *mut Context {
1461      unsafe {
1462        let ret = rust_shadow_realm_callback(initiator_context);
1463        rv.write(ret);
1464      }
1465      rv
1466    }
1467
1468    let slot_didnt_exist_before = self.set_slot(callback);
1469    if slot_didnt_exist_before {
1470      unsafe {
1471        #[cfg(target_os = "windows")]
1472        v8__Isolate__SetHostCreateShadowRealmContextCallback(
1473          self.as_real_ptr(),
1474          rust_shadow_realm_callback_windows,
1475        );
1476        #[cfg(not(target_os = "windows"))]
1477        v8__Isolate__SetHostCreateShadowRealmContextCallback(
1478          self.as_real_ptr(),
1479          rust_shadow_realm_callback,
1480        );
1481      }
1482    }
1483  }
1484
1485  /// Sets a callback for counting the number of times a feature of V8 is used.
1486  #[inline(always)]
1487  pub fn set_use_counter_callback(&mut self, callback: UseCounterCallback) {
1488    unsafe {
1489      v8__Isolate__SetUseCounterCallback(self.as_real_ptr(), callback);
1490    }
1491  }
1492
1493  /// Enables the host application to receive a notification before a
1494  /// garbage collection. Allocations are allowed in the callback function,
1495  /// but the callback is not re-entrant: if the allocation inside it will
1496  /// trigger the garbage collection, the callback won't be called again.
1497  /// It is possible to specify the GCType filter for your callback. But it is
1498  /// not possible to register the same callback function two times with
1499  /// different GCType filters.
1500  #[allow(clippy::not_unsafe_ptr_arg_deref)] // False positive.
1501  #[inline(always)]
1502  pub fn add_gc_prologue_callback(
1503    &mut self,
1504    callback: GcCallbackWithData,
1505    data: *mut c_void,
1506    gc_type_filter: GCType,
1507  ) {
1508    unsafe {
1509      v8__Isolate__AddGCPrologueCallback(
1510        self.as_real_ptr(),
1511        callback,
1512        data,
1513        gc_type_filter,
1514      );
1515    }
1516  }
1517
1518  /// This function removes callback which was installed by
1519  /// AddGCPrologueCallback function.
1520  #[allow(clippy::not_unsafe_ptr_arg_deref)] // False positive.
1521  #[inline(always)]
1522  pub fn remove_gc_prologue_callback(
1523    &mut self,
1524    callback: GcCallbackWithData,
1525    data: *mut c_void,
1526  ) {
1527    unsafe {
1528      v8__Isolate__RemoveGCPrologueCallback(self.as_real_ptr(), callback, data)
1529    }
1530  }
1531
1532  /// Enables the host application to receive a notification after a
1533  /// garbage collection.
1534  #[allow(clippy::not_unsafe_ptr_arg_deref)] // False positive.
1535  #[inline(always)]
1536  pub fn add_gc_epilogue_callback(
1537    &mut self,
1538    callback: GcCallbackWithData,
1539    data: *mut c_void,
1540    gc_type_filter: GCType,
1541  ) {
1542    unsafe {
1543      v8__Isolate__AddGCEpilogueCallback(
1544        self.as_real_ptr(),
1545        callback,
1546        data,
1547        gc_type_filter,
1548      );
1549    }
1550  }
1551
1552  /// This function removes a callback which was added by
1553  /// `AddGCEpilogueCallback`.
1554  #[allow(clippy::not_unsafe_ptr_arg_deref)] // False positive.
1555  #[inline(always)]
1556  pub fn remove_gc_epilogue_callback(
1557    &mut self,
1558    callback: GcCallbackWithData,
1559    data: *mut c_void,
1560  ) {
1561    unsafe {
1562      v8__Isolate__RemoveGCEpilogueCallback(self.as_real_ptr(), callback, data)
1563    }
1564  }
1565
1566  /// Add a callback to invoke in case the heap size is close to the heap limit.
1567  /// If multiple callbacks are added, only the most recently added callback is
1568  /// invoked.
1569  #[allow(clippy::not_unsafe_ptr_arg_deref)] // False positive.
1570  #[inline(always)]
1571  pub fn add_near_heap_limit_callback(
1572    &mut self,
1573    callback: NearHeapLimitCallback,
1574    data: *mut c_void,
1575  ) {
1576    unsafe {
1577      v8__Isolate__AddNearHeapLimitCallback(self.as_real_ptr(), callback, data)
1578    };
1579  }
1580
1581  /// Remove the given callback and restore the heap limit to the given limit.
1582  /// If the given limit is zero, then it is ignored. If the current heap size
1583  /// is greater than the given limit, then the heap limit is restored to the
1584  /// minimal limit that is possible for the current heap size.
1585  #[inline(always)]
1586  pub fn remove_near_heap_limit_callback(
1587    &mut self,
1588    callback: NearHeapLimitCallback,
1589    heap_limit: usize,
1590  ) {
1591    unsafe {
1592      v8__Isolate__RemoveNearHeapLimitCallback(
1593        self.as_real_ptr(),
1594        callback,
1595        heap_limit,
1596      );
1597    };
1598  }
1599
1600  /// Adjusts the amount of registered external memory. Used to give V8 an
1601  /// indication of the amount of externally allocated memory that is kept
1602  /// alive by JavaScript objects. V8 uses this to decide when to perform
1603  /// global garbage collections. Registering externally allocated memory
1604  /// will trigger global garbage collections more often than it would
1605  /// otherwise in an attempt to garbage collect the JavaScript objects
1606  /// that keep the externally allocated memory alive.
1607  #[inline(always)]
1608  pub fn adjust_amount_of_external_allocated_memory(
1609    &mut self,
1610    change_in_bytes: i64,
1611  ) -> i64 {
1612    unsafe {
1613      v8__Isolate__AdjustAmountOfExternalAllocatedMemory(
1614        self.as_real_ptr(),
1615        change_in_bytes,
1616      )
1617    }
1618  }
1619
1620  #[inline(always)]
1621  pub fn get_cpp_heap(&mut self) -> Option<&Heap> {
1622    unsafe { v8__Isolate__GetCppHeap(self.as_real_ptr()).as_ref() }
1623  }
1624
1625  #[inline(always)]
1626  pub fn set_oom_error_handler(&mut self, callback: OomErrorCallback) {
1627    unsafe { v8__Isolate__SetOOMErrorHandler(self.as_real_ptr(), callback) };
1628  }
1629
1630  /// Returns the policy controlling how Microtasks are invoked.
1631  #[inline(always)]
1632  pub fn get_microtasks_policy(&self) -> MicrotasksPolicy {
1633    unsafe { v8__Isolate__GetMicrotasksPolicy(self.as_real_ptr()) }
1634  }
1635
1636  /// Returns the policy controlling how Microtasks are invoked.
1637  #[inline(always)]
1638  pub fn set_microtasks_policy(&mut self, policy: MicrotasksPolicy) {
1639    unsafe { v8__Isolate__SetMicrotasksPolicy(self.as_real_ptr(), policy) }
1640  }
1641
1642  /// Runs the default MicrotaskQueue until it gets empty and perform other
1643  /// microtask checkpoint steps, such as calling ClearKeptObjects. Asserts that
1644  /// the MicrotasksPolicy is not kScoped. Any exceptions thrown by microtask
1645  /// callbacks are swallowed.
1646  #[inline(always)]
1647  pub fn perform_microtask_checkpoint(&mut self) {
1648    unsafe { v8__Isolate__PerformMicrotaskCheckpoint(self.as_real_ptr()) }
1649  }
1650
1651  /// Enqueues the callback to the default MicrotaskQueue
1652  #[inline(always)]
1653  pub fn enqueue_microtask(&mut self, microtask: Local<Function>) {
1654    unsafe { v8__Isolate__EnqueueMicrotask(self.as_real_ptr(), &*microtask) }
1655  }
1656
1657  /// Set whether calling Atomics.wait (a function that may block) is allowed in
1658  /// this isolate. This can also be configured via
1659  /// CreateParams::allow_atomics_wait.
1660  #[inline(always)]
1661  pub fn set_allow_atomics_wait(&mut self, allow: bool) {
1662    unsafe { v8__Isolate__SetAllowAtomicsWait(self.as_real_ptr(), allow) }
1663  }
1664
1665  /// Embedder injection point for `WebAssembly.compileStreaming(source)`.
1666  /// The expectation is that the embedder sets it at most once.
1667  ///
1668  /// The callback receives the source argument (string, Promise, etc.)
1669  /// and an instance of [WasmStreaming]. The [WasmStreaming] instance
1670  /// can outlive the callback and is used to feed data chunks to V8
1671  /// asynchronously.
1672  #[inline(always)]
1673  pub fn set_wasm_streaming_callback<F>(&mut self, _: F)
1674  where
1675    F: UnitType
1676      + for<'a, 'b, 'c> Fn(
1677        &'c mut PinScope<'a, 'b>,
1678        Local<'a, Value>,
1679        WasmStreaming,
1680      ),
1681  {
1682    unsafe {
1683      v8__Isolate__SetWasmStreamingCallback(
1684        self.as_real_ptr(),
1685        trampoline::<F>(),
1686      )
1687    }
1688  }
1689
1690  /// Notification that the embedder has changed the time zone, daylight savings
1691  /// time or other date / time configuration parameters. V8 keeps a cache of
1692  /// various values used for date / time computation. This notification will
1693  /// reset those cached values for the current context so that date / time
1694  /// configuration changes would be reflected.
1695  ///
1696  /// This API should not be called more than needed as it will negatively impact
1697  /// the performance of date operations.
1698  #[inline(always)]
1699  pub fn date_time_configuration_change_notification(
1700    &mut self,
1701    time_zone_detection: TimeZoneDetection,
1702  ) {
1703    unsafe {
1704      v8__Isolate__DateTimeConfigurationChangeNotification(
1705        self.as_real_ptr(),
1706        time_zone_detection,
1707      );
1708    }
1709  }
1710
1711  /// Returns true if there is ongoing background work within V8 that will
1712  /// eventually post a foreground task, like asynchronous WebAssembly
1713  /// compilation.
1714  #[inline(always)]
1715  pub fn has_pending_background_tasks(&self) -> bool {
1716    unsafe { v8__Isolate__HasPendingBackgroundTasks(self.as_real_ptr()) }
1717  }
1718
1719  /// Request garbage collection with a specific embedderstack state in this
1720  /// Isolate. It is only valid to call this function if --expose_gc was
1721  /// specified.
1722  ///
1723  /// This should only be used for testing purposes and not to enforce a garbage
1724  /// collection schedule. It has strong negative impact on the garbage
1725  /// collection performance. Use IdleNotificationDeadline() or
1726  /// LowMemoryNotification() instead to influence the garbage collection
1727  /// schedule.
1728  #[inline(always)]
1729  pub fn request_garbage_collection_for_testing(
1730    &mut self,
1731    r#type: GarbageCollectionType,
1732  ) {
1733    unsafe {
1734      v8__Isolate__RequestGarbageCollectionForTesting(
1735        self.as_real_ptr(),
1736        match r#type {
1737          GarbageCollectionType::Full => 0,
1738          GarbageCollectionType::Minor => 1,
1739        },
1740      );
1741    }
1742  }
1743
1744  /// Disposes the isolate.  The isolate must not be entered by any
1745  /// thread to be disposable.
1746  unsafe fn dispose(&mut self) {
1747    // No test case in rusty_v8 show this, but there have been situations in
1748    // deno where dropping Annex before the states causes a segfault.
1749    unsafe {
1750      v8__Isolate__Dispose(self.as_real_ptr());
1751    }
1752  }
1753
1754  /// Take a heap snapshot. The callback is invoked one or more times
1755  /// with byte slices containing the snapshot serialized as JSON.
1756  /// It's the callback's responsibility to reassemble them into
1757  /// a single document, e.g., by writing them to a file.
1758  /// Note that Chrome DevTools refuses to load snapshots without
1759  /// a .heapsnapshot suffix.
1760  pub fn take_heap_snapshot<F>(&mut self, mut callback: F)
1761  where
1762    F: FnMut(&[u8]) -> bool,
1763  {
1764    unsafe extern "C" fn trampoline<F>(
1765      arg: *mut c_void,
1766      data: *const u8,
1767      size: usize,
1768    ) -> bool
1769    where
1770      F: FnMut(&[u8]) -> bool,
1771    {
1772      unsafe {
1773        let mut callback = NonNull::<F>::new_unchecked(arg as _);
1774        if size > 0 {
1775          (callback.as_mut())(std::slice::from_raw_parts(data, size))
1776        } else {
1777          (callback.as_mut())(&[])
1778        }
1779      }
1780    }
1781
1782    let arg = addr_of_mut!(callback);
1783    unsafe {
1784      v8__HeapProfiler__TakeHeapSnapshot(
1785        self.as_real_ptr(),
1786        trampoline::<F>,
1787        arg as _,
1788      );
1789    }
1790  }
1791
1792  /// Set the default context to be included in the snapshot blob.
1793  /// The snapshot will not contain the global proxy, and we expect one or a
1794  /// global object template to create one, to be provided upon deserialization.
1795  ///
1796  /// # Panics
1797  ///
1798  /// Panics if the isolate was not created using [`Isolate::snapshot_creator`]
1799  #[inline(always)]
1800  pub fn set_default_context(&mut self, context: Local<Context>) {
1801    let snapshot_creator = self
1802      .get_annex_mut()
1803      .maybe_snapshot_creator
1804      .as_mut()
1805      .unwrap();
1806    snapshot_creator.set_default_context(context);
1807  }
1808
1809  /// Add additional context to be included in the snapshot blob.
1810  /// The snapshot will include the global proxy.
1811  ///
1812  /// Returns the index of the context in the snapshot blob.
1813  ///
1814  /// # Panics
1815  ///
1816  /// Panics if the isolate was not created using [`Isolate::snapshot_creator`]
1817  #[inline(always)]
1818  pub fn add_context(&mut self, context: Local<Context>) -> usize {
1819    let snapshot_creator = self
1820      .get_annex_mut()
1821      .maybe_snapshot_creator
1822      .as_mut()
1823      .unwrap();
1824    snapshot_creator.add_context(context)
1825  }
1826
1827  /// Attach arbitrary `v8::Data` to the isolate snapshot, which can be
1828  /// retrieved via `HandleScope::get_context_data_from_snapshot_once()` after
1829  /// deserialization. This data does not survive when a new snapshot is created
1830  /// from an existing snapshot.
1831  ///
1832  /// # Panics
1833  ///
1834  /// Panics if the isolate was not created using [`Isolate::snapshot_creator`]
1835  #[inline(always)]
1836  pub fn add_isolate_data<T>(&mut self, data: Local<T>) -> usize
1837  where
1838    for<'l> Local<'l, T>: Into<Local<'l, Data>>,
1839  {
1840    let snapshot_creator = self
1841      .get_annex_mut()
1842      .maybe_snapshot_creator
1843      .as_mut()
1844      .unwrap();
1845    snapshot_creator.add_isolate_data(data)
1846  }
1847
1848  /// Attach arbitrary `v8::Data` to the context snapshot, which can be
1849  /// retrieved via `HandleScope::get_context_data_from_snapshot_once()` after
1850  /// deserialization. This data does not survive when a new snapshot is
1851  /// created from an existing snapshot.
1852  ///
1853  /// # Panics
1854  ///
1855  /// Panics if the isolate was not created using [`Isolate::snapshot_creator`]
1856  #[inline(always)]
1857  pub fn add_context_data<T>(
1858    &mut self,
1859    context: Local<Context>,
1860    data: Local<T>,
1861  ) -> usize
1862  where
1863    for<'l> Local<'l, T>: Into<Local<'l, Data>>,
1864  {
1865    let snapshot_creator = self
1866      .get_annex_mut()
1867      .maybe_snapshot_creator
1868      .as_mut()
1869      .unwrap();
1870    snapshot_creator.add_context_data(context, data)
1871  }
1872}
1873
1874pub(crate) struct IsolateAnnex {
1875  create_param_allocations: Box<dyn Any>,
1876  slots: HashMap<TypeId, RawSlot, BuildTypeIdHasher>,
1877  finalizer_map: FinalizerMap,
1878  maybe_snapshot_creator: Option<SnapshotCreator>,
1879  // The `isolate` and `isolate_mutex` fields are there so an `IsolateHandle`
1880  // (which may outlive the isolate itself) can determine whether the isolate
1881  // is still alive, and if so, get a reference to it. Safety rules:
1882  // - The 'main thread' must lock the mutex and reset `isolate` to null just
1883  //   before the isolate is disposed.
1884  // - Any other thread must lock the mutex while it's reading/using the
1885  //   `isolate` pointer.
1886  isolate: *mut RealIsolate,
1887  isolate_mutex: Mutex<()>,
1888}
1889
1890unsafe impl Send for IsolateAnnex {}
1891unsafe impl Sync for IsolateAnnex {}
1892
1893impl IsolateAnnex {
1894  fn new(
1895    isolate: &mut Isolate,
1896    create_param_allocations: Box<dyn Any>,
1897  ) -> Self {
1898    Self {
1899      create_param_allocations,
1900      slots: HashMap::default(),
1901      finalizer_map: FinalizerMap::default(),
1902      maybe_snapshot_creator: None,
1903      isolate: isolate.as_real_ptr(),
1904      isolate_mutex: Mutex::new(()),
1905    }
1906  }
1907}
1908
1909impl Debug for IsolateAnnex {
1910  fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
1911    f.debug_struct("IsolateAnnex")
1912      .field("isolate", &self.isolate)
1913      .field("isolate_mutex", &self.isolate_mutex)
1914      .finish()
1915  }
1916}
1917
1918/// IsolateHandle is a thread-safe reference to an Isolate. It's main use is to
1919/// terminate execution of a running isolate from another thread.
1920///
1921/// It is created with Isolate::thread_safe_handle().
1922///
1923/// IsolateHandle is Cloneable, Send, and Sync.
1924#[derive(Clone, Debug)]
1925pub struct IsolateHandle(Arc<IsolateAnnex>);
1926
1927impl IsolateHandle {
1928  // This function is marked unsafe because it must be called only with either
1929  // IsolateAnnex::mutex locked, or from the main thread associated with the V8
1930  // isolate.
1931  pub(crate) unsafe fn get_isolate_ptr(&self) -> *mut RealIsolate {
1932    self.0.isolate
1933  }
1934
1935  #[inline(always)]
1936  fn new(isolate: &Isolate) -> Self {
1937    Self(isolate.get_annex_arc())
1938  }
1939
1940  /// Forcefully terminate the current thread of JavaScript execution
1941  /// in the given isolate.
1942  ///
1943  /// This method can be used by any thread even if that thread has not
1944  /// acquired the V8 lock with a Locker object.
1945  ///
1946  /// Returns false if Isolate was already destroyed.
1947  #[inline(always)]
1948  pub fn terminate_execution(&self) -> bool {
1949    let _lock = self.0.isolate_mutex.lock().unwrap();
1950    if self.0.isolate.is_null() {
1951      false
1952    } else {
1953      unsafe { v8__Isolate__TerminateExecution(self.0.isolate) };
1954      true
1955    }
1956  }
1957
1958  /// Resume execution capability in the given isolate, whose execution
1959  /// was previously forcefully terminated using TerminateExecution().
1960  ///
1961  /// When execution is forcefully terminated using TerminateExecution(),
1962  /// the isolate can not resume execution until all JavaScript frames
1963  /// have propagated the uncatchable exception which is generated.  This
1964  /// method allows the program embedding the engine to handle the
1965  /// termination event and resume execution capability, even if
1966  /// JavaScript frames remain on the stack.
1967  ///
1968  /// This method can be used by any thread even if that thread has not
1969  /// acquired the V8 lock with a Locker object.
1970  ///
1971  /// Returns false if Isolate was already destroyed.
1972  #[inline(always)]
1973  pub fn cancel_terminate_execution(&self) -> bool {
1974    let _lock = self.0.isolate_mutex.lock().unwrap();
1975    if self.0.isolate.is_null() {
1976      false
1977    } else {
1978      unsafe { v8__Isolate__CancelTerminateExecution(self.0.isolate) };
1979      true
1980    }
1981  }
1982
1983  /// Is V8 terminating JavaScript execution.
1984  ///
1985  /// Returns true if JavaScript execution is currently terminating
1986  /// because of a call to TerminateExecution.  In that case there are
1987  /// still JavaScript frames on the stack and the termination
1988  /// exception is still active.
1989  ///
1990  /// Returns false if Isolate was already destroyed.
1991  #[inline(always)]
1992  pub fn is_execution_terminating(&self) -> bool {
1993    let _lock = self.0.isolate_mutex.lock().unwrap();
1994    if self.0.isolate.is_null() {
1995      false
1996    } else {
1997      unsafe { v8__Isolate__IsExecutionTerminating(self.0.isolate) }
1998    }
1999  }
2000
2001  /// Request V8 to interrupt long running JavaScript code and invoke
2002  /// the given |callback| passing the given |data| to it. After |callback|
2003  /// returns control will be returned to the JavaScript code.
2004  /// There may be a number of interrupt requests in flight.
2005  /// Can be called from another thread without acquiring a |Locker|.
2006  /// Registered |callback| must not reenter interrupted Isolate.
2007  ///
2008  /// Returns false if Isolate was already destroyed.
2009  // Clippy warns that this method is dereferencing a raw pointer, but it is
2010  // not: https://github.com/rust-lang/rust-clippy/issues/3045
2011  #[allow(clippy::not_unsafe_ptr_arg_deref)]
2012  #[inline(always)]
2013  pub fn request_interrupt(
2014    &self,
2015    callback: InterruptCallback,
2016    data: *mut c_void,
2017  ) -> bool {
2018    let _lock = self.0.isolate_mutex.lock().unwrap();
2019    if self.0.isolate.is_null() {
2020      false
2021    } else {
2022      unsafe { v8__Isolate__RequestInterrupt(self.0.isolate, callback, data) };
2023      true
2024    }
2025  }
2026}
2027
2028/// Same as Isolate but gets disposed when it goes out of scope.
2029#[derive(Debug)]
2030pub struct OwnedIsolate {
2031  cxx_isolate: NonNull<RealIsolate>,
2032}
2033
2034impl OwnedIsolate {
2035  pub(crate) fn new(cxx_isolate: *mut RealIsolate) -> Self {
2036    let isolate = Self::new_already_entered(cxx_isolate);
2037    unsafe {
2038      isolate.enter();
2039    }
2040    isolate
2041  }
2042
2043  pub(crate) fn new_already_entered(cxx_isolate: *mut RealIsolate) -> Self {
2044    let cxx_isolate = NonNull::new(cxx_isolate).unwrap();
2045    let owned_isolate: OwnedIsolate = Self { cxx_isolate };
2046    // owned_isolate.init_scope_root();
2047    owned_isolate
2048  }
2049}
2050
2051impl Drop for OwnedIsolate {
2052  fn drop(&mut self) {
2053    unsafe {
2054      let snapshot_creator = self.get_annex_mut().maybe_snapshot_creator.take();
2055      assert!(
2056        snapshot_creator.is_none(),
2057        "If isolate was created using v8::Isolate::snapshot_creator, you should use v8::OwnedIsolate::create_blob before dropping an isolate."
2058      );
2059      // Safety: We need to check `this == Isolate::GetCurrent()` before calling exit()
2060      assert!(
2061        std::ptr::eq(self.cxx_isolate.as_mut(), v8__Isolate__GetCurrent()),
2062        "v8::OwnedIsolate instances must be dropped in the reverse order of creation. They are entered upon creation and exited upon being dropped."
2063      );
2064      // self.dispose_scope_root();
2065      self.exit();
2066      self.dispose_annex();
2067      Platform::notify_isolate_shutdown(&get_current_platform(), self);
2068      self.dispose();
2069    }
2070  }
2071}
2072
2073impl OwnedIsolate {
2074  /// Creates a snapshot data blob.
2075  /// This must not be called from within a handle scope.
2076  ///
2077  /// # Panics
2078  ///
2079  /// Panics if the isolate was not created using [`Isolate::snapshot_creator`]
2080  #[inline(always)]
2081  pub fn create_blob(
2082    mut self,
2083    function_code_handling: FunctionCodeHandling,
2084  ) -> Option<StartupData> {
2085    let mut snapshot_creator =
2086      self.get_annex_mut().maybe_snapshot_creator.take().unwrap();
2087
2088    // create_param_allocations is needed during CreateBlob
2089    // so v8 can read external references
2090    let _create_param_allocations = unsafe {
2091      // self.dispose_scope_root();
2092      self.dispose_annex()
2093    };
2094
2095    // The isolate is owned by the snapshot creator; we need to forget it
2096    // here as the snapshot creator will drop it when running the destructor.
2097    std::mem::forget(self);
2098    snapshot_creator.create_blob(function_code_handling)
2099  }
2100}
2101
2102impl Deref for OwnedIsolate {
2103  type Target = Isolate;
2104  fn deref(&self) -> &Self::Target {
2105    unsafe {
2106      std::mem::transmute::<&NonNull<RealIsolate>, &Isolate>(&self.cxx_isolate)
2107    }
2108  }
2109}
2110
2111impl DerefMut for OwnedIsolate {
2112  fn deref_mut(&mut self) -> &mut Self::Target {
2113    unsafe {
2114      std::mem::transmute::<&mut NonNull<RealIsolate>, &mut Isolate>(
2115        &mut self.cxx_isolate,
2116      )
2117    }
2118  }
2119}
2120
2121impl AsMut<Isolate> for OwnedIsolate {
2122  fn as_mut(&mut self) -> &mut Isolate {
2123    self
2124  }
2125}
2126
2127impl AsMut<Isolate> for Isolate {
2128  fn as_mut(&mut self) -> &mut Isolate {
2129    self
2130  }
2131}
2132
2133/// Collection of V8 heap information.
2134///
2135/// Instances of this class can be passed to v8::Isolate::GetHeapStatistics to
2136/// get heap statistics from V8.
2137pub struct HeapStatistics(v8__HeapStatistics);
2138
2139impl HeapStatistics {
2140  #[inline(always)]
2141  pub fn total_heap_size(&self) -> usize {
2142    self.0.total_heap_size_
2143  }
2144
2145  #[inline(always)]
2146  pub fn total_heap_size_executable(&self) -> usize {
2147    self.0.total_heap_size_executable_
2148  }
2149
2150  #[inline(always)]
2151  pub fn total_physical_size(&self) -> usize {
2152    self.0.total_physical_size_
2153  }
2154
2155  #[inline(always)]
2156  pub fn total_available_size(&self) -> usize {
2157    self.0.total_available_size_
2158  }
2159
2160  #[inline(always)]
2161  pub fn total_global_handles_size(&self) -> usize {
2162    self.0.total_global_handles_size_
2163  }
2164
2165  #[inline(always)]
2166  pub fn used_global_handles_size(&self) -> usize {
2167    self.0.used_global_handles_size_
2168  }
2169
2170  #[inline(always)]
2171  pub fn used_heap_size(&self) -> usize {
2172    self.0.used_heap_size_
2173  }
2174
2175  #[inline(always)]
2176  pub fn heap_size_limit(&self) -> usize {
2177    self.0.heap_size_limit_
2178  }
2179
2180  #[inline(always)]
2181  pub fn malloced_memory(&self) -> usize {
2182    self.0.malloced_memory_
2183  }
2184
2185  #[inline(always)]
2186  pub fn external_memory(&self) -> usize {
2187    self.0.external_memory_
2188  }
2189
2190  #[inline(always)]
2191  pub fn peak_malloced_memory(&self) -> usize {
2192    self.0.peak_malloced_memory_
2193  }
2194
2195  #[inline(always)]
2196  pub fn number_of_native_contexts(&self) -> usize {
2197    self.0.number_of_native_contexts_
2198  }
2199
2200  #[inline(always)]
2201  pub fn number_of_detached_contexts(&self) -> usize {
2202    self.0.number_of_detached_contexts_
2203  }
2204
2205  /// Returns a 0/1 boolean, which signifies whether the V8 overwrite heap
2206  /// garbage with a bit pattern.
2207  #[inline(always)]
2208  pub fn does_zap_garbage(&self) -> bool {
2209    self.0.does_zap_garbage_
2210  }
2211}
2212
2213pub struct HeapSpaceStatistics(v8__HeapSpaceStatistics);
2214
2215impl HeapSpaceStatistics {
2216  pub fn space_name(&self) -> &'static CStr {
2217    unsafe { CStr::from_ptr(self.0.space_name_) }
2218  }
2219
2220  pub fn space_size(&self) -> usize {
2221    self.0.space_size_
2222  }
2223
2224  pub fn space_used_size(&self) -> usize {
2225    self.0.space_used_size_
2226  }
2227
2228  pub fn space_available_size(&self) -> usize {
2229    self.0.space_available_size_
2230  }
2231
2232  pub fn physical_space_size(&self) -> usize {
2233    self.0.physical_space_size_
2234  }
2235}
2236
2237impl<'s, F> MapFnFrom<F> for PrepareStackTraceCallback<'s>
2238where
2239  F: UnitType
2240    + for<'a> Fn(
2241      &mut PinScope<'s, 'a>,
2242      Local<'s, Value>,
2243      Local<'s, Array>,
2244    ) -> Local<'s, Value>,
2245{
2246  // Windows x64 ABI: MaybeLocal<Value> returned on the stack.
2247  #[cfg(target_os = "windows")]
2248  fn mapping() -> Self {
2249    let f = |ret_ptr, context, error, sites| {
2250      let scope = pin!(unsafe { CallbackScope::new(context) });
2251      let mut scope: crate::PinnedRef<CallbackScope> = scope.init();
2252      let r = (F::get())(&mut scope, error, sites);
2253      unsafe { std::ptr::write(ret_ptr, &*r as *const _) };
2254      ret_ptr
2255    };
2256    f.to_c_fn()
2257  }
2258
2259  // System V ABI
2260  #[cfg(not(target_os = "windows"))]
2261  fn mapping() -> Self {
2262    let f = |context, error, sites| {
2263      let scope = pin!(unsafe { CallbackScope::new(context) });
2264      let mut scope: crate::PinnedRef<CallbackScope> = scope.init();
2265
2266      let r = (F::get())(&mut scope, error, sites);
2267      PrepareStackTraceCallbackRet(&*r as *const _)
2268    };
2269    f.to_c_fn()
2270  }
2271}
2272
2273/// A special hasher that is optimized for hashing `std::any::TypeId` values.
2274/// `TypeId` values are actually 64-bit values which themselves come out of some
2275/// hash function, so it's unnecessary to shuffle their bits any further.
2276#[derive(Clone, Default)]
2277pub(crate) struct TypeIdHasher {
2278  state: Option<u64>,
2279}
2280
2281impl Hasher for TypeIdHasher {
2282  fn write(&mut self, _bytes: &[u8]) {
2283    panic!("TypeIdHasher::write() called unexpectedly");
2284  }
2285
2286  #[inline]
2287  fn write_u64(&mut self, value: u64) {
2288    // The internal hash function of TypeId only takes the bottom 64-bits, even on versions
2289    // of Rust that use a 128-bit TypeId.
2290    let prev_state = self.state.replace(value);
2291    debug_assert_eq!(prev_state, None);
2292  }
2293
2294  #[inline]
2295  fn finish(&self) -> u64 {
2296    self.state.unwrap()
2297  }
2298}
2299
2300/// Factory for instances of `TypeIdHasher`. This is the type that one would
2301/// pass to the constructor of some map/set type in order to make it use
2302/// `TypeIdHasher` instead of the default hasher implementation.
2303#[derive(Copy, Clone, Default)]
2304pub(crate) struct BuildTypeIdHasher;
2305
2306impl BuildHasher for BuildTypeIdHasher {
2307  type Hasher = TypeIdHasher;
2308
2309  #[inline]
2310  fn build_hasher(&self) -> Self::Hasher {
2311    Default::default()
2312  }
2313}
2314
2315const _: () = {
2316  assert!(
2317    size_of::<TypeId>() == size_of::<u64>()
2318      || size_of::<TypeId>() == size_of::<u128>()
2319  );
2320  assert!(
2321    align_of::<TypeId>() == align_of::<u64>()
2322      || align_of::<TypeId>() == align_of::<u128>()
2323  );
2324};
2325
2326pub(crate) struct RawSlot {
2327  data: RawSlotData,
2328  dtor: Option<RawSlotDtor>,
2329}
2330
2331type RawSlotData = MaybeUninit<usize>;
2332type RawSlotDtor = unsafe fn(&mut RawSlotData) -> ();
2333
2334impl RawSlot {
2335  #[inline]
2336  pub fn new<T: 'static>(value: T) -> Self {
2337    if Self::needs_box::<T>() {
2338      Self::new_internal(Box::new(value))
2339    } else {
2340      Self::new_internal(value)
2341    }
2342  }
2343
2344  // SAFETY: a valid value of type `T` must haven been stored in the slot
2345  // earlier. There is no verification that the type param provided by the
2346  // caller is correct.
2347  #[inline]
2348  pub unsafe fn borrow<T: 'static>(&self) -> &T {
2349    unsafe {
2350      if Self::needs_box::<T>() {
2351        &*(self.data.as_ptr() as *const Box<T>)
2352      } else {
2353        &*(self.data.as_ptr() as *const T)
2354      }
2355    }
2356  }
2357
2358  // Safety: see [`RawSlot::borrow`].
2359  #[inline]
2360  pub unsafe fn borrow_mut<T: 'static>(&mut self) -> &mut T {
2361    unsafe {
2362      if Self::needs_box::<T>() {
2363        &mut *(self.data.as_mut_ptr() as *mut Box<T>)
2364      } else {
2365        &mut *(self.data.as_mut_ptr() as *mut T)
2366      }
2367    }
2368  }
2369
2370  // Safety: see [`RawSlot::borrow`].
2371  #[inline]
2372  pub unsafe fn into_inner<T: 'static>(self) -> T {
2373    unsafe {
2374      let value = if Self::needs_box::<T>() {
2375        *std::ptr::read(self.data.as_ptr() as *mut Box<T>)
2376      } else {
2377        std::ptr::read(self.data.as_ptr() as *mut T)
2378      };
2379      forget(self);
2380      value
2381    }
2382  }
2383
2384  const fn needs_box<T: 'static>() -> bool {
2385    size_of::<T>() > size_of::<RawSlotData>()
2386      || align_of::<T>() > align_of::<RawSlotData>()
2387  }
2388
2389  #[inline]
2390  fn new_internal<B: 'static>(value: B) -> Self {
2391    assert!(!Self::needs_box::<B>());
2392    let mut self_ = Self {
2393      data: RawSlotData::zeroed(),
2394      dtor: None,
2395    };
2396    unsafe {
2397      ptr::write(self_.data.as_mut_ptr() as *mut B, value);
2398    }
2399    if needs_drop::<B>() {
2400      self_.dtor.replace(Self::drop_internal::<B>);
2401    };
2402    self_
2403  }
2404
2405  // SAFETY: a valid value of type `T` or `Box<T>` must be stored in the slot.
2406  unsafe fn drop_internal<B: 'static>(data: &mut RawSlotData) {
2407    assert!(!Self::needs_box::<B>());
2408    unsafe {
2409      drop_in_place(data.as_mut_ptr() as *mut B);
2410    }
2411  }
2412}
2413
2414impl Drop for RawSlot {
2415  fn drop(&mut self) {
2416    if let Some(dtor) = self.dtor {
2417      unsafe { dtor(&mut self.data) };
2418    }
2419  }
2420}
2421
2422impl AsRef<Isolate> for OwnedIsolate {
2423  fn as_ref(&self) -> &Isolate {
2424    unsafe { Isolate::from_raw_ref(&self.cxx_isolate) }
2425  }
2426}
2427impl AsRef<Isolate> for Isolate {
2428  fn as_ref(&self) -> &Isolate {
2429    self
2430  }
2431}