Skip to main content

v8/
isolate.rs

1// Copyright 2019-2021 the Deno authors. All rights reserved. MIT license.
2use crate::Array;
3use crate::CallbackScope;
4use crate::Context;
5use crate::Data;
6use crate::FixedArray;
7use crate::Function;
8use crate::FunctionCodeHandling;
9use crate::Local;
10use crate::Message;
11use crate::Module;
12use crate::Object;
13use crate::PinScope;
14use crate::Platform;
15use crate::Promise;
16use crate::PromiseResolver;
17use crate::StartupData;
18use crate::String;
19use crate::V8::get_current_platform;
20use crate::Value;
21use crate::binding::v8__HeapSpaceStatistics;
22use crate::binding::v8__HeapStatistics;
23use crate::binding::v8__Isolate__UseCounterFeature;
24pub use crate::binding::v8__ModuleImportPhase as ModuleImportPhase;
25use crate::cppgc::Heap;
26use crate::external_references::ExternalReference;
27use crate::function::FunctionCallbackInfo;
28use crate::gc::GCCallbackFlags;
29use crate::gc::GCType;
30use crate::handle::FinalizerCallback;
31use crate::handle::FinalizerMap;
32use crate::isolate_create_params::CreateParams;
33use crate::isolate_create_params::raw;
34use crate::promise::PromiseRejectMessage;
35use crate::snapshot::SnapshotCreator;
36use crate::support::MapFnFrom;
37use crate::support::MapFnTo;
38use crate::support::Opaque;
39use crate::support::ToCFn;
40use crate::support::UnitType;
41use crate::support::char;
42use crate::support::int;
43use crate::support::size_t;
44use crate::wasm::WasmStreaming;
45use crate::wasm::trampoline;
46use std::ffi::CStr;
47
48use std::any::Any;
49use std::any::TypeId;
50use std::borrow::Cow;
51use std::collections::HashMap;
52use std::ffi::c_void;
53use std::fmt::{self, Debug, Formatter};
54use std::hash::BuildHasher;
55use std::hash::Hasher;
56use std::mem::MaybeUninit;
57use std::mem::align_of;
58use std::mem::forget;
59use std::mem::needs_drop;
60use std::mem::size_of;
61use std::ops::Deref;
62use std::ops::DerefMut;
63use std::pin::pin;
64use std::ptr;
65use std::ptr::NonNull;
66use std::ptr::addr_of_mut;
67use std::ptr::drop_in_place;
68use std::ptr::null_mut;
69use std::sync::Arc;
70use std::sync::Mutex;
71
72/// Policy for running microtasks:
73///   - explicit: microtasks are invoked with the
74///     Isolate::PerformMicrotaskCheckpoint() method;
75///   - auto: microtasks are invoked when the script call depth decrements
76///     to zero.
77#[derive(Debug, Clone, Copy, PartialEq, Eq)]
78#[repr(C)]
79pub enum MicrotasksPolicy {
80  Explicit = 0,
81  // Scoped = 1 (RAII) is omitted for now, doesn't quite map to idiomatic Rust.
82  Auto = 2,
83}
84
85/// Memory pressure level for the MemoryPressureNotification.
86/// None hints V8 that there is no memory pressure.
87/// Moderate hints V8 to speed up incremental garbage collection at the cost
88/// of higher latency due to garbage collection pauses.
89/// Critical hints V8 to free memory as soon as possible. Garbage collection
90/// pauses at this level will be large.
91#[derive(Debug, Clone, Copy, PartialEq, Eq)]
92#[repr(C)]
93pub enum MemoryPressureLevel {
94  None = 0,
95  Moderate = 1,
96  Critical = 2,
97}
98
99/// Time zone redetection indicator for
100/// DateTimeConfigurationChangeNotification.
101///
102/// kSkip indicates V8 that the notification should not trigger redetecting
103/// host time zone. kRedetect indicates V8 that host time zone should be
104/// redetected, and used to set the default time zone.
105///
106/// The host time zone detection may require file system access or similar
107/// operations unlikely to be available inside a sandbox. If v8 is run inside a
108/// sandbox, the host time zone has to be detected outside the sandbox before
109/// calling DateTimeConfigurationChangeNotification function.
110#[derive(Debug, Clone, Copy, PartialEq, Eq)]
111#[repr(C)]
112pub enum TimeZoneDetection {
113  Skip = 0,
114  Redetect = 1,
115}
116
117/// PromiseHook with type Init is called when a new promise is
118/// created. When a new promise is created as part of the chain in the
119/// case of Promise.then or in the intermediate promises created by
120/// Promise.{race, all}/AsyncFunctionAwait, we pass the parent promise
121/// otherwise we pass undefined.
122///
123/// PromiseHook with type Resolve is called at the beginning of
124/// resolve or reject function defined by CreateResolvingFunctions.
125///
126/// PromiseHook with type Before is called at the beginning of the
127/// PromiseReactionJob.
128///
129/// PromiseHook with type After is called right at the end of the
130/// PromiseReactionJob.
131#[derive(Debug, Clone, Copy, PartialEq, Eq)]
132#[repr(C)]
133pub enum PromiseHookType {
134  Init,
135  Resolve,
136  Before,
137  After,
138}
139
140/// Types of garbage collections that can be requested via
141/// [`Isolate::request_garbage_collection_for_testing`].
142#[derive(Debug, Clone, Copy, PartialEq, Eq)]
143#[repr(C)]
144pub enum GarbageCollectionType {
145  Full,
146  Minor,
147}
148
149pub type MessageCallback = unsafe extern "C" fn(Local<Message>, Local<Value>);
150
151bitflags! {
152  #[derive(Debug, Clone, Copy, PartialEq, Eq)]
153  #[repr(transparent)]
154  pub struct MessageErrorLevel: int {
155    const LOG = 1 << 0;
156    const DEBUG = 1 << 1;
157    const INFO = 1 << 2;
158    const ERROR = 1 << 3;
159    const WARNING = 1 << 4;
160    const ALL = (1 << 5) - 1;
161  }
162}
163
164pub type PromiseHook =
165  unsafe extern "C" fn(PromiseHookType, Local<Promise>, Local<Value>);
166
167pub type PromiseRejectCallback = unsafe extern "C" fn(PromiseRejectMessage);
168
169#[derive(Debug, Clone, Copy, PartialEq, Eq)]
170#[repr(C)]
171pub enum WasmAsyncSuccess {
172  Success,
173  Fail,
174}
175pub type WasmAsyncResolvePromiseCallback = unsafe extern "C" fn(
176  UnsafeRawIsolatePtr,
177  Local<Context>,
178  Local<PromiseResolver>,
179  Local<Value>,
180  WasmAsyncSuccess,
181);
182
183pub type AllowWasmCodeGenerationCallback =
184  unsafe extern "C" fn(Local<Context>, Local<String>) -> bool;
185
186/// HostInitializeImportMetaObjectCallback is called the first time import.meta
187/// is accessed for a module. Subsequent access will reuse the same value.
188///
189/// The method combines two implementation-defined abstract operations into one:
190/// HostGetImportMetaProperties and HostFinalizeImportMeta.
191///
192/// The embedder should use v8::Object::CreateDataProperty to add properties on
193/// the meta object.
194pub type HostInitializeImportMetaObjectCallback =
195  unsafe extern "C" fn(Local<Context>, Local<Module>, Local<Object>);
196
197/// HostImportModuleDynamicallyCallback is called when we require the embedder
198/// to load a module. This is used as part of the dynamic import syntax.
199///
200/// The host_defined_options are metadata provided by the host environment, which may be used
201/// to customize or further specify how the module should be imported.
202///
203/// The resource_name is the identifier or path for the module or script making the import request.
204///
205/// The specifier is the name of the module that should be imported.
206///
207/// The import_attributes are import assertions for this request in the form:
208/// [key1, value1, key2, value2, ...] where the keys and values are of type
209/// v8::String. Note, unlike the FixedArray passed to ResolveModuleCallback and
210/// returned from ModuleRequest::GetImportAssertions(), this array does not
211/// contain the source Locations of the assertions.
212///
213/// The embedder must compile, instantiate, evaluate the Module, and obtain its
214/// namespace object.
215///
216/// The Promise returned from this function is forwarded to userland JavaScript.
217/// The embedder must resolve this promise with the module namespace object. In
218/// case of an exception, the embedder must reject this promise with the
219/// exception. If the promise creation itself fails (e.g. due to stack
220/// overflow), the embedder must propagate that exception by returning an empty
221/// MaybeLocal.
222///
223/// # Example
224///
225/// ```
226/// fn host_import_module_dynamically_callback_example<'s>(
227///   scope: &mut v8::HandleScope<'s>,
228///   host_defined_options: v8::Local<'s, v8::Data>,
229///   resource_name: v8::Local<'s, v8::Value>,
230///   specifier: v8::Local<'s, v8::String>,
231///   import_attributes: v8::Local<'s, v8::FixedArray>,
232/// ) -> Option<v8::Local<'s, v8::Promise>> {
233///   todo!()
234/// }
235/// ```
236pub trait HostImportModuleDynamicallyCallback:
237  UnitType
238  + for<'s, 'i> FnOnce(
239    &mut PinScope<'s, 'i>,
240    Local<'s, Data>,
241    Local<'s, Value>,
242    Local<'s, String>,
243    Local<'s, FixedArray>,
244  ) -> Option<Local<'s, Promise>>
245{
246  fn to_c_fn(self) -> RawHostImportModuleDynamicallyCallback;
247}
248
249#[cfg(target_family = "unix")]
250pub(crate) type RawHostImportModuleDynamicallyCallback =
251  for<'s> unsafe extern "C" fn(
252    Local<'s, Context>,
253    Local<'s, Data>,
254    Local<'s, Value>,
255    Local<'s, String>,
256    Local<'s, FixedArray>,
257  ) -> *mut Promise;
258
259#[cfg(all(target_family = "windows", target_arch = "x86_64"))]
260pub type RawHostImportModuleDynamicallyCallback =
261  for<'s> unsafe extern "C" fn(
262    *mut *mut Promise,
263    Local<'s, Context>,
264    Local<'s, Data>,
265    Local<'s, Value>,
266    Local<'s, String>,
267    Local<'s, FixedArray>,
268  ) -> *mut *mut Promise;
269
270impl<F> HostImportModuleDynamicallyCallback for F
271where
272  F: UnitType
273    + for<'s, 'i> FnOnce(
274      &mut PinScope<'s, 'i>,
275      Local<'s, Data>,
276      Local<'s, Value>,
277      Local<'s, String>,
278      Local<'s, FixedArray>,
279    ) -> Option<Local<'s, Promise>>,
280{
281  #[inline(always)]
282  fn to_c_fn(self) -> RawHostImportModuleDynamicallyCallback {
283    #[allow(unused_variables)]
284    #[inline(always)]
285    fn scope_adapter<'s, 'i: 's, F: HostImportModuleDynamicallyCallback>(
286      context: Local<'s, Context>,
287      host_defined_options: Local<'s, Data>,
288      resource_name: Local<'s, Value>,
289      specifier: Local<'s, String>,
290      import_attributes: Local<'s, FixedArray>,
291    ) -> Option<Local<'s, Promise>> {
292      let scope = pin!(unsafe { CallbackScope::new(context) });
293      let mut scope = scope.init();
294      (F::get())(
295        &mut scope,
296        host_defined_options,
297        resource_name,
298        specifier,
299        import_attributes,
300      )
301    }
302
303    #[cfg(target_family = "unix")]
304    #[inline(always)]
305    unsafe extern "C" fn abi_adapter<
306      's,
307      F: HostImportModuleDynamicallyCallback,
308    >(
309      context: Local<'s, Context>,
310      host_defined_options: Local<'s, Data>,
311      resource_name: Local<'s, Value>,
312      specifier: Local<'s, String>,
313      import_attributes: Local<'s, FixedArray>,
314    ) -> *mut Promise {
315      scope_adapter::<F>(
316        context,
317        host_defined_options,
318        resource_name,
319        specifier,
320        import_attributes,
321      )
322      .map_or_else(null_mut, |return_value| return_value.as_non_null().as_ptr())
323    }
324
325    #[cfg(all(target_family = "windows", target_arch = "x86_64"))]
326    #[inline(always)]
327    unsafe extern "C" fn abi_adapter<
328      's,
329      F: HostImportModuleDynamicallyCallback,
330    >(
331      return_value: *mut *mut Promise,
332      context: Local<'s, Context>,
333      host_defined_options: Local<'s, Data>,
334      resource_name: Local<'s, Value>,
335      specifier: Local<'s, String>,
336      import_attributes: Local<'s, FixedArray>,
337    ) -> *mut *mut Promise {
338      unsafe {
339        std::ptr::write(
340          return_value,
341          scope_adapter::<F>(
342            context,
343            host_defined_options,
344            resource_name,
345            specifier,
346            import_attributes,
347          )
348          .map(|return_value| return_value.as_non_null().as_ptr())
349          .unwrap_or_else(null_mut),
350        );
351        return_value
352      }
353    }
354
355    abi_adapter::<F>
356  }
357}
358
359/// HostImportModuleWithPhaseDynamicallyCallback is called when we
360/// require the embedder to load a module with a specific phase. This is used
361/// as part of the dynamic import syntax.
362///
363/// The referrer contains metadata about the script/module that calls
364/// import.
365///
366/// The specifier is the name of the module that should be imported.
367///
368/// The phase is the phase of the import requested.
369///
370/// The import_attributes are import attributes for this request in the form:
371/// [key1, value1, key2, value2, ...] where the keys and values are of type
372/// v8::String. Note, unlike the FixedArray passed to ResolveModuleCallback and
373/// returned from ModuleRequest::GetImportAttributes(), this array does not
374/// contain the source Locations of the attributes.
375///
376/// The Promise returned from this function is forwarded to userland
377/// JavaScript. The embedder must resolve this promise according to the phase
378/// requested:
379/// - For ModuleImportPhase::kSource, the promise must be resolved with a
380///   compiled ModuleSource object, or rejected with a SyntaxError if the
381///   module does not support source representation.
382/// - For ModuleImportPhase::kEvaluation, the promise must be resolved with a
383///   ModuleNamespace object of a module that has been compiled, instantiated,
384///   and evaluated.
385///
386/// In case of an exception, the embedder must reject this promise with the
387/// exception. If the promise creation itself fails (e.g. due to stack
388/// overflow), the embedder must propagate that exception by returning an empty
389/// MaybeLocal.
390///
391/// This callback is still experimental and is only invoked for source phase
392/// imports.
393pub trait HostImportModuleWithPhaseDynamicallyCallback:
394  UnitType
395  + for<'s, 'i> FnOnce(
396    &mut PinScope<'s, 'i>,
397    Local<'s, Data>,
398    Local<'s, Value>,
399    Local<'s, String>,
400    ModuleImportPhase,
401    Local<'s, FixedArray>,
402  ) -> Option<Local<'s, Promise>>
403{
404  fn to_c_fn(self) -> RawHostImportModuleWithPhaseDynamicallyCallback;
405}
406
407#[cfg(target_family = "unix")]
408pub(crate) type RawHostImportModuleWithPhaseDynamicallyCallback =
409  for<'s> unsafe extern "C" fn(
410    Local<'s, Context>,
411    Local<'s, Data>,
412    Local<'s, Value>,
413    Local<'s, String>,
414    ModuleImportPhase,
415    Local<'s, FixedArray>,
416  ) -> *mut Promise;
417
418#[cfg(all(target_family = "windows", target_arch = "x86_64"))]
419pub type RawHostImportModuleWithPhaseDynamicallyCallback =
420  for<'s> unsafe extern "C" fn(
421    *mut *mut Promise,
422    Local<'s, Context>,
423    Local<'s, Data>,
424    Local<'s, Value>,
425    Local<'s, String>,
426    ModuleImportPhase,
427    Local<'s, FixedArray>,
428  ) -> *mut *mut Promise;
429
430impl<F> HostImportModuleWithPhaseDynamicallyCallback for F
431where
432  F: UnitType
433    + for<'s, 'i> FnOnce(
434      &mut PinScope<'s, 'i>,
435      Local<'s, Data>,
436      Local<'s, Value>,
437      Local<'s, String>,
438      ModuleImportPhase,
439      Local<'s, FixedArray>,
440    ) -> Option<Local<'s, Promise>>,
441{
442  #[inline(always)]
443  fn to_c_fn(self) -> RawHostImportModuleWithPhaseDynamicallyCallback {
444    #[allow(unused_variables)]
445    #[inline(always)]
446    fn scope_adapter<'s, F: HostImportModuleWithPhaseDynamicallyCallback>(
447      context: Local<'s, Context>,
448      host_defined_options: Local<'s, Data>,
449      resource_name: Local<'s, Value>,
450      specifier: Local<'s, String>,
451      import_phase: ModuleImportPhase,
452      import_attributes: Local<'s, FixedArray>,
453    ) -> Option<Local<'s, Promise>> {
454      let scope = pin!(unsafe { CallbackScope::new(context) });
455      let mut scope = scope.init();
456      (F::get())(
457        &mut scope,
458        host_defined_options,
459        resource_name,
460        specifier,
461        import_phase,
462        import_attributes,
463      )
464    }
465
466    #[cfg(target_family = "unix")]
467    #[inline(always)]
468    unsafe extern "C" fn abi_adapter<
469      's,
470      F: HostImportModuleWithPhaseDynamicallyCallback,
471    >(
472      context: Local<'s, Context>,
473      host_defined_options: Local<'s, Data>,
474      resource_name: Local<'s, Value>,
475      specifier: Local<'s, String>,
476      import_phase: ModuleImportPhase,
477      import_attributes: Local<'s, FixedArray>,
478    ) -> *mut Promise {
479      scope_adapter::<F>(
480        context,
481        host_defined_options,
482        resource_name,
483        specifier,
484        import_phase,
485        import_attributes,
486      )
487      .map_or_else(null_mut, |return_value| return_value.as_non_null().as_ptr())
488    }
489
490    #[cfg(all(target_family = "windows", target_arch = "x86_64"))]
491    #[inline(always)]
492    unsafe extern "C" fn abi_adapter<
493      's,
494      F: HostImportModuleWithPhaseDynamicallyCallback,
495    >(
496      return_value: *mut *mut Promise,
497      context: Local<'s, Context>,
498      host_defined_options: Local<'s, Data>,
499      resource_name: Local<'s, Value>,
500      specifier: Local<'s, String>,
501      import_phase: ModuleImportPhase,
502      import_attributes: Local<'s, FixedArray>,
503    ) -> *mut *mut Promise {
504      unsafe {
505        std::ptr::write(
506          return_value,
507          scope_adapter::<F>(
508            context,
509            host_defined_options,
510            resource_name,
511            specifier,
512            import_phase,
513            import_attributes,
514          )
515          .map(|return_value| return_value.as_non_null().as_ptr())
516          .unwrap_or_else(null_mut),
517        );
518        return_value
519      }
520    }
521
522    abi_adapter::<F>
523  }
524}
525
526/// `HostCreateShadowRealmContextCallback` is called each time a `ShadowRealm`
527/// is being constructed. You can use [`HandleScope::get_current_context`] to
528/// get the [`Context`] in which the constructor is being run.
529///
530/// The method combines [`Context`] creation and the implementation-defined
531/// abstract operation `HostInitializeShadowRealm` into one.
532///
533/// The embedder should use [`Context::new`] to create a new context. If the
534/// creation fails, the embedder must propagate that exception by returning
535/// [`None`].
536pub type HostCreateShadowRealmContextCallback =
537  for<'s, 'i> fn(scope: &mut PinScope<'s, 'i>) -> Option<Local<'s, Context>>;
538
539pub type GcCallbackWithData = unsafe extern "C" fn(
540  isolate: UnsafeRawIsolatePtr,
541  r#type: GCType,
542  flags: GCCallbackFlags,
543  data: *mut c_void,
544);
545
546pub type InterruptCallback =
547  unsafe extern "C" fn(isolate: UnsafeRawIsolatePtr, data: *mut c_void);
548
549pub type NearHeapLimitCallback = unsafe extern "C" fn(
550  data: *mut c_void,
551  current_heap_limit: usize,
552  initial_heap_limit: usize,
553) -> usize;
554
555#[repr(C)]
556pub struct OomDetails {
557  pub is_heap_oom: bool,
558  pub detail: *const char,
559}
560
561pub type OomErrorCallback =
562  unsafe extern "C" fn(location: *const char, details: &OomDetails);
563
564// Windows x64 ABI: MaybeLocal<Value> returned on the stack.
565#[cfg(target_os = "windows")]
566pub type PrepareStackTraceCallback<'s> =
567  unsafe extern "C" fn(
568    *mut *const Value,
569    Local<'s, Context>,
570    Local<'s, Value>,
571    Local<'s, Array>,
572  ) -> *mut *const Value;
573
574// System V ABI: MaybeLocal<Value> returned in a register.
575// System V i386 ABI: Local<Value> returned in hidden pointer (struct).
576#[cfg(not(target_os = "windows"))]
577#[repr(C)]
578pub struct PrepareStackTraceCallbackRet(*const Value);
579
580#[cfg(not(target_os = "windows"))]
581pub type PrepareStackTraceCallback<'s> =
582  unsafe extern "C" fn(
583    Local<'s, Context>,
584    Local<'s, Value>,
585    Local<'s, Array>,
586  ) -> PrepareStackTraceCallbackRet;
587
588pub type UseCounterFeature = v8__Isolate__UseCounterFeature;
589pub type UseCounterCallback =
590  unsafe extern "C" fn(&mut Isolate, UseCounterFeature);
591
592unsafe extern "C" {
593  fn v8__Isolate__New(params: *const raw::CreateParams) -> *mut RealIsolate;
594  fn v8__Isolate__Dispose(this: *mut RealIsolate);
595  fn v8__Isolate__GetNumberOfDataSlots(this: *const RealIsolate) -> u32;
596  fn v8__Isolate__GetData(
597    isolate: *const RealIsolate,
598    slot: u32,
599  ) -> *mut c_void;
600  fn v8__Isolate__SetData(
601    isolate: *const RealIsolate,
602    slot: u32,
603    data: *mut c_void,
604  );
605  fn v8__Isolate__Enter(this: *mut RealIsolate);
606  fn v8__Isolate__Exit(this: *mut RealIsolate);
607  fn v8__Isolate__GetCurrent() -> *mut RealIsolate;
608  fn v8__Isolate__MemoryPressureNotification(this: *mut RealIsolate, level: u8);
609  fn v8__Isolate__ClearKeptObjects(isolate: *mut RealIsolate);
610  fn v8__Isolate__LowMemoryNotification(isolate: *mut RealIsolate);
611  fn v8__Isolate__GetHeapStatistics(
612    this: *mut RealIsolate,
613    s: *mut v8__HeapStatistics,
614  );
615  fn v8__Isolate__SetCaptureStackTraceForUncaughtExceptions(
616    this: *mut RealIsolate,
617    capture: bool,
618    frame_limit: i32,
619  );
620  fn v8__Isolate__AddMessageListener(
621    isolate: *mut RealIsolate,
622    callback: MessageCallback,
623  ) -> bool;
624  fn v8__Isolate__AddMessageListenerWithErrorLevel(
625    isolate: *mut RealIsolate,
626    callback: MessageCallback,
627    message_levels: MessageErrorLevel,
628  ) -> bool;
629  fn v8__Isolate__AddGCPrologueCallback(
630    isolate: *mut RealIsolate,
631    callback: GcCallbackWithData,
632    data: *mut c_void,
633    gc_type_filter: GCType,
634  );
635  fn v8__Isolate__RemoveGCPrologueCallback(
636    isolate: *mut RealIsolate,
637    callback: GcCallbackWithData,
638    data: *mut c_void,
639  );
640  fn v8__Isolate__AddGCEpilogueCallback(
641    isolate: *mut RealIsolate,
642    callback: GcCallbackWithData,
643    data: *mut c_void,
644    gc_type_filter: GCType,
645  );
646  fn v8__Isolate__RemoveGCEpilogueCallback(
647    isolate: *mut RealIsolate,
648    callback: GcCallbackWithData,
649    data: *mut c_void,
650  );
651  fn v8__Isolate__NumberOfHeapSpaces(isolate: *mut RealIsolate) -> size_t;
652  fn v8__Isolate__GetHeapSpaceStatistics(
653    isolate: *mut RealIsolate,
654    space_statistics: *mut v8__HeapSpaceStatistics,
655    index: size_t,
656  ) -> bool;
657  fn v8__Isolate__AddNearHeapLimitCallback(
658    isolate: *mut RealIsolate,
659    callback: NearHeapLimitCallback,
660    data: *mut c_void,
661  );
662  fn v8__Isolate__RemoveNearHeapLimitCallback(
663    isolate: *mut RealIsolate,
664    callback: NearHeapLimitCallback,
665    heap_limit: usize,
666  );
667  fn v8__Isolate__SetOOMErrorHandler(
668    isolate: *mut RealIsolate,
669    callback: OomErrorCallback,
670  );
671  fn v8__Isolate__AdjustAmountOfExternalAllocatedMemory(
672    isolate: *mut RealIsolate,
673    change_in_bytes: i64,
674  ) -> i64;
675  fn v8__Isolate__GetCppHeap(isolate: *mut RealIsolate) -> *mut Heap;
676  fn v8__Isolate__SetPrepareStackTraceCallback(
677    isolate: *mut RealIsolate,
678    callback: PrepareStackTraceCallback,
679  );
680  fn v8__Isolate__SetPromiseHook(isolate: *mut RealIsolate, hook: PromiseHook);
681  fn v8__Isolate__SetPromiseRejectCallback(
682    isolate: *mut RealIsolate,
683    callback: PromiseRejectCallback,
684  );
685  fn v8__Isolate__SetWasmAsyncResolvePromiseCallback(
686    isolate: *mut RealIsolate,
687    callback: WasmAsyncResolvePromiseCallback,
688  );
689  fn v8__Isolate__SetAllowWasmCodeGenerationCallback(
690    isolate: *mut RealIsolate,
691    callback: AllowWasmCodeGenerationCallback,
692  );
693  fn v8__Isolate__SetHostInitializeImportMetaObjectCallback(
694    isolate: *mut RealIsolate,
695    callback: HostInitializeImportMetaObjectCallback,
696  );
697  fn v8__Isolate__SetHostImportModuleDynamicallyCallback(
698    isolate: *mut RealIsolate,
699    callback: RawHostImportModuleDynamicallyCallback,
700  );
701  fn v8__Isolate__SetHostImportModuleWithPhaseDynamicallyCallback(
702    isolate: *mut RealIsolate,
703    callback: RawHostImportModuleWithPhaseDynamicallyCallback,
704  );
705  #[cfg(not(target_os = "windows"))]
706  fn v8__Isolate__SetHostCreateShadowRealmContextCallback(
707    isolate: *mut RealIsolate,
708    callback: unsafe extern "C" fn(
709      initiator_context: Local<Context>,
710    ) -> *mut Context,
711  );
712  #[cfg(target_os = "windows")]
713  fn v8__Isolate__SetHostCreateShadowRealmContextCallback(
714    isolate: *mut RealIsolate,
715    callback: unsafe extern "C" fn(
716      rv: *mut *mut Context,
717      initiator_context: Local<Context>,
718    ) -> *mut *mut Context,
719  );
720  fn v8__Isolate__SetUseCounterCallback(
721    isolate: *mut RealIsolate,
722    callback: UseCounterCallback,
723  );
724  fn v8__Isolate__RequestInterrupt(
725    isolate: *const RealIsolate,
726    callback: InterruptCallback,
727    data: *mut c_void,
728  );
729  fn v8__Isolate__TerminateExecution(isolate: *const RealIsolate);
730  fn v8__Isolate__IsExecutionTerminating(isolate: *const RealIsolate) -> bool;
731  fn v8__Isolate__CancelTerminateExecution(isolate: *const RealIsolate);
732  fn v8__Isolate__GetMicrotasksPolicy(
733    isolate: *const RealIsolate,
734  ) -> MicrotasksPolicy;
735  fn v8__Isolate__SetMicrotasksPolicy(
736    isolate: *mut RealIsolate,
737    policy: MicrotasksPolicy,
738  );
739  fn v8__Isolate__PerformMicrotaskCheckpoint(isolate: *mut RealIsolate);
740  fn v8__Isolate__EnqueueMicrotask(
741    isolate: *mut RealIsolate,
742    function: *const Function,
743  );
744  fn v8__Isolate__SetAllowAtomicsWait(isolate: *mut RealIsolate, allow: bool);
745  fn v8__Isolate__SetWasmStreamingCallback(
746    isolate: *mut RealIsolate,
747    callback: unsafe extern "C" fn(*const FunctionCallbackInfo),
748  );
749  fn v8__Isolate__DateTimeConfigurationChangeNotification(
750    isolate: *mut RealIsolate,
751    time_zone_detection: TimeZoneDetection,
752  );
753  fn v8__Isolate__HasPendingBackgroundTasks(
754    isolate: *const RealIsolate,
755  ) -> bool;
756  fn v8__Isolate__RequestGarbageCollectionForTesting(
757    isolate: *mut RealIsolate,
758    r#type: usize,
759  );
760
761  fn v8__HeapProfiler__TakeHeapSnapshot(
762    isolate: *mut RealIsolate,
763    callback: unsafe extern "C" fn(*mut c_void, *const u8, usize) -> bool,
764    arg: *mut c_void,
765  );
766}
767
768/// Isolate represents an isolated instance of the V8 engine.  V8 isolates have
769/// completely separate states.  Objects from one isolate must not be used in
770/// other isolates.  The embedder can create multiple isolates and use them in
771/// parallel in multiple threads.  An isolate can be entered by at most one
772/// thread at any given time.  The Locker/Unlocker API must be used to
773/// synchronize.
774///
775/// rusty_v8 note: Unlike in the C++ API, the Isolate is entered when it is
776/// constructed and exited when dropped. Because of that v8::OwnedIsolate
777/// instances must be dropped in the reverse order of creation
778#[repr(transparent)]
779#[derive(Debug)]
780pub struct Isolate(NonNull<RealIsolate>);
781
782#[repr(transparent)]
783#[derive(Debug, Clone, Copy)]
784pub struct UnsafeRawIsolatePtr(*mut RealIsolate);
785
786impl UnsafeRawIsolatePtr {
787  pub fn null() -> Self {
788    Self(std::ptr::null_mut())
789  }
790
791  pub fn is_null(&self) -> bool {
792    self.0.is_null()
793  }
794}
795
796/// Return the currently entered isolate (if any) as a raw pointer.
797///
798/// # Safety
799/// Returned pointer is only valid for identity comparison; do not dereference
800/// unless you have independently established that the isolate is still alive
801/// and correctly entered/locked.
802pub unsafe fn current_raw_isolate_ptr() -> Option<UnsafeRawIsolatePtr> {
803  let ptr = unsafe { v8__Isolate__GetCurrent() };
804  if ptr.is_null() {
805    None
806  } else {
807    Some(UnsafeRawIsolatePtr(ptr))
808  }
809}
810
811#[repr(C)]
812pub struct RealIsolate(Opaque);
813
814impl Isolate {
815  pub(crate) fn as_real_ptr(&self) -> *mut RealIsolate {
816    self.0.as_ptr()
817  }
818
819  pub unsafe fn as_raw_isolate_ptr(&self) -> UnsafeRawIsolatePtr {
820    UnsafeRawIsolatePtr(self.0.as_ptr())
821  }
822
823  #[inline]
824  pub unsafe fn from_raw_isolate_ptr(ptr: UnsafeRawIsolatePtr) -> Self {
825    Self(NonNull::new(ptr.0).unwrap())
826  }
827
828  #[inline]
829  pub unsafe fn from_raw_isolate_ptr_unchecked(
830    ptr: UnsafeRawIsolatePtr,
831  ) -> Self {
832    Self(unsafe { NonNull::new_unchecked(ptr.0) })
833  }
834
835  pub unsafe fn from_raw_ptr_unchecked(ptr: *mut RealIsolate) -> Self {
836    Self(unsafe { NonNull::new_unchecked(ptr) })
837  }
838
839  pub unsafe fn from_raw_ptr(ptr: *mut RealIsolate) -> Self {
840    Self(NonNull::new(ptr).unwrap())
841  }
842
843  #[inline]
844  pub unsafe fn ref_from_raw_isolate_ptr(ptr: &UnsafeRawIsolatePtr) -> &Self {
845    if ptr.is_null() {
846      panic!("UnsafeRawIsolatePtr is null");
847    }
848    unsafe { &*(ptr as *const UnsafeRawIsolatePtr as *const Isolate) }
849  }
850
851  #[inline]
852  pub unsafe fn ref_from_raw_isolate_ptr_unchecked(
853    ptr: &UnsafeRawIsolatePtr,
854  ) -> &Self {
855    unsafe { &*(ptr as *const UnsafeRawIsolatePtr as *const Isolate) }
856  }
857
858  #[inline]
859  pub unsafe fn ref_from_raw_isolate_ptr_mut(
860    ptr: &mut UnsafeRawIsolatePtr,
861  ) -> &mut Self {
862    if ptr.is_null() {
863      panic!("UnsafeRawIsolatePtr is null");
864    }
865    unsafe { &mut *(ptr as *mut UnsafeRawIsolatePtr as *mut Isolate) }
866  }
867
868  #[inline]
869  pub unsafe fn ref_from_raw_isolate_ptr_mut_unchecked(
870    ptr: &mut UnsafeRawIsolatePtr,
871  ) -> &mut Self {
872    unsafe { &mut *(ptr as *mut UnsafeRawIsolatePtr as *mut Isolate) }
873  }
874
875  #[inline]
876  pub(crate) unsafe fn from_non_null(ptr: NonNull<RealIsolate>) -> Self {
877    Self(ptr)
878  }
879
880  #[inline]
881  pub(crate) unsafe fn from_raw_ref(ptr: &NonNull<RealIsolate>) -> &Self {
882    // SAFETY: Isolate is a repr(transparent) wrapper around NonNull<RealIsolate>
883    unsafe { &*(ptr as *const NonNull<RealIsolate> as *const Isolate) }
884  }
885
886  #[inline]
887  pub(crate) unsafe fn from_raw_ref_mut(
888    ptr: &mut NonNull<RealIsolate>,
889  ) -> &mut Self {
890    // SAFETY: Isolate is a repr(transparent) wrapper around NonNull<RealIsolate>
891    unsafe { &mut *(ptr as *mut NonNull<RealIsolate> as *mut Isolate) }
892  }
893
894  // Isolate data slots used internally by rusty_v8.
895  const ANNEX_SLOT: u32 = 0;
896  const INTERNAL_DATA_SLOT_COUNT: u32 = 2;
897
898  #[inline(always)]
899  fn assert_embedder_data_slot_count_and_offset_correct(&self) {
900    assert!(
901      unsafe { v8__Isolate__GetNumberOfDataSlots(self.as_real_ptr()) }
902        >= Self::INTERNAL_DATA_SLOT_COUNT
903    )
904  }
905
906  fn new_impl(params: CreateParams) -> *mut RealIsolate {
907    crate::V8::assert_initialized();
908    let (raw_create_params, create_param_allocations) = params.finalize();
909    let cxx_isolate = unsafe { v8__Isolate__New(&raw_create_params) };
910    let mut isolate = unsafe { Isolate::from_raw_ptr(cxx_isolate) };
911    isolate.initialize(create_param_allocations);
912    cxx_isolate
913  }
914
915  pub(crate) fn initialize(&mut self, create_param_allocations: Box<dyn Any>) {
916    self.assert_embedder_data_slot_count_and_offset_correct();
917    self.create_annex(create_param_allocations);
918  }
919
920  /// Creates a new isolate.  Does not change the currently entered
921  /// isolate.
922  ///
923  /// When an isolate is no longer used its resources should be freed
924  /// by calling V8::dispose().  Using the delete operator is not allowed.
925  ///
926  /// V8::initialize() must have run prior to this.
927  #[allow(clippy::new_ret_no_self)]
928  pub fn new(params: CreateParams) -> OwnedIsolate {
929    OwnedIsolate::new(Self::new_impl(params))
930  }
931
932  #[allow(clippy::new_ret_no_self)]
933  pub fn snapshot_creator(
934    external_references: Option<Cow<'static, [ExternalReference]>>,
935    params: Option<CreateParams>,
936  ) -> OwnedIsolate {
937    SnapshotCreator::new(external_references, params)
938  }
939
940  #[allow(clippy::new_ret_no_self)]
941  pub fn snapshot_creator_from_existing_snapshot(
942    existing_snapshot_blob: StartupData,
943    external_references: Option<Cow<'static, [ExternalReference]>>,
944    params: Option<CreateParams>,
945  ) -> OwnedIsolate {
946    SnapshotCreator::from_existing_snapshot(
947      existing_snapshot_blob,
948      external_references,
949      params,
950    )
951  }
952
953  /// Initial configuration parameters for a new Isolate.
954  #[inline(always)]
955  pub fn create_params() -> CreateParams {
956    CreateParams::default()
957  }
958
959  #[inline(always)]
960  pub fn thread_safe_handle(&self) -> IsolateHandle {
961    IsolateHandle::new(self)
962  }
963
964  /// See [`IsolateHandle::terminate_execution`]
965  #[inline(always)]
966  pub fn terminate_execution(&self) -> bool {
967    self.thread_safe_handle().terminate_execution()
968  }
969
970  /// See [`IsolateHandle::cancel_terminate_execution`]
971  #[inline(always)]
972  pub fn cancel_terminate_execution(&self) -> bool {
973    self.thread_safe_handle().cancel_terminate_execution()
974  }
975
976  /// See [`IsolateHandle::is_execution_terminating`]
977  #[inline(always)]
978  pub fn is_execution_terminating(&self) -> bool {
979    self.thread_safe_handle().is_execution_terminating()
980  }
981
982  pub(crate) fn create_annex(
983    &mut self,
984    create_param_allocations: Box<dyn Any>,
985  ) {
986    let annex_arc = Arc::new(IsolateAnnex::new(self, create_param_allocations));
987    let annex_ptr = Arc::into_raw(annex_arc);
988    assert!(self.get_data_internal(Self::ANNEX_SLOT).is_null());
989    self.set_data_internal(Self::ANNEX_SLOT, annex_ptr as *mut _);
990  }
991
992  unsafe fn dispose_annex(&mut self) -> Box<dyn Any> {
993    // Set the `isolate` pointer inside the annex struct to null, so any
994    // IsolateHandle that outlives the isolate will know that it can't call
995    // methods on the isolate.
996    let annex = self.get_annex_mut();
997    {
998      let _lock = annex.isolate_mutex.lock().unwrap();
999      annex.isolate = null_mut();
1000    }
1001
1002    // Clear slots and drop owned objects that were taken out of `CreateParams`.
1003    let create_param_allocations =
1004      std::mem::replace(&mut annex.create_param_allocations, Box::new(()));
1005    annex.slots.clear();
1006
1007    // Run through any remaining guaranteed finalizers.
1008    for finalizer in annex.finalizer_map.drain() {
1009      if let FinalizerCallback::Guaranteed(callback) = finalizer {
1010        callback();
1011      }
1012    }
1013
1014    // Subtract one from the Arc<IsolateAnnex> reference count.
1015    unsafe { Arc::from_raw(annex) };
1016    self.set_data(0, null_mut());
1017
1018    create_param_allocations
1019  }
1020
1021  #[inline(always)]
1022  fn get_annex(&self) -> &IsolateAnnex {
1023    let annex_ptr =
1024      self.get_data_internal(Self::ANNEX_SLOT) as *const IsolateAnnex;
1025    assert!(!annex_ptr.is_null());
1026    unsafe { &*annex_ptr }
1027  }
1028
1029  #[inline(always)]
1030  fn get_annex_mut(&mut self) -> &mut IsolateAnnex {
1031    let annex_ptr =
1032      self.get_data_internal(Self::ANNEX_SLOT) as *mut IsolateAnnex;
1033    assert!(!annex_ptr.is_null());
1034    unsafe { &mut *annex_ptr }
1035  }
1036
1037  pub(crate) fn set_snapshot_creator(
1038    &mut self,
1039    snapshot_creator: SnapshotCreator,
1040  ) {
1041    let prev = self
1042      .get_annex_mut()
1043      .maybe_snapshot_creator
1044      .replace(snapshot_creator);
1045    assert!(prev.is_none());
1046  }
1047
1048  pub(crate) fn get_finalizer_map(&self) -> &FinalizerMap {
1049    &self.get_annex().finalizer_map
1050  }
1051
1052  pub(crate) fn get_finalizer_map_mut(&mut self) -> &mut FinalizerMap {
1053    &mut self.get_annex_mut().finalizer_map
1054  }
1055
1056  fn get_annex_arc(&self) -> Arc<IsolateAnnex> {
1057    let annex_ptr = self.get_annex();
1058    let annex_arc = unsafe { Arc::from_raw(annex_ptr) };
1059    let _ = Arc::into_raw(annex_arc.clone());
1060    annex_arc
1061  }
1062
1063  /// Retrieve embedder-specific data from the isolate.
1064  /// Returns NULL if SetData has never been called for the given `slot`.
1065  pub fn get_data(&self, slot: u32) -> *mut c_void {
1066    self.get_data_internal(Self::INTERNAL_DATA_SLOT_COUNT + slot)
1067  }
1068
1069  /// Associate embedder-specific data with the isolate. `slot` has to be
1070  /// between 0 and `Isolate::get_number_of_data_slots()`.
1071  #[inline(always)]
1072  pub fn set_data(&mut self, slot: u32, data: *mut c_void) {
1073    self.set_data_internal(Self::INTERNAL_DATA_SLOT_COUNT + slot, data);
1074  }
1075
1076  /// Returns the maximum number of available embedder data slots. Valid slots
1077  /// are in the range of `0 <= n < Isolate::get_number_of_data_slots()`.
1078  pub fn get_number_of_data_slots(&self) -> u32 {
1079    let n = unsafe { v8__Isolate__GetNumberOfDataSlots(self.as_real_ptr()) };
1080    n - Self::INTERNAL_DATA_SLOT_COUNT
1081  }
1082
1083  #[inline(always)]
1084  pub(crate) fn get_data_internal(&self, slot: u32) -> *mut c_void {
1085    unsafe { v8__Isolate__GetData(self.as_real_ptr(), slot) }
1086  }
1087
1088  #[inline(always)]
1089  pub(crate) fn set_data_internal(&mut self, slot: u32, data: *mut c_void) {
1090    unsafe { v8__Isolate__SetData(self.as_real_ptr(), slot, data) }
1091  }
1092
1093  // pub(crate) fn init_scope_root(&mut self) {
1094  //   ScopeData::new_root(self);
1095  // }
1096
1097  // pub(crate) fn dispose_scope_root(&mut self) {
1098  //   ScopeData::drop_root(self);
1099  // }
1100
1101  // /// Returns a pointer to the `ScopeData` struct for the current scope.
1102  // #[inline(always)]
1103  // pub(crate) fn get_current_scope_data(&self) -> Option<NonNull<ScopeData>> {
1104  //   let scope_data_ptr = self.get_data_internal(Self::CURRENT_SCOPE_DATA_SLOT);
1105  //   NonNull::new(scope_data_ptr).map(NonNull::cast)
1106  // }
1107
1108  // /// Updates the slot that stores a `ScopeData` pointer for the current scope.
1109  // #[inline(always)]
1110  // pub(crate) fn set_current_scope_data(
1111  //   &mut self,
1112  //   scope_data: Option<NonNull<ScopeData>>,
1113  // ) {
1114  //   let scope_data_ptr = scope_data
1115  //     .map(NonNull::cast)
1116  //     .map_or_else(null_mut, NonNull::as_ptr);
1117  //   self.set_data_internal(Self::CURRENT_SCOPE_DATA_SLOT, scope_data_ptr);
1118  // }
1119
1120  /// Get a reference to embedder data added with `set_slot()`.
1121  #[inline(always)]
1122  pub fn get_slot<T: 'static>(&self) -> Option<&T> {
1123    self
1124      .get_annex()
1125      .slots
1126      .get(&TypeId::of::<T>())
1127      .map(|slot| unsafe { slot.borrow::<T>() })
1128  }
1129
1130  /// Get a mutable reference to embedder data added with `set_slot()`.
1131  #[inline(always)]
1132  pub fn get_slot_mut<T: 'static>(&mut self) -> Option<&mut T> {
1133    self
1134      .get_annex_mut()
1135      .slots
1136      .get_mut(&TypeId::of::<T>())
1137      .map(|slot| unsafe { slot.borrow_mut::<T>() })
1138  }
1139
1140  /// Use with Isolate::get_slot and Isolate::get_slot_mut to associate state
1141  /// with an Isolate.
1142  ///
1143  /// This method gives ownership of value to the Isolate. Exactly one object of
1144  /// each type can be associated with an Isolate. If called more than once with
1145  /// an object of the same type, the earlier version will be dropped and
1146  /// replaced.
1147  ///
1148  /// Returns true if value was set without replacing an existing value.
1149  ///
1150  /// The value will be dropped when the isolate is dropped.
1151  #[inline(always)]
1152  pub fn set_slot<T: 'static>(&mut self, value: T) -> bool {
1153    self
1154      .get_annex_mut()
1155      .slots
1156      .insert(TypeId::of::<T>(), RawSlot::new(value))
1157      .is_none()
1158  }
1159
1160  /// Removes the embedder data added with `set_slot()` and returns it if it exists.
1161  #[inline(always)]
1162  pub fn remove_slot<T: 'static>(&mut self) -> Option<T> {
1163    self
1164      .get_annex_mut()
1165      .slots
1166      .remove(&TypeId::of::<T>())
1167      .map(|slot| unsafe { slot.into_inner::<T>() })
1168  }
1169
1170  /// Sets this isolate as the entered one for the current thread.
1171  /// Saves the previously entered one (if any), so that it can be
1172  /// restored when exiting.  Re-entering an isolate is allowed.
1173  ///
1174  /// rusty_v8 note: Unlike in the C++ API, the isolate is entered when it is
1175  /// constructed and exited when dropped.
1176  #[inline(always)]
1177  pub unsafe fn enter(&self) {
1178    unsafe {
1179      v8__Isolate__Enter(self.as_real_ptr());
1180    }
1181  }
1182
1183  /// Exits this isolate by restoring the previously entered one in the
1184  /// current thread.  The isolate may still stay the same, if it was
1185  /// entered more than once.
1186  ///
1187  /// Requires: self == Isolate::GetCurrent().
1188  ///
1189  /// rusty_v8 note: Unlike in the C++ API, the isolate is entered when it is
1190  /// constructed and exited when dropped.
1191  #[inline(always)]
1192  pub unsafe fn exit(&self) {
1193    unsafe {
1194      v8__Isolate__Exit(self.as_real_ptr());
1195    }
1196  }
1197
1198  /// Optional notification that the system is running low on memory.
1199  /// V8 uses these notifications to guide heuristics.
1200  /// It is allowed to call this function from another thread while
1201  /// the isolate is executing long running JavaScript code.
1202  #[inline(always)]
1203  pub fn memory_pressure_notification(&mut self, level: MemoryPressureLevel) {
1204    unsafe {
1205      v8__Isolate__MemoryPressureNotification(self.as_real_ptr(), level as u8)
1206    }
1207  }
1208
1209  /// Clears the set of objects held strongly by the heap. This set of
1210  /// objects are originally built when a WeakRef is created or
1211  /// successfully dereferenced.
1212  ///
1213  /// This is invoked automatically after microtasks are run. See
1214  /// MicrotasksPolicy for when microtasks are run.
1215  ///
1216  /// This needs to be manually invoked only if the embedder is manually
1217  /// running microtasks via a custom MicrotaskQueue class's PerformCheckpoint.
1218  /// In that case, it is the embedder's responsibility to make this call at a
1219  /// time which does not interrupt synchronous ECMAScript code execution.
1220  #[inline(always)]
1221  pub fn clear_kept_objects(&mut self) {
1222    unsafe { v8__Isolate__ClearKeptObjects(self.as_real_ptr()) }
1223  }
1224
1225  /// Optional notification that the system is running low on memory.
1226  /// V8 uses these notifications to attempt to free memory.
1227  #[inline(always)]
1228  pub fn low_memory_notification(&mut self) {
1229    unsafe { v8__Isolate__LowMemoryNotification(self.as_real_ptr()) }
1230  }
1231
1232  /// Get statistics about the heap memory usage.
1233  #[inline(always)]
1234  pub fn get_heap_statistics(&mut self) -> HeapStatistics {
1235    let inner = unsafe {
1236      let mut s = MaybeUninit::zeroed();
1237      v8__Isolate__GetHeapStatistics(self.as_real_ptr(), s.as_mut_ptr());
1238      s.assume_init()
1239    };
1240    HeapStatistics(inner)
1241  }
1242
1243  /// Returns the number of spaces in the heap.
1244  #[inline(always)]
1245  pub fn number_of_heap_spaces(&mut self) -> usize {
1246    unsafe { v8__Isolate__NumberOfHeapSpaces(self.as_real_ptr()) }
1247  }
1248
1249  /// Get the memory usage of a space in the heap.
1250  ///
1251  /// \param space_statistics The HeapSpaceStatistics object to fill in
1252  ///   statistics.
1253  /// \param index The index of the space to get statistics from, which ranges
1254  ///   from 0 to NumberOfHeapSpaces() - 1.
1255  /// \returns true on success.
1256  #[inline(always)]
1257  pub fn get_heap_space_statistics(
1258    &mut self,
1259    index: usize,
1260  ) -> Option<HeapSpaceStatistics> {
1261    let inner = unsafe {
1262      let mut s = MaybeUninit::zeroed();
1263      if !v8__Isolate__GetHeapSpaceStatistics(
1264        self.as_real_ptr(),
1265        s.as_mut_ptr(),
1266        index,
1267      ) {
1268        return None;
1269      }
1270      s.assume_init()
1271    };
1272    Some(HeapSpaceStatistics(inner))
1273  }
1274
1275  /// Tells V8 to capture current stack trace when uncaught exception occurs
1276  /// and report it to the message listeners. The option is off by default.
1277  #[inline(always)]
1278  pub fn set_capture_stack_trace_for_uncaught_exceptions(
1279    &mut self,
1280    capture: bool,
1281    frame_limit: i32,
1282  ) {
1283    unsafe {
1284      v8__Isolate__SetCaptureStackTraceForUncaughtExceptions(
1285        self.as_real_ptr(),
1286        capture,
1287        frame_limit,
1288      );
1289    }
1290  }
1291
1292  /// Adds a message listener (errors only).
1293  ///
1294  /// The same message listener can be added more than once and in that
1295  /// case it will be called more than once for each message.
1296  ///
1297  /// The exception object will be passed to the callback.
1298  #[inline(always)]
1299  pub fn add_message_listener(&mut self, callback: MessageCallback) -> bool {
1300    unsafe { v8__Isolate__AddMessageListener(self.as_real_ptr(), callback) }
1301  }
1302
1303  /// Adds a message listener for the specified message levels.
1304  #[inline(always)]
1305  pub fn add_message_listener_with_error_level(
1306    &mut self,
1307    callback: MessageCallback,
1308    message_levels: MessageErrorLevel,
1309  ) -> bool {
1310    unsafe {
1311      v8__Isolate__AddMessageListenerWithErrorLevel(
1312        self.as_real_ptr(),
1313        callback,
1314        message_levels,
1315      )
1316    }
1317  }
1318
1319  /// This specifies the callback called when the stack property of Error
1320  /// is accessed.
1321  ///
1322  /// PrepareStackTraceCallback is called when the stack property of an error is
1323  /// first accessed. The return value will be used as the stack value. If this
1324  /// callback is registed, the |Error.prepareStackTrace| API will be disabled.
1325  /// |sites| is an array of call sites, specified in
1326  /// https://v8.dev/docs/stack-trace-api
1327  #[inline(always)]
1328  pub fn set_prepare_stack_trace_callback<'s>(
1329    &mut self,
1330    callback: impl MapFnTo<PrepareStackTraceCallback<'s>>,
1331  ) {
1332    // Note: the C++ API returns a MaybeLocal but V8 asserts at runtime when
1333    // it's empty. That is, you can't return None and that's why the Rust API
1334    // expects Local<Value> instead of Option<Local<Value>>.
1335    unsafe {
1336      v8__Isolate__SetPrepareStackTraceCallback(
1337        self.as_real_ptr(),
1338        callback.map_fn_to(),
1339      );
1340    };
1341  }
1342
1343  /// Set the PromiseHook callback for various promise lifecycle
1344  /// events.
1345  #[inline(always)]
1346  pub fn set_promise_hook(&mut self, hook: PromiseHook) {
1347    unsafe { v8__Isolate__SetPromiseHook(self.as_real_ptr(), hook) }
1348  }
1349
1350  /// Set callback to notify about promise reject with no handler, or
1351  /// revocation of such a previous notification once the handler is added.
1352  #[inline(always)]
1353  pub fn set_promise_reject_callback(
1354    &mut self,
1355    callback: PromiseRejectCallback,
1356  ) {
1357    unsafe {
1358      v8__Isolate__SetPromiseRejectCallback(self.as_real_ptr(), callback)
1359    }
1360  }
1361
1362  #[inline(always)]
1363  pub fn set_wasm_async_resolve_promise_callback(
1364    &mut self,
1365    callback: WasmAsyncResolvePromiseCallback,
1366  ) {
1367    unsafe {
1368      v8__Isolate__SetWasmAsyncResolvePromiseCallback(
1369        self.as_real_ptr(),
1370        callback,
1371      )
1372    }
1373  }
1374
1375  #[inline(always)]
1376  pub fn set_allow_wasm_code_generation_callback(
1377    &mut self,
1378    callback: AllowWasmCodeGenerationCallback,
1379  ) {
1380    unsafe {
1381      v8__Isolate__SetAllowWasmCodeGenerationCallback(
1382        self.as_real_ptr(),
1383        callback,
1384      );
1385    }
1386  }
1387
1388  #[inline(always)]
1389  /// This specifies the callback called by the upcoming importa.meta
1390  /// language feature to retrieve host-defined meta data for a module.
1391  pub fn set_host_initialize_import_meta_object_callback(
1392    &mut self,
1393    callback: HostInitializeImportMetaObjectCallback,
1394  ) {
1395    unsafe {
1396      v8__Isolate__SetHostInitializeImportMetaObjectCallback(
1397        self.as_real_ptr(),
1398        callback,
1399      );
1400    }
1401  }
1402
1403  /// This specifies the callback called by the upcoming dynamic
1404  /// import() language feature to load modules.
1405  #[inline(always)]
1406  pub fn set_host_import_module_dynamically_callback(
1407    &mut self,
1408    callback: impl HostImportModuleDynamicallyCallback,
1409  ) {
1410    unsafe {
1411      v8__Isolate__SetHostImportModuleDynamicallyCallback(
1412        self.as_real_ptr(),
1413        callback.to_c_fn(),
1414      );
1415    }
1416  }
1417
1418  /// This specifies the callback called by the upcoming dynamic
1419  /// import() and import.source() language feature to load modules.
1420  ///
1421  /// This API is experimental and is expected to be changed or removed in the
1422  /// future. The callback is currently only called when for source-phase
1423  /// imports. Evaluation-phase imports use the existing
1424  /// HostImportModuleDynamicallyCallback callback.
1425  #[inline(always)]
1426  pub fn set_host_import_module_with_phase_dynamically_callback(
1427    &mut self,
1428    callback: impl HostImportModuleWithPhaseDynamicallyCallback,
1429  ) {
1430    unsafe {
1431      v8__Isolate__SetHostImportModuleWithPhaseDynamicallyCallback(
1432        self.as_real_ptr(),
1433        callback.to_c_fn(),
1434      );
1435    }
1436  }
1437
1438  /// This specifies the callback called by the upcoming `ShadowRealm`
1439  /// construction language feature to retrieve host created globals.
1440  pub fn set_host_create_shadow_realm_context_callback(
1441    &mut self,
1442    callback: HostCreateShadowRealmContextCallback,
1443  ) {
1444    #[inline]
1445    unsafe extern "C" fn rust_shadow_realm_callback(
1446      initiator_context: Local<Context>,
1447    ) -> *mut Context {
1448      let scope = pin!(unsafe { CallbackScope::new(initiator_context) });
1449      let mut scope = scope.init();
1450      let isolate = scope.as_ref();
1451      let callback = isolate
1452        .get_slot::<HostCreateShadowRealmContextCallback>()
1453        .unwrap();
1454      let context = callback(&mut scope);
1455      context.map_or_else(null_mut, |l| l.as_non_null().as_ptr())
1456    }
1457
1458    // Windows x64 ABI: MaybeLocal<Context> must be returned on the stack.
1459    #[cfg(target_os = "windows")]
1460    unsafe extern "C" fn rust_shadow_realm_callback_windows(
1461      rv: *mut *mut Context,
1462      initiator_context: Local<Context>,
1463    ) -> *mut *mut Context {
1464      unsafe {
1465        let ret = rust_shadow_realm_callback(initiator_context);
1466        rv.write(ret);
1467      }
1468      rv
1469    }
1470
1471    let slot_didnt_exist_before = self.set_slot(callback);
1472    if slot_didnt_exist_before {
1473      unsafe {
1474        #[cfg(target_os = "windows")]
1475        v8__Isolate__SetHostCreateShadowRealmContextCallback(
1476          self.as_real_ptr(),
1477          rust_shadow_realm_callback_windows,
1478        );
1479        #[cfg(not(target_os = "windows"))]
1480        v8__Isolate__SetHostCreateShadowRealmContextCallback(
1481          self.as_real_ptr(),
1482          rust_shadow_realm_callback,
1483        );
1484      }
1485    }
1486  }
1487
1488  /// Sets a callback for counting the number of times a feature of V8 is used.
1489  #[inline(always)]
1490  pub fn set_use_counter_callback(&mut self, callback: UseCounterCallback) {
1491    unsafe {
1492      v8__Isolate__SetUseCounterCallback(self.as_real_ptr(), callback);
1493    }
1494  }
1495
1496  /// Enables the host application to receive a notification before a
1497  /// garbage collection. Allocations are allowed in the callback function,
1498  /// but the callback is not re-entrant: if the allocation inside it will
1499  /// trigger the garbage collection, the callback won't be called again.
1500  /// It is possible to specify the GCType filter for your callback. But it is
1501  /// not possible to register the same callback function two times with
1502  /// different GCType filters.
1503  #[allow(clippy::not_unsafe_ptr_arg_deref)] // False positive.
1504  #[inline(always)]
1505  pub fn add_gc_prologue_callback(
1506    &mut self,
1507    callback: GcCallbackWithData,
1508    data: *mut c_void,
1509    gc_type_filter: GCType,
1510  ) {
1511    unsafe {
1512      v8__Isolate__AddGCPrologueCallback(
1513        self.as_real_ptr(),
1514        callback,
1515        data,
1516        gc_type_filter,
1517      );
1518    }
1519  }
1520
1521  /// This function removes callback which was installed by
1522  /// AddGCPrologueCallback function.
1523  #[allow(clippy::not_unsafe_ptr_arg_deref)] // False positive.
1524  #[inline(always)]
1525  pub fn remove_gc_prologue_callback(
1526    &mut self,
1527    callback: GcCallbackWithData,
1528    data: *mut c_void,
1529  ) {
1530    unsafe {
1531      v8__Isolate__RemoveGCPrologueCallback(self.as_real_ptr(), callback, data)
1532    }
1533  }
1534
1535  /// Enables the host application to receive a notification after a
1536  /// garbage collection.
1537  #[allow(clippy::not_unsafe_ptr_arg_deref)] // False positive.
1538  #[inline(always)]
1539  pub fn add_gc_epilogue_callback(
1540    &mut self,
1541    callback: GcCallbackWithData,
1542    data: *mut c_void,
1543    gc_type_filter: GCType,
1544  ) {
1545    unsafe {
1546      v8__Isolate__AddGCEpilogueCallback(
1547        self.as_real_ptr(),
1548        callback,
1549        data,
1550        gc_type_filter,
1551      );
1552    }
1553  }
1554
1555  /// This function removes a callback which was added by
1556  /// `AddGCEpilogueCallback`.
1557  #[allow(clippy::not_unsafe_ptr_arg_deref)] // False positive.
1558  #[inline(always)]
1559  pub fn remove_gc_epilogue_callback(
1560    &mut self,
1561    callback: GcCallbackWithData,
1562    data: *mut c_void,
1563  ) {
1564    unsafe {
1565      v8__Isolate__RemoveGCEpilogueCallback(self.as_real_ptr(), callback, data)
1566    }
1567  }
1568
1569  /// Add a callback to invoke in case the heap size is close to the heap limit.
1570  /// If multiple callbacks are added, only the most recently added callback is
1571  /// invoked.
1572  #[allow(clippy::not_unsafe_ptr_arg_deref)] // False positive.
1573  #[inline(always)]
1574  pub fn add_near_heap_limit_callback(
1575    &mut self,
1576    callback: NearHeapLimitCallback,
1577    data: *mut c_void,
1578  ) {
1579    unsafe {
1580      v8__Isolate__AddNearHeapLimitCallback(self.as_real_ptr(), callback, data)
1581    };
1582  }
1583
1584  /// Remove the given callback and restore the heap limit to the given limit.
1585  /// If the given limit is zero, then it is ignored. If the current heap size
1586  /// is greater than the given limit, then the heap limit is restored to the
1587  /// minimal limit that is possible for the current heap size.
1588  #[inline(always)]
1589  pub fn remove_near_heap_limit_callback(
1590    &mut self,
1591    callback: NearHeapLimitCallback,
1592    heap_limit: usize,
1593  ) {
1594    unsafe {
1595      v8__Isolate__RemoveNearHeapLimitCallback(
1596        self.as_real_ptr(),
1597        callback,
1598        heap_limit,
1599      );
1600    };
1601  }
1602
1603  /// Adjusts the amount of registered external memory. Used to give V8 an
1604  /// indication of the amount of externally allocated memory that is kept
1605  /// alive by JavaScript objects. V8 uses this to decide when to perform
1606  /// global garbage collections. Registering externally allocated memory
1607  /// will trigger global garbage collections more often than it would
1608  /// otherwise in an attempt to garbage collect the JavaScript objects
1609  /// that keep the externally allocated memory alive.
1610  #[inline(always)]
1611  pub fn adjust_amount_of_external_allocated_memory(
1612    &mut self,
1613    change_in_bytes: i64,
1614  ) -> i64 {
1615    unsafe {
1616      v8__Isolate__AdjustAmountOfExternalAllocatedMemory(
1617        self.as_real_ptr(),
1618        change_in_bytes,
1619      )
1620    }
1621  }
1622
1623  #[inline(always)]
1624  pub fn get_cpp_heap(&mut self) -> Option<&Heap> {
1625    unsafe { v8__Isolate__GetCppHeap(self.as_real_ptr()).as_ref() }
1626  }
1627
1628  #[inline(always)]
1629  pub fn set_oom_error_handler(&mut self, callback: OomErrorCallback) {
1630    unsafe { v8__Isolate__SetOOMErrorHandler(self.as_real_ptr(), callback) };
1631  }
1632
1633  /// Returns the policy controlling how Microtasks are invoked.
1634  #[inline(always)]
1635  pub fn get_microtasks_policy(&self) -> MicrotasksPolicy {
1636    unsafe { v8__Isolate__GetMicrotasksPolicy(self.as_real_ptr()) }
1637  }
1638
1639  /// Returns the policy controlling how Microtasks are invoked.
1640  #[inline(always)]
1641  pub fn set_microtasks_policy(&mut self, policy: MicrotasksPolicy) {
1642    unsafe { v8__Isolate__SetMicrotasksPolicy(self.as_real_ptr(), policy) }
1643  }
1644
1645  /// Runs the default MicrotaskQueue until it gets empty and perform other
1646  /// microtask checkpoint steps, such as calling ClearKeptObjects. Asserts that
1647  /// the MicrotasksPolicy is not kScoped. Any exceptions thrown by microtask
1648  /// callbacks are swallowed.
1649  #[inline(always)]
1650  pub fn perform_microtask_checkpoint(&mut self) {
1651    unsafe { v8__Isolate__PerformMicrotaskCheckpoint(self.as_real_ptr()) }
1652  }
1653
1654  /// Enqueues the callback to the default MicrotaskQueue
1655  #[inline(always)]
1656  pub fn enqueue_microtask(&mut self, microtask: Local<Function>) {
1657    unsafe { v8__Isolate__EnqueueMicrotask(self.as_real_ptr(), &*microtask) }
1658  }
1659
1660  /// Set whether calling Atomics.wait (a function that may block) is allowed in
1661  /// this isolate. This can also be configured via
1662  /// CreateParams::allow_atomics_wait.
1663  #[inline(always)]
1664  pub fn set_allow_atomics_wait(&mut self, allow: bool) {
1665    unsafe { v8__Isolate__SetAllowAtomicsWait(self.as_real_ptr(), allow) }
1666  }
1667
1668  /// Embedder injection point for `WebAssembly.compileStreaming(source)`.
1669  /// The expectation is that the embedder sets it at most once.
1670  ///
1671  /// The callback receives the source argument (string, Promise, etc.)
1672  /// and an instance of [WasmStreaming]. The [WasmStreaming] instance
1673  /// can outlive the callback and is used to feed data chunks to V8
1674  /// asynchronously.
1675  #[inline(always)]
1676  pub fn set_wasm_streaming_callback<F>(&mut self, _: F)
1677  where
1678    F: UnitType
1679      + for<'a, 'b, 'c> Fn(
1680        &'c mut PinScope<'a, 'b>,
1681        Local<'a, Value>,
1682        WasmStreaming,
1683      ),
1684  {
1685    unsafe {
1686      v8__Isolate__SetWasmStreamingCallback(
1687        self.as_real_ptr(),
1688        trampoline::<F>(),
1689      )
1690    }
1691  }
1692
1693  /// Notification that the embedder has changed the time zone, daylight savings
1694  /// time or other date / time configuration parameters. V8 keeps a cache of
1695  /// various values used for date / time computation. This notification will
1696  /// reset those cached values for the current context so that date / time
1697  /// configuration changes would be reflected.
1698  ///
1699  /// This API should not be called more than needed as it will negatively impact
1700  /// the performance of date operations.
1701  #[inline(always)]
1702  pub fn date_time_configuration_change_notification(
1703    &mut self,
1704    time_zone_detection: TimeZoneDetection,
1705  ) {
1706    unsafe {
1707      v8__Isolate__DateTimeConfigurationChangeNotification(
1708        self.as_real_ptr(),
1709        time_zone_detection,
1710      );
1711    }
1712  }
1713
1714  /// Returns true if there is ongoing background work within V8 that will
1715  /// eventually post a foreground task, like asynchronous WebAssembly
1716  /// compilation.
1717  #[inline(always)]
1718  pub fn has_pending_background_tasks(&self) -> bool {
1719    unsafe { v8__Isolate__HasPendingBackgroundTasks(self.as_real_ptr()) }
1720  }
1721
1722  /// Request garbage collection with a specific embedderstack state in this
1723  /// Isolate. It is only valid to call this function if --expose_gc was
1724  /// specified.
1725  ///
1726  /// This should only be used for testing purposes and not to enforce a garbage
1727  /// collection schedule. It has strong negative impact on the garbage
1728  /// collection performance. Use IdleNotificationDeadline() or
1729  /// LowMemoryNotification() instead to influence the garbage collection
1730  /// schedule.
1731  #[inline(always)]
1732  pub fn request_garbage_collection_for_testing(
1733    &mut self,
1734    r#type: GarbageCollectionType,
1735  ) {
1736    unsafe {
1737      v8__Isolate__RequestGarbageCollectionForTesting(
1738        self.as_real_ptr(),
1739        match r#type {
1740          GarbageCollectionType::Full => 0,
1741          GarbageCollectionType::Minor => 1,
1742        },
1743      );
1744    }
1745  }
1746
1747  /// Disposes the isolate.  The isolate must not be entered by any
1748  /// thread to be disposable.
1749  unsafe fn dispose(&mut self) {
1750    // No test case in rusty_v8 show this, but there have been situations in
1751    // deno where dropping Annex before the states causes a segfault.
1752    unsafe {
1753      v8__Isolate__Dispose(self.as_real_ptr());
1754    }
1755  }
1756
1757  /// Take a heap snapshot. The callback is invoked one or more times
1758  /// with byte slices containing the snapshot serialized as JSON.
1759  /// It's the callback's responsibility to reassemble them into
1760  /// a single document, e.g., by writing them to a file.
1761  /// Note that Chrome DevTools refuses to load snapshots without
1762  /// a .heapsnapshot suffix.
1763  pub fn take_heap_snapshot<F>(&mut self, mut callback: F)
1764  where
1765    F: FnMut(&[u8]) -> bool,
1766  {
1767    unsafe extern "C" fn trampoline<F>(
1768      arg: *mut c_void,
1769      data: *const u8,
1770      size: usize,
1771    ) -> bool
1772    where
1773      F: FnMut(&[u8]) -> bool,
1774    {
1775      unsafe {
1776        let mut callback = NonNull::<F>::new_unchecked(arg as _);
1777        if size > 0 {
1778          (callback.as_mut())(std::slice::from_raw_parts(data, size))
1779        } else {
1780          (callback.as_mut())(&[])
1781        }
1782      }
1783    }
1784
1785    let arg = addr_of_mut!(callback);
1786    unsafe {
1787      v8__HeapProfiler__TakeHeapSnapshot(
1788        self.as_real_ptr(),
1789        trampoline::<F>,
1790        arg as _,
1791      );
1792    }
1793  }
1794
1795  /// Set the default context to be included in the snapshot blob.
1796  /// The snapshot will not contain the global proxy, and we expect one or a
1797  /// global object template to create one, to be provided upon deserialization.
1798  ///
1799  /// # Panics
1800  ///
1801  /// Panics if the isolate was not created using [`Isolate::snapshot_creator`]
1802  #[inline(always)]
1803  pub fn set_default_context(&mut self, context: Local<Context>) {
1804    let snapshot_creator = self
1805      .get_annex_mut()
1806      .maybe_snapshot_creator
1807      .as_mut()
1808      .unwrap();
1809    snapshot_creator.set_default_context(context);
1810  }
1811
1812  /// Add additional context to be included in the snapshot blob.
1813  /// The snapshot will include the global proxy.
1814  ///
1815  /// Returns the index of the context in the snapshot blob.
1816  ///
1817  /// # Panics
1818  ///
1819  /// Panics if the isolate was not created using [`Isolate::snapshot_creator`]
1820  #[inline(always)]
1821  pub fn add_context(&mut self, context: Local<Context>) -> usize {
1822    let snapshot_creator = self
1823      .get_annex_mut()
1824      .maybe_snapshot_creator
1825      .as_mut()
1826      .unwrap();
1827    snapshot_creator.add_context(context)
1828  }
1829
1830  /// Attach arbitrary `v8::Data` to the isolate snapshot, which can be
1831  /// retrieved via `HandleScope::get_context_data_from_snapshot_once()` after
1832  /// deserialization. This data does not survive when a new snapshot is created
1833  /// from an existing snapshot.
1834  ///
1835  /// # Panics
1836  ///
1837  /// Panics if the isolate was not created using [`Isolate::snapshot_creator`]
1838  #[inline(always)]
1839  pub fn add_isolate_data<T>(&mut self, data: Local<T>) -> usize
1840  where
1841    for<'l> Local<'l, T>: Into<Local<'l, Data>>,
1842  {
1843    let snapshot_creator = self
1844      .get_annex_mut()
1845      .maybe_snapshot_creator
1846      .as_mut()
1847      .unwrap();
1848    snapshot_creator.add_isolate_data(data)
1849  }
1850
1851  /// Attach arbitrary `v8::Data` to the context snapshot, which can be
1852  /// retrieved via `HandleScope::get_context_data_from_snapshot_once()` after
1853  /// deserialization. This data does not survive when a new snapshot is
1854  /// created from an existing snapshot.
1855  ///
1856  /// # Panics
1857  ///
1858  /// Panics if the isolate was not created using [`Isolate::snapshot_creator`]
1859  #[inline(always)]
1860  pub fn add_context_data<T>(
1861    &mut self,
1862    context: Local<Context>,
1863    data: Local<T>,
1864  ) -> usize
1865  where
1866    for<'l> Local<'l, T>: Into<Local<'l, Data>>,
1867  {
1868    let snapshot_creator = self
1869      .get_annex_mut()
1870      .maybe_snapshot_creator
1871      .as_mut()
1872      .unwrap();
1873    snapshot_creator.add_context_data(context, data)
1874  }
1875}
1876
1877pub(crate) struct IsolateAnnex {
1878  create_param_allocations: Box<dyn Any>,
1879  slots: HashMap<TypeId, RawSlot, BuildTypeIdHasher>,
1880  finalizer_map: FinalizerMap,
1881  maybe_snapshot_creator: Option<SnapshotCreator>,
1882  // The `isolate` and `isolate_mutex` fields are there so an `IsolateHandle`
1883  // (which may outlive the isolate itself) can determine whether the isolate
1884  // is still alive, and if so, get a reference to it. Safety rules:
1885  // - The 'main thread' must lock the mutex and reset `isolate` to null just
1886  //   before the isolate is disposed.
1887  // - Any other thread must lock the mutex while it's reading/using the
1888  //   `isolate` pointer.
1889  isolate: *mut RealIsolate,
1890  isolate_mutex: Mutex<()>,
1891}
1892
1893unsafe impl Send for IsolateAnnex {}
1894unsafe impl Sync for IsolateAnnex {}
1895
1896impl IsolateAnnex {
1897  fn new(
1898    isolate: &mut Isolate,
1899    create_param_allocations: Box<dyn Any>,
1900  ) -> Self {
1901    Self {
1902      create_param_allocations,
1903      slots: HashMap::default(),
1904      finalizer_map: FinalizerMap::default(),
1905      maybe_snapshot_creator: None,
1906      isolate: isolate.as_real_ptr(),
1907      isolate_mutex: Mutex::new(()),
1908    }
1909  }
1910}
1911
1912impl Debug for IsolateAnnex {
1913  fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
1914    f.debug_struct("IsolateAnnex")
1915      .field("isolate", &self.isolate)
1916      .field("isolate_mutex", &self.isolate_mutex)
1917      .finish()
1918  }
1919}
1920
1921/// IsolateHandle is a thread-safe reference to an Isolate. It's main use is to
1922/// terminate execution of a running isolate from another thread.
1923///
1924/// It is created with Isolate::thread_safe_handle().
1925///
1926/// IsolateHandle is Cloneable, Send, and Sync.
1927#[derive(Clone, Debug)]
1928pub struct IsolateHandle(Arc<IsolateAnnex>);
1929
1930impl IsolateHandle {
1931  // This function is marked unsafe because it must be called only with either
1932  // IsolateAnnex::mutex locked, or from the main thread associated with the V8
1933  // isolate.
1934  pub(crate) unsafe fn get_isolate_ptr(&self) -> *mut RealIsolate {
1935    self.0.isolate
1936  }
1937
1938  #[inline(always)]
1939  fn new(isolate: &Isolate) -> Self {
1940    Self(isolate.get_annex_arc())
1941  }
1942
1943  /// Forcefully terminate the current thread of JavaScript execution
1944  /// in the given isolate.
1945  ///
1946  /// This method can be used by any thread even if that thread has not
1947  /// acquired the V8 lock with a Locker object.
1948  ///
1949  /// Returns false if Isolate was already destroyed.
1950  #[inline(always)]
1951  pub fn terminate_execution(&self) -> bool {
1952    let _lock = self.0.isolate_mutex.lock().unwrap();
1953    if self.0.isolate.is_null() {
1954      false
1955    } else {
1956      unsafe { v8__Isolate__TerminateExecution(self.0.isolate) };
1957      true
1958    }
1959  }
1960
1961  /// Resume execution capability in the given isolate, whose execution
1962  /// was previously forcefully terminated using TerminateExecution().
1963  ///
1964  /// When execution is forcefully terminated using TerminateExecution(),
1965  /// the isolate can not resume execution until all JavaScript frames
1966  /// have propagated the uncatchable exception which is generated.  This
1967  /// method allows the program embedding the engine to handle the
1968  /// termination event and resume execution capability, even if
1969  /// JavaScript frames remain on the stack.
1970  ///
1971  /// This method can be used by any thread even if that thread has not
1972  /// acquired the V8 lock with a Locker object.
1973  ///
1974  /// Returns false if Isolate was already destroyed.
1975  #[inline(always)]
1976  pub fn cancel_terminate_execution(&self) -> bool {
1977    let _lock = self.0.isolate_mutex.lock().unwrap();
1978    if self.0.isolate.is_null() {
1979      false
1980    } else {
1981      unsafe { v8__Isolate__CancelTerminateExecution(self.0.isolate) };
1982      true
1983    }
1984  }
1985
1986  /// Is V8 terminating JavaScript execution.
1987  ///
1988  /// Returns true if JavaScript execution is currently terminating
1989  /// because of a call to TerminateExecution.  In that case there are
1990  /// still JavaScript frames on the stack and the termination
1991  /// exception is still active.
1992  ///
1993  /// Returns false if Isolate was already destroyed.
1994  #[inline(always)]
1995  pub fn is_execution_terminating(&self) -> bool {
1996    let _lock = self.0.isolate_mutex.lock().unwrap();
1997    if self.0.isolate.is_null() {
1998      false
1999    } else {
2000      unsafe { v8__Isolate__IsExecutionTerminating(self.0.isolate) }
2001    }
2002  }
2003
2004  /// Request V8 to interrupt long running JavaScript code and invoke
2005  /// the given |callback| passing the given |data| to it. After |callback|
2006  /// returns control will be returned to the JavaScript code.
2007  /// There may be a number of interrupt requests in flight.
2008  /// Can be called from another thread without acquiring a |Locker|.
2009  /// Registered |callback| must not reenter interrupted Isolate.
2010  ///
2011  /// Returns false if Isolate was already destroyed.
2012  // Clippy warns that this method is dereferencing a raw pointer, but it is
2013  // not: https://github.com/rust-lang/rust-clippy/issues/3045
2014  #[allow(clippy::not_unsafe_ptr_arg_deref)]
2015  #[inline(always)]
2016  pub fn request_interrupt(
2017    &self,
2018    callback: InterruptCallback,
2019    data: *mut c_void,
2020  ) -> bool {
2021    let _lock = self.0.isolate_mutex.lock().unwrap();
2022    if self.0.isolate.is_null() {
2023      false
2024    } else {
2025      unsafe { v8__Isolate__RequestInterrupt(self.0.isolate, callback, data) };
2026      true
2027    }
2028  }
2029}
2030
2031/// Same as Isolate but gets disposed when it goes out of scope.
2032#[derive(Debug)]
2033pub struct OwnedIsolate {
2034  cxx_isolate: NonNull<RealIsolate>,
2035}
2036
2037impl OwnedIsolate {
2038  pub(crate) fn new(cxx_isolate: *mut RealIsolate) -> Self {
2039    let isolate = Self::new_already_entered(cxx_isolate);
2040    unsafe {
2041      isolate.enter();
2042    }
2043    isolate
2044  }
2045
2046  pub(crate) fn new_already_entered(cxx_isolate: *mut RealIsolate) -> Self {
2047    let cxx_isolate = NonNull::new(cxx_isolate).unwrap();
2048    let owned_isolate: OwnedIsolate = Self { cxx_isolate };
2049    // owned_isolate.init_scope_root();
2050    owned_isolate
2051  }
2052
2053  /// Creates a new isolate without auto-entering it.
2054  /// This is intended for use with `v8::Locker` for multi-threaded access.
2055  ///
2056  /// When using this method, you MUST use `v8::Locker` to enter the isolate
2057  /// before any V8 operations, and the isolate must be entered when dropped.
2058  ///
2059  /// # Safety
2060  ///
2061  /// The caller must ensure that:
2062  /// 1. A `v8::Locker` is held when performing any V8 operations
2063  /// 2. The isolate is entered (via Locker) when it is dropped
2064  pub unsafe fn new_for_locker(params: CreateParams) -> Self {
2065    let cxx_isolate = Isolate::new_impl(params);
2066    Self::new_already_entered(cxx_isolate)
2067  }
2068
2069  /// Returns true if this isolate is the currently entered isolate on the
2070  /// calling thread.
2071  pub fn is_current(&self) -> bool {
2072    unsafe {
2073      if let Some(current) = current_raw_isolate_ptr() {
2074        let this = self.as_raw_isolate_ptr();
2075        !this.is_null() && current.0 == this.0
2076      } else {
2077        false
2078      }
2079    }
2080  }
2081}
2082
2083impl Drop for OwnedIsolate {
2084  fn drop(&mut self) {
2085    unsafe {
2086      let snapshot_creator = self.get_annex_mut().maybe_snapshot_creator.take();
2087      assert!(
2088        snapshot_creator.is_none(),
2089        "If isolate was created using v8::Isolate::snapshot_creator, you should use v8::OwnedIsolate::create_blob before dropping an isolate."
2090      );
2091      let is_current =
2092        std::ptr::eq(self.cxx_isolate.as_mut(), v8__Isolate__GetCurrent());
2093      if is_current {
2094        self.exit();
2095      } else {
2096        let locker = crate::Locker::new(self);
2097        locker.enter();
2098        locker.exit();
2099      }
2100      self.dispose_annex();
2101      Platform::notify_isolate_shutdown(&get_current_platform(), self);
2102      self.dispose();
2103    }
2104  }
2105}
2106
2107impl OwnedIsolate {
2108  /// Creates a snapshot data blob.
2109  /// This must not be called from within a handle scope.
2110  ///
2111  /// # Panics
2112  ///
2113  /// Panics if the isolate was not created using [`Isolate::snapshot_creator`]
2114  #[inline(always)]
2115  pub fn create_blob(
2116    mut self,
2117    function_code_handling: FunctionCodeHandling,
2118  ) -> Option<StartupData> {
2119    let mut snapshot_creator =
2120      self.get_annex_mut().maybe_snapshot_creator.take().unwrap();
2121
2122    // create_param_allocations is needed during CreateBlob
2123    // so v8 can read external references
2124    let _create_param_allocations = unsafe {
2125      // self.dispose_scope_root();
2126      self.dispose_annex()
2127    };
2128
2129    // The isolate is owned by the snapshot creator; we need to forget it
2130    // here as the snapshot creator will drop it when running the destructor.
2131    std::mem::forget(self);
2132    snapshot_creator.create_blob(function_code_handling)
2133  }
2134}
2135
2136impl Deref for OwnedIsolate {
2137  type Target = Isolate;
2138  fn deref(&self) -> &Self::Target {
2139    unsafe {
2140      std::mem::transmute::<&NonNull<RealIsolate>, &Isolate>(&self.cxx_isolate)
2141    }
2142  }
2143}
2144
2145impl DerefMut for OwnedIsolate {
2146  fn deref_mut(&mut self) -> &mut Self::Target {
2147    unsafe {
2148      std::mem::transmute::<&mut NonNull<RealIsolate>, &mut Isolate>(
2149        &mut self.cxx_isolate,
2150      )
2151    }
2152  }
2153}
2154
2155impl AsMut<Isolate> for OwnedIsolate {
2156  fn as_mut(&mut self) -> &mut Isolate {
2157    self
2158  }
2159}
2160
2161impl AsMut<Isolate> for Isolate {
2162  fn as_mut(&mut self) -> &mut Isolate {
2163    self
2164  }
2165}
2166
2167/// Collection of V8 heap information.
2168///
2169/// Instances of this class can be passed to v8::Isolate::GetHeapStatistics to
2170/// get heap statistics from V8.
2171pub struct HeapStatistics(v8__HeapStatistics);
2172
2173impl HeapStatistics {
2174  #[inline(always)]
2175  pub fn total_heap_size(&self) -> usize {
2176    self.0.total_heap_size_
2177  }
2178
2179  #[inline(always)]
2180  pub fn total_heap_size_executable(&self) -> usize {
2181    self.0.total_heap_size_executable_
2182  }
2183
2184  #[inline(always)]
2185  pub fn total_physical_size(&self) -> usize {
2186    self.0.total_physical_size_
2187  }
2188
2189  #[inline(always)]
2190  pub fn total_available_size(&self) -> usize {
2191    self.0.total_available_size_
2192  }
2193
2194  #[inline(always)]
2195  pub fn total_global_handles_size(&self) -> usize {
2196    self.0.total_global_handles_size_
2197  }
2198
2199  #[inline(always)]
2200  pub fn used_global_handles_size(&self) -> usize {
2201    self.0.used_global_handles_size_
2202  }
2203
2204  #[inline(always)]
2205  pub fn used_heap_size(&self) -> usize {
2206    self.0.used_heap_size_
2207  }
2208
2209  #[inline(always)]
2210  pub fn heap_size_limit(&self) -> usize {
2211    self.0.heap_size_limit_
2212  }
2213
2214  #[inline(always)]
2215  pub fn malloced_memory(&self) -> usize {
2216    self.0.malloced_memory_
2217  }
2218
2219  #[inline(always)]
2220  pub fn external_memory(&self) -> usize {
2221    self.0.external_memory_
2222  }
2223
2224  #[inline(always)]
2225  pub fn peak_malloced_memory(&self) -> usize {
2226    self.0.peak_malloced_memory_
2227  }
2228
2229  #[inline(always)]
2230  pub fn number_of_native_contexts(&self) -> usize {
2231    self.0.number_of_native_contexts_
2232  }
2233
2234  #[inline(always)]
2235  pub fn number_of_detached_contexts(&self) -> usize {
2236    self.0.number_of_detached_contexts_
2237  }
2238
2239  /// Returns a 0/1 boolean, which signifies whether the V8 overwrite heap
2240  /// garbage with a bit pattern.
2241  #[inline(always)]
2242  pub fn does_zap_garbage(&self) -> bool {
2243    self.0.does_zap_garbage_
2244  }
2245}
2246
2247pub struct HeapSpaceStatistics(v8__HeapSpaceStatistics);
2248
2249impl HeapSpaceStatistics {
2250  pub fn space_name(&self) -> &'static CStr {
2251    unsafe { CStr::from_ptr(self.0.space_name_) }
2252  }
2253
2254  pub fn space_size(&self) -> usize {
2255    self.0.space_size_
2256  }
2257
2258  pub fn space_used_size(&self) -> usize {
2259    self.0.space_used_size_
2260  }
2261
2262  pub fn space_available_size(&self) -> usize {
2263    self.0.space_available_size_
2264  }
2265
2266  pub fn physical_space_size(&self) -> usize {
2267    self.0.physical_space_size_
2268  }
2269}
2270
2271impl<'s, F> MapFnFrom<F> for PrepareStackTraceCallback<'s>
2272where
2273  F: UnitType
2274    + for<'a> Fn(
2275      &mut PinScope<'s, 'a>,
2276      Local<'s, Value>,
2277      Local<'s, Array>,
2278    ) -> Local<'s, Value>,
2279{
2280  // Windows x64 ABI: MaybeLocal<Value> returned on the stack.
2281  #[cfg(target_os = "windows")]
2282  fn mapping() -> Self {
2283    let f = |ret_ptr, context, error, sites| {
2284      let scope = pin!(unsafe { CallbackScope::new(context) });
2285      let mut scope: crate::PinnedRef<CallbackScope> = scope.init();
2286      let r = (F::get())(&mut scope, error, sites);
2287      unsafe { std::ptr::write(ret_ptr, &*r as *const _) };
2288      ret_ptr
2289    };
2290    f.to_c_fn()
2291  }
2292
2293  // System V ABI
2294  #[cfg(not(target_os = "windows"))]
2295  fn mapping() -> Self {
2296    let f = |context, error, sites| {
2297      let scope = pin!(unsafe { CallbackScope::new(context) });
2298      let mut scope: crate::PinnedRef<CallbackScope> = scope.init();
2299
2300      let r = (F::get())(&mut scope, error, sites);
2301      PrepareStackTraceCallbackRet(&*r as *const _)
2302    };
2303    f.to_c_fn()
2304  }
2305}
2306
2307/// A special hasher that is optimized for hashing `std::any::TypeId` values.
2308/// `TypeId` values are actually 64-bit values which themselves come out of some
2309/// hash function, so it's unnecessary to shuffle their bits any further.
2310#[derive(Clone, Default)]
2311pub(crate) struct TypeIdHasher {
2312  state: Option<u64>,
2313}
2314
2315impl Hasher for TypeIdHasher {
2316  fn write(&mut self, _bytes: &[u8]) {
2317    panic!("TypeIdHasher::write() called unexpectedly");
2318  }
2319
2320  #[inline]
2321  fn write_u64(&mut self, value: u64) {
2322    // The internal hash function of TypeId only takes the bottom 64-bits, even on versions
2323    // of Rust that use a 128-bit TypeId.
2324    let prev_state = self.state.replace(value);
2325    debug_assert_eq!(prev_state, None);
2326  }
2327
2328  #[inline]
2329  fn finish(&self) -> u64 {
2330    self.state.unwrap()
2331  }
2332}
2333
2334/// Factory for instances of `TypeIdHasher`. This is the type that one would
2335/// pass to the constructor of some map/set type in order to make it use
2336/// `TypeIdHasher` instead of the default hasher implementation.
2337#[derive(Copy, Clone, Default)]
2338pub(crate) struct BuildTypeIdHasher;
2339
2340impl BuildHasher for BuildTypeIdHasher {
2341  type Hasher = TypeIdHasher;
2342
2343  #[inline]
2344  fn build_hasher(&self) -> Self::Hasher {
2345    Default::default()
2346  }
2347}
2348
2349const _: () = {
2350  assert!(
2351    size_of::<TypeId>() == size_of::<u64>()
2352      || size_of::<TypeId>() == size_of::<u128>()
2353  );
2354  assert!(
2355    align_of::<TypeId>() == align_of::<u64>()
2356      || align_of::<TypeId>() == align_of::<u128>()
2357  );
2358};
2359
2360pub(crate) struct RawSlot {
2361  data: RawSlotData,
2362  dtor: Option<RawSlotDtor>,
2363}
2364
2365type RawSlotData = MaybeUninit<usize>;
2366type RawSlotDtor = unsafe fn(&mut RawSlotData) -> ();
2367
2368impl RawSlot {
2369  #[inline]
2370  pub fn new<T: 'static>(value: T) -> Self {
2371    if Self::needs_box::<T>() {
2372      Self::new_internal(Box::new(value))
2373    } else {
2374      Self::new_internal(value)
2375    }
2376  }
2377
2378  // SAFETY: a valid value of type `T` must haven been stored in the slot
2379  // earlier. There is no verification that the type param provided by the
2380  // caller is correct.
2381  #[inline]
2382  pub unsafe fn borrow<T: 'static>(&self) -> &T {
2383    unsafe {
2384      if Self::needs_box::<T>() {
2385        &*(self.data.as_ptr() as *const Box<T>)
2386      } else {
2387        &*(self.data.as_ptr() as *const T)
2388      }
2389    }
2390  }
2391
2392  // Safety: see [`RawSlot::borrow`].
2393  #[inline]
2394  pub unsafe fn borrow_mut<T: 'static>(&mut self) -> &mut T {
2395    unsafe {
2396      if Self::needs_box::<T>() {
2397        &mut *(self.data.as_mut_ptr() as *mut Box<T>)
2398      } else {
2399        &mut *(self.data.as_mut_ptr() as *mut T)
2400      }
2401    }
2402  }
2403
2404  // Safety: see [`RawSlot::borrow`].
2405  #[inline]
2406  pub unsafe fn into_inner<T: 'static>(self) -> T {
2407    unsafe {
2408      let value = if Self::needs_box::<T>() {
2409        *std::ptr::read(self.data.as_ptr() as *mut Box<T>)
2410      } else {
2411        std::ptr::read(self.data.as_ptr() as *mut T)
2412      };
2413      forget(self);
2414      value
2415    }
2416  }
2417
2418  const fn needs_box<T: 'static>() -> bool {
2419    size_of::<T>() > size_of::<RawSlotData>()
2420      || align_of::<T>() > align_of::<RawSlotData>()
2421  }
2422
2423  #[inline]
2424  fn new_internal<B: 'static>(value: B) -> Self {
2425    assert!(!Self::needs_box::<B>());
2426    let mut self_ = Self {
2427      data: RawSlotData::zeroed(),
2428      dtor: None,
2429    };
2430    unsafe {
2431      ptr::write(self_.data.as_mut_ptr() as *mut B, value);
2432    }
2433    if needs_drop::<B>() {
2434      self_.dtor.replace(Self::drop_internal::<B>);
2435    };
2436    self_
2437  }
2438
2439  // SAFETY: a valid value of type `T` or `Box<T>` must be stored in the slot.
2440  unsafe fn drop_internal<B: 'static>(data: &mut RawSlotData) {
2441    assert!(!Self::needs_box::<B>());
2442    unsafe {
2443      drop_in_place(data.as_mut_ptr() as *mut B);
2444    }
2445  }
2446}
2447
2448impl Drop for RawSlot {
2449  fn drop(&mut self) {
2450    if let Some(dtor) = self.dtor {
2451      unsafe { dtor(&mut self.data) };
2452    }
2453  }
2454}
2455
2456impl AsRef<Isolate> for OwnedIsolate {
2457  fn as_ref(&self) -> &Isolate {
2458    unsafe { Isolate::from_raw_ref(&self.cxx_isolate) }
2459  }
2460}
2461impl AsRef<Isolate> for Isolate {
2462  fn as_ref(&self) -> &Isolate {
2463    self
2464  }
2465}