Skip to main content

v8/
isolate.rs

1// Copyright 2019-2021 the Deno authors. All rights reserved. MIT license.
2use crate::Array;
3use crate::CallbackScope;
4use crate::Context;
5use crate::Data;
6use crate::FixedArray;
7use crate::Function;
8use crate::FunctionCodeHandling;
9use crate::Local;
10use crate::Message;
11use crate::Module;
12use crate::Object;
13use crate::PinScope;
14use crate::Platform;
15use crate::Promise;
16use crate::PromiseResolver;
17use crate::StartupData;
18use crate::String;
19use crate::V8::get_current_platform;
20use crate::Value;
21use crate::binding::v8__HeapCodeStatistics;
22use crate::binding::v8__HeapSpaceStatistics;
23use crate::binding::v8__HeapStatistics;
24use crate::binding::v8__Isolate__UseCounterFeature;
25pub use crate::binding::v8__ModuleImportPhase as ModuleImportPhase;
26use crate::cppgc::Heap;
27use crate::external_references::ExternalReference;
28use crate::function::FunctionCallbackInfo;
29use crate::gc::GCCallbackFlags;
30use crate::gc::GCType;
31use crate::handle::FinalizerCallback;
32use crate::handle::FinalizerMap;
33use crate::isolate_create_params::CreateParams;
34use crate::isolate_create_params::raw;
35use crate::promise::PromiseRejectMessage;
36use crate::snapshot::SnapshotCreator;
37use crate::support::MapFnFrom;
38use crate::support::MapFnTo;
39use crate::support::Opaque;
40use crate::support::ToCFn;
41use crate::support::UnitType;
42use crate::support::char;
43use crate::support::int;
44use crate::support::size_t;
45use crate::wasm::WasmStreaming;
46use crate::wasm::trampoline;
47use std::ffi::CStr;
48
49use std::any::Any;
50use std::any::TypeId;
51use std::borrow::Cow;
52use std::collections::HashMap;
53use std::ffi::c_void;
54use std::fmt::{self, Debug, Formatter};
55use std::hash::BuildHasher;
56use std::hash::Hasher;
57use std::mem::MaybeUninit;
58use std::mem::align_of;
59use std::mem::forget;
60use std::mem::needs_drop;
61use std::mem::size_of;
62use std::ops::Deref;
63use std::ops::DerefMut;
64use std::pin::pin;
65use std::ptr;
66use std::ptr::NonNull;
67use std::ptr::addr_of_mut;
68use std::ptr::drop_in_place;
69use std::ptr::null_mut;
70use std::sync::Arc;
71use std::sync::Mutex;
72
73/// Policy for running microtasks:
74///   - explicit: microtasks are invoked with the
75///     Isolate::PerformMicrotaskCheckpoint() method;
76///   - auto: microtasks are invoked when the script call depth decrements
77///     to zero.
78#[derive(Debug, Clone, Copy, PartialEq, Eq)]
79#[repr(C)]
80pub enum MicrotasksPolicy {
81  Explicit = 0,
82  // Scoped = 1 (RAII) is omitted for now, doesn't quite map to idiomatic Rust.
83  Auto = 2,
84}
85
86/// Memory pressure level for the MemoryPressureNotification.
87/// None hints V8 that there is no memory pressure.
88/// Moderate hints V8 to speed up incremental garbage collection at the cost
89/// of higher latency due to garbage collection pauses.
90/// Critical hints V8 to free memory as soon as possible. Garbage collection
91/// pauses at this level will be large.
92#[derive(Debug, Clone, Copy, PartialEq, Eq)]
93#[repr(C)]
94pub enum MemoryPressureLevel {
95  None = 0,
96  Moderate = 1,
97  Critical = 2,
98}
99
100/// Time zone redetection indicator for
101/// DateTimeConfigurationChangeNotification.
102///
103/// kSkip indicates V8 that the notification should not trigger redetecting
104/// host time zone. kRedetect indicates V8 that host time zone should be
105/// redetected, and used to set the default time zone.
106///
107/// The host time zone detection may require file system access or similar
108/// operations unlikely to be available inside a sandbox. If v8 is run inside a
109/// sandbox, the host time zone has to be detected outside the sandbox before
110/// calling DateTimeConfigurationChangeNotification function.
111#[derive(Debug, Clone, Copy, PartialEq, Eq)]
112#[repr(C)]
113pub enum TimeZoneDetection {
114  Skip = 0,
115  Redetect = 1,
116}
117
118/// PromiseHook with type Init is called when a new promise is
119/// created. When a new promise is created as part of the chain in the
120/// case of Promise.then or in the intermediate promises created by
121/// Promise.{race, all}/AsyncFunctionAwait, we pass the parent promise
122/// otherwise we pass undefined.
123///
124/// PromiseHook with type Resolve is called at the beginning of
125/// resolve or reject function defined by CreateResolvingFunctions.
126///
127/// PromiseHook with type Before is called at the beginning of the
128/// PromiseReactionJob.
129///
130/// PromiseHook with type After is called right at the end of the
131/// PromiseReactionJob.
132#[derive(Debug, Clone, Copy, PartialEq, Eq)]
133#[repr(C)]
134pub enum PromiseHookType {
135  Init,
136  Resolve,
137  Before,
138  After,
139}
140
141/// Types of garbage collections that can be requested via
142/// [`Isolate::request_garbage_collection_for_testing`].
143#[derive(Debug, Clone, Copy, PartialEq, Eq)]
144#[repr(C)]
145pub enum GarbageCollectionType {
146  Full,
147  Minor,
148}
149
150pub type MessageCallback = unsafe extern "C" fn(Local<Message>, Local<Value>);
151
152bitflags! {
153  #[derive(Debug, Clone, Copy, PartialEq, Eq)]
154  #[repr(transparent)]
155  pub struct MessageErrorLevel: int {
156    const LOG = 1 << 0;
157    const DEBUG = 1 << 1;
158    const INFO = 1 << 2;
159    const ERROR = 1 << 3;
160    const WARNING = 1 << 4;
161    const ALL = (1 << 5) - 1;
162  }
163}
164
165pub type PromiseHook =
166  unsafe extern "C" fn(PromiseHookType, Local<Promise>, Local<Value>);
167
168pub type PromiseRejectCallback = unsafe extern "C" fn(PromiseRejectMessage);
169
170#[derive(Debug, Clone, Copy, PartialEq, Eq)]
171#[repr(C)]
172pub enum WasmAsyncSuccess {
173  Success,
174  Fail,
175}
176pub type WasmAsyncResolvePromiseCallback = unsafe extern "C" fn(
177  UnsafeRawIsolatePtr,
178  Local<Context>,
179  Local<PromiseResolver>,
180  Local<Value>,
181  WasmAsyncSuccess,
182);
183
184pub type AllowWasmCodeGenerationCallback =
185  unsafe extern "C" fn(Local<Context>, Local<String>) -> bool;
186
187/// HostInitializeImportMetaObjectCallback is called the first time import.meta
188/// is accessed for a module. Subsequent access will reuse the same value.
189///
190/// The method combines two implementation-defined abstract operations into one:
191/// HostGetImportMetaProperties and HostFinalizeImportMeta.
192///
193/// The embedder should use v8::Object::CreateDataProperty to add properties on
194/// the meta object.
195pub type HostInitializeImportMetaObjectCallback =
196  unsafe extern "C" fn(Local<Context>, Local<Module>, Local<Object>);
197
198/// HostImportModuleDynamicallyCallback is called when we require the embedder
199/// to load a module. This is used as part of the dynamic import syntax.
200///
201/// The host_defined_options are metadata provided by the host environment, which may be used
202/// to customize or further specify how the module should be imported.
203///
204/// The resource_name is the identifier or path for the module or script making the import request.
205///
206/// The specifier is the name of the module that should be imported.
207///
208/// The import_attributes are import assertions for this request in the form:
209/// [key1, value1, key2, value2, ...] where the keys and values are of type
210/// v8::String. Note, unlike the FixedArray passed to ResolveModuleCallback and
211/// returned from ModuleRequest::GetImportAssertions(), this array does not
212/// contain the source Locations of the assertions.
213///
214/// The embedder must compile, instantiate, evaluate the Module, and obtain its
215/// namespace object.
216///
217/// The Promise returned from this function is forwarded to userland JavaScript.
218/// The embedder must resolve this promise with the module namespace object. In
219/// case of an exception, the embedder must reject this promise with the
220/// exception. If the promise creation itself fails (e.g. due to stack
221/// overflow), the embedder must propagate that exception by returning an empty
222/// MaybeLocal.
223///
224/// # Example
225///
226/// ```
227/// fn host_import_module_dynamically_callback_example<'s>(
228///   scope: &mut v8::HandleScope<'s>,
229///   host_defined_options: v8::Local<'s, v8::Data>,
230///   resource_name: v8::Local<'s, v8::Value>,
231///   specifier: v8::Local<'s, v8::String>,
232///   import_attributes: v8::Local<'s, v8::FixedArray>,
233/// ) -> Option<v8::Local<'s, v8::Promise>> {
234///   todo!()
235/// }
236/// ```
237pub trait HostImportModuleDynamicallyCallback:
238  UnitType
239  + for<'s, 'i> FnOnce(
240    &mut PinScope<'s, 'i>,
241    Local<'s, Data>,
242    Local<'s, Value>,
243    Local<'s, String>,
244    Local<'s, FixedArray>,
245  ) -> Option<Local<'s, Promise>>
246{
247  fn to_c_fn(self) -> RawHostImportModuleDynamicallyCallback;
248}
249
250#[cfg(target_family = "unix")]
251pub(crate) type RawHostImportModuleDynamicallyCallback =
252  for<'s> unsafe extern "C" fn(
253    Local<'s, Context>,
254    Local<'s, Data>,
255    Local<'s, Value>,
256    Local<'s, String>,
257    Local<'s, FixedArray>,
258  ) -> *mut Promise;
259
260#[cfg(all(
261  target_family = "windows",
262  any(target_arch = "x86_64", target_arch = "aarch64")
263))]
264pub type RawHostImportModuleDynamicallyCallback =
265  for<'s> unsafe extern "C" fn(
266    *mut *mut Promise,
267    Local<'s, Context>,
268    Local<'s, Data>,
269    Local<'s, Value>,
270    Local<'s, String>,
271    Local<'s, FixedArray>,
272  ) -> *mut *mut Promise;
273
274impl<F> HostImportModuleDynamicallyCallback for F
275where
276  F: UnitType
277    + for<'s, 'i> FnOnce(
278      &mut PinScope<'s, 'i>,
279      Local<'s, Data>,
280      Local<'s, Value>,
281      Local<'s, String>,
282      Local<'s, FixedArray>,
283    ) -> Option<Local<'s, Promise>>,
284{
285  #[inline(always)]
286  fn to_c_fn(self) -> RawHostImportModuleDynamicallyCallback {
287    #[allow(unused_variables)]
288    #[inline(always)]
289    fn scope_adapter<'s, 'i: 's, F: HostImportModuleDynamicallyCallback>(
290      context: Local<'s, Context>,
291      host_defined_options: Local<'s, Data>,
292      resource_name: Local<'s, Value>,
293      specifier: Local<'s, String>,
294      import_attributes: Local<'s, FixedArray>,
295    ) -> Option<Local<'s, Promise>> {
296      let scope = pin!(unsafe { CallbackScope::new(context) });
297      let mut scope = scope.init();
298      (F::get())(
299        &mut scope,
300        host_defined_options,
301        resource_name,
302        specifier,
303        import_attributes,
304      )
305    }
306
307    #[cfg(target_family = "unix")]
308    #[inline(always)]
309    unsafe extern "C" fn abi_adapter<
310      's,
311      F: HostImportModuleDynamicallyCallback,
312    >(
313      context: Local<'s, Context>,
314      host_defined_options: Local<'s, Data>,
315      resource_name: Local<'s, Value>,
316      specifier: Local<'s, String>,
317      import_attributes: Local<'s, FixedArray>,
318    ) -> *mut Promise {
319      scope_adapter::<F>(
320        context,
321        host_defined_options,
322        resource_name,
323        specifier,
324        import_attributes,
325      )
326      .map_or_else(null_mut, |return_value| return_value.as_non_null().as_ptr())
327    }
328
329    #[cfg(all(
330      target_family = "windows",
331      any(target_arch = "x86_64", target_arch = "aarch64")
332    ))]
333    #[inline(always)]
334    unsafe extern "C" fn abi_adapter<
335      's,
336      F: HostImportModuleDynamicallyCallback,
337    >(
338      return_value: *mut *mut Promise,
339      context: Local<'s, Context>,
340      host_defined_options: Local<'s, Data>,
341      resource_name: Local<'s, Value>,
342      specifier: Local<'s, String>,
343      import_attributes: Local<'s, FixedArray>,
344    ) -> *mut *mut Promise {
345      unsafe {
346        std::ptr::write(
347          return_value,
348          scope_adapter::<F>(
349            context,
350            host_defined_options,
351            resource_name,
352            specifier,
353            import_attributes,
354          )
355          .map(|return_value| return_value.as_non_null().as_ptr())
356          .unwrap_or_else(null_mut),
357        );
358        return_value
359      }
360    }
361
362    abi_adapter::<F>
363  }
364}
365
366/// HostImportModuleWithPhaseDynamicallyCallback is called when we
367/// require the embedder to load a module with a specific phase. This is used
368/// as part of the dynamic import syntax.
369///
370/// The referrer contains metadata about the script/module that calls
371/// import.
372///
373/// The specifier is the name of the module that should be imported.
374///
375/// The phase is the phase of the import requested.
376///
377/// The import_attributes are import attributes for this request in the form:
378/// [key1, value1, key2, value2, ...] where the keys and values are of type
379/// v8::String. Note, unlike the FixedArray passed to ResolveModuleCallback and
380/// returned from ModuleRequest::GetImportAttributes(), this array does not
381/// contain the source Locations of the attributes.
382///
383/// The Promise returned from this function is forwarded to userland
384/// JavaScript. The embedder must resolve this promise according to the phase
385/// requested:
386/// - For ModuleImportPhase::kSource, the promise must be resolved with a
387///   compiled ModuleSource object, or rejected with a SyntaxError if the
388///   module does not support source representation.
389/// - For ModuleImportPhase::kEvaluation, the promise must be resolved with a
390///   ModuleNamespace object of a module that has been compiled, instantiated,
391///   and evaluated.
392///
393/// In case of an exception, the embedder must reject this promise with the
394/// exception. If the promise creation itself fails (e.g. due to stack
395/// overflow), the embedder must propagate that exception by returning an empty
396/// MaybeLocal.
397///
398/// This callback is still experimental and is only invoked for source phase
399/// imports.
400pub trait HostImportModuleWithPhaseDynamicallyCallback:
401  UnitType
402  + for<'s, 'i> FnOnce(
403    &mut PinScope<'s, 'i>,
404    Local<'s, Data>,
405    Local<'s, Value>,
406    Local<'s, String>,
407    ModuleImportPhase,
408    Local<'s, FixedArray>,
409  ) -> Option<Local<'s, Promise>>
410{
411  fn to_c_fn(self) -> RawHostImportModuleWithPhaseDynamicallyCallback;
412}
413
414#[cfg(target_family = "unix")]
415pub(crate) type RawHostImportModuleWithPhaseDynamicallyCallback =
416  for<'s> unsafe extern "C" fn(
417    Local<'s, Context>,
418    Local<'s, Data>,
419    Local<'s, Value>,
420    Local<'s, String>,
421    ModuleImportPhase,
422    Local<'s, FixedArray>,
423  ) -> *mut Promise;
424
425#[cfg(all(
426  target_family = "windows",
427  any(target_arch = "x86_64", target_arch = "aarch64")
428))]
429pub type RawHostImportModuleWithPhaseDynamicallyCallback =
430  for<'s> unsafe extern "C" fn(
431    *mut *mut Promise,
432    Local<'s, Context>,
433    Local<'s, Data>,
434    Local<'s, Value>,
435    Local<'s, String>,
436    ModuleImportPhase,
437    Local<'s, FixedArray>,
438  ) -> *mut *mut Promise;
439
440impl<F> HostImportModuleWithPhaseDynamicallyCallback for F
441where
442  F: UnitType
443    + for<'s, 'i> FnOnce(
444      &mut PinScope<'s, 'i>,
445      Local<'s, Data>,
446      Local<'s, Value>,
447      Local<'s, String>,
448      ModuleImportPhase,
449      Local<'s, FixedArray>,
450    ) -> Option<Local<'s, Promise>>,
451{
452  #[inline(always)]
453  fn to_c_fn(self) -> RawHostImportModuleWithPhaseDynamicallyCallback {
454    #[allow(unused_variables)]
455    #[inline(always)]
456    fn scope_adapter<'s, F: HostImportModuleWithPhaseDynamicallyCallback>(
457      context: Local<'s, Context>,
458      host_defined_options: Local<'s, Data>,
459      resource_name: Local<'s, Value>,
460      specifier: Local<'s, String>,
461      import_phase: ModuleImportPhase,
462      import_attributes: Local<'s, FixedArray>,
463    ) -> Option<Local<'s, Promise>> {
464      let scope = pin!(unsafe { CallbackScope::new(context) });
465      let mut scope = scope.init();
466      (F::get())(
467        &mut scope,
468        host_defined_options,
469        resource_name,
470        specifier,
471        import_phase,
472        import_attributes,
473      )
474    }
475
476    #[cfg(target_family = "unix")]
477    #[inline(always)]
478    unsafe extern "C" fn abi_adapter<
479      's,
480      F: HostImportModuleWithPhaseDynamicallyCallback,
481    >(
482      context: Local<'s, Context>,
483      host_defined_options: Local<'s, Data>,
484      resource_name: Local<'s, Value>,
485      specifier: Local<'s, String>,
486      import_phase: ModuleImportPhase,
487      import_attributes: Local<'s, FixedArray>,
488    ) -> *mut Promise {
489      scope_adapter::<F>(
490        context,
491        host_defined_options,
492        resource_name,
493        specifier,
494        import_phase,
495        import_attributes,
496      )
497      .map_or_else(null_mut, |return_value| return_value.as_non_null().as_ptr())
498    }
499
500    #[cfg(all(
501      target_family = "windows",
502      any(target_arch = "x86_64", target_arch = "aarch64")
503    ))]
504    #[inline(always)]
505    unsafe extern "C" fn abi_adapter<
506      's,
507      F: HostImportModuleWithPhaseDynamicallyCallback,
508    >(
509      return_value: *mut *mut Promise,
510      context: Local<'s, Context>,
511      host_defined_options: Local<'s, Data>,
512      resource_name: Local<'s, Value>,
513      specifier: Local<'s, String>,
514      import_phase: ModuleImportPhase,
515      import_attributes: Local<'s, FixedArray>,
516    ) -> *mut *mut Promise {
517      unsafe {
518        std::ptr::write(
519          return_value,
520          scope_adapter::<F>(
521            context,
522            host_defined_options,
523            resource_name,
524            specifier,
525            import_phase,
526            import_attributes,
527          )
528          .map(|return_value| return_value.as_non_null().as_ptr())
529          .unwrap_or_else(null_mut),
530        );
531        return_value
532      }
533    }
534
535    abi_adapter::<F>
536  }
537}
538
539/// `HostCreateShadowRealmContextCallback` is called each time a `ShadowRealm`
540/// is being constructed. You can use [`HandleScope::get_current_context`] to
541/// get the [`Context`] in which the constructor is being run.
542///
543/// The method combines [`Context`] creation and the implementation-defined
544/// abstract operation `HostInitializeShadowRealm` into one.
545///
546/// The embedder should use [`Context::new`] to create a new context. If the
547/// creation fails, the embedder must propagate that exception by returning
548/// [`None`].
549pub type HostCreateShadowRealmContextCallback =
550  for<'s, 'i> fn(scope: &mut PinScope<'s, 'i>) -> Option<Local<'s, Context>>;
551
552pub type GcCallbackWithData = unsafe extern "C" fn(
553  isolate: UnsafeRawIsolatePtr,
554  r#type: GCType,
555  flags: GCCallbackFlags,
556  data: *mut c_void,
557);
558
559pub type InterruptCallback =
560  unsafe extern "C" fn(isolate: UnsafeRawIsolatePtr, data: *mut c_void);
561
562pub type NearHeapLimitCallback = unsafe extern "C" fn(
563  data: *mut c_void,
564  current_heap_limit: usize,
565  initial_heap_limit: usize,
566) -> usize;
567
568#[repr(C)]
569pub struct OomDetails {
570  pub is_heap_oom: bool,
571  pub detail: *const char,
572}
573
574pub type OomErrorCallback =
575  unsafe extern "C" fn(location: *const char, details: &OomDetails);
576
577// Windows x64 ABI: MaybeLocal<Value> returned on the stack.
578#[cfg(target_os = "windows")]
579pub type PrepareStackTraceCallback<'s> =
580  unsafe extern "C" fn(
581    *mut *const Value,
582    Local<'s, Context>,
583    Local<'s, Value>,
584    Local<'s, Array>,
585  ) -> *mut *const Value;
586
587// System V ABI: MaybeLocal<Value> returned in a register.
588// System V i386 ABI: Local<Value> returned in hidden pointer (struct).
589#[cfg(not(target_os = "windows"))]
590#[repr(C)]
591pub struct PrepareStackTraceCallbackRet(*const Value);
592
593#[cfg(not(target_os = "windows"))]
594pub type PrepareStackTraceCallback<'s> =
595  unsafe extern "C" fn(
596    Local<'s, Context>,
597    Local<'s, Value>,
598    Local<'s, Array>,
599  ) -> PrepareStackTraceCallbackRet;
600
601pub type UseCounterFeature = v8__Isolate__UseCounterFeature;
602pub type UseCounterCallback =
603  unsafe extern "C" fn(&mut Isolate, UseCounterFeature);
604
605unsafe extern "C" {
606  fn v8__Isolate__New(params: *const raw::CreateParams) -> *mut RealIsolate;
607  fn v8__Isolate__Dispose(this: *mut RealIsolate);
608  fn v8__Isolate__GetNumberOfDataSlots(this: *const RealIsolate) -> u32;
609  fn v8__Isolate__GetData(
610    isolate: *const RealIsolate,
611    slot: u32,
612  ) -> *mut c_void;
613  fn v8__Isolate__SetData(
614    isolate: *const RealIsolate,
615    slot: u32,
616    data: *mut c_void,
617  );
618  fn v8__Isolate__Enter(this: *mut RealIsolate);
619  fn v8__Isolate__Exit(this: *mut RealIsolate);
620  fn v8__Isolate__GetCurrent() -> *mut RealIsolate;
621  fn v8__Isolate__MemoryPressureNotification(this: *mut RealIsolate, level: u8);
622  fn v8__Isolate__ClearKeptObjects(isolate: *mut RealIsolate);
623  fn v8__Isolate__LowMemoryNotification(isolate: *mut RealIsolate);
624  fn v8__Isolate__GetHeapStatistics(
625    this: *mut RealIsolate,
626    s: *mut v8__HeapStatistics,
627  );
628  fn v8__Isolate__SetCaptureStackTraceForUncaughtExceptions(
629    this: *mut RealIsolate,
630    capture: bool,
631    frame_limit: i32,
632  );
633  fn v8__Isolate__AddMessageListener(
634    isolate: *mut RealIsolate,
635    callback: MessageCallback,
636  ) -> bool;
637  fn v8__Isolate__AddMessageListenerWithErrorLevel(
638    isolate: *mut RealIsolate,
639    callback: MessageCallback,
640    message_levels: MessageErrorLevel,
641  ) -> bool;
642  fn v8__Isolate__AddGCPrologueCallback(
643    isolate: *mut RealIsolate,
644    callback: GcCallbackWithData,
645    data: *mut c_void,
646    gc_type_filter: GCType,
647  );
648  fn v8__Isolate__RemoveGCPrologueCallback(
649    isolate: *mut RealIsolate,
650    callback: GcCallbackWithData,
651    data: *mut c_void,
652  );
653  fn v8__Isolate__AddGCEpilogueCallback(
654    isolate: *mut RealIsolate,
655    callback: GcCallbackWithData,
656    data: *mut c_void,
657    gc_type_filter: GCType,
658  );
659  fn v8__Isolate__RemoveGCEpilogueCallback(
660    isolate: *mut RealIsolate,
661    callback: GcCallbackWithData,
662    data: *mut c_void,
663  );
664  fn v8__Isolate__NumberOfHeapSpaces(isolate: *mut RealIsolate) -> size_t;
665  fn v8__Isolate__GetHeapSpaceStatistics(
666    isolate: *mut RealIsolate,
667    space_statistics: *mut v8__HeapSpaceStatistics,
668    index: size_t,
669  ) -> bool;
670  fn v8__Isolate__GetHeapCodeAndMetadataStatistics(
671    isolate: *mut RealIsolate,
672    code_statistics: *mut v8__HeapCodeStatistics,
673  ) -> bool;
674  fn v8__Isolate__AddNearHeapLimitCallback(
675    isolate: *mut RealIsolate,
676    callback: NearHeapLimitCallback,
677    data: *mut c_void,
678  );
679  fn v8__Isolate__RemoveNearHeapLimitCallback(
680    isolate: *mut RealIsolate,
681    callback: NearHeapLimitCallback,
682    heap_limit: usize,
683  );
684  fn v8__Isolate__SetOOMErrorHandler(
685    isolate: *mut RealIsolate,
686    callback: OomErrorCallback,
687  );
688  fn v8__Isolate__AdjustAmountOfExternalAllocatedMemory(
689    isolate: *mut RealIsolate,
690    change_in_bytes: i64,
691  ) -> i64;
692  fn v8__Isolate__GetCppHeap(isolate: *mut RealIsolate) -> *mut Heap;
693  fn v8__Isolate__SetPrepareStackTraceCallback(
694    isolate: *mut RealIsolate,
695    callback: PrepareStackTraceCallback,
696  );
697  fn v8__Isolate__SetPromiseHook(isolate: *mut RealIsolate, hook: PromiseHook);
698  fn v8__Isolate__SetPromiseRejectCallback(
699    isolate: *mut RealIsolate,
700    callback: PromiseRejectCallback,
701  );
702  fn v8__Isolate__SetWasmAsyncResolvePromiseCallback(
703    isolate: *mut RealIsolate,
704    callback: WasmAsyncResolvePromiseCallback,
705  );
706  fn v8__Isolate__SetAllowWasmCodeGenerationCallback(
707    isolate: *mut RealIsolate,
708    callback: AllowWasmCodeGenerationCallback,
709  );
710  fn v8__Isolate__SetHostInitializeImportMetaObjectCallback(
711    isolate: *mut RealIsolate,
712    callback: HostInitializeImportMetaObjectCallback,
713  );
714  fn v8__Isolate__SetHostImportModuleDynamicallyCallback(
715    isolate: *mut RealIsolate,
716    callback: RawHostImportModuleDynamicallyCallback,
717  );
718  fn v8__Isolate__SetHostImportModuleWithPhaseDynamicallyCallback(
719    isolate: *mut RealIsolate,
720    callback: RawHostImportModuleWithPhaseDynamicallyCallback,
721  );
722  #[cfg(not(target_os = "windows"))]
723  fn v8__Isolate__SetHostCreateShadowRealmContextCallback(
724    isolate: *mut RealIsolate,
725    callback: unsafe extern "C" fn(
726      initiator_context: Local<Context>,
727    ) -> *mut Context,
728  );
729  #[cfg(target_os = "windows")]
730  fn v8__Isolate__SetHostCreateShadowRealmContextCallback(
731    isolate: *mut RealIsolate,
732    callback: unsafe extern "C" fn(
733      rv: *mut *mut Context,
734      initiator_context: Local<Context>,
735    ) -> *mut *mut Context,
736  );
737  fn v8__Isolate__SetUseCounterCallback(
738    isolate: *mut RealIsolate,
739    callback: UseCounterCallback,
740  );
741  fn v8__Isolate__RequestInterrupt(
742    isolate: *const RealIsolate,
743    callback: InterruptCallback,
744    data: *mut c_void,
745  );
746  fn v8__Isolate__TerminateExecution(isolate: *const RealIsolate);
747  fn v8__Isolate__IsExecutionTerminating(isolate: *const RealIsolate) -> bool;
748  fn v8__Isolate__CancelTerminateExecution(isolate: *const RealIsolate);
749  fn v8__Isolate__GetMicrotasksPolicy(
750    isolate: *const RealIsolate,
751  ) -> MicrotasksPolicy;
752  fn v8__Isolate__SetMicrotasksPolicy(
753    isolate: *mut RealIsolate,
754    policy: MicrotasksPolicy,
755  );
756  fn v8__Isolate__PerformMicrotaskCheckpoint(isolate: *mut RealIsolate);
757  fn v8__Isolate__EnqueueMicrotask(
758    isolate: *mut RealIsolate,
759    function: *const Function,
760  );
761  fn v8__Isolate__SetAllowAtomicsWait(isolate: *mut RealIsolate, allow: bool);
762  fn v8__Isolate__SetWasmStreamingCallback(
763    isolate: *mut RealIsolate,
764    callback: unsafe extern "C" fn(*const FunctionCallbackInfo),
765  );
766  fn v8__Isolate__DateTimeConfigurationChangeNotification(
767    isolate: *mut RealIsolate,
768    time_zone_detection: TimeZoneDetection,
769  );
770  fn v8__Isolate__HasPendingBackgroundTasks(
771    isolate: *const RealIsolate,
772  ) -> bool;
773  fn v8__Isolate__RequestGarbageCollectionForTesting(
774    isolate: *mut RealIsolate,
775    r#type: usize,
776  );
777
778  fn v8__HeapProfiler__TakeHeapSnapshot(
779    isolate: *mut RealIsolate,
780    callback: unsafe extern "C" fn(*mut c_void, *const u8, usize) -> bool,
781    arg: *mut c_void,
782  );
783}
784
785/// Isolate represents an isolated instance of the V8 engine.  V8 isolates have
786/// completely separate states.  Objects from one isolate must not be used in
787/// other isolates.  The embedder can create multiple isolates and use them in
788/// parallel in multiple threads.  An isolate can be entered by at most one
789/// thread at any given time.  The Locker/Unlocker API must be used to
790/// synchronize.
791///
792/// rusty_v8 note: Unlike in the C++ API, the Isolate is entered when it is
793/// constructed and exited when dropped. Because of that v8::OwnedIsolate
794/// instances must be dropped in the reverse order of creation
795#[repr(transparent)]
796#[derive(Debug)]
797pub struct Isolate(NonNull<RealIsolate>);
798
799#[repr(transparent)]
800#[derive(Debug, Clone, Copy)]
801pub struct UnsafeRawIsolatePtr(*mut RealIsolate);
802
803impl UnsafeRawIsolatePtr {
804  pub fn null() -> Self {
805    Self(std::ptr::null_mut())
806  }
807
808  pub fn is_null(&self) -> bool {
809    self.0.is_null()
810  }
811}
812
813#[repr(C)]
814pub struct RealIsolate(Opaque);
815
816impl Isolate {
817  pub(crate) fn as_real_ptr(&self) -> *mut RealIsolate {
818    self.0.as_ptr()
819  }
820
821  pub unsafe fn as_raw_isolate_ptr(&self) -> UnsafeRawIsolatePtr {
822    UnsafeRawIsolatePtr(self.0.as_ptr())
823  }
824
825  #[inline]
826  pub unsafe fn from_raw_isolate_ptr(ptr: UnsafeRawIsolatePtr) -> Self {
827    Self(NonNull::new(ptr.0).unwrap())
828  }
829
830  #[inline]
831  pub unsafe fn from_raw_isolate_ptr_unchecked(
832    ptr: UnsafeRawIsolatePtr,
833  ) -> Self {
834    Self(unsafe { NonNull::new_unchecked(ptr.0) })
835  }
836
837  pub unsafe fn from_raw_ptr_unchecked(ptr: *mut RealIsolate) -> Self {
838    Self(unsafe { NonNull::new_unchecked(ptr) })
839  }
840
841  pub unsafe fn from_raw_ptr(ptr: *mut RealIsolate) -> Self {
842    Self(NonNull::new(ptr).unwrap())
843  }
844
845  #[inline]
846  pub unsafe fn ref_from_raw_isolate_ptr(ptr: &UnsafeRawIsolatePtr) -> &Self {
847    if ptr.is_null() {
848      panic!("UnsafeRawIsolatePtr is null");
849    }
850    unsafe { &*(ptr as *const UnsafeRawIsolatePtr as *const Isolate) }
851  }
852
853  #[inline]
854  pub unsafe fn ref_from_raw_isolate_ptr_unchecked(
855    ptr: &UnsafeRawIsolatePtr,
856  ) -> &Self {
857    unsafe { &*(ptr as *const UnsafeRawIsolatePtr as *const Isolate) }
858  }
859
860  #[inline]
861  pub unsafe fn ref_from_raw_isolate_ptr_mut(
862    ptr: &mut UnsafeRawIsolatePtr,
863  ) -> &mut Self {
864    if ptr.is_null() {
865      panic!("UnsafeRawIsolatePtr is null");
866    }
867    unsafe { &mut *(ptr as *mut UnsafeRawIsolatePtr as *mut Isolate) }
868  }
869
870  #[inline]
871  pub unsafe fn ref_from_raw_isolate_ptr_mut_unchecked(
872    ptr: &mut UnsafeRawIsolatePtr,
873  ) -> &mut Self {
874    unsafe { &mut *(ptr as *mut UnsafeRawIsolatePtr as *mut Isolate) }
875  }
876
877  #[inline]
878  pub(crate) unsafe fn from_non_null(ptr: NonNull<RealIsolate>) -> Self {
879    Self(ptr)
880  }
881
882  #[inline]
883  pub(crate) unsafe fn from_raw_ref(ptr: &NonNull<RealIsolate>) -> &Self {
884    // SAFETY: Isolate is a repr(transparent) wrapper around NonNull<RealIsolate>
885    unsafe { &*(ptr as *const NonNull<RealIsolate> as *const Isolate) }
886  }
887
888  #[inline]
889  pub(crate) unsafe fn from_raw_ref_mut(
890    ptr: &mut NonNull<RealIsolate>,
891  ) -> &mut Self {
892    // SAFETY: Isolate is a repr(transparent) wrapper around NonNull<RealIsolate>
893    unsafe { &mut *(ptr as *mut NonNull<RealIsolate> as *mut Isolate) }
894  }
895
896  // Isolate data slots used internally by rusty_v8.
897  const ANNEX_SLOT: u32 = 0;
898  const INTERNAL_DATA_SLOT_COUNT: u32 = 2;
899
900  #[inline(always)]
901  fn assert_embedder_data_slot_count_and_offset_correct(&self) {
902    assert!(
903      unsafe { v8__Isolate__GetNumberOfDataSlots(self.as_real_ptr()) }
904        >= Self::INTERNAL_DATA_SLOT_COUNT
905    )
906  }
907
908  fn new_impl(params: CreateParams) -> *mut RealIsolate {
909    crate::V8::assert_initialized();
910    let (raw_create_params, create_param_allocations) = params.finalize();
911    let cxx_isolate = unsafe { v8__Isolate__New(&raw_create_params) };
912    let mut isolate = unsafe { Isolate::from_raw_ptr(cxx_isolate) };
913    isolate.initialize(create_param_allocations);
914    cxx_isolate
915  }
916
917  pub(crate) fn initialize(&mut self, create_param_allocations: Box<dyn Any>) {
918    self.assert_embedder_data_slot_count_and_offset_correct();
919    self.create_annex(create_param_allocations);
920  }
921
922  /// Creates a new isolate.  Does not change the currently entered
923  /// isolate.
924  ///
925  /// When an isolate is no longer used its resources should be freed
926  /// by calling V8::dispose().  Using the delete operator is not allowed.
927  ///
928  /// V8::initialize() must have run prior to this.
929  #[allow(clippy::new_ret_no_self)]
930  pub fn new(params: CreateParams) -> OwnedIsolate {
931    OwnedIsolate::new(Self::new_impl(params))
932  }
933
934  #[allow(clippy::new_ret_no_self)]
935  pub fn snapshot_creator(
936    external_references: Option<Cow<'static, [ExternalReference]>>,
937    params: Option<CreateParams>,
938  ) -> OwnedIsolate {
939    SnapshotCreator::new(external_references, params)
940  }
941
942  #[allow(clippy::new_ret_no_self)]
943  pub fn snapshot_creator_from_existing_snapshot(
944    existing_snapshot_blob: StartupData,
945    external_references: Option<Cow<'static, [ExternalReference]>>,
946    params: Option<CreateParams>,
947  ) -> OwnedIsolate {
948    SnapshotCreator::from_existing_snapshot(
949      existing_snapshot_blob,
950      external_references,
951      params,
952    )
953  }
954
955  /// Initial configuration parameters for a new Isolate.
956  #[inline(always)]
957  pub fn create_params() -> CreateParams {
958    CreateParams::default()
959  }
960
961  #[inline(always)]
962  pub fn thread_safe_handle(&self) -> IsolateHandle {
963    IsolateHandle::new(self)
964  }
965
966  /// See [`IsolateHandle::terminate_execution`]
967  #[inline(always)]
968  pub fn terminate_execution(&self) -> bool {
969    self.thread_safe_handle().terminate_execution()
970  }
971
972  /// See [`IsolateHandle::cancel_terminate_execution`]
973  #[inline(always)]
974  pub fn cancel_terminate_execution(&self) -> bool {
975    self.thread_safe_handle().cancel_terminate_execution()
976  }
977
978  /// See [`IsolateHandle::is_execution_terminating`]
979  #[inline(always)]
980  pub fn is_execution_terminating(&self) -> bool {
981    self.thread_safe_handle().is_execution_terminating()
982  }
983
984  pub(crate) fn create_annex(
985    &mut self,
986    create_param_allocations: Box<dyn Any>,
987  ) {
988    let annex_arc = Arc::new(IsolateAnnex::new(self, create_param_allocations));
989    let annex_ptr = Arc::into_raw(annex_arc);
990    assert!(self.get_data_internal(Self::ANNEX_SLOT).is_null());
991    self.set_data_internal(Self::ANNEX_SLOT, annex_ptr as *mut _);
992  }
993
994  unsafe fn dispose_annex(&mut self) -> Box<dyn Any> {
995    // Set the `isolate` pointer inside the annex struct to null, so any
996    // IsolateHandle that outlives the isolate will know that it can't call
997    // methods on the isolate.
998    let annex = self.get_annex_mut();
999    {
1000      let _lock = annex.isolate_mutex.lock().unwrap();
1001      annex.isolate = null_mut();
1002    }
1003
1004    // Clear slots and drop owned objects that were taken out of `CreateParams`.
1005    let create_param_allocations =
1006      std::mem::replace(&mut annex.create_param_allocations, Box::new(()));
1007    annex.slots.clear();
1008
1009    // Run through any remaining guaranteed finalizers.
1010    for finalizer in annex.finalizer_map.drain() {
1011      if let FinalizerCallback::Guaranteed(callback) = finalizer {
1012        callback();
1013      }
1014    }
1015
1016    // Subtract one from the Arc<IsolateAnnex> reference count.
1017    unsafe { Arc::from_raw(annex) };
1018    self.set_data(0, null_mut());
1019
1020    create_param_allocations
1021  }
1022
1023  #[inline(always)]
1024  fn get_annex(&self) -> &IsolateAnnex {
1025    let annex_ptr =
1026      self.get_data_internal(Self::ANNEX_SLOT) as *const IsolateAnnex;
1027    assert!(!annex_ptr.is_null());
1028    unsafe { &*annex_ptr }
1029  }
1030
1031  #[inline(always)]
1032  fn get_annex_mut(&mut self) -> &mut IsolateAnnex {
1033    let annex_ptr =
1034      self.get_data_internal(Self::ANNEX_SLOT) as *mut IsolateAnnex;
1035    assert!(!annex_ptr.is_null());
1036    unsafe { &mut *annex_ptr }
1037  }
1038
1039  pub(crate) fn set_snapshot_creator(
1040    &mut self,
1041    snapshot_creator: SnapshotCreator,
1042  ) {
1043    let prev = self
1044      .get_annex_mut()
1045      .maybe_snapshot_creator
1046      .replace(snapshot_creator);
1047    assert!(prev.is_none());
1048  }
1049
1050  pub(crate) fn get_finalizer_map(&self) -> &FinalizerMap {
1051    &self.get_annex().finalizer_map
1052  }
1053
1054  pub(crate) fn get_finalizer_map_mut(&mut self) -> &mut FinalizerMap {
1055    &mut self.get_annex_mut().finalizer_map
1056  }
1057
1058  fn get_annex_arc(&self) -> Arc<IsolateAnnex> {
1059    let annex_ptr = self.get_annex();
1060    let annex_arc = unsafe { Arc::from_raw(annex_ptr) };
1061    let _ = Arc::into_raw(annex_arc.clone());
1062    annex_arc
1063  }
1064
1065  /// Retrieve embedder-specific data from the isolate.
1066  /// Returns NULL if SetData has never been called for the given `slot`.
1067  pub fn get_data(&self, slot: u32) -> *mut c_void {
1068    self.get_data_internal(Self::INTERNAL_DATA_SLOT_COUNT + slot)
1069  }
1070
1071  /// Associate embedder-specific data with the isolate. `slot` has to be
1072  /// between 0 and `Isolate::get_number_of_data_slots()`.
1073  #[inline(always)]
1074  pub fn set_data(&mut self, slot: u32, data: *mut c_void) {
1075    self.set_data_internal(Self::INTERNAL_DATA_SLOT_COUNT + slot, data);
1076  }
1077
1078  /// Returns the maximum number of available embedder data slots. Valid slots
1079  /// are in the range of `0 <= n < Isolate::get_number_of_data_slots()`.
1080  pub fn get_number_of_data_slots(&self) -> u32 {
1081    let n = unsafe { v8__Isolate__GetNumberOfDataSlots(self.as_real_ptr()) };
1082    n - Self::INTERNAL_DATA_SLOT_COUNT
1083  }
1084
1085  #[inline(always)]
1086  pub(crate) fn get_data_internal(&self, slot: u32) -> *mut c_void {
1087    unsafe { v8__Isolate__GetData(self.as_real_ptr(), slot) }
1088  }
1089
1090  #[inline(always)]
1091  pub(crate) fn set_data_internal(&mut self, slot: u32, data: *mut c_void) {
1092    unsafe { v8__Isolate__SetData(self.as_real_ptr(), slot, data) }
1093  }
1094
1095  // pub(crate) fn init_scope_root(&mut self) {
1096  //   ScopeData::new_root(self);
1097  // }
1098
1099  // pub(crate) fn dispose_scope_root(&mut self) {
1100  //   ScopeData::drop_root(self);
1101  // }
1102
1103  // /// Returns a pointer to the `ScopeData` struct for the current scope.
1104  // #[inline(always)]
1105  // pub(crate) fn get_current_scope_data(&self) -> Option<NonNull<ScopeData>> {
1106  //   let scope_data_ptr = self.get_data_internal(Self::CURRENT_SCOPE_DATA_SLOT);
1107  //   NonNull::new(scope_data_ptr).map(NonNull::cast)
1108  // }
1109
1110  // /// Updates the slot that stores a `ScopeData` pointer for the current scope.
1111  // #[inline(always)]
1112  // pub(crate) fn set_current_scope_data(
1113  //   &mut self,
1114  //   scope_data: Option<NonNull<ScopeData>>,
1115  // ) {
1116  //   let scope_data_ptr = scope_data
1117  //     .map(NonNull::cast)
1118  //     .map_or_else(null_mut, NonNull::as_ptr);
1119  //   self.set_data_internal(Self::CURRENT_SCOPE_DATA_SLOT, scope_data_ptr);
1120  // }
1121
1122  /// Get a reference to embedder data added with `set_slot()`.
1123  #[inline(always)]
1124  pub fn get_slot<T: 'static>(&self) -> Option<&T> {
1125    self
1126      .get_annex()
1127      .slots
1128      .get(&TypeId::of::<T>())
1129      .map(|slot| unsafe { slot.borrow::<T>() })
1130  }
1131
1132  /// Get a mutable reference to embedder data added with `set_slot()`.
1133  #[inline(always)]
1134  pub fn get_slot_mut<T: 'static>(&mut self) -> Option<&mut T> {
1135    self
1136      .get_annex_mut()
1137      .slots
1138      .get_mut(&TypeId::of::<T>())
1139      .map(|slot| unsafe { slot.borrow_mut::<T>() })
1140  }
1141
1142  /// Use with Isolate::get_slot and Isolate::get_slot_mut to associate state
1143  /// with an Isolate.
1144  ///
1145  /// This method gives ownership of value to the Isolate. Exactly one object of
1146  /// each type can be associated with an Isolate. If called more than once with
1147  /// an object of the same type, the earlier version will be dropped and
1148  /// replaced.
1149  ///
1150  /// Returns true if value was set without replacing an existing value.
1151  ///
1152  /// The value will be dropped when the isolate is dropped.
1153  #[inline(always)]
1154  pub fn set_slot<T: 'static>(&mut self, value: T) -> bool {
1155    self
1156      .get_annex_mut()
1157      .slots
1158      .insert(TypeId::of::<T>(), RawSlot::new(value))
1159      .is_none()
1160  }
1161
1162  /// Removes the embedder data added with `set_slot()` and returns it if it exists.
1163  #[inline(always)]
1164  pub fn remove_slot<T: 'static>(&mut self) -> Option<T> {
1165    self
1166      .get_annex_mut()
1167      .slots
1168      .remove(&TypeId::of::<T>())
1169      .map(|slot| unsafe { slot.into_inner::<T>() })
1170  }
1171
1172  /// Sets this isolate as the entered one for the current thread.
1173  /// Saves the previously entered one (if any), so that it can be
1174  /// restored when exiting.  Re-entering an isolate is allowed.
1175  ///
1176  /// rusty_v8 note: Unlike in the C++ API, the isolate is entered when it is
1177  /// constructed and exited when dropped.
1178  #[inline(always)]
1179  pub unsafe fn enter(&self) {
1180    unsafe {
1181      v8__Isolate__Enter(self.as_real_ptr());
1182    }
1183  }
1184
1185  /// Exits this isolate by restoring the previously entered one in the
1186  /// current thread.  The isolate may still stay the same, if it was
1187  /// entered more than once.
1188  ///
1189  /// Requires: self == Isolate::GetCurrent().
1190  ///
1191  /// rusty_v8 note: Unlike in the C++ API, the isolate is entered when it is
1192  /// constructed and exited when dropped.
1193  #[inline(always)]
1194  pub unsafe fn exit(&self) {
1195    unsafe {
1196      v8__Isolate__Exit(self.as_real_ptr());
1197    }
1198  }
1199
1200  /// Optional notification that the system is running low on memory.
1201  /// V8 uses these notifications to guide heuristics.
1202  /// It is allowed to call this function from another thread while
1203  /// the isolate is executing long running JavaScript code.
1204  #[inline(always)]
1205  pub fn memory_pressure_notification(&mut self, level: MemoryPressureLevel) {
1206    unsafe {
1207      v8__Isolate__MemoryPressureNotification(self.as_real_ptr(), level as u8)
1208    }
1209  }
1210
1211  /// Clears the set of objects held strongly by the heap. This set of
1212  /// objects are originally built when a WeakRef is created or
1213  /// successfully dereferenced.
1214  ///
1215  /// This is invoked automatically after microtasks are run. See
1216  /// MicrotasksPolicy for when microtasks are run.
1217  ///
1218  /// This needs to be manually invoked only if the embedder is manually
1219  /// running microtasks via a custom MicrotaskQueue class's PerformCheckpoint.
1220  /// In that case, it is the embedder's responsibility to make this call at a
1221  /// time which does not interrupt synchronous ECMAScript code execution.
1222  #[inline(always)]
1223  pub fn clear_kept_objects(&mut self) {
1224    unsafe { v8__Isolate__ClearKeptObjects(self.as_real_ptr()) }
1225  }
1226
1227  /// Optional notification that the system is running low on memory.
1228  /// V8 uses these notifications to attempt to free memory.
1229  #[inline(always)]
1230  pub fn low_memory_notification(&mut self) {
1231    unsafe { v8__Isolate__LowMemoryNotification(self.as_real_ptr()) }
1232  }
1233
1234  /// Get statistics about the heap memory usage.
1235  #[inline(always)]
1236  pub fn get_heap_statistics(&mut self) -> HeapStatistics {
1237    let inner = unsafe {
1238      let mut s = MaybeUninit::zeroed();
1239      v8__Isolate__GetHeapStatistics(self.as_real_ptr(), s.as_mut_ptr());
1240      s.assume_init()
1241    };
1242    HeapStatistics(inner)
1243  }
1244
1245  /// Returns the number of spaces in the heap.
1246  #[inline(always)]
1247  pub fn number_of_heap_spaces(&mut self) -> usize {
1248    unsafe { v8__Isolate__NumberOfHeapSpaces(self.as_real_ptr()) }
1249  }
1250
1251  /// Get the memory usage of a space in the heap.
1252  ///
1253  /// \param space_statistics The HeapSpaceStatistics object to fill in
1254  ///   statistics.
1255  /// \param index The index of the space to get statistics from, which ranges
1256  ///   from 0 to NumberOfHeapSpaces() - 1.
1257  /// \returns true on success.
1258  #[inline(always)]
1259  pub fn get_heap_space_statistics(
1260    &mut self,
1261    index: usize,
1262  ) -> Option<HeapSpaceStatistics> {
1263    let inner = unsafe {
1264      let mut s = MaybeUninit::zeroed();
1265      if !v8__Isolate__GetHeapSpaceStatistics(
1266        self.as_real_ptr(),
1267        s.as_mut_ptr(),
1268        index,
1269      ) {
1270        return None;
1271      }
1272      s.assume_init()
1273    };
1274    Some(HeapSpaceStatistics(inner))
1275  }
1276
1277  /// Get code and metadata statistics for the heap.
1278  ///
1279  /// \returns true on success.
1280  #[inline(always)]
1281  pub fn get_heap_code_and_metadata_statistics(
1282    &mut self,
1283  ) -> Option<HeapCodeStatistics> {
1284    let inner = unsafe {
1285      let mut s = MaybeUninit::zeroed();
1286      if !v8__Isolate__GetHeapCodeAndMetadataStatistics(
1287        self.as_real_ptr(),
1288        s.as_mut_ptr(),
1289      ) {
1290        return None;
1291      }
1292      s.assume_init()
1293    };
1294    Some(HeapCodeStatistics(inner))
1295  }
1296
1297  /// Tells V8 to capture current stack trace when uncaught exception occurs
1298  /// and report it to the message listeners. The option is off by default.
1299  #[inline(always)]
1300  pub fn set_capture_stack_trace_for_uncaught_exceptions(
1301    &mut self,
1302    capture: bool,
1303    frame_limit: i32,
1304  ) {
1305    unsafe {
1306      v8__Isolate__SetCaptureStackTraceForUncaughtExceptions(
1307        self.as_real_ptr(),
1308        capture,
1309        frame_limit,
1310      );
1311    }
1312  }
1313
1314  /// Adds a message listener (errors only).
1315  ///
1316  /// The same message listener can be added more than once and in that
1317  /// case it will be called more than once for each message.
1318  ///
1319  /// The exception object will be passed to the callback.
1320  #[inline(always)]
1321  pub fn add_message_listener(&mut self, callback: MessageCallback) -> bool {
1322    unsafe { v8__Isolate__AddMessageListener(self.as_real_ptr(), callback) }
1323  }
1324
1325  /// Adds a message listener for the specified message levels.
1326  #[inline(always)]
1327  pub fn add_message_listener_with_error_level(
1328    &mut self,
1329    callback: MessageCallback,
1330    message_levels: MessageErrorLevel,
1331  ) -> bool {
1332    unsafe {
1333      v8__Isolate__AddMessageListenerWithErrorLevel(
1334        self.as_real_ptr(),
1335        callback,
1336        message_levels,
1337      )
1338    }
1339  }
1340
1341  /// This specifies the callback called when the stack property of Error
1342  /// is accessed.
1343  ///
1344  /// PrepareStackTraceCallback is called when the stack property of an error is
1345  /// first accessed. The return value will be used as the stack value. If this
1346  /// callback is registed, the |Error.prepareStackTrace| API will be disabled.
1347  /// |sites| is an array of call sites, specified in
1348  /// https://v8.dev/docs/stack-trace-api
1349  #[inline(always)]
1350  pub fn set_prepare_stack_trace_callback<'s>(
1351    &mut self,
1352    callback: impl MapFnTo<PrepareStackTraceCallback<'s>>,
1353  ) {
1354    // Note: the C++ API returns a MaybeLocal but V8 asserts at runtime when
1355    // it's empty. That is, you can't return None and that's why the Rust API
1356    // expects Local<Value> instead of Option<Local<Value>>.
1357    unsafe {
1358      v8__Isolate__SetPrepareStackTraceCallback(
1359        self.as_real_ptr(),
1360        callback.map_fn_to(),
1361      );
1362    };
1363  }
1364
1365  /// Set the PromiseHook callback for various promise lifecycle
1366  /// events.
1367  #[inline(always)]
1368  pub fn set_promise_hook(&mut self, hook: PromiseHook) {
1369    unsafe { v8__Isolate__SetPromiseHook(self.as_real_ptr(), hook) }
1370  }
1371
1372  /// Set callback to notify about promise reject with no handler, or
1373  /// revocation of such a previous notification once the handler is added.
1374  #[inline(always)]
1375  pub fn set_promise_reject_callback(
1376    &mut self,
1377    callback: PromiseRejectCallback,
1378  ) {
1379    unsafe {
1380      v8__Isolate__SetPromiseRejectCallback(self.as_real_ptr(), callback)
1381    }
1382  }
1383
1384  #[inline(always)]
1385  pub fn set_wasm_async_resolve_promise_callback(
1386    &mut self,
1387    callback: WasmAsyncResolvePromiseCallback,
1388  ) {
1389    unsafe {
1390      v8__Isolate__SetWasmAsyncResolvePromiseCallback(
1391        self.as_real_ptr(),
1392        callback,
1393      )
1394    }
1395  }
1396
1397  #[inline(always)]
1398  pub fn set_allow_wasm_code_generation_callback(
1399    &mut self,
1400    callback: AllowWasmCodeGenerationCallback,
1401  ) {
1402    unsafe {
1403      v8__Isolate__SetAllowWasmCodeGenerationCallback(
1404        self.as_real_ptr(),
1405        callback,
1406      );
1407    }
1408  }
1409
1410  #[inline(always)]
1411  /// This specifies the callback called by the upcoming importa.meta
1412  /// language feature to retrieve host-defined meta data for a module.
1413  pub fn set_host_initialize_import_meta_object_callback(
1414    &mut self,
1415    callback: HostInitializeImportMetaObjectCallback,
1416  ) {
1417    unsafe {
1418      v8__Isolate__SetHostInitializeImportMetaObjectCallback(
1419        self.as_real_ptr(),
1420        callback,
1421      );
1422    }
1423  }
1424
1425  /// This specifies the callback called by the upcoming dynamic
1426  /// import() language feature to load modules.
1427  #[inline(always)]
1428  pub fn set_host_import_module_dynamically_callback(
1429    &mut self,
1430    callback: impl HostImportModuleDynamicallyCallback,
1431  ) {
1432    unsafe {
1433      v8__Isolate__SetHostImportModuleDynamicallyCallback(
1434        self.as_real_ptr(),
1435        callback.to_c_fn(),
1436      );
1437    }
1438  }
1439
1440  /// This specifies the callback called by the upcoming dynamic
1441  /// import() and import.source() language feature to load modules.
1442  ///
1443  /// This API is experimental and is expected to be changed or removed in the
1444  /// future. The callback is currently only called when for source-phase
1445  /// imports. Evaluation-phase imports use the existing
1446  /// HostImportModuleDynamicallyCallback callback.
1447  #[inline(always)]
1448  pub fn set_host_import_module_with_phase_dynamically_callback(
1449    &mut self,
1450    callback: impl HostImportModuleWithPhaseDynamicallyCallback,
1451  ) {
1452    unsafe {
1453      v8__Isolate__SetHostImportModuleWithPhaseDynamicallyCallback(
1454        self.as_real_ptr(),
1455        callback.to_c_fn(),
1456      );
1457    }
1458  }
1459
1460  /// This specifies the callback called by the upcoming `ShadowRealm`
1461  /// construction language feature to retrieve host created globals.
1462  pub fn set_host_create_shadow_realm_context_callback(
1463    &mut self,
1464    callback: HostCreateShadowRealmContextCallback,
1465  ) {
1466    #[inline]
1467    unsafe extern "C" fn rust_shadow_realm_callback(
1468      initiator_context: Local<Context>,
1469    ) -> *mut Context {
1470      let scope = pin!(unsafe { CallbackScope::new(initiator_context) });
1471      let mut scope = scope.init();
1472      let isolate = scope.as_ref();
1473      let callback = isolate
1474        .get_slot::<HostCreateShadowRealmContextCallback>()
1475        .unwrap();
1476      let context = callback(&mut scope);
1477      context.map_or_else(null_mut, |l| l.as_non_null().as_ptr())
1478    }
1479
1480    // Windows x64 ABI: MaybeLocal<Context> must be returned on the stack.
1481    #[cfg(target_os = "windows")]
1482    unsafe extern "C" fn rust_shadow_realm_callback_windows(
1483      rv: *mut *mut Context,
1484      initiator_context: Local<Context>,
1485    ) -> *mut *mut Context {
1486      unsafe {
1487        let ret = rust_shadow_realm_callback(initiator_context);
1488        rv.write(ret);
1489      }
1490      rv
1491    }
1492
1493    let slot_didnt_exist_before = self.set_slot(callback);
1494    if slot_didnt_exist_before {
1495      unsafe {
1496        #[cfg(target_os = "windows")]
1497        v8__Isolate__SetHostCreateShadowRealmContextCallback(
1498          self.as_real_ptr(),
1499          rust_shadow_realm_callback_windows,
1500        );
1501        #[cfg(not(target_os = "windows"))]
1502        v8__Isolate__SetHostCreateShadowRealmContextCallback(
1503          self.as_real_ptr(),
1504          rust_shadow_realm_callback,
1505        );
1506      }
1507    }
1508  }
1509
1510  /// Sets a callback for counting the number of times a feature of V8 is used.
1511  #[inline(always)]
1512  pub fn set_use_counter_callback(&mut self, callback: UseCounterCallback) {
1513    unsafe {
1514      v8__Isolate__SetUseCounterCallback(self.as_real_ptr(), callback);
1515    }
1516  }
1517
1518  /// Enables the host application to receive a notification before a
1519  /// garbage collection. Allocations are allowed in the callback function,
1520  /// but the callback is not re-entrant: if the allocation inside it will
1521  /// trigger the garbage collection, the callback won't be called again.
1522  /// It is possible to specify the GCType filter for your callback. But it is
1523  /// not possible to register the same callback function two times with
1524  /// different GCType filters.
1525  #[allow(clippy::not_unsafe_ptr_arg_deref)] // False positive.
1526  #[inline(always)]
1527  pub fn add_gc_prologue_callback(
1528    &mut self,
1529    callback: GcCallbackWithData,
1530    data: *mut c_void,
1531    gc_type_filter: GCType,
1532  ) {
1533    unsafe {
1534      v8__Isolate__AddGCPrologueCallback(
1535        self.as_real_ptr(),
1536        callback,
1537        data,
1538        gc_type_filter,
1539      );
1540    }
1541  }
1542
1543  /// This function removes callback which was installed by
1544  /// AddGCPrologueCallback function.
1545  #[allow(clippy::not_unsafe_ptr_arg_deref)] // False positive.
1546  #[inline(always)]
1547  pub fn remove_gc_prologue_callback(
1548    &mut self,
1549    callback: GcCallbackWithData,
1550    data: *mut c_void,
1551  ) {
1552    unsafe {
1553      v8__Isolate__RemoveGCPrologueCallback(self.as_real_ptr(), callback, data)
1554    }
1555  }
1556
1557  /// Enables the host application to receive a notification after a
1558  /// garbage collection.
1559  #[allow(clippy::not_unsafe_ptr_arg_deref)] // False positive.
1560  #[inline(always)]
1561  pub fn add_gc_epilogue_callback(
1562    &mut self,
1563    callback: GcCallbackWithData,
1564    data: *mut c_void,
1565    gc_type_filter: GCType,
1566  ) {
1567    unsafe {
1568      v8__Isolate__AddGCEpilogueCallback(
1569        self.as_real_ptr(),
1570        callback,
1571        data,
1572        gc_type_filter,
1573      );
1574    }
1575  }
1576
1577  /// This function removes a callback which was added by
1578  /// `AddGCEpilogueCallback`.
1579  #[allow(clippy::not_unsafe_ptr_arg_deref)] // False positive.
1580  #[inline(always)]
1581  pub fn remove_gc_epilogue_callback(
1582    &mut self,
1583    callback: GcCallbackWithData,
1584    data: *mut c_void,
1585  ) {
1586    unsafe {
1587      v8__Isolate__RemoveGCEpilogueCallback(self.as_real_ptr(), callback, data)
1588    }
1589  }
1590
1591  /// Add a callback to invoke in case the heap size is close to the heap limit.
1592  /// If multiple callbacks are added, only the most recently added callback is
1593  /// invoked.
1594  #[allow(clippy::not_unsafe_ptr_arg_deref)] // False positive.
1595  #[inline(always)]
1596  pub fn add_near_heap_limit_callback(
1597    &mut self,
1598    callback: NearHeapLimitCallback,
1599    data: *mut c_void,
1600  ) {
1601    unsafe {
1602      v8__Isolate__AddNearHeapLimitCallback(self.as_real_ptr(), callback, data)
1603    };
1604  }
1605
1606  /// Remove the given callback and restore the heap limit to the given limit.
1607  /// If the given limit is zero, then it is ignored. If the current heap size
1608  /// is greater than the given limit, then the heap limit is restored to the
1609  /// minimal limit that is possible for the current heap size.
1610  #[inline(always)]
1611  pub fn remove_near_heap_limit_callback(
1612    &mut self,
1613    callback: NearHeapLimitCallback,
1614    heap_limit: usize,
1615  ) {
1616    unsafe {
1617      v8__Isolate__RemoveNearHeapLimitCallback(
1618        self.as_real_ptr(),
1619        callback,
1620        heap_limit,
1621      );
1622    };
1623  }
1624
1625  /// Adjusts the amount of registered external memory. Used to give V8 an
1626  /// indication of the amount of externally allocated memory that is kept
1627  /// alive by JavaScript objects. V8 uses this to decide when to perform
1628  /// global garbage collections. Registering externally allocated memory
1629  /// will trigger global garbage collections more often than it would
1630  /// otherwise in an attempt to garbage collect the JavaScript objects
1631  /// that keep the externally allocated memory alive.
1632  #[inline(always)]
1633  pub fn adjust_amount_of_external_allocated_memory(
1634    &mut self,
1635    change_in_bytes: i64,
1636  ) -> i64 {
1637    unsafe {
1638      v8__Isolate__AdjustAmountOfExternalAllocatedMemory(
1639        self.as_real_ptr(),
1640        change_in_bytes,
1641      )
1642    }
1643  }
1644
1645  #[inline(always)]
1646  pub fn get_cpp_heap(&mut self) -> Option<&Heap> {
1647    unsafe { v8__Isolate__GetCppHeap(self.as_real_ptr()).as_ref() }
1648  }
1649
1650  #[inline(always)]
1651  pub fn set_oom_error_handler(&mut self, callback: OomErrorCallback) {
1652    unsafe { v8__Isolate__SetOOMErrorHandler(self.as_real_ptr(), callback) };
1653  }
1654
1655  /// Returns the policy controlling how Microtasks are invoked.
1656  #[inline(always)]
1657  pub fn get_microtasks_policy(&self) -> MicrotasksPolicy {
1658    unsafe { v8__Isolate__GetMicrotasksPolicy(self.as_real_ptr()) }
1659  }
1660
1661  /// Returns the policy controlling how Microtasks are invoked.
1662  #[inline(always)]
1663  pub fn set_microtasks_policy(&mut self, policy: MicrotasksPolicy) {
1664    unsafe { v8__Isolate__SetMicrotasksPolicy(self.as_real_ptr(), policy) }
1665  }
1666
1667  /// Runs the default MicrotaskQueue until it gets empty and perform other
1668  /// microtask checkpoint steps, such as calling ClearKeptObjects. Asserts that
1669  /// the MicrotasksPolicy is not kScoped. Any exceptions thrown by microtask
1670  /// callbacks are swallowed.
1671  #[inline(always)]
1672  pub fn perform_microtask_checkpoint(&mut self) {
1673    unsafe { v8__Isolate__PerformMicrotaskCheckpoint(self.as_real_ptr()) }
1674  }
1675
1676  /// Enqueues the callback to the default MicrotaskQueue
1677  #[inline(always)]
1678  pub fn enqueue_microtask(&mut self, microtask: Local<Function>) {
1679    unsafe { v8__Isolate__EnqueueMicrotask(self.as_real_ptr(), &*microtask) }
1680  }
1681
1682  /// Set whether calling Atomics.wait (a function that may block) is allowed in
1683  /// this isolate. This can also be configured via
1684  /// CreateParams::allow_atomics_wait.
1685  #[inline(always)]
1686  pub fn set_allow_atomics_wait(&mut self, allow: bool) {
1687    unsafe { v8__Isolate__SetAllowAtomicsWait(self.as_real_ptr(), allow) }
1688  }
1689
1690  /// Embedder injection point for `WebAssembly.compileStreaming(source)`.
1691  /// The expectation is that the embedder sets it at most once.
1692  ///
1693  /// The callback receives the source argument (string, Promise, etc.)
1694  /// and an instance of [WasmStreaming]. The [WasmStreaming] instance
1695  /// can outlive the callback and is used to feed data chunks to V8
1696  /// asynchronously.
1697  #[inline(always)]
1698  pub fn set_wasm_streaming_callback<F>(&mut self, _: F)
1699  where
1700    F: UnitType
1701      + for<'a, 'b, 'c> Fn(
1702        &'c mut PinScope<'a, 'b>,
1703        Local<'a, Value>,
1704        WasmStreaming<false>,
1705      ),
1706  {
1707    unsafe {
1708      v8__Isolate__SetWasmStreamingCallback(
1709        self.as_real_ptr(),
1710        trampoline::<F>(),
1711      )
1712    }
1713  }
1714
1715  /// Notification that the embedder has changed the time zone, daylight savings
1716  /// time or other date / time configuration parameters. V8 keeps a cache of
1717  /// various values used for date / time computation. This notification will
1718  /// reset those cached values for the current context so that date / time
1719  /// configuration changes would be reflected.
1720  ///
1721  /// This API should not be called more than needed as it will negatively impact
1722  /// the performance of date operations.
1723  #[inline(always)]
1724  pub fn date_time_configuration_change_notification(
1725    &mut self,
1726    time_zone_detection: TimeZoneDetection,
1727  ) {
1728    unsafe {
1729      v8__Isolate__DateTimeConfigurationChangeNotification(
1730        self.as_real_ptr(),
1731        time_zone_detection,
1732      );
1733    }
1734  }
1735
1736  /// Returns true if there is ongoing background work within V8 that will
1737  /// eventually post a foreground task, like asynchronous WebAssembly
1738  /// compilation.
1739  #[inline(always)]
1740  pub fn has_pending_background_tasks(&self) -> bool {
1741    unsafe { v8__Isolate__HasPendingBackgroundTasks(self.as_real_ptr()) }
1742  }
1743
1744  /// Request garbage collection with a specific embedderstack state in this
1745  /// Isolate. It is only valid to call this function if --expose_gc was
1746  /// specified.
1747  ///
1748  /// This should only be used for testing purposes and not to enforce a garbage
1749  /// collection schedule. It has strong negative impact on the garbage
1750  /// collection performance. Use IdleNotificationDeadline() or
1751  /// LowMemoryNotification() instead to influence the garbage collection
1752  /// schedule.
1753  #[inline(always)]
1754  pub fn request_garbage_collection_for_testing(
1755    &mut self,
1756    r#type: GarbageCollectionType,
1757  ) {
1758    unsafe {
1759      v8__Isolate__RequestGarbageCollectionForTesting(
1760        self.as_real_ptr(),
1761        match r#type {
1762          GarbageCollectionType::Full => 0,
1763          GarbageCollectionType::Minor => 1,
1764        },
1765      );
1766    }
1767  }
1768
1769  /// Disposes the isolate.  The isolate must not be entered by any
1770  /// thread to be disposable.
1771  unsafe fn dispose(&mut self) {
1772    // No test case in rusty_v8 show this, but there have been situations in
1773    // deno where dropping Annex before the states causes a segfault.
1774    unsafe {
1775      v8__Isolate__Dispose(self.as_real_ptr());
1776    }
1777  }
1778
1779  /// Take a heap snapshot. The callback is invoked one or more times
1780  /// with byte slices containing the snapshot serialized as JSON.
1781  /// It's the callback's responsibility to reassemble them into
1782  /// a single document, e.g., by writing them to a file.
1783  /// Note that Chrome DevTools refuses to load snapshots without
1784  /// a .heapsnapshot suffix.
1785  pub fn take_heap_snapshot<F>(&mut self, mut callback: F)
1786  where
1787    F: FnMut(&[u8]) -> bool,
1788  {
1789    unsafe extern "C" fn trampoline<F>(
1790      arg: *mut c_void,
1791      data: *const u8,
1792      size: usize,
1793    ) -> bool
1794    where
1795      F: FnMut(&[u8]) -> bool,
1796    {
1797      unsafe {
1798        let mut callback = NonNull::<F>::new_unchecked(arg as _);
1799        if size > 0 {
1800          (callback.as_mut())(std::slice::from_raw_parts(data, size))
1801        } else {
1802          (callback.as_mut())(&[])
1803        }
1804      }
1805    }
1806
1807    let arg = addr_of_mut!(callback);
1808    unsafe {
1809      v8__HeapProfiler__TakeHeapSnapshot(
1810        self.as_real_ptr(),
1811        trampoline::<F>,
1812        arg as _,
1813      );
1814    }
1815  }
1816
1817  /// Set the default context to be included in the snapshot blob.
1818  /// The snapshot will not contain the global proxy, and we expect one or a
1819  /// global object template to create one, to be provided upon deserialization.
1820  ///
1821  /// # Panics
1822  ///
1823  /// Panics if the isolate was not created using [`Isolate::snapshot_creator`]
1824  #[inline(always)]
1825  pub fn set_default_context(&mut self, context: Local<Context>) {
1826    let snapshot_creator = self
1827      .get_annex_mut()
1828      .maybe_snapshot_creator
1829      .as_mut()
1830      .unwrap();
1831    snapshot_creator.set_default_context(context);
1832  }
1833
1834  /// Add additional context to be included in the snapshot blob.
1835  /// The snapshot will include the global proxy.
1836  ///
1837  /// Returns the index of the context in the snapshot blob.
1838  ///
1839  /// # Panics
1840  ///
1841  /// Panics if the isolate was not created using [`Isolate::snapshot_creator`]
1842  #[inline(always)]
1843  pub fn add_context(&mut self, context: Local<Context>) -> usize {
1844    let snapshot_creator = self
1845      .get_annex_mut()
1846      .maybe_snapshot_creator
1847      .as_mut()
1848      .unwrap();
1849    snapshot_creator.add_context(context)
1850  }
1851
1852  /// Attach arbitrary `v8::Data` to the isolate snapshot, which can be
1853  /// retrieved via `HandleScope::get_context_data_from_snapshot_once()` after
1854  /// deserialization. This data does not survive when a new snapshot is created
1855  /// from an existing snapshot.
1856  ///
1857  /// # Panics
1858  ///
1859  /// Panics if the isolate was not created using [`Isolate::snapshot_creator`]
1860  #[inline(always)]
1861  pub fn add_isolate_data<T>(&mut self, data: Local<T>) -> usize
1862  where
1863    for<'l> Local<'l, T>: Into<Local<'l, Data>>,
1864  {
1865    let snapshot_creator = self
1866      .get_annex_mut()
1867      .maybe_snapshot_creator
1868      .as_mut()
1869      .unwrap();
1870    snapshot_creator.add_isolate_data(data)
1871  }
1872
1873  /// Attach arbitrary `v8::Data` to the context snapshot, which can be
1874  /// retrieved via `HandleScope::get_context_data_from_snapshot_once()` after
1875  /// deserialization. This data does not survive when a new snapshot is
1876  /// created from an existing snapshot.
1877  ///
1878  /// # Panics
1879  ///
1880  /// Panics if the isolate was not created using [`Isolate::snapshot_creator`]
1881  #[inline(always)]
1882  pub fn add_context_data<T>(
1883    &mut self,
1884    context: Local<Context>,
1885    data: Local<T>,
1886  ) -> usize
1887  where
1888    for<'l> Local<'l, T>: Into<Local<'l, Data>>,
1889  {
1890    let snapshot_creator = self
1891      .get_annex_mut()
1892      .maybe_snapshot_creator
1893      .as_mut()
1894      .unwrap();
1895    snapshot_creator.add_context_data(context, data)
1896  }
1897}
1898
1899pub(crate) struct IsolateAnnex {
1900  create_param_allocations: Box<dyn Any>,
1901  slots: HashMap<TypeId, RawSlot, BuildTypeIdHasher>,
1902  finalizer_map: FinalizerMap,
1903  maybe_snapshot_creator: Option<SnapshotCreator>,
1904  // The `isolate` and `isolate_mutex` fields are there so an `IsolateHandle`
1905  // (which may outlive the isolate itself) can determine whether the isolate
1906  // is still alive, and if so, get a reference to it. Safety rules:
1907  // - The 'main thread' must lock the mutex and reset `isolate` to null just
1908  //   before the isolate is disposed.
1909  // - Any other thread must lock the mutex while it's reading/using the
1910  //   `isolate` pointer.
1911  isolate: *mut RealIsolate,
1912  isolate_mutex: Mutex<()>,
1913}
1914
1915unsafe impl Send for IsolateAnnex {}
1916unsafe impl Sync for IsolateAnnex {}
1917
1918impl IsolateAnnex {
1919  fn new(
1920    isolate: &mut Isolate,
1921    create_param_allocations: Box<dyn Any>,
1922  ) -> Self {
1923    Self {
1924      create_param_allocations,
1925      slots: HashMap::default(),
1926      finalizer_map: FinalizerMap::default(),
1927      maybe_snapshot_creator: None,
1928      isolate: isolate.as_real_ptr(),
1929      isolate_mutex: Mutex::new(()),
1930    }
1931  }
1932}
1933
1934impl Debug for IsolateAnnex {
1935  fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
1936    f.debug_struct("IsolateAnnex")
1937      .field("isolate", &self.isolate)
1938      .field("isolate_mutex", &self.isolate_mutex)
1939      .finish()
1940  }
1941}
1942
1943/// IsolateHandle is a thread-safe reference to an Isolate. It's main use is to
1944/// terminate execution of a running isolate from another thread.
1945///
1946/// It is created with Isolate::thread_safe_handle().
1947///
1948/// IsolateHandle is Cloneable, Send, and Sync.
1949#[derive(Clone, Debug)]
1950pub struct IsolateHandle(Arc<IsolateAnnex>);
1951
1952impl IsolateHandle {
1953  // This function is marked unsafe because it must be called only with either
1954  // IsolateAnnex::mutex locked, or from the main thread associated with the V8
1955  // isolate.
1956  pub(crate) unsafe fn get_isolate_ptr(&self) -> *mut RealIsolate {
1957    self.0.isolate
1958  }
1959
1960  #[inline(always)]
1961  fn new(isolate: &Isolate) -> Self {
1962    Self(isolate.get_annex_arc())
1963  }
1964
1965  /// Forcefully terminate the current thread of JavaScript execution
1966  /// in the given isolate.
1967  ///
1968  /// This method can be used by any thread even if that thread has not
1969  /// acquired the V8 lock with a Locker object.
1970  ///
1971  /// Returns false if Isolate was already destroyed.
1972  #[inline(always)]
1973  pub fn terminate_execution(&self) -> bool {
1974    let _lock = self.0.isolate_mutex.lock().unwrap();
1975    if self.0.isolate.is_null() {
1976      false
1977    } else {
1978      unsafe { v8__Isolate__TerminateExecution(self.0.isolate) };
1979      true
1980    }
1981  }
1982
1983  /// Resume execution capability in the given isolate, whose execution
1984  /// was previously forcefully terminated using TerminateExecution().
1985  ///
1986  /// When execution is forcefully terminated using TerminateExecution(),
1987  /// the isolate can not resume execution until all JavaScript frames
1988  /// have propagated the uncatchable exception which is generated.  This
1989  /// method allows the program embedding the engine to handle the
1990  /// termination event and resume execution capability, even if
1991  /// JavaScript frames remain on the stack.
1992  ///
1993  /// This method can be used by any thread even if that thread has not
1994  /// acquired the V8 lock with a Locker object.
1995  ///
1996  /// Returns false if Isolate was already destroyed.
1997  #[inline(always)]
1998  pub fn cancel_terminate_execution(&self) -> bool {
1999    let _lock = self.0.isolate_mutex.lock().unwrap();
2000    if self.0.isolate.is_null() {
2001      false
2002    } else {
2003      unsafe { v8__Isolate__CancelTerminateExecution(self.0.isolate) };
2004      true
2005    }
2006  }
2007
2008  /// Is V8 terminating JavaScript execution.
2009  ///
2010  /// Returns true if JavaScript execution is currently terminating
2011  /// because of a call to TerminateExecution.  In that case there are
2012  /// still JavaScript frames on the stack and the termination
2013  /// exception is still active.
2014  ///
2015  /// Returns false if Isolate was already destroyed.
2016  #[inline(always)]
2017  pub fn is_execution_terminating(&self) -> bool {
2018    let _lock = self.0.isolate_mutex.lock().unwrap();
2019    if self.0.isolate.is_null() {
2020      false
2021    } else {
2022      unsafe { v8__Isolate__IsExecutionTerminating(self.0.isolate) }
2023    }
2024  }
2025
2026  /// Request V8 to interrupt long running JavaScript code and invoke
2027  /// the given |callback| passing the given |data| to it. After |callback|
2028  /// returns control will be returned to the JavaScript code.
2029  /// There may be a number of interrupt requests in flight.
2030  /// Can be called from another thread without acquiring a |Locker|.
2031  /// Registered |callback| must not reenter interrupted Isolate.
2032  ///
2033  /// Returns false if Isolate was already destroyed.
2034  // Clippy warns that this method is dereferencing a raw pointer, but it is
2035  // not: https://github.com/rust-lang/rust-clippy/issues/3045
2036  #[allow(clippy::not_unsafe_ptr_arg_deref)]
2037  #[inline(always)]
2038  pub fn request_interrupt(
2039    &self,
2040    callback: InterruptCallback,
2041    data: *mut c_void,
2042  ) -> bool {
2043    let _lock = self.0.isolate_mutex.lock().unwrap();
2044    if self.0.isolate.is_null() {
2045      false
2046    } else {
2047      unsafe { v8__Isolate__RequestInterrupt(self.0.isolate, callback, data) };
2048      true
2049    }
2050  }
2051}
2052
2053/// Same as Isolate but gets disposed when it goes out of scope.
2054#[derive(Debug)]
2055pub struct OwnedIsolate {
2056  cxx_isolate: NonNull<RealIsolate>,
2057}
2058
2059impl OwnedIsolate {
2060  pub(crate) fn new(cxx_isolate: *mut RealIsolate) -> Self {
2061    let isolate = Self::new_already_entered(cxx_isolate);
2062    unsafe {
2063      isolate.enter();
2064    }
2065    isolate
2066  }
2067
2068  pub(crate) fn new_already_entered(cxx_isolate: *mut RealIsolate) -> Self {
2069    let cxx_isolate = NonNull::new(cxx_isolate).unwrap();
2070    let owned_isolate: OwnedIsolate = Self { cxx_isolate };
2071    // owned_isolate.init_scope_root();
2072    owned_isolate
2073  }
2074}
2075
2076impl Drop for OwnedIsolate {
2077  fn drop(&mut self) {
2078    unsafe {
2079      let snapshot_creator = self.get_annex_mut().maybe_snapshot_creator.take();
2080      assert!(
2081        snapshot_creator.is_none(),
2082        "If isolate was created using v8::Isolate::snapshot_creator, you should use v8::OwnedIsolate::create_blob before dropping an isolate."
2083      );
2084      // Safety: We need to check `this == Isolate::GetCurrent()` before calling exit()
2085      assert!(
2086        std::ptr::eq(self.cxx_isolate.as_mut(), v8__Isolate__GetCurrent()),
2087        "v8::OwnedIsolate instances must be dropped in the reverse order of creation. They are entered upon creation and exited upon being dropped."
2088      );
2089      // self.dispose_scope_root();
2090      self.exit();
2091      self.dispose_annex();
2092      Platform::notify_isolate_shutdown(&get_current_platform(), self);
2093      self.dispose();
2094    }
2095  }
2096}
2097
2098impl OwnedIsolate {
2099  /// Creates a snapshot data blob.
2100  /// This must not be called from within a handle scope.
2101  ///
2102  /// # Panics
2103  ///
2104  /// Panics if the isolate was not created using [`Isolate::snapshot_creator`]
2105  #[inline(always)]
2106  pub fn create_blob(
2107    mut self,
2108    function_code_handling: FunctionCodeHandling,
2109  ) -> Option<StartupData> {
2110    let mut snapshot_creator =
2111      self.get_annex_mut().maybe_snapshot_creator.take().unwrap();
2112
2113    // create_param_allocations is needed during CreateBlob
2114    // so v8 can read external references
2115    let _create_param_allocations = unsafe {
2116      // self.dispose_scope_root();
2117      self.dispose_annex()
2118    };
2119
2120    // The isolate is owned by the snapshot creator; we need to forget it
2121    // here as the snapshot creator will drop it when running the destructor.
2122    std::mem::forget(self);
2123    snapshot_creator.create_blob(function_code_handling)
2124  }
2125}
2126
2127impl Deref for OwnedIsolate {
2128  type Target = Isolate;
2129  fn deref(&self) -> &Self::Target {
2130    unsafe {
2131      std::mem::transmute::<&NonNull<RealIsolate>, &Isolate>(&self.cxx_isolate)
2132    }
2133  }
2134}
2135
2136impl DerefMut for OwnedIsolate {
2137  fn deref_mut(&mut self) -> &mut Self::Target {
2138    unsafe {
2139      std::mem::transmute::<&mut NonNull<RealIsolate>, &mut Isolate>(
2140        &mut self.cxx_isolate,
2141      )
2142    }
2143  }
2144}
2145
2146impl AsMut<Isolate> for OwnedIsolate {
2147  fn as_mut(&mut self) -> &mut Isolate {
2148    self
2149  }
2150}
2151
2152impl AsMut<Isolate> for Isolate {
2153  fn as_mut(&mut self) -> &mut Isolate {
2154    self
2155  }
2156}
2157
2158/// Collection of V8 heap information.
2159///
2160/// Instances of this class can be passed to v8::Isolate::GetHeapStatistics to
2161/// get heap statistics from V8.
2162pub struct HeapStatistics(v8__HeapStatistics);
2163
2164impl HeapStatistics {
2165  #[inline(always)]
2166  pub fn total_heap_size(&self) -> usize {
2167    self.0.total_heap_size_
2168  }
2169
2170  #[inline(always)]
2171  pub fn total_heap_size_executable(&self) -> usize {
2172    self.0.total_heap_size_executable_
2173  }
2174
2175  #[inline(always)]
2176  pub fn total_physical_size(&self) -> usize {
2177    self.0.total_physical_size_
2178  }
2179
2180  #[inline(always)]
2181  pub fn total_available_size(&self) -> usize {
2182    self.0.total_available_size_
2183  }
2184
2185  #[inline(always)]
2186  pub fn total_global_handles_size(&self) -> usize {
2187    self.0.total_global_handles_size_
2188  }
2189
2190  #[inline(always)]
2191  pub fn used_global_handles_size(&self) -> usize {
2192    self.0.used_global_handles_size_
2193  }
2194
2195  #[inline(always)]
2196  pub fn used_heap_size(&self) -> usize {
2197    self.0.used_heap_size_
2198  }
2199
2200  #[inline(always)]
2201  pub fn heap_size_limit(&self) -> usize {
2202    self.0.heap_size_limit_
2203  }
2204
2205  #[inline(always)]
2206  pub fn malloced_memory(&self) -> usize {
2207    self.0.malloced_memory_
2208  }
2209
2210  #[inline(always)]
2211  pub fn external_memory(&self) -> usize {
2212    self.0.external_memory_
2213  }
2214
2215  #[inline(always)]
2216  pub fn peak_malloced_memory(&self) -> usize {
2217    self.0.peak_malloced_memory_
2218  }
2219
2220  #[inline(always)]
2221  pub fn number_of_native_contexts(&self) -> usize {
2222    self.0.number_of_native_contexts_
2223  }
2224
2225  #[inline(always)]
2226  pub fn number_of_detached_contexts(&self) -> usize {
2227    self.0.number_of_detached_contexts_
2228  }
2229
2230  /// Returns the total number of bytes allocated since the Isolate was created.
2231  /// This includes all heap objects allocated in any space (new, old, code,
2232  /// etc.).
2233  #[inline(always)]
2234  pub fn total_allocated_bytes(&self) -> u64 {
2235    self.0.total_allocated_bytes_
2236  }
2237
2238  /// Returns a 0/1 boolean, which signifies whether the V8 overwrite heap
2239  /// garbage with a bit pattern.
2240  #[inline(always)]
2241  pub fn does_zap_garbage(&self) -> bool {
2242    self.0.does_zap_garbage_
2243  }
2244}
2245
2246pub struct HeapSpaceStatistics(v8__HeapSpaceStatistics);
2247
2248impl HeapSpaceStatistics {
2249  pub fn space_name(&self) -> &'static CStr {
2250    unsafe { CStr::from_ptr(self.0.space_name_) }
2251  }
2252
2253  pub fn space_size(&self) -> usize {
2254    self.0.space_size_
2255  }
2256
2257  pub fn space_used_size(&self) -> usize {
2258    self.0.space_used_size_
2259  }
2260
2261  pub fn space_available_size(&self) -> usize {
2262    self.0.space_available_size_
2263  }
2264
2265  pub fn physical_space_size(&self) -> usize {
2266    self.0.physical_space_size_
2267  }
2268}
2269
2270pub struct HeapCodeStatistics(v8__HeapCodeStatistics);
2271
2272impl HeapCodeStatistics {
2273  pub fn code_and_metadata_size(&self) -> usize {
2274    self.0.code_and_metadata_size_
2275  }
2276
2277  pub fn bytecode_and_metadata_size(&self) -> usize {
2278    self.0.bytecode_and_metadata_size_
2279  }
2280
2281  pub fn external_script_source_size(&self) -> usize {
2282    self.0.external_script_source_size_
2283  }
2284
2285  pub fn cpu_profiler_metadata_size(&self) -> usize {
2286    self.0.cpu_profiler_metadata_size_
2287  }
2288}
2289
2290impl<'s, F> MapFnFrom<F> for PrepareStackTraceCallback<'s>
2291where
2292  F: UnitType
2293    + for<'a> Fn(
2294      &mut PinScope<'s, 'a>,
2295      Local<'s, Value>,
2296      Local<'s, Array>,
2297    ) -> Local<'s, Value>,
2298{
2299  // Windows x64 ABI: MaybeLocal<Value> returned on the stack.
2300  #[cfg(target_os = "windows")]
2301  fn mapping() -> Self {
2302    let f = |ret_ptr, context, error, sites| {
2303      let scope = pin!(unsafe { CallbackScope::new(context) });
2304      let mut scope: crate::PinnedRef<CallbackScope> = scope.init();
2305      let r = (F::get())(&mut scope, error, sites);
2306      unsafe { std::ptr::write(ret_ptr, &*r as *const _) };
2307      ret_ptr
2308    };
2309    f.to_c_fn()
2310  }
2311
2312  // System V ABI
2313  #[cfg(not(target_os = "windows"))]
2314  fn mapping() -> Self {
2315    let f = |context, error, sites| {
2316      let scope = pin!(unsafe { CallbackScope::new(context) });
2317      let mut scope: crate::PinnedRef<CallbackScope> = scope.init();
2318
2319      let r = (F::get())(&mut scope, error, sites);
2320      PrepareStackTraceCallbackRet(&*r as *const _)
2321    };
2322    f.to_c_fn()
2323  }
2324}
2325
2326/// A special hasher that is optimized for hashing `std::any::TypeId` values.
2327/// `TypeId` values are actually 64-bit values which themselves come out of some
2328/// hash function, so it's unnecessary to shuffle their bits any further.
2329#[derive(Clone, Default)]
2330pub(crate) struct TypeIdHasher {
2331  state: Option<u64>,
2332}
2333
2334impl Hasher for TypeIdHasher {
2335  fn write(&mut self, _bytes: &[u8]) {
2336    panic!("TypeIdHasher::write() called unexpectedly");
2337  }
2338
2339  #[inline]
2340  fn write_u64(&mut self, value: u64) {
2341    // The internal hash function of TypeId only takes the bottom 64-bits, even on versions
2342    // of Rust that use a 128-bit TypeId.
2343    let prev_state = self.state.replace(value);
2344    debug_assert_eq!(prev_state, None);
2345  }
2346
2347  #[inline]
2348  fn finish(&self) -> u64 {
2349    self.state.unwrap()
2350  }
2351}
2352
2353/// Factory for instances of `TypeIdHasher`. This is the type that one would
2354/// pass to the constructor of some map/set type in order to make it use
2355/// `TypeIdHasher` instead of the default hasher implementation.
2356#[derive(Copy, Clone, Default)]
2357pub(crate) struct BuildTypeIdHasher;
2358
2359impl BuildHasher for BuildTypeIdHasher {
2360  type Hasher = TypeIdHasher;
2361
2362  #[inline]
2363  fn build_hasher(&self) -> Self::Hasher {
2364    Default::default()
2365  }
2366}
2367
2368const _: () = {
2369  assert!(
2370    size_of::<TypeId>() == size_of::<u64>()
2371      || size_of::<TypeId>() == size_of::<u128>()
2372  );
2373  assert!(
2374    align_of::<TypeId>() == align_of::<u64>()
2375      || align_of::<TypeId>() == align_of::<u128>()
2376  );
2377};
2378
2379pub(crate) struct RawSlot {
2380  data: RawSlotData,
2381  dtor: Option<RawSlotDtor>,
2382}
2383
2384type RawSlotData = MaybeUninit<usize>;
2385type RawSlotDtor = unsafe fn(&mut RawSlotData) -> ();
2386
2387impl RawSlot {
2388  #[inline]
2389  pub fn new<T: 'static>(value: T) -> Self {
2390    if Self::needs_box::<T>() {
2391      Self::new_internal(Box::new(value))
2392    } else {
2393      Self::new_internal(value)
2394    }
2395  }
2396
2397  // SAFETY: a valid value of type `T` must haven been stored in the slot
2398  // earlier. There is no verification that the type param provided by the
2399  // caller is correct.
2400  #[inline]
2401  pub unsafe fn borrow<T: 'static>(&self) -> &T {
2402    unsafe {
2403      if Self::needs_box::<T>() {
2404        &*(self.data.as_ptr() as *const Box<T>)
2405      } else {
2406        &*(self.data.as_ptr() as *const T)
2407      }
2408    }
2409  }
2410
2411  // Safety: see [`RawSlot::borrow`].
2412  #[inline]
2413  pub unsafe fn borrow_mut<T: 'static>(&mut self) -> &mut T {
2414    unsafe {
2415      if Self::needs_box::<T>() {
2416        &mut *(self.data.as_mut_ptr() as *mut Box<T>)
2417      } else {
2418        &mut *(self.data.as_mut_ptr() as *mut T)
2419      }
2420    }
2421  }
2422
2423  // Safety: see [`RawSlot::borrow`].
2424  #[inline]
2425  pub unsafe fn into_inner<T: 'static>(self) -> T {
2426    unsafe {
2427      let value = if Self::needs_box::<T>() {
2428        *std::ptr::read(self.data.as_ptr() as *mut Box<T>)
2429      } else {
2430        std::ptr::read(self.data.as_ptr() as *mut T)
2431      };
2432      forget(self);
2433      value
2434    }
2435  }
2436
2437  const fn needs_box<T: 'static>() -> bool {
2438    size_of::<T>() > size_of::<RawSlotData>()
2439      || align_of::<T>() > align_of::<RawSlotData>()
2440  }
2441
2442  #[inline]
2443  fn new_internal<B: 'static>(value: B) -> Self {
2444    assert!(!Self::needs_box::<B>());
2445    let mut self_ = Self {
2446      data: RawSlotData::zeroed(),
2447      dtor: None,
2448    };
2449    unsafe {
2450      ptr::write(self_.data.as_mut_ptr() as *mut B, value);
2451    }
2452    if needs_drop::<B>() {
2453      self_.dtor.replace(Self::drop_internal::<B>);
2454    };
2455    self_
2456  }
2457
2458  // SAFETY: a valid value of type `T` or `Box<T>` must be stored in the slot.
2459  unsafe fn drop_internal<B: 'static>(data: &mut RawSlotData) {
2460    assert!(!Self::needs_box::<B>());
2461    unsafe {
2462      drop_in_place(data.as_mut_ptr() as *mut B);
2463    }
2464  }
2465}
2466
2467impl Drop for RawSlot {
2468  fn drop(&mut self) {
2469    if let Some(dtor) = self.dtor {
2470      unsafe { dtor(&mut self.data) };
2471    }
2472  }
2473}
2474
2475impl AsRef<Isolate> for OwnedIsolate {
2476  fn as_ref(&self) -> &Isolate {
2477    unsafe { Isolate::from_raw_ref(&self.cxx_isolate) }
2478  }
2479}
2480impl AsRef<Isolate> for Isolate {
2481  fn as_ref(&self) -> &Isolate {
2482    self
2483  }
2484}