Skip to main content

v8/
isolate.rs

1// Copyright 2019-2021 the Deno authors. All rights reserved. MIT license.
2use crate::Array;
3use crate::CallbackScope;
4use crate::Context;
5use crate::Data;
6use crate::FixedArray;
7use crate::Function;
8use crate::FunctionCodeHandling;
9use crate::Local;
10use crate::Message;
11use crate::Module;
12use crate::Object;
13use crate::PinScope;
14use crate::Platform;
15use crate::Promise;
16use crate::PromiseResolver;
17use crate::StartupData;
18use crate::String;
19use crate::V8::get_current_platform;
20use crate::Value;
21use crate::binding::v8__HeapCodeStatistics;
22use crate::binding::v8__HeapSpaceStatistics;
23use crate::binding::v8__HeapStatistics;
24use crate::binding::v8__Isolate__UseCounterFeature;
25pub use crate::binding::v8__ModuleImportPhase as ModuleImportPhase;
26use crate::cppgc::Heap;
27use crate::external_references::ExternalReference;
28use crate::function::FunctionCallbackInfo;
29use crate::gc::GCCallbackFlags;
30use crate::gc::GCType;
31use crate::handle::FinalizerCallback;
32use crate::handle::FinalizerMap;
33use crate::isolate_create_params::CreateParams;
34use crate::isolate_create_params::raw;
35use crate::promise::PromiseRejectMessage;
36use crate::snapshot::SnapshotCreator;
37use crate::support::MapFnFrom;
38use crate::support::MapFnTo;
39use crate::support::Opaque;
40use crate::support::ToCFn;
41use crate::support::UnitType;
42use crate::support::char;
43use crate::support::int;
44use crate::support::size_t;
45use crate::wasm::WasmStreaming;
46use crate::wasm::trampoline;
47use std::ffi::CStr;
48
49use std::any::Any;
50use std::any::TypeId;
51use std::borrow::Cow;
52use std::collections::HashMap;
53use std::ffi::c_void;
54use std::fmt::{self, Debug, Formatter};
55use std::hash::BuildHasher;
56use std::hash::Hasher;
57use std::mem::MaybeUninit;
58use std::mem::align_of;
59use std::mem::forget;
60use std::mem::needs_drop;
61use std::mem::size_of;
62use std::ops::Deref;
63use std::ops::DerefMut;
64use std::pin::pin;
65use std::ptr;
66use std::ptr::NonNull;
67use std::ptr::addr_of_mut;
68use std::ptr::drop_in_place;
69use std::ptr::null_mut;
70use std::sync::Arc;
71
72use parking_lot::Mutex;
73
74/// Policy for running microtasks:
75///   - explicit: microtasks are invoked with the
76///     Isolate::PerformMicrotaskCheckpoint() method;
77///   - auto: microtasks are invoked when the script call depth decrements
78///     to zero.
79#[derive(Debug, Clone, Copy, PartialEq, Eq)]
80#[repr(C)]
81pub enum MicrotasksPolicy {
82  Explicit = 0,
83  // Scoped = 1 (RAII) is omitted for now, doesn't quite map to idiomatic Rust.
84  Auto = 2,
85}
86
87/// Memory pressure level for the MemoryPressureNotification.
88/// None hints V8 that there is no memory pressure.
89/// Moderate hints V8 to speed up incremental garbage collection at the cost
90/// of higher latency due to garbage collection pauses.
91/// Critical hints V8 to free memory as soon as possible. Garbage collection
92/// pauses at this level will be large.
93#[derive(Debug, Clone, Copy, PartialEq, Eq)]
94#[repr(C)]
95pub enum MemoryPressureLevel {
96  None = 0,
97  Moderate = 1,
98  Critical = 2,
99}
100
101/// Time zone redetection indicator for
102/// DateTimeConfigurationChangeNotification.
103///
104/// kSkip indicates V8 that the notification should not trigger redetecting
105/// host time zone. kRedetect indicates V8 that host time zone should be
106/// redetected, and used to set the default time zone.
107///
108/// The host time zone detection may require file system access or similar
109/// operations unlikely to be available inside a sandbox. If v8 is run inside a
110/// sandbox, the host time zone has to be detected outside the sandbox before
111/// calling DateTimeConfigurationChangeNotification function.
112#[derive(Debug, Clone, Copy, PartialEq, Eq)]
113#[repr(C)]
114pub enum TimeZoneDetection {
115  Skip = 0,
116  Redetect = 1,
117}
118
119/// PromiseHook with type Init is called when a new promise is
120/// created. When a new promise is created as part of the chain in the
121/// case of Promise.then or in the intermediate promises created by
122/// Promise.{race, all}/AsyncFunctionAwait, we pass the parent promise
123/// otherwise we pass undefined.
124///
125/// PromiseHook with type Resolve is called at the beginning of
126/// resolve or reject function defined by CreateResolvingFunctions.
127///
128/// PromiseHook with type Before is called at the beginning of the
129/// PromiseReactionJob.
130///
131/// PromiseHook with type After is called right at the end of the
132/// PromiseReactionJob.
133#[derive(Debug, Clone, Copy, PartialEq, Eq)]
134#[repr(C)]
135pub enum PromiseHookType {
136  Init,
137  Resolve,
138  Before,
139  After,
140}
141
142/// Types of garbage collections that can be requested via
143/// [`Isolate::request_garbage_collection_for_testing`].
144#[derive(Debug, Clone, Copy, PartialEq, Eq)]
145#[repr(C)]
146pub enum GarbageCollectionType {
147  Full,
148  Minor,
149}
150
151pub type MessageCallback = unsafe extern "C" fn(Local<Message>, Local<Value>);
152
153bitflags! {
154  #[derive(Debug, Clone, Copy, PartialEq, Eq)]
155  #[repr(transparent)]
156  pub struct MessageErrorLevel: int {
157    const LOG = 1 << 0;
158    const DEBUG = 1 << 1;
159    const INFO = 1 << 2;
160    const ERROR = 1 << 3;
161    const WARNING = 1 << 4;
162    const ALL = (1 << 5) - 1;
163  }
164}
165
166pub type PromiseHook =
167  unsafe extern "C" fn(PromiseHookType, Local<Promise>, Local<Value>);
168
169pub type PromiseRejectCallback = unsafe extern "C" fn(PromiseRejectMessage);
170
171#[derive(Debug, Clone, Copy, PartialEq, Eq)]
172#[repr(C)]
173pub enum WasmAsyncSuccess {
174  Success,
175  Fail,
176}
177pub type WasmAsyncResolvePromiseCallback = unsafe extern "C" fn(
178  UnsafeRawIsolatePtr,
179  Local<Context>,
180  Local<PromiseResolver>,
181  Local<Value>,
182  WasmAsyncSuccess,
183);
184
185pub type AllowWasmCodeGenerationCallback =
186  unsafe extern "C" fn(Local<Context>, Local<String>) -> bool;
187
188/// HostInitializeImportMetaObjectCallback is called the first time import.meta
189/// is accessed for a module. Subsequent access will reuse the same value.
190///
191/// The method combines two implementation-defined abstract operations into one:
192/// HostGetImportMetaProperties and HostFinalizeImportMeta.
193///
194/// The embedder should use v8::Object::CreateDataProperty to add properties on
195/// the meta object.
196pub type HostInitializeImportMetaObjectCallback =
197  unsafe extern "C" fn(Local<Context>, Local<Module>, Local<Object>);
198
199/// HostImportModuleDynamicallyCallback is called when we require the embedder
200/// to load a module. This is used as part of the dynamic import syntax.
201///
202/// The host_defined_options are metadata provided by the host environment, which may be used
203/// to customize or further specify how the module should be imported.
204///
205/// The resource_name is the identifier or path for the module or script making the import request.
206///
207/// The specifier is the name of the module that should be imported.
208///
209/// The import_attributes are import assertions for this request in the form:
210/// [key1, value1, key2, value2, ...] where the keys and values are of type
211/// v8::String. Note, unlike the FixedArray passed to ResolveModuleCallback and
212/// returned from ModuleRequest::GetImportAssertions(), this array does not
213/// contain the source Locations of the assertions.
214///
215/// The embedder must compile, instantiate, evaluate the Module, and obtain its
216/// namespace object.
217///
218/// The Promise returned from this function is forwarded to userland JavaScript.
219/// The embedder must resolve this promise with the module namespace object. In
220/// case of an exception, the embedder must reject this promise with the
221/// exception. If the promise creation itself fails (e.g. due to stack
222/// overflow), the embedder must propagate that exception by returning an empty
223/// MaybeLocal.
224///
225/// # Example
226///
227/// ```
228/// fn host_import_module_dynamically_callback_example<'s>(
229///   scope: &mut v8::HandleScope<'s>,
230///   host_defined_options: v8::Local<'s, v8::Data>,
231///   resource_name: v8::Local<'s, v8::Value>,
232///   specifier: v8::Local<'s, v8::String>,
233///   import_attributes: v8::Local<'s, v8::FixedArray>,
234/// ) -> Option<v8::Local<'s, v8::Promise>> {
235///   todo!()
236/// }
237/// ```
238pub trait HostImportModuleDynamicallyCallback:
239  UnitType
240  + for<'s, 'i> FnOnce(
241    &mut PinScope<'s, 'i>,
242    Local<'s, Data>,
243    Local<'s, Value>,
244    Local<'s, String>,
245    Local<'s, FixedArray>,
246  ) -> Option<Local<'s, Promise>>
247{
248  fn to_c_fn(self) -> RawHostImportModuleDynamicallyCallback;
249}
250
251#[cfg(target_family = "unix")]
252pub(crate) type RawHostImportModuleDynamicallyCallback =
253  for<'s> unsafe extern "C" fn(
254    Local<'s, Context>,
255    Local<'s, Data>,
256    Local<'s, Value>,
257    Local<'s, String>,
258    Local<'s, FixedArray>,
259  ) -> *mut Promise;
260
261#[cfg(all(
262  target_family = "windows",
263  any(target_arch = "x86_64", target_arch = "aarch64")
264))]
265pub type RawHostImportModuleDynamicallyCallback =
266  for<'s> unsafe extern "C" fn(
267    *mut *mut Promise,
268    Local<'s, Context>,
269    Local<'s, Data>,
270    Local<'s, Value>,
271    Local<'s, String>,
272    Local<'s, FixedArray>,
273  ) -> *mut *mut Promise;
274
275impl<F> HostImportModuleDynamicallyCallback for F
276where
277  F: UnitType
278    + for<'s, 'i> FnOnce(
279      &mut PinScope<'s, 'i>,
280      Local<'s, Data>,
281      Local<'s, Value>,
282      Local<'s, String>,
283      Local<'s, FixedArray>,
284    ) -> Option<Local<'s, Promise>>,
285{
286  #[inline(always)]
287  fn to_c_fn(self) -> RawHostImportModuleDynamicallyCallback {
288    #[allow(unused_variables)]
289    #[inline(always)]
290    fn scope_adapter<'s, 'i: 's, F: HostImportModuleDynamicallyCallback>(
291      context: Local<'s, Context>,
292      host_defined_options: Local<'s, Data>,
293      resource_name: Local<'s, Value>,
294      specifier: Local<'s, String>,
295      import_attributes: Local<'s, FixedArray>,
296    ) -> Option<Local<'s, Promise>> {
297      let scope = pin!(unsafe { CallbackScope::new(context) });
298      let mut scope = scope.init();
299      (F::get())(
300        &mut scope,
301        host_defined_options,
302        resource_name,
303        specifier,
304        import_attributes,
305      )
306    }
307
308    #[cfg(target_family = "unix")]
309    #[inline(always)]
310    unsafe extern "C" fn abi_adapter<
311      's,
312      F: HostImportModuleDynamicallyCallback,
313    >(
314      context: Local<'s, Context>,
315      host_defined_options: Local<'s, Data>,
316      resource_name: Local<'s, Value>,
317      specifier: Local<'s, String>,
318      import_attributes: Local<'s, FixedArray>,
319    ) -> *mut Promise {
320      scope_adapter::<F>(
321        context,
322        host_defined_options,
323        resource_name,
324        specifier,
325        import_attributes,
326      )
327      .map_or_else(null_mut, |return_value| return_value.as_non_null().as_ptr())
328    }
329
330    #[cfg(all(
331      target_family = "windows",
332      any(target_arch = "x86_64", target_arch = "aarch64")
333    ))]
334    #[inline(always)]
335    unsafe extern "C" fn abi_adapter<
336      's,
337      F: HostImportModuleDynamicallyCallback,
338    >(
339      return_value: *mut *mut Promise,
340      context: Local<'s, Context>,
341      host_defined_options: Local<'s, Data>,
342      resource_name: Local<'s, Value>,
343      specifier: Local<'s, String>,
344      import_attributes: Local<'s, FixedArray>,
345    ) -> *mut *mut Promise {
346      unsafe {
347        std::ptr::write(
348          return_value,
349          scope_adapter::<F>(
350            context,
351            host_defined_options,
352            resource_name,
353            specifier,
354            import_attributes,
355          )
356          .map(|return_value| return_value.as_non_null().as_ptr())
357          .unwrap_or_else(null_mut),
358        );
359        return_value
360      }
361    }
362
363    abi_adapter::<F>
364  }
365}
366
367/// HostImportModuleWithPhaseDynamicallyCallback is called when we
368/// require the embedder to load a module with a specific phase. This is used
369/// as part of the dynamic import syntax.
370///
371/// The referrer contains metadata about the script/module that calls
372/// import.
373///
374/// The specifier is the name of the module that should be imported.
375///
376/// The phase is the phase of the import requested.
377///
378/// The import_attributes are import attributes for this request in the form:
379/// [key1, value1, key2, value2, ...] where the keys and values are of type
380/// v8::String. Note, unlike the FixedArray passed to ResolveModuleCallback and
381/// returned from ModuleRequest::GetImportAttributes(), this array does not
382/// contain the source Locations of the attributes.
383///
384/// The Promise returned from this function is forwarded to userland
385/// JavaScript. The embedder must resolve this promise according to the phase
386/// requested:
387/// - For ModuleImportPhase::kSource, the promise must be resolved with a
388///   compiled ModuleSource object, or rejected with a SyntaxError if the
389///   module does not support source representation.
390/// - For ModuleImportPhase::kEvaluation, the promise must be resolved with a
391///   ModuleNamespace object of a module that has been compiled, instantiated,
392///   and evaluated.
393///
394/// In case of an exception, the embedder must reject this promise with the
395/// exception. If the promise creation itself fails (e.g. due to stack
396/// overflow), the embedder must propagate that exception by returning an empty
397/// MaybeLocal.
398///
399/// This callback is still experimental and is only invoked for source phase
400/// imports.
401pub trait HostImportModuleWithPhaseDynamicallyCallback:
402  UnitType
403  + for<'s, 'i> FnOnce(
404    &mut PinScope<'s, 'i>,
405    Local<'s, Data>,
406    Local<'s, Value>,
407    Local<'s, String>,
408    ModuleImportPhase,
409    Local<'s, FixedArray>,
410  ) -> Option<Local<'s, Promise>>
411{
412  fn to_c_fn(self) -> RawHostImportModuleWithPhaseDynamicallyCallback;
413}
414
415#[cfg(target_family = "unix")]
416pub(crate) type RawHostImportModuleWithPhaseDynamicallyCallback =
417  for<'s> unsafe extern "C" fn(
418    Local<'s, Context>,
419    Local<'s, Data>,
420    Local<'s, Value>,
421    Local<'s, String>,
422    ModuleImportPhase,
423    Local<'s, FixedArray>,
424  ) -> *mut Promise;
425
426#[cfg(all(
427  target_family = "windows",
428  any(target_arch = "x86_64", target_arch = "aarch64")
429))]
430pub type RawHostImportModuleWithPhaseDynamicallyCallback =
431  for<'s> unsafe extern "C" fn(
432    *mut *mut Promise,
433    Local<'s, Context>,
434    Local<'s, Data>,
435    Local<'s, Value>,
436    Local<'s, String>,
437    ModuleImportPhase,
438    Local<'s, FixedArray>,
439  ) -> *mut *mut Promise;
440
441impl<F> HostImportModuleWithPhaseDynamicallyCallback for F
442where
443  F: UnitType
444    + for<'s, 'i> FnOnce(
445      &mut PinScope<'s, 'i>,
446      Local<'s, Data>,
447      Local<'s, Value>,
448      Local<'s, String>,
449      ModuleImportPhase,
450      Local<'s, FixedArray>,
451    ) -> Option<Local<'s, Promise>>,
452{
453  #[inline(always)]
454  fn to_c_fn(self) -> RawHostImportModuleWithPhaseDynamicallyCallback {
455    #[allow(unused_variables)]
456    #[inline(always)]
457    fn scope_adapter<'s, F: HostImportModuleWithPhaseDynamicallyCallback>(
458      context: Local<'s, Context>,
459      host_defined_options: Local<'s, Data>,
460      resource_name: Local<'s, Value>,
461      specifier: Local<'s, String>,
462      import_phase: ModuleImportPhase,
463      import_attributes: Local<'s, FixedArray>,
464    ) -> Option<Local<'s, Promise>> {
465      let scope = pin!(unsafe { CallbackScope::new(context) });
466      let mut scope = scope.init();
467      (F::get())(
468        &mut scope,
469        host_defined_options,
470        resource_name,
471        specifier,
472        import_phase,
473        import_attributes,
474      )
475    }
476
477    #[cfg(target_family = "unix")]
478    #[inline(always)]
479    unsafe extern "C" fn abi_adapter<
480      's,
481      F: HostImportModuleWithPhaseDynamicallyCallback,
482    >(
483      context: Local<'s, Context>,
484      host_defined_options: Local<'s, Data>,
485      resource_name: Local<'s, Value>,
486      specifier: Local<'s, String>,
487      import_phase: ModuleImportPhase,
488      import_attributes: Local<'s, FixedArray>,
489    ) -> *mut Promise {
490      scope_adapter::<F>(
491        context,
492        host_defined_options,
493        resource_name,
494        specifier,
495        import_phase,
496        import_attributes,
497      )
498      .map_or_else(null_mut, |return_value| return_value.as_non_null().as_ptr())
499    }
500
501    #[cfg(all(
502      target_family = "windows",
503      any(target_arch = "x86_64", target_arch = "aarch64")
504    ))]
505    #[inline(always)]
506    unsafe extern "C" fn abi_adapter<
507      's,
508      F: HostImportModuleWithPhaseDynamicallyCallback,
509    >(
510      return_value: *mut *mut Promise,
511      context: Local<'s, Context>,
512      host_defined_options: Local<'s, Data>,
513      resource_name: Local<'s, Value>,
514      specifier: Local<'s, String>,
515      import_phase: ModuleImportPhase,
516      import_attributes: Local<'s, FixedArray>,
517    ) -> *mut *mut Promise {
518      unsafe {
519        std::ptr::write(
520          return_value,
521          scope_adapter::<F>(
522            context,
523            host_defined_options,
524            resource_name,
525            specifier,
526            import_phase,
527            import_attributes,
528          )
529          .map(|return_value| return_value.as_non_null().as_ptr())
530          .unwrap_or_else(null_mut),
531        );
532        return_value
533      }
534    }
535
536    abi_adapter::<F>
537  }
538}
539
540/// `HostCreateShadowRealmContextCallback` is called each time a `ShadowRealm`
541/// is being constructed. You can use [`HandleScope::get_current_context`] to
542/// get the [`Context`] in which the constructor is being run.
543///
544/// The method combines [`Context`] creation and the implementation-defined
545/// abstract operation `HostInitializeShadowRealm` into one.
546///
547/// The embedder should use [`Context::new`] to create a new context. If the
548/// creation fails, the embedder must propagate that exception by returning
549/// [`None`].
550pub type HostCreateShadowRealmContextCallback =
551  for<'s, 'i> fn(scope: &mut PinScope<'s, 'i>) -> Option<Local<'s, Context>>;
552
553pub type GcCallbackWithData = unsafe extern "C" fn(
554  isolate: UnsafeRawIsolatePtr,
555  r#type: GCType,
556  flags: GCCallbackFlags,
557  data: *mut c_void,
558);
559
560pub type InterruptCallback =
561  unsafe extern "C" fn(isolate: UnsafeRawIsolatePtr, data: *mut c_void);
562
563pub type NearHeapLimitCallback = unsafe extern "C" fn(
564  data: *mut c_void,
565  current_heap_limit: usize,
566  initial_heap_limit: usize,
567) -> usize;
568
569#[repr(C)]
570pub struct OomDetails {
571  pub is_heap_oom: bool,
572  pub detail: *const char,
573}
574
575pub type OomErrorCallback =
576  unsafe extern "C" fn(location: *const char, details: &OomDetails);
577
578// Windows x64 ABI: MaybeLocal<Value> returned on the stack.
579#[cfg(target_os = "windows")]
580pub type PrepareStackTraceCallback<'s> =
581  unsafe extern "C" fn(
582    *mut *const Value,
583    Local<'s, Context>,
584    Local<'s, Value>,
585    Local<'s, Array>,
586  ) -> *mut *const Value;
587
588// System V ABI: MaybeLocal<Value> returned in a register.
589// System V i386 ABI: Local<Value> returned in hidden pointer (struct).
590#[cfg(not(target_os = "windows"))]
591#[repr(C)]
592pub struct PrepareStackTraceCallbackRet(*const Value);
593
594#[cfg(not(target_os = "windows"))]
595pub type PrepareStackTraceCallback<'s> =
596  unsafe extern "C" fn(
597    Local<'s, Context>,
598    Local<'s, Value>,
599    Local<'s, Array>,
600  ) -> PrepareStackTraceCallbackRet;
601
602pub type UseCounterFeature = v8__Isolate__UseCounterFeature;
603pub type UseCounterCallback =
604  unsafe extern "C" fn(&mut Isolate, UseCounterFeature);
605
606unsafe extern "C" {
607  fn v8__Isolate__New(params: *const raw::CreateParams) -> *mut RealIsolate;
608  fn v8__Isolate__Dispose(this: *mut RealIsolate);
609  fn v8__Isolate__GetNumberOfDataSlots(this: *const RealIsolate) -> u32;
610  fn v8__Isolate__GetData(
611    isolate: *const RealIsolate,
612    slot: u32,
613  ) -> *mut c_void;
614  fn v8__Isolate__SetData(
615    isolate: *const RealIsolate,
616    slot: u32,
617    data: *mut c_void,
618  );
619  fn v8__Isolate__Enter(this: *mut RealIsolate);
620  fn v8__Isolate__Exit(this: *mut RealIsolate);
621  fn v8__Isolate__GetCurrent() -> *mut RealIsolate;
622  fn v8__Isolate__MemoryPressureNotification(this: *mut RealIsolate, level: u8);
623  fn v8__Isolate__ClearKeptObjects(isolate: *mut RealIsolate);
624  fn v8__Isolate__LowMemoryNotification(isolate: *mut RealIsolate);
625  fn v8__Isolate__GetHeapStatistics(
626    this: *mut RealIsolate,
627    s: *mut v8__HeapStatistics,
628  );
629  fn v8__Isolate__SetCaptureStackTraceForUncaughtExceptions(
630    this: *mut RealIsolate,
631    capture: bool,
632    frame_limit: i32,
633  );
634  fn v8__Isolate__AddMessageListener(
635    isolate: *mut RealIsolate,
636    callback: MessageCallback,
637  ) -> bool;
638  fn v8__Isolate__AddMessageListenerWithErrorLevel(
639    isolate: *mut RealIsolate,
640    callback: MessageCallback,
641    message_levels: MessageErrorLevel,
642  ) -> bool;
643  fn v8__Isolate__AddGCPrologueCallback(
644    isolate: *mut RealIsolate,
645    callback: GcCallbackWithData,
646    data: *mut c_void,
647    gc_type_filter: GCType,
648  );
649  fn v8__Isolate__RemoveGCPrologueCallback(
650    isolate: *mut RealIsolate,
651    callback: GcCallbackWithData,
652    data: *mut c_void,
653  );
654  fn v8__Isolate__AddGCEpilogueCallback(
655    isolate: *mut RealIsolate,
656    callback: GcCallbackWithData,
657    data: *mut c_void,
658    gc_type_filter: GCType,
659  );
660  fn v8__Isolate__RemoveGCEpilogueCallback(
661    isolate: *mut RealIsolate,
662    callback: GcCallbackWithData,
663    data: *mut c_void,
664  );
665  fn v8__Isolate__NumberOfHeapSpaces(isolate: *mut RealIsolate) -> size_t;
666  fn v8__Isolate__GetHeapSpaceStatistics(
667    isolate: *mut RealIsolate,
668    space_statistics: *mut v8__HeapSpaceStatistics,
669    index: size_t,
670  ) -> bool;
671  fn v8__Isolate__GetHeapCodeAndMetadataStatistics(
672    isolate: *mut RealIsolate,
673    code_statistics: *mut v8__HeapCodeStatistics,
674  ) -> bool;
675  fn v8__Isolate__AddNearHeapLimitCallback(
676    isolate: *mut RealIsolate,
677    callback: NearHeapLimitCallback,
678    data: *mut c_void,
679  );
680  fn v8__Isolate__RemoveNearHeapLimitCallback(
681    isolate: *mut RealIsolate,
682    callback: NearHeapLimitCallback,
683    heap_limit: usize,
684  );
685  fn v8__Isolate__SetOOMErrorHandler(
686    isolate: *mut RealIsolate,
687    callback: OomErrorCallback,
688  );
689  fn v8__Isolate__AdjustAmountOfExternalAllocatedMemory(
690    isolate: *mut RealIsolate,
691    change_in_bytes: i64,
692  ) -> i64;
693  fn v8__Isolate__GetCppHeap(isolate: *mut RealIsolate) -> *mut Heap;
694  fn v8__Isolate__SetPrepareStackTraceCallback(
695    isolate: *mut RealIsolate,
696    callback: PrepareStackTraceCallback,
697  );
698  fn v8__Isolate__SetPromiseHook(isolate: *mut RealIsolate, hook: PromiseHook);
699  fn v8__Isolate__SetPromiseRejectCallback(
700    isolate: *mut RealIsolate,
701    callback: PromiseRejectCallback,
702  );
703  fn v8__Isolate__SetWasmAsyncResolvePromiseCallback(
704    isolate: *mut RealIsolate,
705    callback: WasmAsyncResolvePromiseCallback,
706  );
707  fn v8__Isolate__SetAllowWasmCodeGenerationCallback(
708    isolate: *mut RealIsolate,
709    callback: AllowWasmCodeGenerationCallback,
710  );
711  fn v8__Isolate__SetHostInitializeImportMetaObjectCallback(
712    isolate: *mut RealIsolate,
713    callback: HostInitializeImportMetaObjectCallback,
714  );
715  fn v8__Isolate__SetHostImportModuleDynamicallyCallback(
716    isolate: *mut RealIsolate,
717    callback: RawHostImportModuleDynamicallyCallback,
718  );
719  fn v8__Isolate__SetHostImportModuleWithPhaseDynamicallyCallback(
720    isolate: *mut RealIsolate,
721    callback: RawHostImportModuleWithPhaseDynamicallyCallback,
722  );
723  #[cfg(not(target_os = "windows"))]
724  fn v8__Isolate__SetHostCreateShadowRealmContextCallback(
725    isolate: *mut RealIsolate,
726    callback: unsafe extern "C" fn(
727      initiator_context: Local<Context>,
728    ) -> *mut Context,
729  );
730  #[cfg(target_os = "windows")]
731  fn v8__Isolate__SetHostCreateShadowRealmContextCallback(
732    isolate: *mut RealIsolate,
733    callback: unsafe extern "C" fn(
734      rv: *mut *mut Context,
735      initiator_context: Local<Context>,
736    ) -> *mut *mut Context,
737  );
738  fn v8__Isolate__SetUseCounterCallback(
739    isolate: *mut RealIsolate,
740    callback: UseCounterCallback,
741  );
742  fn v8__Isolate__RequestInterrupt(
743    isolate: *const RealIsolate,
744    callback: InterruptCallback,
745    data: *mut c_void,
746  );
747  fn v8__Isolate__TerminateExecution(isolate: *const RealIsolate);
748  fn v8__Isolate__IsExecutionTerminating(isolate: *const RealIsolate) -> bool;
749  fn v8__Isolate__CancelTerminateExecution(isolate: *const RealIsolate);
750  fn v8__Isolate__GetMicrotasksPolicy(
751    isolate: *const RealIsolate,
752  ) -> MicrotasksPolicy;
753  fn v8__Isolate__SetMicrotasksPolicy(
754    isolate: *mut RealIsolate,
755    policy: MicrotasksPolicy,
756  );
757  fn v8__Isolate__PerformMicrotaskCheckpoint(isolate: *mut RealIsolate);
758  fn v8__Isolate__EnqueueMicrotask(
759    isolate: *mut RealIsolate,
760    function: *const Function,
761  );
762  fn v8__Isolate__SetAllowAtomicsWait(isolate: *mut RealIsolate, allow: bool);
763  fn v8__Isolate__SetWasmStreamingCallback(
764    isolate: *mut RealIsolate,
765    callback: unsafe extern "C" fn(*const FunctionCallbackInfo),
766  );
767  fn v8__Isolate__DateTimeConfigurationChangeNotification(
768    isolate: *mut RealIsolate,
769    time_zone_detection: TimeZoneDetection,
770  );
771  fn v8__Isolate__HasPendingBackgroundTasks(
772    isolate: *const RealIsolate,
773  ) -> bool;
774  fn v8__Isolate__RequestGarbageCollectionForTesting(
775    isolate: *mut RealIsolate,
776    r#type: usize,
777  );
778
779  fn v8__HeapProfiler__TakeHeapSnapshot(
780    isolate: *mut RealIsolate,
781    callback: unsafe extern "C" fn(*mut c_void, *const u8, usize) -> bool,
782    arg: *mut c_void,
783  );
784}
785
786/// Isolate represents an isolated instance of the V8 engine.  V8 isolates have
787/// completely separate states.  Objects from one isolate must not be used in
788/// other isolates.  The embedder can create multiple isolates and use them in
789/// parallel in multiple threads.  An isolate can be entered by at most one
790/// thread at any given time.  The Locker/Unlocker API must be used to
791/// synchronize.
792///
793/// rusty_v8 note: Unlike in the C++ API, the Isolate is entered when it is
794/// constructed and exited when dropped. Because of that v8::OwnedIsolate
795/// instances must be dropped in the reverse order of creation
796#[repr(transparent)]
797#[derive(Debug)]
798pub struct Isolate(NonNull<RealIsolate>);
799
800#[repr(transparent)]
801#[derive(Debug, Clone, Copy)]
802pub struct UnsafeRawIsolatePtr(*mut RealIsolate);
803
804impl UnsafeRawIsolatePtr {
805  pub fn null() -> Self {
806    Self(std::ptr::null_mut())
807  }
808
809  pub fn is_null(&self) -> bool {
810    self.0.is_null()
811  }
812}
813
814#[repr(C)]
815pub struct RealIsolate(Opaque);
816
817impl Isolate {
818  pub(crate) fn as_real_ptr(&self) -> *mut RealIsolate {
819    self.0.as_ptr()
820  }
821
822  pub unsafe fn as_raw_isolate_ptr(&self) -> UnsafeRawIsolatePtr {
823    UnsafeRawIsolatePtr(self.0.as_ptr())
824  }
825
826  #[inline]
827  pub unsafe fn from_raw_isolate_ptr(ptr: UnsafeRawIsolatePtr) -> Self {
828    Self(NonNull::new(ptr.0).unwrap())
829  }
830
831  #[inline]
832  pub unsafe fn from_raw_isolate_ptr_unchecked(
833    ptr: UnsafeRawIsolatePtr,
834  ) -> Self {
835    Self(unsafe { NonNull::new_unchecked(ptr.0) })
836  }
837
838  pub unsafe fn from_raw_ptr_unchecked(ptr: *mut RealIsolate) -> Self {
839    Self(unsafe { NonNull::new_unchecked(ptr) })
840  }
841
842  pub unsafe fn from_raw_ptr(ptr: *mut RealIsolate) -> Self {
843    Self(NonNull::new(ptr).unwrap())
844  }
845
846  #[inline]
847  pub unsafe fn ref_from_raw_isolate_ptr(ptr: &UnsafeRawIsolatePtr) -> &Self {
848    if ptr.is_null() {
849      panic!("UnsafeRawIsolatePtr is null");
850    }
851    unsafe { &*(ptr as *const UnsafeRawIsolatePtr as *const Isolate) }
852  }
853
854  #[inline]
855  pub unsafe fn ref_from_raw_isolate_ptr_unchecked(
856    ptr: &UnsafeRawIsolatePtr,
857  ) -> &Self {
858    unsafe { &*(ptr as *const UnsafeRawIsolatePtr as *const Isolate) }
859  }
860
861  #[inline]
862  pub unsafe fn ref_from_raw_isolate_ptr_mut(
863    ptr: &mut UnsafeRawIsolatePtr,
864  ) -> &mut Self {
865    if ptr.is_null() {
866      panic!("UnsafeRawIsolatePtr is null");
867    }
868    unsafe { &mut *(ptr as *mut UnsafeRawIsolatePtr as *mut Isolate) }
869  }
870
871  #[inline]
872  pub unsafe fn ref_from_raw_isolate_ptr_mut_unchecked(
873    ptr: &mut UnsafeRawIsolatePtr,
874  ) -> &mut Self {
875    unsafe { &mut *(ptr as *mut UnsafeRawIsolatePtr as *mut Isolate) }
876  }
877
878  #[inline]
879  pub(crate) unsafe fn from_non_null(ptr: NonNull<RealIsolate>) -> Self {
880    Self(ptr)
881  }
882
883  #[inline]
884  pub(crate) unsafe fn from_raw_ref(ptr: &NonNull<RealIsolate>) -> &Self {
885    // SAFETY: Isolate is a repr(transparent) wrapper around NonNull<RealIsolate>
886    unsafe { &*(ptr as *const NonNull<RealIsolate> as *const Isolate) }
887  }
888
889  #[inline]
890  pub(crate) unsafe fn from_raw_ref_mut(
891    ptr: &mut NonNull<RealIsolate>,
892  ) -> &mut Self {
893    // SAFETY: Isolate is a repr(transparent) wrapper around NonNull<RealIsolate>
894    unsafe { &mut *(ptr as *mut NonNull<RealIsolate> as *mut Isolate) }
895  }
896
897  // Isolate data slots used internally by rusty_v8.
898  const ANNEX_SLOT: u32 = 0;
899  const INTERNAL_DATA_SLOT_COUNT: u32 = 2;
900
901  #[inline(always)]
902  fn assert_embedder_data_slot_count_and_offset_correct(&self) {
903    assert!(
904      unsafe { v8__Isolate__GetNumberOfDataSlots(self.as_real_ptr()) }
905        >= Self::INTERNAL_DATA_SLOT_COUNT
906    )
907  }
908
909  fn new_impl(params: CreateParams) -> *mut RealIsolate {
910    crate::V8::assert_initialized();
911    let (raw_create_params, create_param_allocations) = params.finalize();
912    let cxx_isolate = unsafe { v8__Isolate__New(&raw_create_params) };
913    let mut isolate = unsafe { Isolate::from_raw_ptr(cxx_isolate) };
914    isolate.initialize(create_param_allocations);
915    cxx_isolate
916  }
917
918  pub(crate) fn initialize(&mut self, create_param_allocations: Box<dyn Any>) {
919    self.assert_embedder_data_slot_count_and_offset_correct();
920    self.create_annex(create_param_allocations);
921  }
922
923  /// Creates a new isolate.  Does not change the currently entered
924  /// isolate.
925  ///
926  /// When an isolate is no longer used its resources should be freed
927  /// by calling V8::dispose().  Using the delete operator is not allowed.
928  ///
929  /// V8::initialize() must have run prior to this.
930  #[allow(clippy::new_ret_no_self)]
931  pub fn new(params: CreateParams) -> OwnedIsolate {
932    OwnedIsolate::new(Self::new_impl(params))
933  }
934
935  /// Creates an isolate for use with `v8::Locker` in multi-threaded scenarios.
936  ///
937  /// Unlike `Isolate::new()`, this does not automatically enter the isolate.
938  #[allow(clippy::new_ret_no_self)]
939  pub fn new_unentered(params: CreateParams) -> UnenteredIsolate {
940    UnenteredIsolate::new(Self::new_impl(params))
941  }
942
943  #[allow(clippy::new_ret_no_self)]
944  pub fn snapshot_creator(
945    external_references: Option<Cow<'static, [ExternalReference]>>,
946    params: Option<CreateParams>,
947  ) -> OwnedIsolate {
948    SnapshotCreator::new(external_references, params)
949  }
950
951  #[allow(clippy::new_ret_no_self)]
952  pub fn snapshot_creator_from_existing_snapshot(
953    existing_snapshot_blob: StartupData,
954    external_references: Option<Cow<'static, [ExternalReference]>>,
955    params: Option<CreateParams>,
956  ) -> OwnedIsolate {
957    SnapshotCreator::from_existing_snapshot(
958      existing_snapshot_blob,
959      external_references,
960      params,
961    )
962  }
963
964  /// Initial configuration parameters for a new Isolate.
965  #[inline(always)]
966  pub fn create_params() -> CreateParams {
967    CreateParams::default()
968  }
969
970  #[inline(always)]
971  pub fn thread_safe_handle(&self) -> IsolateHandle {
972    IsolateHandle::new(self)
973  }
974
975  /// See [`IsolateHandle::terminate_execution`]
976  #[inline(always)]
977  pub fn terminate_execution(&self) -> bool {
978    self.thread_safe_handle().terminate_execution()
979  }
980
981  /// See [`IsolateHandle::cancel_terminate_execution`]
982  #[inline(always)]
983  pub fn cancel_terminate_execution(&self) -> bool {
984    self.thread_safe_handle().cancel_terminate_execution()
985  }
986
987  /// See [`IsolateHandle::is_execution_terminating`]
988  #[inline(always)]
989  pub fn is_execution_terminating(&self) -> bool {
990    self.thread_safe_handle().is_execution_terminating()
991  }
992
993  pub(crate) fn create_annex(
994    &mut self,
995    create_param_allocations: Box<dyn Any>,
996  ) {
997    let annex_arc = Arc::new(IsolateAnnex::new(self, create_param_allocations));
998    let annex_ptr = Arc::into_raw(annex_arc);
999    assert!(self.get_data_internal(Self::ANNEX_SLOT).is_null());
1000    self.set_data_internal(Self::ANNEX_SLOT, annex_ptr as *mut _);
1001  }
1002
1003  unsafe fn dispose_annex(&mut self) -> Box<dyn Any> {
1004    // Set the `isolate` pointer inside the annex struct to null, so any
1005    // IsolateHandle that outlives the isolate will know that it can't call
1006    // methods on the isolate.
1007    let annex = self.get_annex_mut();
1008    {
1009      let _lock = annex.isolate_mutex.lock();
1010      annex.isolate = null_mut();
1011    }
1012
1013    // Clear slots and drop owned objects that were taken out of `CreateParams`.
1014    let create_param_allocations =
1015      std::mem::replace(&mut annex.create_param_allocations, Box::new(()));
1016    annex.slots.clear();
1017
1018    // Run through any remaining guaranteed finalizers.
1019    for finalizer in annex.finalizer_map.drain() {
1020      if let FinalizerCallback::Guaranteed(callback) = finalizer {
1021        callback();
1022      }
1023    }
1024
1025    // Subtract one from the Arc<IsolateAnnex> reference count.
1026    unsafe { Arc::from_raw(annex) };
1027    self.set_data(0, null_mut());
1028
1029    create_param_allocations
1030  }
1031
1032  #[inline(always)]
1033  fn get_annex(&self) -> &IsolateAnnex {
1034    let annex_ptr =
1035      self.get_data_internal(Self::ANNEX_SLOT) as *const IsolateAnnex;
1036    assert!(!annex_ptr.is_null());
1037    unsafe { &*annex_ptr }
1038  }
1039
1040  #[inline(always)]
1041  fn get_annex_mut(&mut self) -> &mut IsolateAnnex {
1042    let annex_ptr =
1043      self.get_data_internal(Self::ANNEX_SLOT) as *mut IsolateAnnex;
1044    assert!(!annex_ptr.is_null());
1045    unsafe { &mut *annex_ptr }
1046  }
1047
1048  pub(crate) fn set_snapshot_creator(
1049    &mut self,
1050    snapshot_creator: SnapshotCreator,
1051  ) {
1052    let prev = self
1053      .get_annex_mut()
1054      .maybe_snapshot_creator
1055      .replace(snapshot_creator);
1056    assert!(prev.is_none());
1057  }
1058
1059  pub(crate) fn get_finalizer_map(&self) -> &FinalizerMap {
1060    &self.get_annex().finalizer_map
1061  }
1062
1063  pub(crate) fn get_finalizer_map_mut(&mut self) -> &mut FinalizerMap {
1064    &mut self.get_annex_mut().finalizer_map
1065  }
1066
1067  fn get_annex_arc(&self) -> Arc<IsolateAnnex> {
1068    let annex_ptr = self.get_annex();
1069    let annex_arc = unsafe { Arc::from_raw(annex_ptr) };
1070    let _ = Arc::into_raw(annex_arc.clone());
1071    annex_arc
1072  }
1073
1074  /// Retrieve embedder-specific data from the isolate.
1075  /// Returns NULL if SetData has never been called for the given `slot`.
1076  pub fn get_data(&self, slot: u32) -> *mut c_void {
1077    self.get_data_internal(Self::INTERNAL_DATA_SLOT_COUNT + slot)
1078  }
1079
1080  /// Associate embedder-specific data with the isolate. `slot` has to be
1081  /// between 0 and `Isolate::get_number_of_data_slots()`.
1082  #[inline(always)]
1083  pub fn set_data(&mut self, slot: u32, data: *mut c_void) {
1084    self.set_data_internal(Self::INTERNAL_DATA_SLOT_COUNT + slot, data);
1085  }
1086
1087  /// Returns the maximum number of available embedder data slots. Valid slots
1088  /// are in the range of `0 <= n < Isolate::get_number_of_data_slots()`.
1089  pub fn get_number_of_data_slots(&self) -> u32 {
1090    let n = unsafe { v8__Isolate__GetNumberOfDataSlots(self.as_real_ptr()) };
1091    n - Self::INTERNAL_DATA_SLOT_COUNT
1092  }
1093
1094  #[inline(always)]
1095  pub(crate) fn get_data_internal(&self, slot: u32) -> *mut c_void {
1096    unsafe { v8__Isolate__GetData(self.as_real_ptr(), slot) }
1097  }
1098
1099  #[inline(always)]
1100  pub(crate) fn set_data_internal(&mut self, slot: u32, data: *mut c_void) {
1101    unsafe { v8__Isolate__SetData(self.as_real_ptr(), slot, data) }
1102  }
1103
1104  // pub(crate) fn init_scope_root(&mut self) {
1105  //   ScopeData::new_root(self);
1106  // }
1107
1108  // pub(crate) fn dispose_scope_root(&mut self) {
1109  //   ScopeData::drop_root(self);
1110  // }
1111
1112  // /// Returns a pointer to the `ScopeData` struct for the current scope.
1113  // #[inline(always)]
1114  // pub(crate) fn get_current_scope_data(&self) -> Option<NonNull<ScopeData>> {
1115  //   let scope_data_ptr = self.get_data_internal(Self::CURRENT_SCOPE_DATA_SLOT);
1116  //   NonNull::new(scope_data_ptr).map(NonNull::cast)
1117  // }
1118
1119  // /// Updates the slot that stores a `ScopeData` pointer for the current scope.
1120  // #[inline(always)]
1121  // pub(crate) fn set_current_scope_data(
1122  //   &mut self,
1123  //   scope_data: Option<NonNull<ScopeData>>,
1124  // ) {
1125  //   let scope_data_ptr = scope_data
1126  //     .map(NonNull::cast)
1127  //     .map_or_else(null_mut, NonNull::as_ptr);
1128  //   self.set_data_internal(Self::CURRENT_SCOPE_DATA_SLOT, scope_data_ptr);
1129  // }
1130
1131  /// Get a reference to embedder data added with `set_slot()`.
1132  #[inline(always)]
1133  pub fn get_slot<T: 'static>(&self) -> Option<&T> {
1134    self
1135      .get_annex()
1136      .slots
1137      .get(&TypeId::of::<T>())
1138      .map(|slot| unsafe { slot.borrow::<T>() })
1139  }
1140
1141  /// Get a mutable reference to embedder data added with `set_slot()`.
1142  #[inline(always)]
1143  pub fn get_slot_mut<T: 'static>(&mut self) -> Option<&mut T> {
1144    self
1145      .get_annex_mut()
1146      .slots
1147      .get_mut(&TypeId::of::<T>())
1148      .map(|slot| unsafe { slot.borrow_mut::<T>() })
1149  }
1150
1151  /// Use with Isolate::get_slot and Isolate::get_slot_mut to associate state
1152  /// with an Isolate.
1153  ///
1154  /// This method gives ownership of value to the Isolate. Exactly one object of
1155  /// each type can be associated with an Isolate. If called more than once with
1156  /// an object of the same type, the earlier version will be dropped and
1157  /// replaced.
1158  ///
1159  /// Returns true if value was set without replacing an existing value.
1160  ///
1161  /// The value will be dropped when the isolate is dropped.
1162  #[inline(always)]
1163  pub fn set_slot<T: 'static>(&mut self, value: T) -> bool {
1164    self
1165      .get_annex_mut()
1166      .slots
1167      .insert(TypeId::of::<T>(), RawSlot::new(value))
1168      .is_none()
1169  }
1170
1171  /// Removes the embedder data added with `set_slot()` and returns it if it exists.
1172  #[inline(always)]
1173  pub fn remove_slot<T: 'static>(&mut self) -> Option<T> {
1174    self
1175      .get_annex_mut()
1176      .slots
1177      .remove(&TypeId::of::<T>())
1178      .map(|slot| unsafe { slot.into_inner::<T>() })
1179  }
1180
1181  /// Sets this isolate as the entered one for the current thread.
1182  /// Saves the previously entered one (if any), so that it can be
1183  /// restored when exiting.  Re-entering an isolate is allowed.
1184  ///
1185  /// rusty_v8 note: Unlike in the C++ API, the isolate is entered when it is
1186  /// constructed and exited when dropped.
1187  #[inline(always)]
1188  pub unsafe fn enter(&self) {
1189    unsafe {
1190      v8__Isolate__Enter(self.as_real_ptr());
1191    }
1192  }
1193
1194  /// Exits this isolate by restoring the previously entered one in the
1195  /// current thread.  The isolate may still stay the same, if it was
1196  /// entered more than once.
1197  ///
1198  /// Requires: self == Isolate::GetCurrent().
1199  ///
1200  /// rusty_v8 note: Unlike in the C++ API, the isolate is entered when it is
1201  /// constructed and exited when dropped.
1202  #[inline(always)]
1203  pub unsafe fn exit(&self) {
1204    unsafe {
1205      v8__Isolate__Exit(self.as_real_ptr());
1206    }
1207  }
1208
1209  /// Optional notification that the system is running low on memory.
1210  /// V8 uses these notifications to guide heuristics.
1211  /// It is allowed to call this function from another thread while
1212  /// the isolate is executing long running JavaScript code.
1213  #[inline(always)]
1214  pub fn memory_pressure_notification(&mut self, level: MemoryPressureLevel) {
1215    unsafe {
1216      v8__Isolate__MemoryPressureNotification(self.as_real_ptr(), level as u8)
1217    }
1218  }
1219
1220  /// Clears the set of objects held strongly by the heap. This set of
1221  /// objects are originally built when a WeakRef is created or
1222  /// successfully dereferenced.
1223  ///
1224  /// This is invoked automatically after microtasks are run. See
1225  /// MicrotasksPolicy for when microtasks are run.
1226  ///
1227  /// This needs to be manually invoked only if the embedder is manually
1228  /// running microtasks via a custom MicrotaskQueue class's PerformCheckpoint.
1229  /// In that case, it is the embedder's responsibility to make this call at a
1230  /// time which does not interrupt synchronous ECMAScript code execution.
1231  #[inline(always)]
1232  pub fn clear_kept_objects(&mut self) {
1233    unsafe { v8__Isolate__ClearKeptObjects(self.as_real_ptr()) }
1234  }
1235
1236  /// Optional notification that the system is running low on memory.
1237  /// V8 uses these notifications to attempt to free memory.
1238  #[inline(always)]
1239  pub fn low_memory_notification(&mut self) {
1240    unsafe { v8__Isolate__LowMemoryNotification(self.as_real_ptr()) }
1241  }
1242
1243  /// Get statistics about the heap memory usage.
1244  #[inline(always)]
1245  pub fn get_heap_statistics(&mut self) -> HeapStatistics {
1246    let inner = unsafe {
1247      let mut s = MaybeUninit::zeroed();
1248      v8__Isolate__GetHeapStatistics(self.as_real_ptr(), s.as_mut_ptr());
1249      s.assume_init()
1250    };
1251    HeapStatistics(inner)
1252  }
1253
1254  /// Returns the number of spaces in the heap.
1255  #[inline(always)]
1256  pub fn number_of_heap_spaces(&mut self) -> usize {
1257    unsafe { v8__Isolate__NumberOfHeapSpaces(self.as_real_ptr()) }
1258  }
1259
1260  /// Get the memory usage of a space in the heap.
1261  ///
1262  /// \param space_statistics The HeapSpaceStatistics object to fill in
1263  ///   statistics.
1264  /// \param index The index of the space to get statistics from, which ranges
1265  ///   from 0 to NumberOfHeapSpaces() - 1.
1266  /// \returns true on success.
1267  #[inline(always)]
1268  pub fn get_heap_space_statistics(
1269    &mut self,
1270    index: usize,
1271  ) -> Option<HeapSpaceStatistics> {
1272    let inner = unsafe {
1273      let mut s = MaybeUninit::zeroed();
1274      if !v8__Isolate__GetHeapSpaceStatistics(
1275        self.as_real_ptr(),
1276        s.as_mut_ptr(),
1277        index,
1278      ) {
1279        return None;
1280      }
1281      s.assume_init()
1282    };
1283    Some(HeapSpaceStatistics(inner))
1284  }
1285
1286  /// Get code and metadata statistics for the heap.
1287  ///
1288  /// \returns true on success.
1289  #[inline(always)]
1290  pub fn get_heap_code_and_metadata_statistics(
1291    &mut self,
1292  ) -> Option<HeapCodeStatistics> {
1293    let inner = unsafe {
1294      let mut s = MaybeUninit::zeroed();
1295      if !v8__Isolate__GetHeapCodeAndMetadataStatistics(
1296        self.as_real_ptr(),
1297        s.as_mut_ptr(),
1298      ) {
1299        return None;
1300      }
1301      s.assume_init()
1302    };
1303    Some(HeapCodeStatistics(inner))
1304  }
1305
1306  /// Tells V8 to capture current stack trace when uncaught exception occurs
1307  /// and report it to the message listeners. The option is off by default.
1308  #[inline(always)]
1309  pub fn set_capture_stack_trace_for_uncaught_exceptions(
1310    &mut self,
1311    capture: bool,
1312    frame_limit: i32,
1313  ) {
1314    unsafe {
1315      v8__Isolate__SetCaptureStackTraceForUncaughtExceptions(
1316        self.as_real_ptr(),
1317        capture,
1318        frame_limit,
1319      );
1320    }
1321  }
1322
1323  /// Adds a message listener (errors only).
1324  ///
1325  /// The same message listener can be added more than once and in that
1326  /// case it will be called more than once for each message.
1327  ///
1328  /// The exception object will be passed to the callback.
1329  #[inline(always)]
1330  pub fn add_message_listener(&mut self, callback: MessageCallback) -> bool {
1331    unsafe { v8__Isolate__AddMessageListener(self.as_real_ptr(), callback) }
1332  }
1333
1334  /// Adds a message listener for the specified message levels.
1335  #[inline(always)]
1336  pub fn add_message_listener_with_error_level(
1337    &mut self,
1338    callback: MessageCallback,
1339    message_levels: MessageErrorLevel,
1340  ) -> bool {
1341    unsafe {
1342      v8__Isolate__AddMessageListenerWithErrorLevel(
1343        self.as_real_ptr(),
1344        callback,
1345        message_levels,
1346      )
1347    }
1348  }
1349
1350  /// This specifies the callback called when the stack property of Error
1351  /// is accessed.
1352  ///
1353  /// PrepareStackTraceCallback is called when the stack property of an error is
1354  /// first accessed. The return value will be used as the stack value. If this
1355  /// callback is registed, the |Error.prepareStackTrace| API will be disabled.
1356  /// |sites| is an array of call sites, specified in
1357  /// https://v8.dev/docs/stack-trace-api
1358  #[inline(always)]
1359  pub fn set_prepare_stack_trace_callback<'s>(
1360    &mut self,
1361    callback: impl MapFnTo<PrepareStackTraceCallback<'s>>,
1362  ) {
1363    // Note: the C++ API returns a MaybeLocal but V8 asserts at runtime when
1364    // it's empty. That is, you can't return None and that's why the Rust API
1365    // expects Local<Value> instead of Option<Local<Value>>.
1366    unsafe {
1367      v8__Isolate__SetPrepareStackTraceCallback(
1368        self.as_real_ptr(),
1369        callback.map_fn_to(),
1370      );
1371    };
1372  }
1373
1374  /// Set the PromiseHook callback for various promise lifecycle
1375  /// events.
1376  #[inline(always)]
1377  pub fn set_promise_hook(&mut self, hook: PromiseHook) {
1378    unsafe { v8__Isolate__SetPromiseHook(self.as_real_ptr(), hook) }
1379  }
1380
1381  /// Set callback to notify about promise reject with no handler, or
1382  /// revocation of such a previous notification once the handler is added.
1383  #[inline(always)]
1384  pub fn set_promise_reject_callback(
1385    &mut self,
1386    callback: PromiseRejectCallback,
1387  ) {
1388    unsafe {
1389      v8__Isolate__SetPromiseRejectCallback(self.as_real_ptr(), callback)
1390    }
1391  }
1392
1393  #[inline(always)]
1394  pub fn set_wasm_async_resolve_promise_callback(
1395    &mut self,
1396    callback: WasmAsyncResolvePromiseCallback,
1397  ) {
1398    unsafe {
1399      v8__Isolate__SetWasmAsyncResolvePromiseCallback(
1400        self.as_real_ptr(),
1401        callback,
1402      )
1403    }
1404  }
1405
1406  #[inline(always)]
1407  pub fn set_allow_wasm_code_generation_callback(
1408    &mut self,
1409    callback: AllowWasmCodeGenerationCallback,
1410  ) {
1411    unsafe {
1412      v8__Isolate__SetAllowWasmCodeGenerationCallback(
1413        self.as_real_ptr(),
1414        callback,
1415      );
1416    }
1417  }
1418
1419  #[inline(always)]
1420  /// This specifies the callback called by the upcoming importa.meta
1421  /// language feature to retrieve host-defined meta data for a module.
1422  pub fn set_host_initialize_import_meta_object_callback(
1423    &mut self,
1424    callback: HostInitializeImportMetaObjectCallback,
1425  ) {
1426    unsafe {
1427      v8__Isolate__SetHostInitializeImportMetaObjectCallback(
1428        self.as_real_ptr(),
1429        callback,
1430      );
1431    }
1432  }
1433
1434  /// This specifies the callback called by the upcoming dynamic
1435  /// import() language feature to load modules.
1436  #[inline(always)]
1437  pub fn set_host_import_module_dynamically_callback(
1438    &mut self,
1439    callback: impl HostImportModuleDynamicallyCallback,
1440  ) {
1441    unsafe {
1442      v8__Isolate__SetHostImportModuleDynamicallyCallback(
1443        self.as_real_ptr(),
1444        callback.to_c_fn(),
1445      );
1446    }
1447  }
1448
1449  /// This specifies the callback called by the upcoming dynamic
1450  /// import() and import.source() language feature to load modules.
1451  ///
1452  /// This API is experimental and is expected to be changed or removed in the
1453  /// future. The callback is currently only called when for source-phase
1454  /// imports. Evaluation-phase imports use the existing
1455  /// HostImportModuleDynamicallyCallback callback.
1456  #[inline(always)]
1457  pub fn set_host_import_module_with_phase_dynamically_callback(
1458    &mut self,
1459    callback: impl HostImportModuleWithPhaseDynamicallyCallback,
1460  ) {
1461    unsafe {
1462      v8__Isolate__SetHostImportModuleWithPhaseDynamicallyCallback(
1463        self.as_real_ptr(),
1464        callback.to_c_fn(),
1465      );
1466    }
1467  }
1468
1469  /// This specifies the callback called by the upcoming `ShadowRealm`
1470  /// construction language feature to retrieve host created globals.
1471  pub fn set_host_create_shadow_realm_context_callback(
1472    &mut self,
1473    callback: HostCreateShadowRealmContextCallback,
1474  ) {
1475    #[inline]
1476    unsafe extern "C" fn rust_shadow_realm_callback(
1477      initiator_context: Local<Context>,
1478    ) -> *mut Context {
1479      let scope = pin!(unsafe { CallbackScope::new(initiator_context) });
1480      let mut scope = scope.init();
1481      let isolate = scope.as_ref();
1482      let callback = isolate
1483        .get_slot::<HostCreateShadowRealmContextCallback>()
1484        .unwrap();
1485      let context = callback(&mut scope);
1486      context.map_or_else(null_mut, |l| l.as_non_null().as_ptr())
1487    }
1488
1489    // Windows x64 ABI: MaybeLocal<Context> must be returned on the stack.
1490    #[cfg(target_os = "windows")]
1491    unsafe extern "C" fn rust_shadow_realm_callback_windows(
1492      rv: *mut *mut Context,
1493      initiator_context: Local<Context>,
1494    ) -> *mut *mut Context {
1495      unsafe {
1496        let ret = rust_shadow_realm_callback(initiator_context);
1497        rv.write(ret);
1498      }
1499      rv
1500    }
1501
1502    let slot_didnt_exist_before = self.set_slot(callback);
1503    if slot_didnt_exist_before {
1504      unsafe {
1505        #[cfg(target_os = "windows")]
1506        v8__Isolate__SetHostCreateShadowRealmContextCallback(
1507          self.as_real_ptr(),
1508          rust_shadow_realm_callback_windows,
1509        );
1510        #[cfg(not(target_os = "windows"))]
1511        v8__Isolate__SetHostCreateShadowRealmContextCallback(
1512          self.as_real_ptr(),
1513          rust_shadow_realm_callback,
1514        );
1515      }
1516    }
1517  }
1518
1519  /// Sets a callback for counting the number of times a feature of V8 is used.
1520  #[inline(always)]
1521  pub fn set_use_counter_callback(&mut self, callback: UseCounterCallback) {
1522    unsafe {
1523      v8__Isolate__SetUseCounterCallback(self.as_real_ptr(), callback);
1524    }
1525  }
1526
1527  /// Enables the host application to receive a notification before a
1528  /// garbage collection. Allocations are allowed in the callback function,
1529  /// but the callback is not re-entrant: if the allocation inside it will
1530  /// trigger the garbage collection, the callback won't be called again.
1531  /// It is possible to specify the GCType filter for your callback. But it is
1532  /// not possible to register the same callback function two times with
1533  /// different GCType filters.
1534  #[allow(clippy::not_unsafe_ptr_arg_deref)] // False positive.
1535  #[inline(always)]
1536  pub fn add_gc_prologue_callback(
1537    &mut self,
1538    callback: GcCallbackWithData,
1539    data: *mut c_void,
1540    gc_type_filter: GCType,
1541  ) {
1542    unsafe {
1543      v8__Isolate__AddGCPrologueCallback(
1544        self.as_real_ptr(),
1545        callback,
1546        data,
1547        gc_type_filter,
1548      );
1549    }
1550  }
1551
1552  /// This function removes callback which was installed by
1553  /// AddGCPrologueCallback function.
1554  #[allow(clippy::not_unsafe_ptr_arg_deref)] // False positive.
1555  #[inline(always)]
1556  pub fn remove_gc_prologue_callback(
1557    &mut self,
1558    callback: GcCallbackWithData,
1559    data: *mut c_void,
1560  ) {
1561    unsafe {
1562      v8__Isolate__RemoveGCPrologueCallback(self.as_real_ptr(), callback, data)
1563    }
1564  }
1565
1566  /// Enables the host application to receive a notification after a
1567  /// garbage collection.
1568  #[allow(clippy::not_unsafe_ptr_arg_deref)] // False positive.
1569  #[inline(always)]
1570  pub fn add_gc_epilogue_callback(
1571    &mut self,
1572    callback: GcCallbackWithData,
1573    data: *mut c_void,
1574    gc_type_filter: GCType,
1575  ) {
1576    unsafe {
1577      v8__Isolate__AddGCEpilogueCallback(
1578        self.as_real_ptr(),
1579        callback,
1580        data,
1581        gc_type_filter,
1582      );
1583    }
1584  }
1585
1586  /// This function removes a callback which was added by
1587  /// `AddGCEpilogueCallback`.
1588  #[allow(clippy::not_unsafe_ptr_arg_deref)] // False positive.
1589  #[inline(always)]
1590  pub fn remove_gc_epilogue_callback(
1591    &mut self,
1592    callback: GcCallbackWithData,
1593    data: *mut c_void,
1594  ) {
1595    unsafe {
1596      v8__Isolate__RemoveGCEpilogueCallback(self.as_real_ptr(), callback, data)
1597    }
1598  }
1599
1600  /// Add a callback to invoke in case the heap size is close to the heap limit.
1601  /// If multiple callbacks are added, only the most recently added callback is
1602  /// invoked.
1603  #[allow(clippy::not_unsafe_ptr_arg_deref)] // False positive.
1604  #[inline(always)]
1605  pub fn add_near_heap_limit_callback(
1606    &mut self,
1607    callback: NearHeapLimitCallback,
1608    data: *mut c_void,
1609  ) {
1610    unsafe {
1611      v8__Isolate__AddNearHeapLimitCallback(self.as_real_ptr(), callback, data)
1612    };
1613  }
1614
1615  /// Remove the given callback and restore the heap limit to the given limit.
1616  /// If the given limit is zero, then it is ignored. If the current heap size
1617  /// is greater than the given limit, then the heap limit is restored to the
1618  /// minimal limit that is possible for the current heap size.
1619  #[inline(always)]
1620  pub fn remove_near_heap_limit_callback(
1621    &mut self,
1622    callback: NearHeapLimitCallback,
1623    heap_limit: usize,
1624  ) {
1625    unsafe {
1626      v8__Isolate__RemoveNearHeapLimitCallback(
1627        self.as_real_ptr(),
1628        callback,
1629        heap_limit,
1630      );
1631    };
1632  }
1633
1634  /// Adjusts the amount of registered external memory. Used to give V8 an
1635  /// indication of the amount of externally allocated memory that is kept
1636  /// alive by JavaScript objects. V8 uses this to decide when to perform
1637  /// global garbage collections. Registering externally allocated memory
1638  /// will trigger global garbage collections more often than it would
1639  /// otherwise in an attempt to garbage collect the JavaScript objects
1640  /// that keep the externally allocated memory alive.
1641  #[inline(always)]
1642  pub fn adjust_amount_of_external_allocated_memory(
1643    &mut self,
1644    change_in_bytes: i64,
1645  ) -> i64 {
1646    unsafe {
1647      v8__Isolate__AdjustAmountOfExternalAllocatedMemory(
1648        self.as_real_ptr(),
1649        change_in_bytes,
1650      )
1651    }
1652  }
1653
1654  #[inline(always)]
1655  pub fn get_cpp_heap(&mut self) -> Option<&Heap> {
1656    unsafe { v8__Isolate__GetCppHeap(self.as_real_ptr()).as_ref() }
1657  }
1658
1659  #[inline(always)]
1660  pub fn set_oom_error_handler(&mut self, callback: OomErrorCallback) {
1661    unsafe { v8__Isolate__SetOOMErrorHandler(self.as_real_ptr(), callback) };
1662  }
1663
1664  /// Returns the policy controlling how Microtasks are invoked.
1665  #[inline(always)]
1666  pub fn get_microtasks_policy(&self) -> MicrotasksPolicy {
1667    unsafe { v8__Isolate__GetMicrotasksPolicy(self.as_real_ptr()) }
1668  }
1669
1670  /// Returns the policy controlling how Microtasks are invoked.
1671  #[inline(always)]
1672  pub fn set_microtasks_policy(&mut self, policy: MicrotasksPolicy) {
1673    unsafe { v8__Isolate__SetMicrotasksPolicy(self.as_real_ptr(), policy) }
1674  }
1675
1676  /// Runs the default MicrotaskQueue until it gets empty and perform other
1677  /// microtask checkpoint steps, such as calling ClearKeptObjects. Asserts that
1678  /// the MicrotasksPolicy is not kScoped. Any exceptions thrown by microtask
1679  /// callbacks are swallowed.
1680  #[inline(always)]
1681  pub fn perform_microtask_checkpoint(&mut self) {
1682    unsafe { v8__Isolate__PerformMicrotaskCheckpoint(self.as_real_ptr()) }
1683  }
1684
1685  /// Enqueues the callback to the default MicrotaskQueue
1686  #[inline(always)]
1687  pub fn enqueue_microtask(&mut self, microtask: Local<Function>) {
1688    unsafe { v8__Isolate__EnqueueMicrotask(self.as_real_ptr(), &*microtask) }
1689  }
1690
1691  /// Set whether calling Atomics.wait (a function that may block) is allowed in
1692  /// this isolate. This can also be configured via
1693  /// CreateParams::allow_atomics_wait.
1694  #[inline(always)]
1695  pub fn set_allow_atomics_wait(&mut self, allow: bool) {
1696    unsafe { v8__Isolate__SetAllowAtomicsWait(self.as_real_ptr(), allow) }
1697  }
1698
1699  /// Embedder injection point for `WebAssembly.compileStreaming(source)`.
1700  /// The expectation is that the embedder sets it at most once.
1701  ///
1702  /// The callback receives the source argument (string, Promise, etc.)
1703  /// and an instance of [WasmStreaming]. The [WasmStreaming] instance
1704  /// can outlive the callback and is used to feed data chunks to V8
1705  /// asynchronously.
1706  #[inline(always)]
1707  pub fn set_wasm_streaming_callback<F>(&mut self, _: F)
1708  where
1709    F: UnitType
1710      + for<'a, 'b, 'c> Fn(
1711        &'c mut PinScope<'a, 'b>,
1712        Local<'a, Value>,
1713        WasmStreaming<false>,
1714      ),
1715  {
1716    unsafe {
1717      v8__Isolate__SetWasmStreamingCallback(
1718        self.as_real_ptr(),
1719        trampoline::<F>(),
1720      )
1721    }
1722  }
1723
1724  /// Notification that the embedder has changed the time zone, daylight savings
1725  /// time or other date / time configuration parameters. V8 keeps a cache of
1726  /// various values used for date / time computation. This notification will
1727  /// reset those cached values for the current context so that date / time
1728  /// configuration changes would be reflected.
1729  ///
1730  /// This API should not be called more than needed as it will negatively impact
1731  /// the performance of date operations.
1732  #[inline(always)]
1733  pub fn date_time_configuration_change_notification(
1734    &mut self,
1735    time_zone_detection: TimeZoneDetection,
1736  ) {
1737    unsafe {
1738      v8__Isolate__DateTimeConfigurationChangeNotification(
1739        self.as_real_ptr(),
1740        time_zone_detection,
1741      );
1742    }
1743  }
1744
1745  /// Returns true if there is ongoing background work within V8 that will
1746  /// eventually post a foreground task, like asynchronous WebAssembly
1747  /// compilation.
1748  #[inline(always)]
1749  pub fn has_pending_background_tasks(&self) -> bool {
1750    unsafe { v8__Isolate__HasPendingBackgroundTasks(self.as_real_ptr()) }
1751  }
1752
1753  /// Request garbage collection with a specific embedderstack state in this
1754  /// Isolate. It is only valid to call this function if --expose_gc was
1755  /// specified.
1756  ///
1757  /// This should only be used for testing purposes and not to enforce a garbage
1758  /// collection schedule. It has strong negative impact on the garbage
1759  /// collection performance. Use IdleNotificationDeadline() or
1760  /// LowMemoryNotification() instead to influence the garbage collection
1761  /// schedule.
1762  #[inline(always)]
1763  pub fn request_garbage_collection_for_testing(
1764    &mut self,
1765    r#type: GarbageCollectionType,
1766  ) {
1767    unsafe {
1768      v8__Isolate__RequestGarbageCollectionForTesting(
1769        self.as_real_ptr(),
1770        match r#type {
1771          GarbageCollectionType::Full => 0,
1772          GarbageCollectionType::Minor => 1,
1773        },
1774      );
1775    }
1776  }
1777
1778  /// Disposes the isolate.  The isolate must not be entered by any
1779  /// thread to be disposable.
1780  unsafe fn dispose(&mut self) {
1781    // No test case in rusty_v8 show this, but there have been situations in
1782    // deno where dropping Annex before the states causes a segfault.
1783    unsafe {
1784      v8__Isolate__Dispose(self.as_real_ptr());
1785    }
1786  }
1787
1788  /// Take a heap snapshot. The callback is invoked one or more times
1789  /// with byte slices containing the snapshot serialized as JSON.
1790  /// It's the callback's responsibility to reassemble them into
1791  /// a single document, e.g., by writing them to a file.
1792  /// Note that Chrome DevTools refuses to load snapshots without
1793  /// a .heapsnapshot suffix.
1794  pub fn take_heap_snapshot<F>(&mut self, mut callback: F)
1795  where
1796    F: FnMut(&[u8]) -> bool,
1797  {
1798    unsafe extern "C" fn trampoline<F>(
1799      arg: *mut c_void,
1800      data: *const u8,
1801      size: usize,
1802    ) -> bool
1803    where
1804      F: FnMut(&[u8]) -> bool,
1805    {
1806      unsafe {
1807        let mut callback = NonNull::<F>::new_unchecked(arg as _);
1808        if size > 0 {
1809          (callback.as_mut())(std::slice::from_raw_parts(data, size))
1810        } else {
1811          (callback.as_mut())(&[])
1812        }
1813      }
1814    }
1815
1816    let arg = addr_of_mut!(callback);
1817    unsafe {
1818      v8__HeapProfiler__TakeHeapSnapshot(
1819        self.as_real_ptr(),
1820        trampoline::<F>,
1821        arg as _,
1822      );
1823    }
1824  }
1825
1826  /// Set the default context to be included in the snapshot blob.
1827  /// The snapshot will not contain the global proxy, and we expect one or a
1828  /// global object template to create one, to be provided upon deserialization.
1829  ///
1830  /// # Panics
1831  ///
1832  /// Panics if the isolate was not created using [`Isolate::snapshot_creator`]
1833  #[inline(always)]
1834  pub fn set_default_context(&mut self, context: Local<Context>) {
1835    let snapshot_creator = self
1836      .get_annex_mut()
1837      .maybe_snapshot_creator
1838      .as_mut()
1839      .unwrap();
1840    snapshot_creator.set_default_context(context);
1841  }
1842
1843  /// Add additional context to be included in the snapshot blob.
1844  /// The snapshot will include the global proxy.
1845  ///
1846  /// Returns the index of the context in the snapshot blob.
1847  ///
1848  /// # Panics
1849  ///
1850  /// Panics if the isolate was not created using [`Isolate::snapshot_creator`]
1851  #[inline(always)]
1852  pub fn add_context(&mut self, context: Local<Context>) -> usize {
1853    let snapshot_creator = self
1854      .get_annex_mut()
1855      .maybe_snapshot_creator
1856      .as_mut()
1857      .unwrap();
1858    snapshot_creator.add_context(context)
1859  }
1860
1861  /// Attach arbitrary `v8::Data` to the isolate snapshot, which can be
1862  /// retrieved via `HandleScope::get_context_data_from_snapshot_once()` after
1863  /// deserialization. This data does not survive when a new snapshot is created
1864  /// from an existing snapshot.
1865  ///
1866  /// # Panics
1867  ///
1868  /// Panics if the isolate was not created using [`Isolate::snapshot_creator`]
1869  #[inline(always)]
1870  pub fn add_isolate_data<T>(&mut self, data: Local<T>) -> usize
1871  where
1872    for<'l> Local<'l, T>: Into<Local<'l, Data>>,
1873  {
1874    let snapshot_creator = self
1875      .get_annex_mut()
1876      .maybe_snapshot_creator
1877      .as_mut()
1878      .unwrap();
1879    snapshot_creator.add_isolate_data(data)
1880  }
1881
1882  /// Attach arbitrary `v8::Data` to the context snapshot, which can be
1883  /// retrieved via `HandleScope::get_context_data_from_snapshot_once()` after
1884  /// deserialization. This data does not survive when a new snapshot is
1885  /// created from an existing snapshot.
1886  ///
1887  /// # Panics
1888  ///
1889  /// Panics if the isolate was not created using [`Isolate::snapshot_creator`]
1890  #[inline(always)]
1891  pub fn add_context_data<T>(
1892    &mut self,
1893    context: Local<Context>,
1894    data: Local<T>,
1895  ) -> usize
1896  where
1897    for<'l> Local<'l, T>: Into<Local<'l, Data>>,
1898  {
1899    let snapshot_creator = self
1900      .get_annex_mut()
1901      .maybe_snapshot_creator
1902      .as_mut()
1903      .unwrap();
1904    snapshot_creator.add_context_data(context, data)
1905  }
1906}
1907
1908pub(crate) struct IsolateAnnex {
1909  create_param_allocations: Box<dyn Any>,
1910  slots: HashMap<TypeId, RawSlot, BuildTypeIdHasher>,
1911  finalizer_map: FinalizerMap,
1912  maybe_snapshot_creator: Option<SnapshotCreator>,
1913  // The `isolate` and `isolate_mutex` fields are there so an `IsolateHandle`
1914  // (which may outlive the isolate itself) can determine whether the isolate
1915  // is still alive, and if so, get a reference to it. Safety rules:
1916  // - The 'main thread' must lock the mutex and reset `isolate` to null just
1917  //   before the isolate is disposed.
1918  // - Any other thread must lock the mutex while it's reading/using the
1919  //   `isolate` pointer.
1920  isolate: *mut RealIsolate,
1921  isolate_mutex: Mutex<()>,
1922}
1923
1924unsafe impl Send for IsolateAnnex {}
1925unsafe impl Sync for IsolateAnnex {}
1926
1927impl IsolateAnnex {
1928  fn new(
1929    isolate: &mut Isolate,
1930    create_param_allocations: Box<dyn Any>,
1931  ) -> Self {
1932    Self {
1933      create_param_allocations,
1934      slots: HashMap::default(),
1935      finalizer_map: FinalizerMap::default(),
1936      maybe_snapshot_creator: None,
1937      isolate: isolate.as_real_ptr(),
1938      isolate_mutex: Mutex::new(()),
1939    }
1940  }
1941}
1942
1943impl Debug for IsolateAnnex {
1944  fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
1945    f.debug_struct("IsolateAnnex")
1946      .field("isolate", &self.isolate)
1947      .field("isolate_mutex", &self.isolate_mutex)
1948      .finish()
1949  }
1950}
1951
1952/// IsolateHandle is a thread-safe reference to an Isolate. It's main use is to
1953/// terminate execution of a running isolate from another thread.
1954///
1955/// It is created with Isolate::thread_safe_handle().
1956///
1957/// IsolateHandle is Cloneable, Send, and Sync.
1958#[derive(Clone, Debug)]
1959pub struct IsolateHandle(Arc<IsolateAnnex>);
1960
1961impl IsolateHandle {
1962  // This function is marked unsafe because it must be called only with either
1963  // IsolateAnnex::mutex locked, or from the main thread associated with the V8
1964  // isolate.
1965  pub(crate) unsafe fn get_isolate_ptr(&self) -> *mut RealIsolate {
1966    self.0.isolate
1967  }
1968
1969  #[inline(always)]
1970  fn new(isolate: &Isolate) -> Self {
1971    Self(isolate.get_annex_arc())
1972  }
1973
1974  /// Forcefully terminate the current thread of JavaScript execution
1975  /// in the given isolate.
1976  ///
1977  /// This method can be used by any thread even if that thread has not
1978  /// acquired the V8 lock with a Locker object.
1979  ///
1980  /// Returns false if Isolate was already destroyed.
1981  #[inline(always)]
1982  pub fn terminate_execution(&self) -> bool {
1983    let _lock = self.0.isolate_mutex.lock();
1984    if self.0.isolate.is_null() {
1985      false
1986    } else {
1987      unsafe { v8__Isolate__TerminateExecution(self.0.isolate) };
1988      true
1989    }
1990  }
1991
1992  /// Resume execution capability in the given isolate, whose execution
1993  /// was previously forcefully terminated using TerminateExecution().
1994  ///
1995  /// When execution is forcefully terminated using TerminateExecution(),
1996  /// the isolate can not resume execution until all JavaScript frames
1997  /// have propagated the uncatchable exception which is generated.  This
1998  /// method allows the program embedding the engine to handle the
1999  /// termination event and resume execution capability, even if
2000  /// JavaScript frames remain on the stack.
2001  ///
2002  /// This method can be used by any thread even if that thread has not
2003  /// acquired the V8 lock with a Locker object.
2004  ///
2005  /// Returns false if Isolate was already destroyed.
2006  #[inline(always)]
2007  pub fn cancel_terminate_execution(&self) -> bool {
2008    let _lock = self.0.isolate_mutex.lock();
2009    if self.0.isolate.is_null() {
2010      false
2011    } else {
2012      unsafe { v8__Isolate__CancelTerminateExecution(self.0.isolate) };
2013      true
2014    }
2015  }
2016
2017  /// Is V8 terminating JavaScript execution.
2018  ///
2019  /// Returns true if JavaScript execution is currently terminating
2020  /// because of a call to TerminateExecution.  In that case there are
2021  /// still JavaScript frames on the stack and the termination
2022  /// exception is still active.
2023  ///
2024  /// Returns false if Isolate was already destroyed.
2025  #[inline(always)]
2026  pub fn is_execution_terminating(&self) -> bool {
2027    let _lock = self.0.isolate_mutex.lock();
2028    if self.0.isolate.is_null() {
2029      false
2030    } else {
2031      unsafe { v8__Isolate__IsExecutionTerminating(self.0.isolate) }
2032    }
2033  }
2034
2035  /// Request V8 to interrupt long running JavaScript code and invoke
2036  /// the given |callback| passing the given |data| to it. After |callback|
2037  /// returns control will be returned to the JavaScript code.
2038  /// There may be a number of interrupt requests in flight.
2039  /// Can be called from another thread without acquiring a |Locker|.
2040  /// Registered |callback| must not reenter interrupted Isolate.
2041  ///
2042  /// Returns false if Isolate was already destroyed.
2043  // Clippy warns that this method is dereferencing a raw pointer, but it is
2044  // not: https://github.com/rust-lang/rust-clippy/issues/3045
2045  #[allow(clippy::not_unsafe_ptr_arg_deref)]
2046  #[inline(always)]
2047  pub fn request_interrupt(
2048    &self,
2049    callback: InterruptCallback,
2050    data: *mut c_void,
2051  ) -> bool {
2052    let _lock = self.0.isolate_mutex.lock();
2053    if self.0.isolate.is_null() {
2054      false
2055    } else {
2056      unsafe { v8__Isolate__RequestInterrupt(self.0.isolate, callback, data) };
2057      true
2058    }
2059  }
2060}
2061
2062/// Same as Isolate but gets disposed when it goes out of scope.
2063#[derive(Debug)]
2064pub struct OwnedIsolate {
2065  cxx_isolate: NonNull<RealIsolate>,
2066}
2067
2068impl OwnedIsolate {
2069  pub(crate) fn new(cxx_isolate: *mut RealIsolate) -> Self {
2070    let isolate = Self::new_already_entered(cxx_isolate);
2071    unsafe {
2072      isolate.enter();
2073    }
2074    isolate
2075  }
2076
2077  pub(crate) fn new_already_entered(cxx_isolate: *mut RealIsolate) -> Self {
2078    let cxx_isolate = NonNull::new(cxx_isolate).unwrap();
2079    let owned_isolate: OwnedIsolate = Self { cxx_isolate };
2080    // owned_isolate.init_scope_root();
2081    owned_isolate
2082  }
2083}
2084
2085impl Drop for OwnedIsolate {
2086  fn drop(&mut self) {
2087    unsafe {
2088      let snapshot_creator = self.get_annex_mut().maybe_snapshot_creator.take();
2089      assert!(
2090        snapshot_creator.is_none(),
2091        "If isolate was created using v8::Isolate::snapshot_creator, you should use v8::OwnedIsolate::create_blob before dropping an isolate."
2092      );
2093      // Safety: We need to check `this == Isolate::GetCurrent()` before calling exit()
2094      assert!(
2095        std::ptr::eq(self.cxx_isolate.as_mut(), v8__Isolate__GetCurrent()),
2096        "v8::OwnedIsolate instances must be dropped in the reverse order of creation. They are entered upon creation and exited upon being dropped."
2097      );
2098      // self.dispose_scope_root();
2099      self.exit();
2100      self.dispose_annex();
2101      Platform::notify_isolate_shutdown(&get_current_platform(), self);
2102      self.dispose();
2103    }
2104  }
2105}
2106
2107impl OwnedIsolate {
2108  /// Creates a snapshot data blob.
2109  /// This must not be called from within a handle scope.
2110  ///
2111  /// # Panics
2112  ///
2113  /// Panics if the isolate was not created using [`Isolate::snapshot_creator`]
2114  #[inline(always)]
2115  pub fn create_blob(
2116    mut self,
2117    function_code_handling: FunctionCodeHandling,
2118  ) -> Option<StartupData> {
2119    let mut snapshot_creator =
2120      self.get_annex_mut().maybe_snapshot_creator.take().unwrap();
2121
2122    // create_param_allocations is needed during CreateBlob
2123    // so v8 can read external references
2124    let _create_param_allocations = unsafe {
2125      // self.dispose_scope_root();
2126      self.dispose_annex()
2127    };
2128
2129    // The isolate is owned by the snapshot creator; we need to forget it
2130    // here as the snapshot creator will drop it when running the destructor.
2131    std::mem::forget(self);
2132    snapshot_creator.create_blob(function_code_handling)
2133  }
2134}
2135
2136impl Deref for OwnedIsolate {
2137  type Target = Isolate;
2138  fn deref(&self) -> &Self::Target {
2139    unsafe {
2140      std::mem::transmute::<&NonNull<RealIsolate>, &Isolate>(&self.cxx_isolate)
2141    }
2142  }
2143}
2144
2145impl DerefMut for OwnedIsolate {
2146  fn deref_mut(&mut self) -> &mut Self::Target {
2147    unsafe {
2148      std::mem::transmute::<&mut NonNull<RealIsolate>, &mut Isolate>(
2149        &mut self.cxx_isolate,
2150      )
2151    }
2152  }
2153}
2154
2155impl AsMut<Isolate> for OwnedIsolate {
2156  fn as_mut(&mut self) -> &mut Isolate {
2157    self
2158  }
2159}
2160
2161impl AsMut<Isolate> for Isolate {
2162  fn as_mut(&mut self) -> &mut Isolate {
2163    self
2164  }
2165}
2166
2167/// An isolate that must be accessed via [`Locker`].
2168///
2169/// Unlike [`OwnedIsolate`], this isolate does not automatically enter itself
2170/// upon creation. Instead, you must use a [`Locker`] to access it:
2171///
2172/// ```ignore
2173/// let mut isolate = v8::Isolate::new_unentered(Default::default());
2174///
2175/// // Access the isolate through a Locker
2176/// {
2177///     let mut locker = v8::Locker::new(&mut isolate);
2178///     let scope = &mut v8::HandleScope::new(&mut *locker);
2179///     // ... use scope ...
2180/// }
2181///
2182/// // The locker is dropped, isolate can be used from another thread
2183/// ```
2184///
2185/// # Thread Safety
2186///
2187/// `UnenteredIsolate` implements `Send`, meaning it can be transferred between
2188/// threads. However, V8 isolates are not thread-safe by themselves. You must:
2189///
2190/// 1. Only access the isolate through a [`Locker`]
2191/// 2. Never have multiple `Locker`s for the same isolate simultaneously
2192///    (V8 will block if you try)
2193///
2194/// # Dropping
2195///
2196/// When dropped, the isolate will be properly disposed. The drop will panic
2197/// if a [`Locker`] is currently held for this isolate.
2198#[derive(Debug)]
2199pub struct UnenteredIsolate {
2200  cxx_isolate: NonNull<RealIsolate>,
2201}
2202
2203impl UnenteredIsolate {
2204  pub(crate) fn new(cxx_isolate: *mut RealIsolate) -> Self {
2205    Self {
2206      cxx_isolate: NonNull::new(cxx_isolate).unwrap(),
2207    }
2208  }
2209
2210  /// Returns the raw pointer to the underlying V8 isolate.
2211  ///
2212  /// # Safety
2213  ///
2214  /// The returned pointer is only valid while this `UnenteredIsolate` exists
2215  /// and should only be used while a [`Locker`] is held.
2216  #[inline]
2217  pub fn as_raw(&self) -> *mut RealIsolate {
2218    self.cxx_isolate.as_ptr()
2219  }
2220}
2221
2222impl Drop for UnenteredIsolate {
2223  fn drop(&mut self) {
2224    // Safety check: ensure no Locker is held
2225    debug_assert!(
2226      !crate::scope::raw::Locker::is_locked(self.cxx_isolate),
2227      "Cannot drop UnenteredIsolate while a Locker is held. \
2228       Drop the Locker first."
2229    );
2230
2231    unsafe {
2232      let isolate = Isolate::from_raw_ref_mut(&mut self.cxx_isolate);
2233      let snapshot_creator =
2234        isolate.get_annex_mut().maybe_snapshot_creator.take();
2235      assert!(
2236        snapshot_creator.is_none(),
2237        "v8::UnenteredIsolate::create_blob must be called before dropping"
2238      );
2239      isolate.dispose_annex();
2240      Platform::notify_isolate_shutdown(&get_current_platform(), isolate);
2241      isolate.dispose();
2242    }
2243  }
2244}
2245
2246// SAFETY: UnenteredIsolate can be sent between threads because:
2247// 1. The underlying V8 isolate is not accessed directly - all access goes through Locker
2248// 2. Locker ensures proper synchronization when accessing the isolate
2249// 3. V8's Locker internally uses a mutex to prevent concurrent access
2250unsafe impl Send for UnenteredIsolate {}
2251
2252/// Collection of V8 heap information.
2253///
2254/// Instances of this class can be passed to v8::Isolate::GetHeapStatistics to
2255/// get heap statistics from V8.
2256pub struct HeapStatistics(v8__HeapStatistics);
2257
2258impl HeapStatistics {
2259  #[inline(always)]
2260  pub fn total_heap_size(&self) -> usize {
2261    self.0.total_heap_size_
2262  }
2263
2264  #[inline(always)]
2265  pub fn total_heap_size_executable(&self) -> usize {
2266    self.0.total_heap_size_executable_
2267  }
2268
2269  #[inline(always)]
2270  pub fn total_physical_size(&self) -> usize {
2271    self.0.total_physical_size_
2272  }
2273
2274  #[inline(always)]
2275  pub fn total_available_size(&self) -> usize {
2276    self.0.total_available_size_
2277  }
2278
2279  #[inline(always)]
2280  pub fn total_global_handles_size(&self) -> usize {
2281    self.0.total_global_handles_size_
2282  }
2283
2284  #[inline(always)]
2285  pub fn used_global_handles_size(&self) -> usize {
2286    self.0.used_global_handles_size_
2287  }
2288
2289  #[inline(always)]
2290  pub fn used_heap_size(&self) -> usize {
2291    self.0.used_heap_size_
2292  }
2293
2294  #[inline(always)]
2295  pub fn heap_size_limit(&self) -> usize {
2296    self.0.heap_size_limit_
2297  }
2298
2299  #[inline(always)]
2300  pub fn malloced_memory(&self) -> usize {
2301    self.0.malloced_memory_
2302  }
2303
2304  #[inline(always)]
2305  pub fn external_memory(&self) -> usize {
2306    self.0.external_memory_
2307  }
2308
2309  #[inline(always)]
2310  pub fn peak_malloced_memory(&self) -> usize {
2311    self.0.peak_malloced_memory_
2312  }
2313
2314  #[inline(always)]
2315  pub fn number_of_native_contexts(&self) -> usize {
2316    self.0.number_of_native_contexts_
2317  }
2318
2319  #[inline(always)]
2320  pub fn number_of_detached_contexts(&self) -> usize {
2321    self.0.number_of_detached_contexts_
2322  }
2323
2324  /// Returns the total number of bytes allocated since the Isolate was created.
2325  /// This includes all heap objects allocated in any space (new, old, code,
2326  /// etc.).
2327  #[inline(always)]
2328  pub fn total_allocated_bytes(&self) -> u64 {
2329    self.0.total_allocated_bytes_
2330  }
2331
2332  /// Returns a 0/1 boolean, which signifies whether the V8 overwrite heap
2333  /// garbage with a bit pattern.
2334  #[inline(always)]
2335  pub fn does_zap_garbage(&self) -> bool {
2336    self.0.does_zap_garbage_
2337  }
2338}
2339
2340pub struct HeapSpaceStatistics(v8__HeapSpaceStatistics);
2341
2342impl HeapSpaceStatistics {
2343  pub fn space_name(&self) -> &'static CStr {
2344    unsafe { CStr::from_ptr(self.0.space_name_) }
2345  }
2346
2347  pub fn space_size(&self) -> usize {
2348    self.0.space_size_
2349  }
2350
2351  pub fn space_used_size(&self) -> usize {
2352    self.0.space_used_size_
2353  }
2354
2355  pub fn space_available_size(&self) -> usize {
2356    self.0.space_available_size_
2357  }
2358
2359  pub fn physical_space_size(&self) -> usize {
2360    self.0.physical_space_size_
2361  }
2362}
2363
2364pub struct HeapCodeStatistics(v8__HeapCodeStatistics);
2365
2366impl HeapCodeStatistics {
2367  pub fn code_and_metadata_size(&self) -> usize {
2368    self.0.code_and_metadata_size_
2369  }
2370
2371  pub fn bytecode_and_metadata_size(&self) -> usize {
2372    self.0.bytecode_and_metadata_size_
2373  }
2374
2375  pub fn external_script_source_size(&self) -> usize {
2376    self.0.external_script_source_size_
2377  }
2378
2379  pub fn cpu_profiler_metadata_size(&self) -> usize {
2380    self.0.cpu_profiler_metadata_size_
2381  }
2382}
2383
2384impl<'s, F> MapFnFrom<F> for PrepareStackTraceCallback<'s>
2385where
2386  F: UnitType
2387    + for<'a> Fn(
2388      &mut PinScope<'s, 'a>,
2389      Local<'s, Value>,
2390      Local<'s, Array>,
2391    ) -> Local<'s, Value>,
2392{
2393  // Windows x64 ABI: MaybeLocal<Value> returned on the stack.
2394  #[cfg(target_os = "windows")]
2395  fn mapping() -> Self {
2396    let f = |ret_ptr, context, error, sites| {
2397      let scope = pin!(unsafe { CallbackScope::new(context) });
2398      let mut scope: crate::PinnedRef<CallbackScope> = scope.init();
2399      let r = (F::get())(&mut scope, error, sites);
2400      unsafe { std::ptr::write(ret_ptr, &*r as *const _) };
2401      ret_ptr
2402    };
2403    f.to_c_fn()
2404  }
2405
2406  // System V ABI
2407  #[cfg(not(target_os = "windows"))]
2408  fn mapping() -> Self {
2409    let f = |context, error, sites| {
2410      let scope = pin!(unsafe { CallbackScope::new(context) });
2411      let mut scope: crate::PinnedRef<CallbackScope> = scope.init();
2412
2413      let r = (F::get())(&mut scope, error, sites);
2414      PrepareStackTraceCallbackRet(&*r as *const _)
2415    };
2416    f.to_c_fn()
2417  }
2418}
2419
2420/// A special hasher that is optimized for hashing `std::any::TypeId` values.
2421/// `TypeId` values are actually 64-bit values which themselves come out of some
2422/// hash function, so it's unnecessary to shuffle their bits any further.
2423#[derive(Clone, Default)]
2424pub(crate) struct TypeIdHasher {
2425  state: Option<u64>,
2426}
2427
2428impl Hasher for TypeIdHasher {
2429  fn write(&mut self, _bytes: &[u8]) {
2430    panic!("TypeIdHasher::write() called unexpectedly");
2431  }
2432
2433  #[inline]
2434  fn write_u64(&mut self, value: u64) {
2435    // The internal hash function of TypeId only takes the bottom 64-bits, even on versions
2436    // of Rust that use a 128-bit TypeId.
2437    let prev_state = self.state.replace(value);
2438    debug_assert_eq!(prev_state, None);
2439  }
2440
2441  #[inline]
2442  fn finish(&self) -> u64 {
2443    self.state.unwrap()
2444  }
2445}
2446
2447/// Factory for instances of `TypeIdHasher`. This is the type that one would
2448/// pass to the constructor of some map/set type in order to make it use
2449/// `TypeIdHasher` instead of the default hasher implementation.
2450#[derive(Copy, Clone, Default)]
2451pub(crate) struct BuildTypeIdHasher;
2452
2453impl BuildHasher for BuildTypeIdHasher {
2454  type Hasher = TypeIdHasher;
2455
2456  #[inline]
2457  fn build_hasher(&self) -> Self::Hasher {
2458    Default::default()
2459  }
2460}
2461
2462const _: () = {
2463  assert!(
2464    size_of::<TypeId>() == size_of::<u64>()
2465      || size_of::<TypeId>() == size_of::<u128>()
2466  );
2467  assert!(
2468    align_of::<TypeId>() == align_of::<u64>()
2469      || align_of::<TypeId>() == align_of::<u128>()
2470  );
2471};
2472
2473pub(crate) struct RawSlot {
2474  data: RawSlotData,
2475  dtor: Option<RawSlotDtor>,
2476}
2477
2478type RawSlotData = MaybeUninit<usize>;
2479type RawSlotDtor = unsafe fn(&mut RawSlotData) -> ();
2480
2481impl RawSlot {
2482  #[inline]
2483  pub fn new<T: 'static>(value: T) -> Self {
2484    if Self::needs_box::<T>() {
2485      Self::new_internal(Box::new(value))
2486    } else {
2487      Self::new_internal(value)
2488    }
2489  }
2490
2491  // SAFETY: a valid value of type `T` must haven been stored in the slot
2492  // earlier. There is no verification that the type param provided by the
2493  // caller is correct.
2494  #[inline]
2495  pub unsafe fn borrow<T: 'static>(&self) -> &T {
2496    unsafe {
2497      if Self::needs_box::<T>() {
2498        &*(self.data.as_ptr() as *const Box<T>)
2499      } else {
2500        &*(self.data.as_ptr() as *const T)
2501      }
2502    }
2503  }
2504
2505  // Safety: see [`RawSlot::borrow`].
2506  #[inline]
2507  pub unsafe fn borrow_mut<T: 'static>(&mut self) -> &mut T {
2508    unsafe {
2509      if Self::needs_box::<T>() {
2510        &mut *(self.data.as_mut_ptr() as *mut Box<T>)
2511      } else {
2512        &mut *(self.data.as_mut_ptr() as *mut T)
2513      }
2514    }
2515  }
2516
2517  // Safety: see [`RawSlot::borrow`].
2518  #[inline]
2519  pub unsafe fn into_inner<T: 'static>(self) -> T {
2520    unsafe {
2521      let value = if Self::needs_box::<T>() {
2522        *std::ptr::read(self.data.as_ptr() as *mut Box<T>)
2523      } else {
2524        std::ptr::read(self.data.as_ptr() as *mut T)
2525      };
2526      forget(self);
2527      value
2528    }
2529  }
2530
2531  const fn needs_box<T: 'static>() -> bool {
2532    size_of::<T>() > size_of::<RawSlotData>()
2533      || align_of::<T>() > align_of::<RawSlotData>()
2534  }
2535
2536  #[inline]
2537  fn new_internal<B: 'static>(value: B) -> Self {
2538    assert!(!Self::needs_box::<B>());
2539    let mut self_ = Self {
2540      data: RawSlotData::zeroed(),
2541      dtor: None,
2542    };
2543    unsafe {
2544      ptr::write(self_.data.as_mut_ptr() as *mut B, value);
2545    }
2546    if needs_drop::<B>() {
2547      self_.dtor.replace(Self::drop_internal::<B>);
2548    };
2549    self_
2550  }
2551
2552  // SAFETY: a valid value of type `T` or `Box<T>` must be stored in the slot.
2553  unsafe fn drop_internal<B: 'static>(data: &mut RawSlotData) {
2554    assert!(!Self::needs_box::<B>());
2555    unsafe {
2556      drop_in_place(data.as_mut_ptr() as *mut B);
2557    }
2558  }
2559}
2560
2561impl Drop for RawSlot {
2562  fn drop(&mut self) {
2563    if let Some(dtor) = self.dtor {
2564      unsafe { dtor(&mut self.data) };
2565    }
2566  }
2567}
2568
2569impl AsRef<Isolate> for OwnedIsolate {
2570  fn as_ref(&self) -> &Isolate {
2571    unsafe { Isolate::from_raw_ref(&self.cxx_isolate) }
2572  }
2573}
2574impl AsRef<Isolate> for Isolate {
2575  fn as_ref(&self) -> &Isolate {
2576    self
2577  }
2578}
2579
2580/// Locks an isolate and enters it for the current thread.
2581///
2582/// This is a RAII wrapper around V8's `v8::Locker`. It ensures that the isolate
2583/// is properly locked before any V8 operations and unlocked when dropped.
2584///
2585/// # Thread Safety
2586///
2587/// `Locker` does not implement `Send` or `Sync`. Once created, it must be used
2588/// only on the thread where it was created. The underlying `UnenteredIsolate`
2589/// implements `Send`, allowing it to be transferred between threads, but a new
2590/// `Locker` must be created on each thread that needs to access the isolate.
2591///
2592/// # Panic Safety
2593///
2594/// `Locker::new()` is panic-safe. If a panic occurs during construction,
2595/// the isolate will be properly exited via a drop guard.
2596pub struct Locker<'a> {
2597  raw: std::mem::ManuallyDrop<crate::scope::raw::Locker>,
2598  isolate: &'a mut UnenteredIsolate,
2599}
2600
2601impl<'a> Locker<'a> {
2602  /// Creates a new `Locker` for the given isolate.
2603  ///
2604  /// This will:
2605  /// 1. Acquire the V8 lock (via `v8::Locker`)
2606  /// 2. Enter the isolate (via `v8::Isolate::Enter()`)
2607  ///
2608  /// When the `Locker` is dropped, the isolate is exited and the lock is released.
2609  ///
2610  /// The ordering is critical: we must hold the lock before calling Enter(),
2611  /// because Enter() modifies V8's entry_stack_ which is not thread-safe.
2612  pub fn new(isolate: &'a mut UnenteredIsolate) -> Self {
2613    let isolate_ptr = isolate.cxx_isolate;
2614
2615    // Acquire the lock first (must hold lock before touching entry_stack_)
2616    let mut raw = unsafe { crate::scope::raw::Locker::uninit() };
2617    unsafe { raw.init(isolate_ptr) };
2618
2619    // Now enter the isolate (safe because we hold the lock)
2620    unsafe {
2621      v8__Isolate__Enter(isolate_ptr.as_ptr());
2622    }
2623
2624    Self {
2625      raw: std::mem::ManuallyDrop::new(raw),
2626      isolate,
2627    }
2628  }
2629
2630  /// Returns `true` if the given isolate is currently locked by any `Locker`.
2631  pub fn is_locked(isolate: &UnenteredIsolate) -> bool {
2632    crate::scope::raw::Locker::is_locked(isolate.cxx_isolate)
2633  }
2634}
2635
2636impl Drop for Locker<'_> {
2637  fn drop(&mut self) {
2638    unsafe {
2639      // Exit first (while we still hold the lock), then release the lock.
2640      // Reverse order of new(): Lock -> Enter, so drop: Exit -> Unlock.
2641      v8__Isolate__Exit(self.isolate.cxx_isolate.as_ptr());
2642      std::mem::ManuallyDrop::drop(&mut self.raw);
2643    }
2644  }
2645}
2646
2647impl Deref for Locker<'_> {
2648  type Target = Isolate;
2649  fn deref(&self) -> &Self::Target {
2650    unsafe { Isolate::from_raw_ref(&self.isolate.cxx_isolate) }
2651  }
2652}
2653
2654impl DerefMut for Locker<'_> {
2655  fn deref_mut(&mut self) -> &mut Self::Target {
2656    unsafe { Isolate::from_raw_ref_mut(&mut self.isolate.cxx_isolate) }
2657  }
2658}