1use crate::Array;
3use crate::CallbackScope;
4use crate::Context;
5use crate::Data;
6use crate::FixedArray;
7use crate::Function;
8use crate::FunctionCodeHandling;
9use crate::Local;
10use crate::Message;
11use crate::Module;
12use crate::Object;
13use crate::PinScope;
14use crate::Platform;
15use crate::Promise;
16use crate::PromiseResolver;
17use crate::StartupData;
18use crate::String;
19use crate::V8::get_current_platform;
20use crate::Value;
21use crate::binding::v8__HeapCodeStatistics;
22use crate::binding::v8__HeapSpaceStatistics;
23use crate::binding::v8__HeapStatistics;
24use crate::binding::v8__Isolate__UseCounterFeature;
25pub use crate::binding::v8__ModuleImportPhase as ModuleImportPhase;
26use crate::cppgc::Heap;
27use crate::external_references::ExternalReference;
28use crate::function::FunctionCallbackInfo;
29use crate::gc::GCCallbackFlags;
30use crate::gc::GCType;
31use crate::handle::FinalizerCallback;
32use crate::handle::FinalizerMap;
33use crate::isolate_create_params::CreateParams;
34use crate::isolate_create_params::raw;
35use crate::promise::PromiseRejectMessage;
36use crate::snapshot::SnapshotCreator;
37use crate::support::MapFnFrom;
38use crate::support::MapFnTo;
39use crate::support::Opaque;
40use crate::support::ToCFn;
41use crate::support::UnitType;
42use crate::support::char;
43use crate::support::int;
44use crate::support::size_t;
45use crate::wasm::WasmStreaming;
46use crate::wasm::trampoline;
47use std::ffi::CStr;
48
49use std::any::Any;
50use std::any::TypeId;
51use std::borrow::Cow;
52use std::collections::HashMap;
53use std::ffi::c_void;
54use std::fmt::{self, Debug, Formatter};
55use std::hash::BuildHasher;
56use std::hash::Hasher;
57use std::mem::MaybeUninit;
58use std::mem::align_of;
59use std::mem::forget;
60use std::mem::needs_drop;
61use std::mem::size_of;
62use std::ops::Deref;
63use std::ops::DerefMut;
64use std::pin::pin;
65use std::ptr;
66use std::ptr::NonNull;
67use std::ptr::addr_of_mut;
68use std::ptr::drop_in_place;
69use std::ptr::null_mut;
70use std::sync::Arc;
71use std::sync::Mutex;
72
73#[derive(Debug, Clone, Copy, PartialEq, Eq)]
79#[repr(C)]
80pub enum MicrotasksPolicy {
81 Explicit = 0,
82 Auto = 2,
84}
85
86#[derive(Debug, Clone, Copy, PartialEq, Eq)]
93#[repr(C)]
94pub enum MemoryPressureLevel {
95 None = 0,
96 Moderate = 1,
97 Critical = 2,
98}
99
100#[derive(Debug, Clone, Copy, PartialEq, Eq)]
112#[repr(C)]
113pub enum TimeZoneDetection {
114 Skip = 0,
115 Redetect = 1,
116}
117
118#[derive(Debug, Clone, Copy, PartialEq, Eq)]
133#[repr(C)]
134pub enum PromiseHookType {
135 Init,
136 Resolve,
137 Before,
138 After,
139}
140
141#[derive(Debug, Clone, Copy, PartialEq, Eq)]
144#[repr(C)]
145pub enum GarbageCollectionType {
146 Full,
147 Minor,
148}
149
150pub type MessageCallback = unsafe extern "C" fn(Local<Message>, Local<Value>);
151
152bitflags! {
153 #[derive(Debug, Clone, Copy, PartialEq, Eq)]
154 #[repr(transparent)]
155 pub struct MessageErrorLevel: int {
156 const LOG = 1 << 0;
157 const DEBUG = 1 << 1;
158 const INFO = 1 << 2;
159 const ERROR = 1 << 3;
160 const WARNING = 1 << 4;
161 const ALL = (1 << 5) - 1;
162 }
163}
164
165pub type PromiseHook =
166 unsafe extern "C" fn(PromiseHookType, Local<Promise>, Local<Value>);
167
168pub type PromiseRejectCallback = unsafe extern "C" fn(PromiseRejectMessage);
169
170#[derive(Debug, Clone, Copy, PartialEq, Eq)]
171#[repr(C)]
172pub enum WasmAsyncSuccess {
173 Success,
174 Fail,
175}
176pub type WasmAsyncResolvePromiseCallback = unsafe extern "C" fn(
177 UnsafeRawIsolatePtr,
178 Local<Context>,
179 Local<PromiseResolver>,
180 Local<Value>,
181 WasmAsyncSuccess,
182);
183
184pub type AllowWasmCodeGenerationCallback =
185 unsafe extern "C" fn(Local<Context>, Local<String>) -> bool;
186
187pub type HostInitializeImportMetaObjectCallback =
196 unsafe extern "C" fn(Local<Context>, Local<Module>, Local<Object>);
197
198pub trait HostImportModuleDynamicallyCallback:
238 UnitType
239 + for<'s, 'i> FnOnce(
240 &mut PinScope<'s, 'i>,
241 Local<'s, Data>,
242 Local<'s, Value>,
243 Local<'s, String>,
244 Local<'s, FixedArray>,
245 ) -> Option<Local<'s, Promise>>
246{
247 fn to_c_fn(self) -> RawHostImportModuleDynamicallyCallback;
248}
249
250#[cfg(target_family = "unix")]
251pub(crate) type RawHostImportModuleDynamicallyCallback =
252 for<'s> unsafe extern "C" fn(
253 Local<'s, Context>,
254 Local<'s, Data>,
255 Local<'s, Value>,
256 Local<'s, String>,
257 Local<'s, FixedArray>,
258 ) -> *mut Promise;
259
260#[cfg(all(
261 target_family = "windows",
262 any(target_arch = "x86_64", target_arch = "aarch64")
263))]
264pub type RawHostImportModuleDynamicallyCallback =
265 for<'s> unsafe extern "C" fn(
266 *mut *mut Promise,
267 Local<'s, Context>,
268 Local<'s, Data>,
269 Local<'s, Value>,
270 Local<'s, String>,
271 Local<'s, FixedArray>,
272 ) -> *mut *mut Promise;
273
274impl<F> HostImportModuleDynamicallyCallback for F
275where
276 F: UnitType
277 + for<'s, 'i> FnOnce(
278 &mut PinScope<'s, 'i>,
279 Local<'s, Data>,
280 Local<'s, Value>,
281 Local<'s, String>,
282 Local<'s, FixedArray>,
283 ) -> Option<Local<'s, Promise>>,
284{
285 #[inline(always)]
286 fn to_c_fn(self) -> RawHostImportModuleDynamicallyCallback {
287 #[allow(unused_variables)]
288 #[inline(always)]
289 fn scope_adapter<'s, 'i: 's, F: HostImportModuleDynamicallyCallback>(
290 context: Local<'s, Context>,
291 host_defined_options: Local<'s, Data>,
292 resource_name: Local<'s, Value>,
293 specifier: Local<'s, String>,
294 import_attributes: Local<'s, FixedArray>,
295 ) -> Option<Local<'s, Promise>> {
296 let scope = pin!(unsafe { CallbackScope::new(context) });
297 let mut scope = scope.init();
298 (F::get())(
299 &mut scope,
300 host_defined_options,
301 resource_name,
302 specifier,
303 import_attributes,
304 )
305 }
306
307 #[cfg(target_family = "unix")]
308 #[inline(always)]
309 unsafe extern "C" fn abi_adapter<
310 's,
311 F: HostImportModuleDynamicallyCallback,
312 >(
313 context: Local<'s, Context>,
314 host_defined_options: Local<'s, Data>,
315 resource_name: Local<'s, Value>,
316 specifier: Local<'s, String>,
317 import_attributes: Local<'s, FixedArray>,
318 ) -> *mut Promise {
319 scope_adapter::<F>(
320 context,
321 host_defined_options,
322 resource_name,
323 specifier,
324 import_attributes,
325 )
326 .map_or_else(null_mut, |return_value| return_value.as_non_null().as_ptr())
327 }
328
329 #[cfg(all(
330 target_family = "windows",
331 any(target_arch = "x86_64", target_arch = "aarch64")
332 ))]
333 #[inline(always)]
334 unsafe extern "C" fn abi_adapter<
335 's,
336 F: HostImportModuleDynamicallyCallback,
337 >(
338 return_value: *mut *mut Promise,
339 context: Local<'s, Context>,
340 host_defined_options: Local<'s, Data>,
341 resource_name: Local<'s, Value>,
342 specifier: Local<'s, String>,
343 import_attributes: Local<'s, FixedArray>,
344 ) -> *mut *mut Promise {
345 unsafe {
346 std::ptr::write(
347 return_value,
348 scope_adapter::<F>(
349 context,
350 host_defined_options,
351 resource_name,
352 specifier,
353 import_attributes,
354 )
355 .map(|return_value| return_value.as_non_null().as_ptr())
356 .unwrap_or_else(null_mut),
357 );
358 return_value
359 }
360 }
361
362 abi_adapter::<F>
363 }
364}
365
366pub trait HostImportModuleWithPhaseDynamicallyCallback:
401 UnitType
402 + for<'s, 'i> FnOnce(
403 &mut PinScope<'s, 'i>,
404 Local<'s, Data>,
405 Local<'s, Value>,
406 Local<'s, String>,
407 ModuleImportPhase,
408 Local<'s, FixedArray>,
409 ) -> Option<Local<'s, Promise>>
410{
411 fn to_c_fn(self) -> RawHostImportModuleWithPhaseDynamicallyCallback;
412}
413
414#[cfg(target_family = "unix")]
415pub(crate) type RawHostImportModuleWithPhaseDynamicallyCallback =
416 for<'s> unsafe extern "C" fn(
417 Local<'s, Context>,
418 Local<'s, Data>,
419 Local<'s, Value>,
420 Local<'s, String>,
421 ModuleImportPhase,
422 Local<'s, FixedArray>,
423 ) -> *mut Promise;
424
425#[cfg(all(
426 target_family = "windows",
427 any(target_arch = "x86_64", target_arch = "aarch64")
428))]
429pub type RawHostImportModuleWithPhaseDynamicallyCallback =
430 for<'s> unsafe extern "C" fn(
431 *mut *mut Promise,
432 Local<'s, Context>,
433 Local<'s, Data>,
434 Local<'s, Value>,
435 Local<'s, String>,
436 ModuleImportPhase,
437 Local<'s, FixedArray>,
438 ) -> *mut *mut Promise;
439
440impl<F> HostImportModuleWithPhaseDynamicallyCallback for F
441where
442 F: UnitType
443 + for<'s, 'i> FnOnce(
444 &mut PinScope<'s, 'i>,
445 Local<'s, Data>,
446 Local<'s, Value>,
447 Local<'s, String>,
448 ModuleImportPhase,
449 Local<'s, FixedArray>,
450 ) -> Option<Local<'s, Promise>>,
451{
452 #[inline(always)]
453 fn to_c_fn(self) -> RawHostImportModuleWithPhaseDynamicallyCallback {
454 #[allow(unused_variables)]
455 #[inline(always)]
456 fn scope_adapter<'s, F: HostImportModuleWithPhaseDynamicallyCallback>(
457 context: Local<'s, Context>,
458 host_defined_options: Local<'s, Data>,
459 resource_name: Local<'s, Value>,
460 specifier: Local<'s, String>,
461 import_phase: ModuleImportPhase,
462 import_attributes: Local<'s, FixedArray>,
463 ) -> Option<Local<'s, Promise>> {
464 let scope = pin!(unsafe { CallbackScope::new(context) });
465 let mut scope = scope.init();
466 (F::get())(
467 &mut scope,
468 host_defined_options,
469 resource_name,
470 specifier,
471 import_phase,
472 import_attributes,
473 )
474 }
475
476 #[cfg(target_family = "unix")]
477 #[inline(always)]
478 unsafe extern "C" fn abi_adapter<
479 's,
480 F: HostImportModuleWithPhaseDynamicallyCallback,
481 >(
482 context: Local<'s, Context>,
483 host_defined_options: Local<'s, Data>,
484 resource_name: Local<'s, Value>,
485 specifier: Local<'s, String>,
486 import_phase: ModuleImportPhase,
487 import_attributes: Local<'s, FixedArray>,
488 ) -> *mut Promise {
489 scope_adapter::<F>(
490 context,
491 host_defined_options,
492 resource_name,
493 specifier,
494 import_phase,
495 import_attributes,
496 )
497 .map_or_else(null_mut, |return_value| return_value.as_non_null().as_ptr())
498 }
499
500 #[cfg(all(
501 target_family = "windows",
502 any(target_arch = "x86_64", target_arch = "aarch64")
503 ))]
504 #[inline(always)]
505 unsafe extern "C" fn abi_adapter<
506 's,
507 F: HostImportModuleWithPhaseDynamicallyCallback,
508 >(
509 return_value: *mut *mut Promise,
510 context: Local<'s, Context>,
511 host_defined_options: Local<'s, Data>,
512 resource_name: Local<'s, Value>,
513 specifier: Local<'s, String>,
514 import_phase: ModuleImportPhase,
515 import_attributes: Local<'s, FixedArray>,
516 ) -> *mut *mut Promise {
517 unsafe {
518 std::ptr::write(
519 return_value,
520 scope_adapter::<F>(
521 context,
522 host_defined_options,
523 resource_name,
524 specifier,
525 import_phase,
526 import_attributes,
527 )
528 .map(|return_value| return_value.as_non_null().as_ptr())
529 .unwrap_or_else(null_mut),
530 );
531 return_value
532 }
533 }
534
535 abi_adapter::<F>
536 }
537}
538
539pub type HostCreateShadowRealmContextCallback =
550 for<'s, 'i> fn(scope: &mut PinScope<'s, 'i>) -> Option<Local<'s, Context>>;
551
552pub type GcCallbackWithData = unsafe extern "C" fn(
553 isolate: UnsafeRawIsolatePtr,
554 r#type: GCType,
555 flags: GCCallbackFlags,
556 data: *mut c_void,
557);
558
559pub type InterruptCallback =
560 unsafe extern "C" fn(isolate: UnsafeRawIsolatePtr, data: *mut c_void);
561
562pub type NearHeapLimitCallback = unsafe extern "C" fn(
563 data: *mut c_void,
564 current_heap_limit: usize,
565 initial_heap_limit: usize,
566) -> usize;
567
568#[repr(C)]
569pub struct OomDetails {
570 pub is_heap_oom: bool,
571 pub detail: *const char,
572}
573
574pub type OomErrorCallback =
575 unsafe extern "C" fn(location: *const char, details: &OomDetails);
576
577#[cfg(target_os = "windows")]
579pub type PrepareStackTraceCallback<'s> =
580 unsafe extern "C" fn(
581 *mut *const Value,
582 Local<'s, Context>,
583 Local<'s, Value>,
584 Local<'s, Array>,
585 ) -> *mut *const Value;
586
587#[cfg(not(target_os = "windows"))]
590#[repr(C)]
591pub struct PrepareStackTraceCallbackRet(*const Value);
592
593#[cfg(not(target_os = "windows"))]
594pub type PrepareStackTraceCallback<'s> =
595 unsafe extern "C" fn(
596 Local<'s, Context>,
597 Local<'s, Value>,
598 Local<'s, Array>,
599 ) -> PrepareStackTraceCallbackRet;
600
601pub type UseCounterFeature = v8__Isolate__UseCounterFeature;
602pub type UseCounterCallback =
603 unsafe extern "C" fn(&mut Isolate, UseCounterFeature);
604
605unsafe extern "C" {
606 fn v8__Isolate__New(params: *const raw::CreateParams) -> *mut RealIsolate;
607 fn v8__Isolate__Dispose(this: *mut RealIsolate);
608 fn v8__Isolate__GetNumberOfDataSlots(this: *const RealIsolate) -> u32;
609 fn v8__Isolate__GetData(
610 isolate: *const RealIsolate,
611 slot: u32,
612 ) -> *mut c_void;
613 fn v8__Isolate__SetData(
614 isolate: *const RealIsolate,
615 slot: u32,
616 data: *mut c_void,
617 );
618 fn v8__Isolate__Enter(this: *mut RealIsolate);
619 fn v8__Isolate__Exit(this: *mut RealIsolate);
620 fn v8__Isolate__GetCurrent() -> *mut RealIsolate;
621 fn v8__Isolate__MemoryPressureNotification(this: *mut RealIsolate, level: u8);
622 fn v8__Isolate__ClearKeptObjects(isolate: *mut RealIsolate);
623 fn v8__Isolate__LowMemoryNotification(isolate: *mut RealIsolate);
624 fn v8__Isolate__GetHeapStatistics(
625 this: *mut RealIsolate,
626 s: *mut v8__HeapStatistics,
627 );
628 fn v8__Isolate__SetCaptureStackTraceForUncaughtExceptions(
629 this: *mut RealIsolate,
630 capture: bool,
631 frame_limit: i32,
632 );
633 fn v8__Isolate__AddMessageListener(
634 isolate: *mut RealIsolate,
635 callback: MessageCallback,
636 ) -> bool;
637 fn v8__Isolate__AddMessageListenerWithErrorLevel(
638 isolate: *mut RealIsolate,
639 callback: MessageCallback,
640 message_levels: MessageErrorLevel,
641 ) -> bool;
642 fn v8__Isolate__AddGCPrologueCallback(
643 isolate: *mut RealIsolate,
644 callback: GcCallbackWithData,
645 data: *mut c_void,
646 gc_type_filter: GCType,
647 );
648 fn v8__Isolate__RemoveGCPrologueCallback(
649 isolate: *mut RealIsolate,
650 callback: GcCallbackWithData,
651 data: *mut c_void,
652 );
653 fn v8__Isolate__AddGCEpilogueCallback(
654 isolate: *mut RealIsolate,
655 callback: GcCallbackWithData,
656 data: *mut c_void,
657 gc_type_filter: GCType,
658 );
659 fn v8__Isolate__RemoveGCEpilogueCallback(
660 isolate: *mut RealIsolate,
661 callback: GcCallbackWithData,
662 data: *mut c_void,
663 );
664 fn v8__Isolate__NumberOfHeapSpaces(isolate: *mut RealIsolate) -> size_t;
665 fn v8__Isolate__GetHeapSpaceStatistics(
666 isolate: *mut RealIsolate,
667 space_statistics: *mut v8__HeapSpaceStatistics,
668 index: size_t,
669 ) -> bool;
670 fn v8__Isolate__GetHeapCodeAndMetadataStatistics(
671 isolate: *mut RealIsolate,
672 code_statistics: *mut v8__HeapCodeStatistics,
673 ) -> bool;
674 fn v8__Isolate__AddNearHeapLimitCallback(
675 isolate: *mut RealIsolate,
676 callback: NearHeapLimitCallback,
677 data: *mut c_void,
678 );
679 fn v8__Isolate__RemoveNearHeapLimitCallback(
680 isolate: *mut RealIsolate,
681 callback: NearHeapLimitCallback,
682 heap_limit: usize,
683 );
684 fn v8__Isolate__SetOOMErrorHandler(
685 isolate: *mut RealIsolate,
686 callback: OomErrorCallback,
687 );
688 fn v8__Isolate__AdjustAmountOfExternalAllocatedMemory(
689 isolate: *mut RealIsolate,
690 change_in_bytes: i64,
691 ) -> i64;
692 fn v8__Isolate__GetCppHeap(isolate: *mut RealIsolate) -> *mut Heap;
693 fn v8__Isolate__SetPrepareStackTraceCallback(
694 isolate: *mut RealIsolate,
695 callback: PrepareStackTraceCallback,
696 );
697 fn v8__Isolate__SetPromiseHook(isolate: *mut RealIsolate, hook: PromiseHook);
698 fn v8__Isolate__SetPromiseRejectCallback(
699 isolate: *mut RealIsolate,
700 callback: PromiseRejectCallback,
701 );
702 fn v8__Isolate__SetWasmAsyncResolvePromiseCallback(
703 isolate: *mut RealIsolate,
704 callback: WasmAsyncResolvePromiseCallback,
705 );
706 fn v8__Isolate__SetAllowWasmCodeGenerationCallback(
707 isolate: *mut RealIsolate,
708 callback: AllowWasmCodeGenerationCallback,
709 );
710 fn v8__Isolate__SetHostInitializeImportMetaObjectCallback(
711 isolate: *mut RealIsolate,
712 callback: HostInitializeImportMetaObjectCallback,
713 );
714 fn v8__Isolate__SetHostImportModuleDynamicallyCallback(
715 isolate: *mut RealIsolate,
716 callback: RawHostImportModuleDynamicallyCallback,
717 );
718 fn v8__Isolate__SetHostImportModuleWithPhaseDynamicallyCallback(
719 isolate: *mut RealIsolate,
720 callback: RawHostImportModuleWithPhaseDynamicallyCallback,
721 );
722 #[cfg(not(target_os = "windows"))]
723 fn v8__Isolate__SetHostCreateShadowRealmContextCallback(
724 isolate: *mut RealIsolate,
725 callback: unsafe extern "C" fn(
726 initiator_context: Local<Context>,
727 ) -> *mut Context,
728 );
729 #[cfg(target_os = "windows")]
730 fn v8__Isolate__SetHostCreateShadowRealmContextCallback(
731 isolate: *mut RealIsolate,
732 callback: unsafe extern "C" fn(
733 rv: *mut *mut Context,
734 initiator_context: Local<Context>,
735 ) -> *mut *mut Context,
736 );
737 fn v8__Isolate__SetUseCounterCallback(
738 isolate: *mut RealIsolate,
739 callback: UseCounterCallback,
740 );
741 fn v8__Isolate__RequestInterrupt(
742 isolate: *const RealIsolate,
743 callback: InterruptCallback,
744 data: *mut c_void,
745 );
746 fn v8__Isolate__TerminateExecution(isolate: *const RealIsolate);
747 fn v8__Isolate__IsExecutionTerminating(isolate: *const RealIsolate) -> bool;
748 fn v8__Isolate__CancelTerminateExecution(isolate: *const RealIsolate);
749 fn v8__Isolate__GetMicrotasksPolicy(
750 isolate: *const RealIsolate,
751 ) -> MicrotasksPolicy;
752 fn v8__Isolate__SetMicrotasksPolicy(
753 isolate: *mut RealIsolate,
754 policy: MicrotasksPolicy,
755 );
756 fn v8__Isolate__PerformMicrotaskCheckpoint(isolate: *mut RealIsolate);
757 fn v8__Isolate__EnqueueMicrotask(
758 isolate: *mut RealIsolate,
759 function: *const Function,
760 );
761 fn v8__Isolate__SetAllowAtomicsWait(isolate: *mut RealIsolate, allow: bool);
762 fn v8__Isolate__SetWasmStreamingCallback(
763 isolate: *mut RealIsolate,
764 callback: unsafe extern "C" fn(*const FunctionCallbackInfo),
765 );
766 fn v8__Isolate__DateTimeConfigurationChangeNotification(
767 isolate: *mut RealIsolate,
768 time_zone_detection: TimeZoneDetection,
769 );
770 fn v8__Isolate__HasPendingBackgroundTasks(
771 isolate: *const RealIsolate,
772 ) -> bool;
773 fn v8__Isolate__RequestGarbageCollectionForTesting(
774 isolate: *mut RealIsolate,
775 r#type: usize,
776 );
777
778 fn v8__HeapProfiler__TakeHeapSnapshot(
779 isolate: *mut RealIsolate,
780 callback: unsafe extern "C" fn(*mut c_void, *const u8, usize) -> bool,
781 arg: *mut c_void,
782 );
783}
784
785#[repr(transparent)]
796#[derive(Debug)]
797pub struct Isolate(NonNull<RealIsolate>);
798
799#[repr(transparent)]
800#[derive(Debug, Clone, Copy)]
801pub struct UnsafeRawIsolatePtr(*mut RealIsolate);
802
803impl UnsafeRawIsolatePtr {
804 pub fn null() -> Self {
805 Self(std::ptr::null_mut())
806 }
807
808 pub fn is_null(&self) -> bool {
809 self.0.is_null()
810 }
811}
812
813#[repr(C)]
814pub struct RealIsolate(Opaque);
815
816impl Isolate {
817 pub(crate) fn as_real_ptr(&self) -> *mut RealIsolate {
818 self.0.as_ptr()
819 }
820
821 pub unsafe fn as_raw_isolate_ptr(&self) -> UnsafeRawIsolatePtr {
822 UnsafeRawIsolatePtr(self.0.as_ptr())
823 }
824
825 #[inline]
826 pub unsafe fn from_raw_isolate_ptr(ptr: UnsafeRawIsolatePtr) -> Self {
827 Self(NonNull::new(ptr.0).unwrap())
828 }
829
830 #[inline]
831 pub unsafe fn from_raw_isolate_ptr_unchecked(
832 ptr: UnsafeRawIsolatePtr,
833 ) -> Self {
834 Self(unsafe { NonNull::new_unchecked(ptr.0) })
835 }
836
837 pub unsafe fn from_raw_ptr_unchecked(ptr: *mut RealIsolate) -> Self {
838 Self(unsafe { NonNull::new_unchecked(ptr) })
839 }
840
841 pub unsafe fn from_raw_ptr(ptr: *mut RealIsolate) -> Self {
842 Self(NonNull::new(ptr).unwrap())
843 }
844
845 #[inline]
846 pub unsafe fn ref_from_raw_isolate_ptr(ptr: &UnsafeRawIsolatePtr) -> &Self {
847 if ptr.is_null() {
848 panic!("UnsafeRawIsolatePtr is null");
849 }
850 unsafe { &*(ptr as *const UnsafeRawIsolatePtr as *const Isolate) }
851 }
852
853 #[inline]
854 pub unsafe fn ref_from_raw_isolate_ptr_unchecked(
855 ptr: &UnsafeRawIsolatePtr,
856 ) -> &Self {
857 unsafe { &*(ptr as *const UnsafeRawIsolatePtr as *const Isolate) }
858 }
859
860 #[inline]
861 pub unsafe fn ref_from_raw_isolate_ptr_mut(
862 ptr: &mut UnsafeRawIsolatePtr,
863 ) -> &mut Self {
864 if ptr.is_null() {
865 panic!("UnsafeRawIsolatePtr is null");
866 }
867 unsafe { &mut *(ptr as *mut UnsafeRawIsolatePtr as *mut Isolate) }
868 }
869
870 #[inline]
871 pub unsafe fn ref_from_raw_isolate_ptr_mut_unchecked(
872 ptr: &mut UnsafeRawIsolatePtr,
873 ) -> &mut Self {
874 unsafe { &mut *(ptr as *mut UnsafeRawIsolatePtr as *mut Isolate) }
875 }
876
877 #[inline]
878 pub(crate) unsafe fn from_non_null(ptr: NonNull<RealIsolate>) -> Self {
879 Self(ptr)
880 }
881
882 #[inline]
883 pub(crate) unsafe fn from_raw_ref(ptr: &NonNull<RealIsolate>) -> &Self {
884 unsafe { &*(ptr as *const NonNull<RealIsolate> as *const Isolate) }
886 }
887
888 #[inline]
889 pub(crate) unsafe fn from_raw_ref_mut(
890 ptr: &mut NonNull<RealIsolate>,
891 ) -> &mut Self {
892 unsafe { &mut *(ptr as *mut NonNull<RealIsolate> as *mut Isolate) }
894 }
895
896 const ANNEX_SLOT: u32 = 0;
898 const INTERNAL_DATA_SLOT_COUNT: u32 = 2;
899
900 #[inline(always)]
901 fn assert_embedder_data_slot_count_and_offset_correct(&self) {
902 assert!(
903 unsafe { v8__Isolate__GetNumberOfDataSlots(self.as_real_ptr()) }
904 >= Self::INTERNAL_DATA_SLOT_COUNT
905 )
906 }
907
908 fn new_impl(params: CreateParams) -> *mut RealIsolate {
909 crate::V8::assert_initialized();
910 let (raw_create_params, create_param_allocations) = params.finalize();
911 let cxx_isolate = unsafe { v8__Isolate__New(&raw_create_params) };
912 let mut isolate = unsafe { Isolate::from_raw_ptr(cxx_isolate) };
913 isolate.initialize(create_param_allocations);
914 cxx_isolate
915 }
916
917 pub(crate) fn initialize(&mut self, create_param_allocations: Box<dyn Any>) {
918 self.assert_embedder_data_slot_count_and_offset_correct();
919 self.create_annex(create_param_allocations);
920 }
921
922 #[allow(clippy::new_ret_no_self)]
930 pub fn new(params: CreateParams) -> OwnedIsolate {
931 OwnedIsolate::new(Self::new_impl(params))
932 }
933
934 #[allow(clippy::new_ret_no_self)]
935 pub fn snapshot_creator(
936 external_references: Option<Cow<'static, [ExternalReference]>>,
937 params: Option<CreateParams>,
938 ) -> OwnedIsolate {
939 SnapshotCreator::new(external_references, params)
940 }
941
942 #[allow(clippy::new_ret_no_self)]
943 pub fn snapshot_creator_from_existing_snapshot(
944 existing_snapshot_blob: StartupData,
945 external_references: Option<Cow<'static, [ExternalReference]>>,
946 params: Option<CreateParams>,
947 ) -> OwnedIsolate {
948 SnapshotCreator::from_existing_snapshot(
949 existing_snapshot_blob,
950 external_references,
951 params,
952 )
953 }
954
955 #[inline(always)]
957 pub fn create_params() -> CreateParams {
958 CreateParams::default()
959 }
960
961 #[inline(always)]
962 pub fn thread_safe_handle(&self) -> IsolateHandle {
963 IsolateHandle::new(self)
964 }
965
966 #[inline(always)]
968 pub fn terminate_execution(&self) -> bool {
969 self.thread_safe_handle().terminate_execution()
970 }
971
972 #[inline(always)]
974 pub fn cancel_terminate_execution(&self) -> bool {
975 self.thread_safe_handle().cancel_terminate_execution()
976 }
977
978 #[inline(always)]
980 pub fn is_execution_terminating(&self) -> bool {
981 self.thread_safe_handle().is_execution_terminating()
982 }
983
984 pub(crate) fn create_annex(
985 &mut self,
986 create_param_allocations: Box<dyn Any>,
987 ) {
988 let annex_arc = Arc::new(IsolateAnnex::new(self, create_param_allocations));
989 let annex_ptr = Arc::into_raw(annex_arc);
990 assert!(self.get_data_internal(Self::ANNEX_SLOT).is_null());
991 self.set_data_internal(Self::ANNEX_SLOT, annex_ptr as *mut _);
992 }
993
994 unsafe fn dispose_annex(&mut self) -> Box<dyn Any> {
995 let annex = self.get_annex_mut();
999 {
1000 let _lock = annex.isolate_mutex.lock().unwrap();
1001 annex.isolate = null_mut();
1002 }
1003
1004 let create_param_allocations =
1006 std::mem::replace(&mut annex.create_param_allocations, Box::new(()));
1007 annex.slots.clear();
1008
1009 for finalizer in annex.finalizer_map.drain() {
1011 if let FinalizerCallback::Guaranteed(callback) = finalizer {
1012 callback();
1013 }
1014 }
1015
1016 unsafe { Arc::from_raw(annex) };
1018 self.set_data(0, null_mut());
1019
1020 create_param_allocations
1021 }
1022
1023 #[inline(always)]
1024 fn get_annex(&self) -> &IsolateAnnex {
1025 let annex_ptr =
1026 self.get_data_internal(Self::ANNEX_SLOT) as *const IsolateAnnex;
1027 assert!(!annex_ptr.is_null());
1028 unsafe { &*annex_ptr }
1029 }
1030
1031 #[inline(always)]
1032 fn get_annex_mut(&mut self) -> &mut IsolateAnnex {
1033 let annex_ptr =
1034 self.get_data_internal(Self::ANNEX_SLOT) as *mut IsolateAnnex;
1035 assert!(!annex_ptr.is_null());
1036 unsafe { &mut *annex_ptr }
1037 }
1038
1039 pub(crate) fn set_snapshot_creator(
1040 &mut self,
1041 snapshot_creator: SnapshotCreator,
1042 ) {
1043 let prev = self
1044 .get_annex_mut()
1045 .maybe_snapshot_creator
1046 .replace(snapshot_creator);
1047 assert!(prev.is_none());
1048 }
1049
1050 pub(crate) fn get_finalizer_map(&self) -> &FinalizerMap {
1051 &self.get_annex().finalizer_map
1052 }
1053
1054 pub(crate) fn get_finalizer_map_mut(&mut self) -> &mut FinalizerMap {
1055 &mut self.get_annex_mut().finalizer_map
1056 }
1057
1058 fn get_annex_arc(&self) -> Arc<IsolateAnnex> {
1059 let annex_ptr = self.get_annex();
1060 let annex_arc = unsafe { Arc::from_raw(annex_ptr) };
1061 let _ = Arc::into_raw(annex_arc.clone());
1062 annex_arc
1063 }
1064
1065 pub fn get_data(&self, slot: u32) -> *mut c_void {
1068 self.get_data_internal(Self::INTERNAL_DATA_SLOT_COUNT + slot)
1069 }
1070
1071 #[inline(always)]
1074 pub fn set_data(&mut self, slot: u32, data: *mut c_void) {
1075 self.set_data_internal(Self::INTERNAL_DATA_SLOT_COUNT + slot, data);
1076 }
1077
1078 pub fn get_number_of_data_slots(&self) -> u32 {
1081 let n = unsafe { v8__Isolate__GetNumberOfDataSlots(self.as_real_ptr()) };
1082 n - Self::INTERNAL_DATA_SLOT_COUNT
1083 }
1084
1085 #[inline(always)]
1086 pub(crate) fn get_data_internal(&self, slot: u32) -> *mut c_void {
1087 unsafe { v8__Isolate__GetData(self.as_real_ptr(), slot) }
1088 }
1089
1090 #[inline(always)]
1091 pub(crate) fn set_data_internal(&mut self, slot: u32, data: *mut c_void) {
1092 unsafe { v8__Isolate__SetData(self.as_real_ptr(), slot, data) }
1093 }
1094
1095 #[inline(always)]
1124 pub fn get_slot<T: 'static>(&self) -> Option<&T> {
1125 self
1126 .get_annex()
1127 .slots
1128 .get(&TypeId::of::<T>())
1129 .map(|slot| unsafe { slot.borrow::<T>() })
1130 }
1131
1132 #[inline(always)]
1134 pub fn get_slot_mut<T: 'static>(&mut self) -> Option<&mut T> {
1135 self
1136 .get_annex_mut()
1137 .slots
1138 .get_mut(&TypeId::of::<T>())
1139 .map(|slot| unsafe { slot.borrow_mut::<T>() })
1140 }
1141
1142 #[inline(always)]
1154 pub fn set_slot<T: 'static>(&mut self, value: T) -> bool {
1155 self
1156 .get_annex_mut()
1157 .slots
1158 .insert(TypeId::of::<T>(), RawSlot::new(value))
1159 .is_none()
1160 }
1161
1162 #[inline(always)]
1164 pub fn remove_slot<T: 'static>(&mut self) -> Option<T> {
1165 self
1166 .get_annex_mut()
1167 .slots
1168 .remove(&TypeId::of::<T>())
1169 .map(|slot| unsafe { slot.into_inner::<T>() })
1170 }
1171
1172 #[inline(always)]
1179 pub unsafe fn enter(&self) {
1180 unsafe {
1181 v8__Isolate__Enter(self.as_real_ptr());
1182 }
1183 }
1184
1185 #[inline(always)]
1194 pub unsafe fn exit(&self) {
1195 unsafe {
1196 v8__Isolate__Exit(self.as_real_ptr());
1197 }
1198 }
1199
1200 #[inline(always)]
1205 pub fn memory_pressure_notification(&mut self, level: MemoryPressureLevel) {
1206 unsafe {
1207 v8__Isolate__MemoryPressureNotification(self.as_real_ptr(), level as u8)
1208 }
1209 }
1210
1211 #[inline(always)]
1223 pub fn clear_kept_objects(&mut self) {
1224 unsafe { v8__Isolate__ClearKeptObjects(self.as_real_ptr()) }
1225 }
1226
1227 #[inline(always)]
1230 pub fn low_memory_notification(&mut self) {
1231 unsafe { v8__Isolate__LowMemoryNotification(self.as_real_ptr()) }
1232 }
1233
1234 #[inline(always)]
1236 pub fn get_heap_statistics(&mut self) -> HeapStatistics {
1237 let inner = unsafe {
1238 let mut s = MaybeUninit::zeroed();
1239 v8__Isolate__GetHeapStatistics(self.as_real_ptr(), s.as_mut_ptr());
1240 s.assume_init()
1241 };
1242 HeapStatistics(inner)
1243 }
1244
1245 #[inline(always)]
1247 pub fn number_of_heap_spaces(&mut self) -> usize {
1248 unsafe { v8__Isolate__NumberOfHeapSpaces(self.as_real_ptr()) }
1249 }
1250
1251 #[inline(always)]
1259 pub fn get_heap_space_statistics(
1260 &mut self,
1261 index: usize,
1262 ) -> Option<HeapSpaceStatistics> {
1263 let inner = unsafe {
1264 let mut s = MaybeUninit::zeroed();
1265 if !v8__Isolate__GetHeapSpaceStatistics(
1266 self.as_real_ptr(),
1267 s.as_mut_ptr(),
1268 index,
1269 ) {
1270 return None;
1271 }
1272 s.assume_init()
1273 };
1274 Some(HeapSpaceStatistics(inner))
1275 }
1276
1277 #[inline(always)]
1281 pub fn get_heap_code_and_metadata_statistics(
1282 &mut self,
1283 ) -> Option<HeapCodeStatistics> {
1284 let inner = unsafe {
1285 let mut s = MaybeUninit::zeroed();
1286 if !v8__Isolate__GetHeapCodeAndMetadataStatistics(
1287 self.as_real_ptr(),
1288 s.as_mut_ptr(),
1289 ) {
1290 return None;
1291 }
1292 s.assume_init()
1293 };
1294 Some(HeapCodeStatistics(inner))
1295 }
1296
1297 #[inline(always)]
1300 pub fn set_capture_stack_trace_for_uncaught_exceptions(
1301 &mut self,
1302 capture: bool,
1303 frame_limit: i32,
1304 ) {
1305 unsafe {
1306 v8__Isolate__SetCaptureStackTraceForUncaughtExceptions(
1307 self.as_real_ptr(),
1308 capture,
1309 frame_limit,
1310 );
1311 }
1312 }
1313
1314 #[inline(always)]
1321 pub fn add_message_listener(&mut self, callback: MessageCallback) -> bool {
1322 unsafe { v8__Isolate__AddMessageListener(self.as_real_ptr(), callback) }
1323 }
1324
1325 #[inline(always)]
1327 pub fn add_message_listener_with_error_level(
1328 &mut self,
1329 callback: MessageCallback,
1330 message_levels: MessageErrorLevel,
1331 ) -> bool {
1332 unsafe {
1333 v8__Isolate__AddMessageListenerWithErrorLevel(
1334 self.as_real_ptr(),
1335 callback,
1336 message_levels,
1337 )
1338 }
1339 }
1340
1341 #[inline(always)]
1350 pub fn set_prepare_stack_trace_callback<'s>(
1351 &mut self,
1352 callback: impl MapFnTo<PrepareStackTraceCallback<'s>>,
1353 ) {
1354 unsafe {
1358 v8__Isolate__SetPrepareStackTraceCallback(
1359 self.as_real_ptr(),
1360 callback.map_fn_to(),
1361 );
1362 };
1363 }
1364
1365 #[inline(always)]
1368 pub fn set_promise_hook(&mut self, hook: PromiseHook) {
1369 unsafe { v8__Isolate__SetPromiseHook(self.as_real_ptr(), hook) }
1370 }
1371
1372 #[inline(always)]
1375 pub fn set_promise_reject_callback(
1376 &mut self,
1377 callback: PromiseRejectCallback,
1378 ) {
1379 unsafe {
1380 v8__Isolate__SetPromiseRejectCallback(self.as_real_ptr(), callback)
1381 }
1382 }
1383
1384 #[inline(always)]
1385 pub fn set_wasm_async_resolve_promise_callback(
1386 &mut self,
1387 callback: WasmAsyncResolvePromiseCallback,
1388 ) {
1389 unsafe {
1390 v8__Isolate__SetWasmAsyncResolvePromiseCallback(
1391 self.as_real_ptr(),
1392 callback,
1393 )
1394 }
1395 }
1396
1397 #[inline(always)]
1398 pub fn set_allow_wasm_code_generation_callback(
1399 &mut self,
1400 callback: AllowWasmCodeGenerationCallback,
1401 ) {
1402 unsafe {
1403 v8__Isolate__SetAllowWasmCodeGenerationCallback(
1404 self.as_real_ptr(),
1405 callback,
1406 );
1407 }
1408 }
1409
1410 #[inline(always)]
1411 pub fn set_host_initialize_import_meta_object_callback(
1414 &mut self,
1415 callback: HostInitializeImportMetaObjectCallback,
1416 ) {
1417 unsafe {
1418 v8__Isolate__SetHostInitializeImportMetaObjectCallback(
1419 self.as_real_ptr(),
1420 callback,
1421 );
1422 }
1423 }
1424
1425 #[inline(always)]
1428 pub fn set_host_import_module_dynamically_callback(
1429 &mut self,
1430 callback: impl HostImportModuleDynamicallyCallback,
1431 ) {
1432 unsafe {
1433 v8__Isolate__SetHostImportModuleDynamicallyCallback(
1434 self.as_real_ptr(),
1435 callback.to_c_fn(),
1436 );
1437 }
1438 }
1439
1440 #[inline(always)]
1448 pub fn set_host_import_module_with_phase_dynamically_callback(
1449 &mut self,
1450 callback: impl HostImportModuleWithPhaseDynamicallyCallback,
1451 ) {
1452 unsafe {
1453 v8__Isolate__SetHostImportModuleWithPhaseDynamicallyCallback(
1454 self.as_real_ptr(),
1455 callback.to_c_fn(),
1456 );
1457 }
1458 }
1459
1460 pub fn set_host_create_shadow_realm_context_callback(
1463 &mut self,
1464 callback: HostCreateShadowRealmContextCallback,
1465 ) {
1466 #[inline]
1467 unsafe extern "C" fn rust_shadow_realm_callback(
1468 initiator_context: Local<Context>,
1469 ) -> *mut Context {
1470 let scope = pin!(unsafe { CallbackScope::new(initiator_context) });
1471 let mut scope = scope.init();
1472 let isolate = scope.as_ref();
1473 let callback = isolate
1474 .get_slot::<HostCreateShadowRealmContextCallback>()
1475 .unwrap();
1476 let context = callback(&mut scope);
1477 context.map_or_else(null_mut, |l| l.as_non_null().as_ptr())
1478 }
1479
1480 #[cfg(target_os = "windows")]
1482 unsafe extern "C" fn rust_shadow_realm_callback_windows(
1483 rv: *mut *mut Context,
1484 initiator_context: Local<Context>,
1485 ) -> *mut *mut Context {
1486 unsafe {
1487 let ret = rust_shadow_realm_callback(initiator_context);
1488 rv.write(ret);
1489 }
1490 rv
1491 }
1492
1493 let slot_didnt_exist_before = self.set_slot(callback);
1494 if slot_didnt_exist_before {
1495 unsafe {
1496 #[cfg(target_os = "windows")]
1497 v8__Isolate__SetHostCreateShadowRealmContextCallback(
1498 self.as_real_ptr(),
1499 rust_shadow_realm_callback_windows,
1500 );
1501 #[cfg(not(target_os = "windows"))]
1502 v8__Isolate__SetHostCreateShadowRealmContextCallback(
1503 self.as_real_ptr(),
1504 rust_shadow_realm_callback,
1505 );
1506 }
1507 }
1508 }
1509
1510 #[inline(always)]
1512 pub fn set_use_counter_callback(&mut self, callback: UseCounterCallback) {
1513 unsafe {
1514 v8__Isolate__SetUseCounterCallback(self.as_real_ptr(), callback);
1515 }
1516 }
1517
1518 #[allow(clippy::not_unsafe_ptr_arg_deref)] #[inline(always)]
1527 pub fn add_gc_prologue_callback(
1528 &mut self,
1529 callback: GcCallbackWithData,
1530 data: *mut c_void,
1531 gc_type_filter: GCType,
1532 ) {
1533 unsafe {
1534 v8__Isolate__AddGCPrologueCallback(
1535 self.as_real_ptr(),
1536 callback,
1537 data,
1538 gc_type_filter,
1539 );
1540 }
1541 }
1542
1543 #[allow(clippy::not_unsafe_ptr_arg_deref)] #[inline(always)]
1547 pub fn remove_gc_prologue_callback(
1548 &mut self,
1549 callback: GcCallbackWithData,
1550 data: *mut c_void,
1551 ) {
1552 unsafe {
1553 v8__Isolate__RemoveGCPrologueCallback(self.as_real_ptr(), callback, data)
1554 }
1555 }
1556
1557 #[allow(clippy::not_unsafe_ptr_arg_deref)] #[inline(always)]
1561 pub fn add_gc_epilogue_callback(
1562 &mut self,
1563 callback: GcCallbackWithData,
1564 data: *mut c_void,
1565 gc_type_filter: GCType,
1566 ) {
1567 unsafe {
1568 v8__Isolate__AddGCEpilogueCallback(
1569 self.as_real_ptr(),
1570 callback,
1571 data,
1572 gc_type_filter,
1573 );
1574 }
1575 }
1576
1577 #[allow(clippy::not_unsafe_ptr_arg_deref)] #[inline(always)]
1581 pub fn remove_gc_epilogue_callback(
1582 &mut self,
1583 callback: GcCallbackWithData,
1584 data: *mut c_void,
1585 ) {
1586 unsafe {
1587 v8__Isolate__RemoveGCEpilogueCallback(self.as_real_ptr(), callback, data)
1588 }
1589 }
1590
1591 #[allow(clippy::not_unsafe_ptr_arg_deref)] #[inline(always)]
1596 pub fn add_near_heap_limit_callback(
1597 &mut self,
1598 callback: NearHeapLimitCallback,
1599 data: *mut c_void,
1600 ) {
1601 unsafe {
1602 v8__Isolate__AddNearHeapLimitCallback(self.as_real_ptr(), callback, data)
1603 };
1604 }
1605
1606 #[inline(always)]
1611 pub fn remove_near_heap_limit_callback(
1612 &mut self,
1613 callback: NearHeapLimitCallback,
1614 heap_limit: usize,
1615 ) {
1616 unsafe {
1617 v8__Isolate__RemoveNearHeapLimitCallback(
1618 self.as_real_ptr(),
1619 callback,
1620 heap_limit,
1621 );
1622 };
1623 }
1624
1625 #[inline(always)]
1633 pub fn adjust_amount_of_external_allocated_memory(
1634 &mut self,
1635 change_in_bytes: i64,
1636 ) -> i64 {
1637 unsafe {
1638 v8__Isolate__AdjustAmountOfExternalAllocatedMemory(
1639 self.as_real_ptr(),
1640 change_in_bytes,
1641 )
1642 }
1643 }
1644
1645 #[inline(always)]
1646 pub fn get_cpp_heap(&mut self) -> Option<&Heap> {
1647 unsafe { v8__Isolate__GetCppHeap(self.as_real_ptr()).as_ref() }
1648 }
1649
1650 #[inline(always)]
1651 pub fn set_oom_error_handler(&mut self, callback: OomErrorCallback) {
1652 unsafe { v8__Isolate__SetOOMErrorHandler(self.as_real_ptr(), callback) };
1653 }
1654
1655 #[inline(always)]
1657 pub fn get_microtasks_policy(&self) -> MicrotasksPolicy {
1658 unsafe { v8__Isolate__GetMicrotasksPolicy(self.as_real_ptr()) }
1659 }
1660
1661 #[inline(always)]
1663 pub fn set_microtasks_policy(&mut self, policy: MicrotasksPolicy) {
1664 unsafe { v8__Isolate__SetMicrotasksPolicy(self.as_real_ptr(), policy) }
1665 }
1666
1667 #[inline(always)]
1672 pub fn perform_microtask_checkpoint(&mut self) {
1673 unsafe { v8__Isolate__PerformMicrotaskCheckpoint(self.as_real_ptr()) }
1674 }
1675
1676 #[inline(always)]
1678 pub fn enqueue_microtask(&mut self, microtask: Local<Function>) {
1679 unsafe { v8__Isolate__EnqueueMicrotask(self.as_real_ptr(), &*microtask) }
1680 }
1681
1682 #[inline(always)]
1686 pub fn set_allow_atomics_wait(&mut self, allow: bool) {
1687 unsafe { v8__Isolate__SetAllowAtomicsWait(self.as_real_ptr(), allow) }
1688 }
1689
1690 #[inline(always)]
1698 pub fn set_wasm_streaming_callback<F>(&mut self, _: F)
1699 where
1700 F: UnitType
1701 + for<'a, 'b, 'c> Fn(
1702 &'c mut PinScope<'a, 'b>,
1703 Local<'a, Value>,
1704 WasmStreaming<false>,
1705 ),
1706 {
1707 unsafe {
1708 v8__Isolate__SetWasmStreamingCallback(
1709 self.as_real_ptr(),
1710 trampoline::<F>(),
1711 )
1712 }
1713 }
1714
1715 #[inline(always)]
1724 pub fn date_time_configuration_change_notification(
1725 &mut self,
1726 time_zone_detection: TimeZoneDetection,
1727 ) {
1728 unsafe {
1729 v8__Isolate__DateTimeConfigurationChangeNotification(
1730 self.as_real_ptr(),
1731 time_zone_detection,
1732 );
1733 }
1734 }
1735
1736 #[inline(always)]
1740 pub fn has_pending_background_tasks(&self) -> bool {
1741 unsafe { v8__Isolate__HasPendingBackgroundTasks(self.as_real_ptr()) }
1742 }
1743
1744 #[inline(always)]
1754 pub fn request_garbage_collection_for_testing(
1755 &mut self,
1756 r#type: GarbageCollectionType,
1757 ) {
1758 unsafe {
1759 v8__Isolate__RequestGarbageCollectionForTesting(
1760 self.as_real_ptr(),
1761 match r#type {
1762 GarbageCollectionType::Full => 0,
1763 GarbageCollectionType::Minor => 1,
1764 },
1765 );
1766 }
1767 }
1768
1769 unsafe fn dispose(&mut self) {
1772 unsafe {
1775 v8__Isolate__Dispose(self.as_real_ptr());
1776 }
1777 }
1778
1779 pub fn take_heap_snapshot<F>(&mut self, mut callback: F)
1786 where
1787 F: FnMut(&[u8]) -> bool,
1788 {
1789 unsafe extern "C" fn trampoline<F>(
1790 arg: *mut c_void,
1791 data: *const u8,
1792 size: usize,
1793 ) -> bool
1794 where
1795 F: FnMut(&[u8]) -> bool,
1796 {
1797 unsafe {
1798 let mut callback = NonNull::<F>::new_unchecked(arg as _);
1799 if size > 0 {
1800 (callback.as_mut())(std::slice::from_raw_parts(data, size))
1801 } else {
1802 (callback.as_mut())(&[])
1803 }
1804 }
1805 }
1806
1807 let arg = addr_of_mut!(callback);
1808 unsafe {
1809 v8__HeapProfiler__TakeHeapSnapshot(
1810 self.as_real_ptr(),
1811 trampoline::<F>,
1812 arg as _,
1813 );
1814 }
1815 }
1816
1817 #[inline(always)]
1825 pub fn set_default_context(&mut self, context: Local<Context>) {
1826 let snapshot_creator = self
1827 .get_annex_mut()
1828 .maybe_snapshot_creator
1829 .as_mut()
1830 .unwrap();
1831 snapshot_creator.set_default_context(context);
1832 }
1833
1834 #[inline(always)]
1843 pub fn add_context(&mut self, context: Local<Context>) -> usize {
1844 let snapshot_creator = self
1845 .get_annex_mut()
1846 .maybe_snapshot_creator
1847 .as_mut()
1848 .unwrap();
1849 snapshot_creator.add_context(context)
1850 }
1851
1852 #[inline(always)]
1861 pub fn add_isolate_data<T>(&mut self, data: Local<T>) -> usize
1862 where
1863 for<'l> Local<'l, T>: Into<Local<'l, Data>>,
1864 {
1865 let snapshot_creator = self
1866 .get_annex_mut()
1867 .maybe_snapshot_creator
1868 .as_mut()
1869 .unwrap();
1870 snapshot_creator.add_isolate_data(data)
1871 }
1872
1873 #[inline(always)]
1882 pub fn add_context_data<T>(
1883 &mut self,
1884 context: Local<Context>,
1885 data: Local<T>,
1886 ) -> usize
1887 where
1888 for<'l> Local<'l, T>: Into<Local<'l, Data>>,
1889 {
1890 let snapshot_creator = self
1891 .get_annex_mut()
1892 .maybe_snapshot_creator
1893 .as_mut()
1894 .unwrap();
1895 snapshot_creator.add_context_data(context, data)
1896 }
1897}
1898
1899pub(crate) struct IsolateAnnex {
1900 create_param_allocations: Box<dyn Any>,
1901 slots: HashMap<TypeId, RawSlot, BuildTypeIdHasher>,
1902 finalizer_map: FinalizerMap,
1903 maybe_snapshot_creator: Option<SnapshotCreator>,
1904 isolate: *mut RealIsolate,
1912 isolate_mutex: Mutex<()>,
1913}
1914
1915unsafe impl Send for IsolateAnnex {}
1916unsafe impl Sync for IsolateAnnex {}
1917
1918impl IsolateAnnex {
1919 fn new(
1920 isolate: &mut Isolate,
1921 create_param_allocations: Box<dyn Any>,
1922 ) -> Self {
1923 Self {
1924 create_param_allocations,
1925 slots: HashMap::default(),
1926 finalizer_map: FinalizerMap::default(),
1927 maybe_snapshot_creator: None,
1928 isolate: isolate.as_real_ptr(),
1929 isolate_mutex: Mutex::new(()),
1930 }
1931 }
1932}
1933
1934impl Debug for IsolateAnnex {
1935 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
1936 f.debug_struct("IsolateAnnex")
1937 .field("isolate", &self.isolate)
1938 .field("isolate_mutex", &self.isolate_mutex)
1939 .finish()
1940 }
1941}
1942
1943#[derive(Clone, Debug)]
1950pub struct IsolateHandle(Arc<IsolateAnnex>);
1951
1952impl IsolateHandle {
1953 pub(crate) unsafe fn get_isolate_ptr(&self) -> *mut RealIsolate {
1957 self.0.isolate
1958 }
1959
1960 #[inline(always)]
1961 fn new(isolate: &Isolate) -> Self {
1962 Self(isolate.get_annex_arc())
1963 }
1964
1965 #[inline(always)]
1973 pub fn terminate_execution(&self) -> bool {
1974 let _lock = self.0.isolate_mutex.lock().unwrap();
1975 if self.0.isolate.is_null() {
1976 false
1977 } else {
1978 unsafe { v8__Isolate__TerminateExecution(self.0.isolate) };
1979 true
1980 }
1981 }
1982
1983 #[inline(always)]
1998 pub fn cancel_terminate_execution(&self) -> bool {
1999 let _lock = self.0.isolate_mutex.lock().unwrap();
2000 if self.0.isolate.is_null() {
2001 false
2002 } else {
2003 unsafe { v8__Isolate__CancelTerminateExecution(self.0.isolate) };
2004 true
2005 }
2006 }
2007
2008 #[inline(always)]
2017 pub fn is_execution_terminating(&self) -> bool {
2018 let _lock = self.0.isolate_mutex.lock().unwrap();
2019 if self.0.isolate.is_null() {
2020 false
2021 } else {
2022 unsafe { v8__Isolate__IsExecutionTerminating(self.0.isolate) }
2023 }
2024 }
2025
2026 #[allow(clippy::not_unsafe_ptr_arg_deref)]
2037 #[inline(always)]
2038 pub fn request_interrupt(
2039 &self,
2040 callback: InterruptCallback,
2041 data: *mut c_void,
2042 ) -> bool {
2043 let _lock = self.0.isolate_mutex.lock().unwrap();
2044 if self.0.isolate.is_null() {
2045 false
2046 } else {
2047 unsafe { v8__Isolate__RequestInterrupt(self.0.isolate, callback, data) };
2048 true
2049 }
2050 }
2051}
2052
2053#[derive(Debug)]
2055pub struct OwnedIsolate {
2056 cxx_isolate: NonNull<RealIsolate>,
2057}
2058
2059impl OwnedIsolate {
2060 pub(crate) fn new(cxx_isolate: *mut RealIsolate) -> Self {
2061 let isolate = Self::new_already_entered(cxx_isolate);
2062 unsafe {
2063 isolate.enter();
2064 }
2065 isolate
2066 }
2067
2068 pub(crate) fn new_already_entered(cxx_isolate: *mut RealIsolate) -> Self {
2069 let cxx_isolate = NonNull::new(cxx_isolate).unwrap();
2070 let owned_isolate: OwnedIsolate = Self { cxx_isolate };
2071 owned_isolate
2073 }
2074}
2075
2076impl Drop for OwnedIsolate {
2077 fn drop(&mut self) {
2078 unsafe {
2079 let snapshot_creator = self.get_annex_mut().maybe_snapshot_creator.take();
2080 assert!(
2081 snapshot_creator.is_none(),
2082 "If isolate was created using v8::Isolate::snapshot_creator, you should use v8::OwnedIsolate::create_blob before dropping an isolate."
2083 );
2084 assert!(
2086 std::ptr::eq(self.cxx_isolate.as_mut(), v8__Isolate__GetCurrent()),
2087 "v8::OwnedIsolate instances must be dropped in the reverse order of creation. They are entered upon creation and exited upon being dropped."
2088 );
2089 self.exit();
2091 self.dispose_annex();
2092 Platform::notify_isolate_shutdown(&get_current_platform(), self);
2093 self.dispose();
2094 }
2095 }
2096}
2097
2098impl OwnedIsolate {
2099 #[inline(always)]
2106 pub fn create_blob(
2107 mut self,
2108 function_code_handling: FunctionCodeHandling,
2109 ) -> Option<StartupData> {
2110 let mut snapshot_creator =
2111 self.get_annex_mut().maybe_snapshot_creator.take().unwrap();
2112
2113 let _create_param_allocations = unsafe {
2116 self.dispose_annex()
2118 };
2119
2120 std::mem::forget(self);
2123 snapshot_creator.create_blob(function_code_handling)
2124 }
2125}
2126
2127impl Deref for OwnedIsolate {
2128 type Target = Isolate;
2129 fn deref(&self) -> &Self::Target {
2130 unsafe {
2131 std::mem::transmute::<&NonNull<RealIsolate>, &Isolate>(&self.cxx_isolate)
2132 }
2133 }
2134}
2135
2136impl DerefMut for OwnedIsolate {
2137 fn deref_mut(&mut self) -> &mut Self::Target {
2138 unsafe {
2139 std::mem::transmute::<&mut NonNull<RealIsolate>, &mut Isolate>(
2140 &mut self.cxx_isolate,
2141 )
2142 }
2143 }
2144}
2145
2146impl AsMut<Isolate> for OwnedIsolate {
2147 fn as_mut(&mut self) -> &mut Isolate {
2148 self
2149 }
2150}
2151
2152impl AsMut<Isolate> for Isolate {
2153 fn as_mut(&mut self) -> &mut Isolate {
2154 self
2155 }
2156}
2157
2158pub struct HeapStatistics(v8__HeapStatistics);
2163
2164impl HeapStatistics {
2165 #[inline(always)]
2166 pub fn total_heap_size(&self) -> usize {
2167 self.0.total_heap_size_
2168 }
2169
2170 #[inline(always)]
2171 pub fn total_heap_size_executable(&self) -> usize {
2172 self.0.total_heap_size_executable_
2173 }
2174
2175 #[inline(always)]
2176 pub fn total_physical_size(&self) -> usize {
2177 self.0.total_physical_size_
2178 }
2179
2180 #[inline(always)]
2181 pub fn total_available_size(&self) -> usize {
2182 self.0.total_available_size_
2183 }
2184
2185 #[inline(always)]
2186 pub fn total_global_handles_size(&self) -> usize {
2187 self.0.total_global_handles_size_
2188 }
2189
2190 #[inline(always)]
2191 pub fn used_global_handles_size(&self) -> usize {
2192 self.0.used_global_handles_size_
2193 }
2194
2195 #[inline(always)]
2196 pub fn used_heap_size(&self) -> usize {
2197 self.0.used_heap_size_
2198 }
2199
2200 #[inline(always)]
2201 pub fn heap_size_limit(&self) -> usize {
2202 self.0.heap_size_limit_
2203 }
2204
2205 #[inline(always)]
2206 pub fn malloced_memory(&self) -> usize {
2207 self.0.malloced_memory_
2208 }
2209
2210 #[inline(always)]
2211 pub fn external_memory(&self) -> usize {
2212 self.0.external_memory_
2213 }
2214
2215 #[inline(always)]
2216 pub fn peak_malloced_memory(&self) -> usize {
2217 self.0.peak_malloced_memory_
2218 }
2219
2220 #[inline(always)]
2221 pub fn number_of_native_contexts(&self) -> usize {
2222 self.0.number_of_native_contexts_
2223 }
2224
2225 #[inline(always)]
2226 pub fn number_of_detached_contexts(&self) -> usize {
2227 self.0.number_of_detached_contexts_
2228 }
2229
2230 #[inline(always)]
2234 pub fn total_allocated_bytes(&self) -> u64 {
2235 self.0.total_allocated_bytes_
2236 }
2237
2238 #[inline(always)]
2241 pub fn does_zap_garbage(&self) -> bool {
2242 self.0.does_zap_garbage_
2243 }
2244}
2245
2246pub struct HeapSpaceStatistics(v8__HeapSpaceStatistics);
2247
2248impl HeapSpaceStatistics {
2249 pub fn space_name(&self) -> &'static CStr {
2250 unsafe { CStr::from_ptr(self.0.space_name_) }
2251 }
2252
2253 pub fn space_size(&self) -> usize {
2254 self.0.space_size_
2255 }
2256
2257 pub fn space_used_size(&self) -> usize {
2258 self.0.space_used_size_
2259 }
2260
2261 pub fn space_available_size(&self) -> usize {
2262 self.0.space_available_size_
2263 }
2264
2265 pub fn physical_space_size(&self) -> usize {
2266 self.0.physical_space_size_
2267 }
2268}
2269
2270pub struct HeapCodeStatistics(v8__HeapCodeStatistics);
2271
2272impl HeapCodeStatistics {
2273 pub fn code_and_metadata_size(&self) -> usize {
2274 self.0.code_and_metadata_size_
2275 }
2276
2277 pub fn bytecode_and_metadata_size(&self) -> usize {
2278 self.0.bytecode_and_metadata_size_
2279 }
2280
2281 pub fn external_script_source_size(&self) -> usize {
2282 self.0.external_script_source_size_
2283 }
2284
2285 pub fn cpu_profiler_metadata_size(&self) -> usize {
2286 self.0.cpu_profiler_metadata_size_
2287 }
2288}
2289
2290impl<'s, F> MapFnFrom<F> for PrepareStackTraceCallback<'s>
2291where
2292 F: UnitType
2293 + for<'a> Fn(
2294 &mut PinScope<'s, 'a>,
2295 Local<'s, Value>,
2296 Local<'s, Array>,
2297 ) -> Local<'s, Value>,
2298{
2299 #[cfg(target_os = "windows")]
2301 fn mapping() -> Self {
2302 let f = |ret_ptr, context, error, sites| {
2303 let scope = pin!(unsafe { CallbackScope::new(context) });
2304 let mut scope: crate::PinnedRef<CallbackScope> = scope.init();
2305 let r = (F::get())(&mut scope, error, sites);
2306 unsafe { std::ptr::write(ret_ptr, &*r as *const _) };
2307 ret_ptr
2308 };
2309 f.to_c_fn()
2310 }
2311
2312 #[cfg(not(target_os = "windows"))]
2314 fn mapping() -> Self {
2315 let f = |context, error, sites| {
2316 let scope = pin!(unsafe { CallbackScope::new(context) });
2317 let mut scope: crate::PinnedRef<CallbackScope> = scope.init();
2318
2319 let r = (F::get())(&mut scope, error, sites);
2320 PrepareStackTraceCallbackRet(&*r as *const _)
2321 };
2322 f.to_c_fn()
2323 }
2324}
2325
2326#[derive(Clone, Default)]
2330pub(crate) struct TypeIdHasher {
2331 state: Option<u64>,
2332}
2333
2334impl Hasher for TypeIdHasher {
2335 fn write(&mut self, _bytes: &[u8]) {
2336 panic!("TypeIdHasher::write() called unexpectedly");
2337 }
2338
2339 #[inline]
2340 fn write_u64(&mut self, value: u64) {
2341 let prev_state = self.state.replace(value);
2344 debug_assert_eq!(prev_state, None);
2345 }
2346
2347 #[inline]
2348 fn finish(&self) -> u64 {
2349 self.state.unwrap()
2350 }
2351}
2352
2353#[derive(Copy, Clone, Default)]
2357pub(crate) struct BuildTypeIdHasher;
2358
2359impl BuildHasher for BuildTypeIdHasher {
2360 type Hasher = TypeIdHasher;
2361
2362 #[inline]
2363 fn build_hasher(&self) -> Self::Hasher {
2364 Default::default()
2365 }
2366}
2367
2368const _: () = {
2369 assert!(
2370 size_of::<TypeId>() == size_of::<u64>()
2371 || size_of::<TypeId>() == size_of::<u128>()
2372 );
2373 assert!(
2374 align_of::<TypeId>() == align_of::<u64>()
2375 || align_of::<TypeId>() == align_of::<u128>()
2376 );
2377};
2378
2379pub(crate) struct RawSlot {
2380 data: RawSlotData,
2381 dtor: Option<RawSlotDtor>,
2382}
2383
2384type RawSlotData = MaybeUninit<usize>;
2385type RawSlotDtor = unsafe fn(&mut RawSlotData) -> ();
2386
2387impl RawSlot {
2388 #[inline]
2389 pub fn new<T: 'static>(value: T) -> Self {
2390 if Self::needs_box::<T>() {
2391 Self::new_internal(Box::new(value))
2392 } else {
2393 Self::new_internal(value)
2394 }
2395 }
2396
2397 #[inline]
2401 pub unsafe fn borrow<T: 'static>(&self) -> &T {
2402 unsafe {
2403 if Self::needs_box::<T>() {
2404 &*(self.data.as_ptr() as *const Box<T>)
2405 } else {
2406 &*(self.data.as_ptr() as *const T)
2407 }
2408 }
2409 }
2410
2411 #[inline]
2413 pub unsafe fn borrow_mut<T: 'static>(&mut self) -> &mut T {
2414 unsafe {
2415 if Self::needs_box::<T>() {
2416 &mut *(self.data.as_mut_ptr() as *mut Box<T>)
2417 } else {
2418 &mut *(self.data.as_mut_ptr() as *mut T)
2419 }
2420 }
2421 }
2422
2423 #[inline]
2425 pub unsafe fn into_inner<T: 'static>(self) -> T {
2426 unsafe {
2427 let value = if Self::needs_box::<T>() {
2428 *std::ptr::read(self.data.as_ptr() as *mut Box<T>)
2429 } else {
2430 std::ptr::read(self.data.as_ptr() as *mut T)
2431 };
2432 forget(self);
2433 value
2434 }
2435 }
2436
2437 const fn needs_box<T: 'static>() -> bool {
2438 size_of::<T>() > size_of::<RawSlotData>()
2439 || align_of::<T>() > align_of::<RawSlotData>()
2440 }
2441
2442 #[inline]
2443 fn new_internal<B: 'static>(value: B) -> Self {
2444 assert!(!Self::needs_box::<B>());
2445 let mut self_ = Self {
2446 data: RawSlotData::zeroed(),
2447 dtor: None,
2448 };
2449 unsafe {
2450 ptr::write(self_.data.as_mut_ptr() as *mut B, value);
2451 }
2452 if needs_drop::<B>() {
2453 self_.dtor.replace(Self::drop_internal::<B>);
2454 };
2455 self_
2456 }
2457
2458 unsafe fn drop_internal<B: 'static>(data: &mut RawSlotData) {
2460 assert!(!Self::needs_box::<B>());
2461 unsafe {
2462 drop_in_place(data.as_mut_ptr() as *mut B);
2463 }
2464 }
2465}
2466
2467impl Drop for RawSlot {
2468 fn drop(&mut self) {
2469 if let Some(dtor) = self.dtor {
2470 unsafe { dtor(&mut self.data) };
2471 }
2472 }
2473}
2474
2475impl AsRef<Isolate> for OwnedIsolate {
2476 fn as_ref(&self) -> &Isolate {
2477 unsafe { Isolate::from_raw_ref(&self.cxx_isolate) }
2478 }
2479}
2480impl AsRef<Isolate> for Isolate {
2481 fn as_ref(&self) -> &Isolate {
2482 self
2483 }
2484}