1use crate::Array;
3use crate::CallbackScope;
4use crate::Context;
5use crate::Data;
6use crate::FixedArray;
7use crate::Function;
8use crate::FunctionCodeHandling;
9use crate::Local;
10use crate::Message;
11use crate::Module;
12use crate::Object;
13use crate::PinScope;
14use crate::Platform;
15use crate::Promise;
16use crate::PromiseResolver;
17use crate::StartupData;
18use crate::String;
19use crate::V8::get_current_platform;
20use crate::Value;
21use crate::binding::v8__HeapSpaceStatistics;
22use crate::binding::v8__HeapStatistics;
23use crate::binding::v8__Isolate__UseCounterFeature;
24pub use crate::binding::v8__ModuleImportPhase as ModuleImportPhase;
25use crate::cppgc::Heap;
26use crate::external_references::ExternalReference;
27use crate::function::FunctionCallbackInfo;
28use crate::gc::GCCallbackFlags;
29use crate::gc::GCType;
30use crate::handle::FinalizerCallback;
31use crate::handle::FinalizerMap;
32use crate::isolate_create_params::CreateParams;
33use crate::isolate_create_params::raw;
34use crate::promise::PromiseRejectMessage;
35use crate::snapshot::SnapshotCreator;
36use crate::support::MapFnFrom;
37use crate::support::MapFnTo;
38use crate::support::Opaque;
39use crate::support::ToCFn;
40use crate::support::UnitType;
41use crate::support::char;
42use crate::support::int;
43use crate::support::size_t;
44use crate::wasm::WasmStreaming;
45use crate::wasm::trampoline;
46use std::ffi::CStr;
47
48use std::any::Any;
49use std::any::TypeId;
50use std::borrow::Cow;
51use std::collections::HashMap;
52use std::ffi::c_void;
53use std::fmt::{self, Debug, Formatter};
54use std::hash::BuildHasher;
55use std::hash::Hasher;
56use std::mem::MaybeUninit;
57use std::mem::align_of;
58use std::mem::forget;
59use std::mem::needs_drop;
60use std::mem::size_of;
61use std::ops::Deref;
62use std::ops::DerefMut;
63use std::pin::pin;
64use std::ptr;
65use std::ptr::NonNull;
66use std::ptr::addr_of_mut;
67use std::ptr::drop_in_place;
68use std::ptr::null_mut;
69use std::sync::Arc;
70use std::sync::Mutex;
71
72#[derive(Debug, Clone, Copy, PartialEq, Eq)]
78#[repr(C)]
79pub enum MicrotasksPolicy {
80 Explicit = 0,
81 Auto = 2,
83}
84
85#[derive(Debug, Clone, Copy, PartialEq, Eq)]
92#[repr(C)]
93pub enum MemoryPressureLevel {
94 None = 0,
95 Moderate = 1,
96 Critical = 2,
97}
98
99#[derive(Debug, Clone, Copy, PartialEq, Eq)]
111#[repr(C)]
112pub enum TimeZoneDetection {
113 Skip = 0,
114 Redetect = 1,
115}
116
117#[derive(Debug, Clone, Copy, PartialEq, Eq)]
132#[repr(C)]
133pub enum PromiseHookType {
134 Init,
135 Resolve,
136 Before,
137 After,
138}
139
140#[derive(Debug, Clone, Copy, PartialEq, Eq)]
143#[repr(C)]
144pub enum GarbageCollectionType {
145 Full,
146 Minor,
147}
148
149pub type MessageCallback = unsafe extern "C" fn(Local<Message>, Local<Value>);
150
151bitflags! {
152 #[derive(Debug, Clone, Copy, PartialEq, Eq)]
153 #[repr(transparent)]
154 pub struct MessageErrorLevel: int {
155 const LOG = 1 << 0;
156 const DEBUG = 1 << 1;
157 const INFO = 1 << 2;
158 const ERROR = 1 << 3;
159 const WARNING = 1 << 4;
160 const ALL = (1 << 5) - 1;
161 }
162}
163
164pub type PromiseHook =
165 unsafe extern "C" fn(PromiseHookType, Local<Promise>, Local<Value>);
166
167pub type PromiseRejectCallback = unsafe extern "C" fn(PromiseRejectMessage);
168
169#[derive(Debug, Clone, Copy, PartialEq, Eq)]
170#[repr(C)]
171pub enum WasmAsyncSuccess {
172 Success,
173 Fail,
174}
175pub type WasmAsyncResolvePromiseCallback = unsafe extern "C" fn(
176 UnsafeRawIsolatePtr,
177 Local<Context>,
178 Local<PromiseResolver>,
179 Local<Value>,
180 WasmAsyncSuccess,
181);
182
183pub type AllowWasmCodeGenerationCallback =
184 unsafe extern "C" fn(Local<Context>, Local<String>) -> bool;
185
186pub type HostInitializeImportMetaObjectCallback =
195 unsafe extern "C" fn(Local<Context>, Local<Module>, Local<Object>);
196
197pub trait HostImportModuleDynamicallyCallback:
237 UnitType
238 + for<'s, 'i> FnOnce(
239 &mut PinScope<'s, 'i>,
240 Local<'s, Data>,
241 Local<'s, Value>,
242 Local<'s, String>,
243 Local<'s, FixedArray>,
244 ) -> Option<Local<'s, Promise>>
245{
246 fn to_c_fn(self) -> RawHostImportModuleDynamicallyCallback;
247}
248
249#[cfg(target_family = "unix")]
250pub(crate) type RawHostImportModuleDynamicallyCallback =
251 for<'s> unsafe extern "C" fn(
252 Local<'s, Context>,
253 Local<'s, Data>,
254 Local<'s, Value>,
255 Local<'s, String>,
256 Local<'s, FixedArray>,
257 ) -> *mut Promise;
258
259#[cfg(all(
260 target_family = "windows",
261 any(target_arch = "x86_64", target_arch = "aarch64")
262))]
263pub type RawHostImportModuleDynamicallyCallback =
264 for<'s> unsafe extern "C" fn(
265 *mut *mut Promise,
266 Local<'s, Context>,
267 Local<'s, Data>,
268 Local<'s, Value>,
269 Local<'s, String>,
270 Local<'s, FixedArray>,
271 ) -> *mut *mut Promise;
272
273impl<F> HostImportModuleDynamicallyCallback for F
274where
275 F: UnitType
276 + for<'s, 'i> FnOnce(
277 &mut PinScope<'s, 'i>,
278 Local<'s, Data>,
279 Local<'s, Value>,
280 Local<'s, String>,
281 Local<'s, FixedArray>,
282 ) -> Option<Local<'s, Promise>>,
283{
284 #[inline(always)]
285 fn to_c_fn(self) -> RawHostImportModuleDynamicallyCallback {
286 #[allow(unused_variables)]
287 #[inline(always)]
288 fn scope_adapter<'s, 'i: 's, F: HostImportModuleDynamicallyCallback>(
289 context: Local<'s, Context>,
290 host_defined_options: Local<'s, Data>,
291 resource_name: Local<'s, Value>,
292 specifier: Local<'s, String>,
293 import_attributes: Local<'s, FixedArray>,
294 ) -> Option<Local<'s, Promise>> {
295 let scope = pin!(unsafe { CallbackScope::new(context) });
296 let mut scope = scope.init();
297 (F::get())(
298 &mut scope,
299 host_defined_options,
300 resource_name,
301 specifier,
302 import_attributes,
303 )
304 }
305
306 #[cfg(target_family = "unix")]
307 #[inline(always)]
308 unsafe extern "C" fn abi_adapter<
309 's,
310 F: HostImportModuleDynamicallyCallback,
311 >(
312 context: Local<'s, Context>,
313 host_defined_options: Local<'s, Data>,
314 resource_name: Local<'s, Value>,
315 specifier: Local<'s, String>,
316 import_attributes: Local<'s, FixedArray>,
317 ) -> *mut Promise {
318 scope_adapter::<F>(
319 context,
320 host_defined_options,
321 resource_name,
322 specifier,
323 import_attributes,
324 )
325 .map_or_else(null_mut, |return_value| return_value.as_non_null().as_ptr())
326 }
327
328 #[cfg(all(
329 target_family = "windows",
330 any(target_arch = "x86_64", target_arch = "aarch64")
331 ))]
332 #[inline(always)]
333 unsafe extern "C" fn abi_adapter<
334 's,
335 F: HostImportModuleDynamicallyCallback,
336 >(
337 return_value: *mut *mut Promise,
338 context: Local<'s, Context>,
339 host_defined_options: Local<'s, Data>,
340 resource_name: Local<'s, Value>,
341 specifier: Local<'s, String>,
342 import_attributes: Local<'s, FixedArray>,
343 ) -> *mut *mut Promise {
344 unsafe {
345 std::ptr::write(
346 return_value,
347 scope_adapter::<F>(
348 context,
349 host_defined_options,
350 resource_name,
351 specifier,
352 import_attributes,
353 )
354 .map(|return_value| return_value.as_non_null().as_ptr())
355 .unwrap_or_else(null_mut),
356 );
357 return_value
358 }
359 }
360
361 abi_adapter::<F>
362 }
363}
364
365pub trait HostImportModuleWithPhaseDynamicallyCallback:
400 UnitType
401 + for<'s, 'i> FnOnce(
402 &mut PinScope<'s, 'i>,
403 Local<'s, Data>,
404 Local<'s, Value>,
405 Local<'s, String>,
406 ModuleImportPhase,
407 Local<'s, FixedArray>,
408 ) -> Option<Local<'s, Promise>>
409{
410 fn to_c_fn(self) -> RawHostImportModuleWithPhaseDynamicallyCallback;
411}
412
413#[cfg(target_family = "unix")]
414pub(crate) type RawHostImportModuleWithPhaseDynamicallyCallback =
415 for<'s> unsafe extern "C" fn(
416 Local<'s, Context>,
417 Local<'s, Data>,
418 Local<'s, Value>,
419 Local<'s, String>,
420 ModuleImportPhase,
421 Local<'s, FixedArray>,
422 ) -> *mut Promise;
423
424#[cfg(all(
425 target_family = "windows",
426 any(target_arch = "x86_64", target_arch = "aarch64")
427))]
428pub type RawHostImportModuleWithPhaseDynamicallyCallback =
429 for<'s> unsafe extern "C" fn(
430 *mut *mut Promise,
431 Local<'s, Context>,
432 Local<'s, Data>,
433 Local<'s, Value>,
434 Local<'s, String>,
435 ModuleImportPhase,
436 Local<'s, FixedArray>,
437 ) -> *mut *mut Promise;
438
439impl<F> HostImportModuleWithPhaseDynamicallyCallback for F
440where
441 F: UnitType
442 + for<'s, 'i> FnOnce(
443 &mut PinScope<'s, 'i>,
444 Local<'s, Data>,
445 Local<'s, Value>,
446 Local<'s, String>,
447 ModuleImportPhase,
448 Local<'s, FixedArray>,
449 ) -> Option<Local<'s, Promise>>,
450{
451 #[inline(always)]
452 fn to_c_fn(self) -> RawHostImportModuleWithPhaseDynamicallyCallback {
453 #[allow(unused_variables)]
454 #[inline(always)]
455 fn scope_adapter<'s, F: HostImportModuleWithPhaseDynamicallyCallback>(
456 context: Local<'s, Context>,
457 host_defined_options: Local<'s, Data>,
458 resource_name: Local<'s, Value>,
459 specifier: Local<'s, String>,
460 import_phase: ModuleImportPhase,
461 import_attributes: Local<'s, FixedArray>,
462 ) -> Option<Local<'s, Promise>> {
463 let scope = pin!(unsafe { CallbackScope::new(context) });
464 let mut scope = scope.init();
465 (F::get())(
466 &mut scope,
467 host_defined_options,
468 resource_name,
469 specifier,
470 import_phase,
471 import_attributes,
472 )
473 }
474
475 #[cfg(target_family = "unix")]
476 #[inline(always)]
477 unsafe extern "C" fn abi_adapter<
478 's,
479 F: HostImportModuleWithPhaseDynamicallyCallback,
480 >(
481 context: Local<'s, Context>,
482 host_defined_options: Local<'s, Data>,
483 resource_name: Local<'s, Value>,
484 specifier: Local<'s, String>,
485 import_phase: ModuleImportPhase,
486 import_attributes: Local<'s, FixedArray>,
487 ) -> *mut Promise {
488 scope_adapter::<F>(
489 context,
490 host_defined_options,
491 resource_name,
492 specifier,
493 import_phase,
494 import_attributes,
495 )
496 .map_or_else(null_mut, |return_value| return_value.as_non_null().as_ptr())
497 }
498
499 #[cfg(all(
500 target_family = "windows",
501 any(target_arch = "x86_64", target_arch = "aarch64")
502 ))]
503 #[inline(always)]
504 unsafe extern "C" fn abi_adapter<
505 's,
506 F: HostImportModuleWithPhaseDynamicallyCallback,
507 >(
508 return_value: *mut *mut Promise,
509 context: Local<'s, Context>,
510 host_defined_options: Local<'s, Data>,
511 resource_name: Local<'s, Value>,
512 specifier: Local<'s, String>,
513 import_phase: ModuleImportPhase,
514 import_attributes: Local<'s, FixedArray>,
515 ) -> *mut *mut Promise {
516 unsafe {
517 std::ptr::write(
518 return_value,
519 scope_adapter::<F>(
520 context,
521 host_defined_options,
522 resource_name,
523 specifier,
524 import_phase,
525 import_attributes,
526 )
527 .map(|return_value| return_value.as_non_null().as_ptr())
528 .unwrap_or_else(null_mut),
529 );
530 return_value
531 }
532 }
533
534 abi_adapter::<F>
535 }
536}
537
538pub type HostCreateShadowRealmContextCallback =
549 for<'s, 'i> fn(scope: &mut PinScope<'s, 'i>) -> Option<Local<'s, Context>>;
550
551pub type GcCallbackWithData = unsafe extern "C" fn(
552 isolate: UnsafeRawIsolatePtr,
553 r#type: GCType,
554 flags: GCCallbackFlags,
555 data: *mut c_void,
556);
557
558pub type InterruptCallback =
559 unsafe extern "C" fn(isolate: UnsafeRawIsolatePtr, data: *mut c_void);
560
561pub type NearHeapLimitCallback = unsafe extern "C" fn(
562 data: *mut c_void,
563 current_heap_limit: usize,
564 initial_heap_limit: usize,
565) -> usize;
566
567#[repr(C)]
568pub struct OomDetails {
569 pub is_heap_oom: bool,
570 pub detail: *const char,
571}
572
573pub type OomErrorCallback =
574 unsafe extern "C" fn(location: *const char, details: &OomDetails);
575
576#[cfg(target_os = "windows")]
578pub type PrepareStackTraceCallback<'s> =
579 unsafe extern "C" fn(
580 *mut *const Value,
581 Local<'s, Context>,
582 Local<'s, Value>,
583 Local<'s, Array>,
584 ) -> *mut *const Value;
585
586#[cfg(not(target_os = "windows"))]
589#[repr(C)]
590pub struct PrepareStackTraceCallbackRet(*const Value);
591
592#[cfg(not(target_os = "windows"))]
593pub type PrepareStackTraceCallback<'s> =
594 unsafe extern "C" fn(
595 Local<'s, Context>,
596 Local<'s, Value>,
597 Local<'s, Array>,
598 ) -> PrepareStackTraceCallbackRet;
599
600pub type UseCounterFeature = v8__Isolate__UseCounterFeature;
601pub type UseCounterCallback =
602 unsafe extern "C" fn(&mut Isolate, UseCounterFeature);
603
604unsafe extern "C" {
605 fn v8__Isolate__New(params: *const raw::CreateParams) -> *mut RealIsolate;
606 fn v8__Isolate__Dispose(this: *mut RealIsolate);
607 fn v8__Isolate__GetNumberOfDataSlots(this: *const RealIsolate) -> u32;
608 fn v8__Isolate__GetData(
609 isolate: *const RealIsolate,
610 slot: u32,
611 ) -> *mut c_void;
612 fn v8__Isolate__SetData(
613 isolate: *const RealIsolate,
614 slot: u32,
615 data: *mut c_void,
616 );
617 fn v8__Isolate__Enter(this: *mut RealIsolate);
618 fn v8__Isolate__Exit(this: *mut RealIsolate);
619 fn v8__Isolate__GetCurrent() -> *mut RealIsolate;
620 fn v8__Isolate__MemoryPressureNotification(this: *mut RealIsolate, level: u8);
621 fn v8__Isolate__ClearKeptObjects(isolate: *mut RealIsolate);
622 fn v8__Isolate__LowMemoryNotification(isolate: *mut RealIsolate);
623 fn v8__Isolate__GetHeapStatistics(
624 this: *mut RealIsolate,
625 s: *mut v8__HeapStatistics,
626 );
627 fn v8__Isolate__SetCaptureStackTraceForUncaughtExceptions(
628 this: *mut RealIsolate,
629 capture: bool,
630 frame_limit: i32,
631 );
632 fn v8__Isolate__AddMessageListener(
633 isolate: *mut RealIsolate,
634 callback: MessageCallback,
635 ) -> bool;
636 fn v8__Isolate__AddMessageListenerWithErrorLevel(
637 isolate: *mut RealIsolate,
638 callback: MessageCallback,
639 message_levels: MessageErrorLevel,
640 ) -> bool;
641 fn v8__Isolate__AddGCPrologueCallback(
642 isolate: *mut RealIsolate,
643 callback: GcCallbackWithData,
644 data: *mut c_void,
645 gc_type_filter: GCType,
646 );
647 fn v8__Isolate__RemoveGCPrologueCallback(
648 isolate: *mut RealIsolate,
649 callback: GcCallbackWithData,
650 data: *mut c_void,
651 );
652 fn v8__Isolate__AddGCEpilogueCallback(
653 isolate: *mut RealIsolate,
654 callback: GcCallbackWithData,
655 data: *mut c_void,
656 gc_type_filter: GCType,
657 );
658 fn v8__Isolate__RemoveGCEpilogueCallback(
659 isolate: *mut RealIsolate,
660 callback: GcCallbackWithData,
661 data: *mut c_void,
662 );
663 fn v8__Isolate__NumberOfHeapSpaces(isolate: *mut RealIsolate) -> size_t;
664 fn v8__Isolate__GetHeapSpaceStatistics(
665 isolate: *mut RealIsolate,
666 space_statistics: *mut v8__HeapSpaceStatistics,
667 index: size_t,
668 ) -> bool;
669 fn v8__Isolate__AddNearHeapLimitCallback(
670 isolate: *mut RealIsolate,
671 callback: NearHeapLimitCallback,
672 data: *mut c_void,
673 );
674 fn v8__Isolate__RemoveNearHeapLimitCallback(
675 isolate: *mut RealIsolate,
676 callback: NearHeapLimitCallback,
677 heap_limit: usize,
678 );
679 fn v8__Isolate__SetOOMErrorHandler(
680 isolate: *mut RealIsolate,
681 callback: OomErrorCallback,
682 );
683 fn v8__Isolate__AdjustAmountOfExternalAllocatedMemory(
684 isolate: *mut RealIsolate,
685 change_in_bytes: i64,
686 ) -> i64;
687 fn v8__Isolate__GetCppHeap(isolate: *mut RealIsolate) -> *mut Heap;
688 fn v8__Isolate__SetPrepareStackTraceCallback(
689 isolate: *mut RealIsolate,
690 callback: PrepareStackTraceCallback,
691 );
692 fn v8__Isolate__SetPromiseHook(isolate: *mut RealIsolate, hook: PromiseHook);
693 fn v8__Isolate__SetPromiseRejectCallback(
694 isolate: *mut RealIsolate,
695 callback: PromiseRejectCallback,
696 );
697 fn v8__Isolate__SetWasmAsyncResolvePromiseCallback(
698 isolate: *mut RealIsolate,
699 callback: WasmAsyncResolvePromiseCallback,
700 );
701 fn v8__Isolate__SetAllowWasmCodeGenerationCallback(
702 isolate: *mut RealIsolate,
703 callback: AllowWasmCodeGenerationCallback,
704 );
705 fn v8__Isolate__SetHostInitializeImportMetaObjectCallback(
706 isolate: *mut RealIsolate,
707 callback: HostInitializeImportMetaObjectCallback,
708 );
709 fn v8__Isolate__SetHostImportModuleDynamicallyCallback(
710 isolate: *mut RealIsolate,
711 callback: RawHostImportModuleDynamicallyCallback,
712 );
713 fn v8__Isolate__SetHostImportModuleWithPhaseDynamicallyCallback(
714 isolate: *mut RealIsolate,
715 callback: RawHostImportModuleWithPhaseDynamicallyCallback,
716 );
717 #[cfg(not(target_os = "windows"))]
718 fn v8__Isolate__SetHostCreateShadowRealmContextCallback(
719 isolate: *mut RealIsolate,
720 callback: unsafe extern "C" fn(
721 initiator_context: Local<Context>,
722 ) -> *mut Context,
723 );
724 #[cfg(target_os = "windows")]
725 fn v8__Isolate__SetHostCreateShadowRealmContextCallback(
726 isolate: *mut RealIsolate,
727 callback: unsafe extern "C" fn(
728 rv: *mut *mut Context,
729 initiator_context: Local<Context>,
730 ) -> *mut *mut Context,
731 );
732 fn v8__Isolate__SetUseCounterCallback(
733 isolate: *mut RealIsolate,
734 callback: UseCounterCallback,
735 );
736 fn v8__Isolate__RequestInterrupt(
737 isolate: *const RealIsolate,
738 callback: InterruptCallback,
739 data: *mut c_void,
740 );
741 fn v8__Isolate__TerminateExecution(isolate: *const RealIsolate);
742 fn v8__Isolate__IsExecutionTerminating(isolate: *const RealIsolate) -> bool;
743 fn v8__Isolate__CancelTerminateExecution(isolate: *const RealIsolate);
744 fn v8__Isolate__GetMicrotasksPolicy(
745 isolate: *const RealIsolate,
746 ) -> MicrotasksPolicy;
747 fn v8__Isolate__SetMicrotasksPolicy(
748 isolate: *mut RealIsolate,
749 policy: MicrotasksPolicy,
750 );
751 fn v8__Isolate__PerformMicrotaskCheckpoint(isolate: *mut RealIsolate);
752 fn v8__Isolate__EnqueueMicrotask(
753 isolate: *mut RealIsolate,
754 function: *const Function,
755 );
756 fn v8__Isolate__SetAllowAtomicsWait(isolate: *mut RealIsolate, allow: bool);
757 fn v8__Isolate__SetWasmStreamingCallback(
758 isolate: *mut RealIsolate,
759 callback: unsafe extern "C" fn(*const FunctionCallbackInfo),
760 );
761 fn v8__Isolate__DateTimeConfigurationChangeNotification(
762 isolate: *mut RealIsolate,
763 time_zone_detection: TimeZoneDetection,
764 );
765 fn v8__Isolate__HasPendingBackgroundTasks(
766 isolate: *const RealIsolate,
767 ) -> bool;
768 fn v8__Isolate__RequestGarbageCollectionForTesting(
769 isolate: *mut RealIsolate,
770 r#type: usize,
771 );
772
773 fn v8__HeapProfiler__TakeHeapSnapshot(
774 isolate: *mut RealIsolate,
775 callback: unsafe extern "C" fn(*mut c_void, *const u8, usize) -> bool,
776 arg: *mut c_void,
777 );
778}
779
780#[repr(transparent)]
791#[derive(Debug)]
792pub struct Isolate(NonNull<RealIsolate>);
793
794#[repr(transparent)]
795#[derive(Debug, Clone, Copy)]
796pub struct UnsafeRawIsolatePtr(*mut RealIsolate);
797
798impl UnsafeRawIsolatePtr {
799 pub fn null() -> Self {
800 Self(std::ptr::null_mut())
801 }
802
803 pub fn is_null(&self) -> bool {
804 self.0.is_null()
805 }
806}
807
808#[repr(C)]
809pub struct RealIsolate(Opaque);
810
811impl Isolate {
812 pub(crate) fn as_real_ptr(&self) -> *mut RealIsolate {
813 self.0.as_ptr()
814 }
815
816 pub unsafe fn as_raw_isolate_ptr(&self) -> UnsafeRawIsolatePtr {
817 UnsafeRawIsolatePtr(self.0.as_ptr())
818 }
819
820 #[inline]
821 pub unsafe fn from_raw_isolate_ptr(ptr: UnsafeRawIsolatePtr) -> Self {
822 Self(NonNull::new(ptr.0).unwrap())
823 }
824
825 #[inline]
826 pub unsafe fn from_raw_isolate_ptr_unchecked(
827 ptr: UnsafeRawIsolatePtr,
828 ) -> Self {
829 Self(unsafe { NonNull::new_unchecked(ptr.0) })
830 }
831
832 pub unsafe fn from_raw_ptr_unchecked(ptr: *mut RealIsolate) -> Self {
833 Self(unsafe { NonNull::new_unchecked(ptr) })
834 }
835
836 pub unsafe fn from_raw_ptr(ptr: *mut RealIsolate) -> Self {
837 Self(NonNull::new(ptr).unwrap())
838 }
839
840 #[inline]
841 pub unsafe fn ref_from_raw_isolate_ptr(ptr: &UnsafeRawIsolatePtr) -> &Self {
842 if ptr.is_null() {
843 panic!("UnsafeRawIsolatePtr is null");
844 }
845 unsafe { &*(ptr as *const UnsafeRawIsolatePtr as *const Isolate) }
846 }
847
848 #[inline]
849 pub unsafe fn ref_from_raw_isolate_ptr_unchecked(
850 ptr: &UnsafeRawIsolatePtr,
851 ) -> &Self {
852 unsafe { &*(ptr as *const UnsafeRawIsolatePtr as *const Isolate) }
853 }
854
855 #[inline]
856 pub unsafe fn ref_from_raw_isolate_ptr_mut(
857 ptr: &mut UnsafeRawIsolatePtr,
858 ) -> &mut Self {
859 if ptr.is_null() {
860 panic!("UnsafeRawIsolatePtr is null");
861 }
862 unsafe { &mut *(ptr as *mut UnsafeRawIsolatePtr as *mut Isolate) }
863 }
864
865 #[inline]
866 pub unsafe fn ref_from_raw_isolate_ptr_mut_unchecked(
867 ptr: &mut UnsafeRawIsolatePtr,
868 ) -> &mut Self {
869 unsafe { &mut *(ptr as *mut UnsafeRawIsolatePtr as *mut Isolate) }
870 }
871
872 #[inline]
873 pub(crate) unsafe fn from_non_null(ptr: NonNull<RealIsolate>) -> Self {
874 Self(ptr)
875 }
876
877 #[inline]
878 pub(crate) unsafe fn from_raw_ref(ptr: &NonNull<RealIsolate>) -> &Self {
879 unsafe { &*(ptr as *const NonNull<RealIsolate> as *const Isolate) }
881 }
882
883 #[inline]
884 pub(crate) unsafe fn from_raw_ref_mut(
885 ptr: &mut NonNull<RealIsolate>,
886 ) -> &mut Self {
887 unsafe { &mut *(ptr as *mut NonNull<RealIsolate> as *mut Isolate) }
889 }
890
891 const ANNEX_SLOT: u32 = 0;
893 const INTERNAL_DATA_SLOT_COUNT: u32 = 2;
894
895 #[inline(always)]
896 fn assert_embedder_data_slot_count_and_offset_correct(&self) {
897 assert!(
898 unsafe { v8__Isolate__GetNumberOfDataSlots(self.as_real_ptr()) }
899 >= Self::INTERNAL_DATA_SLOT_COUNT
900 )
901 }
902
903 fn new_impl(params: CreateParams) -> *mut RealIsolate {
904 crate::V8::assert_initialized();
905 let (raw_create_params, create_param_allocations) = params.finalize();
906 let cxx_isolate = unsafe { v8__Isolate__New(&raw_create_params) };
907 let mut isolate = unsafe { Isolate::from_raw_ptr(cxx_isolate) };
908 isolate.initialize(create_param_allocations);
909 cxx_isolate
910 }
911
912 pub(crate) fn initialize(&mut self, create_param_allocations: Box<dyn Any>) {
913 self.assert_embedder_data_slot_count_and_offset_correct();
914 self.create_annex(create_param_allocations);
915 }
916
917 #[allow(clippy::new_ret_no_self)]
925 pub fn new(params: CreateParams) -> OwnedIsolate {
926 OwnedIsolate::new(Self::new_impl(params))
927 }
928
929 #[allow(clippy::new_ret_no_self)]
930 pub fn snapshot_creator(
931 external_references: Option<Cow<'static, [ExternalReference]>>,
932 params: Option<CreateParams>,
933 ) -> OwnedIsolate {
934 SnapshotCreator::new(external_references, params)
935 }
936
937 #[allow(clippy::new_ret_no_self)]
938 pub fn snapshot_creator_from_existing_snapshot(
939 existing_snapshot_blob: StartupData,
940 external_references: Option<Cow<'static, [ExternalReference]>>,
941 params: Option<CreateParams>,
942 ) -> OwnedIsolate {
943 SnapshotCreator::from_existing_snapshot(
944 existing_snapshot_blob,
945 external_references,
946 params,
947 )
948 }
949
950 #[inline(always)]
952 pub fn create_params() -> CreateParams {
953 CreateParams::default()
954 }
955
956 #[inline(always)]
957 pub fn thread_safe_handle(&self) -> IsolateHandle {
958 IsolateHandle::new(self)
959 }
960
961 #[inline(always)]
963 pub fn terminate_execution(&self) -> bool {
964 self.thread_safe_handle().terminate_execution()
965 }
966
967 #[inline(always)]
969 pub fn cancel_terminate_execution(&self) -> bool {
970 self.thread_safe_handle().cancel_terminate_execution()
971 }
972
973 #[inline(always)]
975 pub fn is_execution_terminating(&self) -> bool {
976 self.thread_safe_handle().is_execution_terminating()
977 }
978
979 pub(crate) fn create_annex(
980 &mut self,
981 create_param_allocations: Box<dyn Any>,
982 ) {
983 let annex_arc = Arc::new(IsolateAnnex::new(self, create_param_allocations));
984 let annex_ptr = Arc::into_raw(annex_arc);
985 assert!(self.get_data_internal(Self::ANNEX_SLOT).is_null());
986 self.set_data_internal(Self::ANNEX_SLOT, annex_ptr as *mut _);
987 }
988
989 unsafe fn dispose_annex(&mut self) -> Box<dyn Any> {
990 let annex = self.get_annex_mut();
994 {
995 let _lock = annex.isolate_mutex.lock().unwrap();
996 annex.isolate = null_mut();
997 }
998
999 let create_param_allocations =
1001 std::mem::replace(&mut annex.create_param_allocations, Box::new(()));
1002 annex.slots.clear();
1003
1004 for finalizer in annex.finalizer_map.drain() {
1006 if let FinalizerCallback::Guaranteed(callback) = finalizer {
1007 callback();
1008 }
1009 }
1010
1011 unsafe { Arc::from_raw(annex) };
1013 self.set_data(0, null_mut());
1014
1015 create_param_allocations
1016 }
1017
1018 #[inline(always)]
1019 fn get_annex(&self) -> &IsolateAnnex {
1020 let annex_ptr =
1021 self.get_data_internal(Self::ANNEX_SLOT) as *const IsolateAnnex;
1022 assert!(!annex_ptr.is_null());
1023 unsafe { &*annex_ptr }
1024 }
1025
1026 #[inline(always)]
1027 fn get_annex_mut(&mut self) -> &mut IsolateAnnex {
1028 let annex_ptr =
1029 self.get_data_internal(Self::ANNEX_SLOT) as *mut IsolateAnnex;
1030 assert!(!annex_ptr.is_null());
1031 unsafe { &mut *annex_ptr }
1032 }
1033
1034 pub(crate) fn set_snapshot_creator(
1035 &mut self,
1036 snapshot_creator: SnapshotCreator,
1037 ) {
1038 let prev = self
1039 .get_annex_mut()
1040 .maybe_snapshot_creator
1041 .replace(snapshot_creator);
1042 assert!(prev.is_none());
1043 }
1044
1045 pub(crate) fn get_finalizer_map(&self) -> &FinalizerMap {
1046 &self.get_annex().finalizer_map
1047 }
1048
1049 pub(crate) fn get_finalizer_map_mut(&mut self) -> &mut FinalizerMap {
1050 &mut self.get_annex_mut().finalizer_map
1051 }
1052
1053 fn get_annex_arc(&self) -> Arc<IsolateAnnex> {
1054 let annex_ptr = self.get_annex();
1055 let annex_arc = unsafe { Arc::from_raw(annex_ptr) };
1056 let _ = Arc::into_raw(annex_arc.clone());
1057 annex_arc
1058 }
1059
1060 pub fn get_data(&self, slot: u32) -> *mut c_void {
1063 self.get_data_internal(Self::INTERNAL_DATA_SLOT_COUNT + slot)
1064 }
1065
1066 #[inline(always)]
1069 pub fn set_data(&mut self, slot: u32, data: *mut c_void) {
1070 self.set_data_internal(Self::INTERNAL_DATA_SLOT_COUNT + slot, data);
1071 }
1072
1073 pub fn get_number_of_data_slots(&self) -> u32 {
1076 let n = unsafe { v8__Isolate__GetNumberOfDataSlots(self.as_real_ptr()) };
1077 n - Self::INTERNAL_DATA_SLOT_COUNT
1078 }
1079
1080 #[inline(always)]
1081 pub(crate) fn get_data_internal(&self, slot: u32) -> *mut c_void {
1082 unsafe { v8__Isolate__GetData(self.as_real_ptr(), slot) }
1083 }
1084
1085 #[inline(always)]
1086 pub(crate) fn set_data_internal(&mut self, slot: u32, data: *mut c_void) {
1087 unsafe { v8__Isolate__SetData(self.as_real_ptr(), slot, data) }
1088 }
1089
1090 #[inline(always)]
1119 pub fn get_slot<T: 'static>(&self) -> Option<&T> {
1120 self
1121 .get_annex()
1122 .slots
1123 .get(&TypeId::of::<T>())
1124 .map(|slot| unsafe { slot.borrow::<T>() })
1125 }
1126
1127 #[inline(always)]
1129 pub fn get_slot_mut<T: 'static>(&mut self) -> Option<&mut T> {
1130 self
1131 .get_annex_mut()
1132 .slots
1133 .get_mut(&TypeId::of::<T>())
1134 .map(|slot| unsafe { slot.borrow_mut::<T>() })
1135 }
1136
1137 #[inline(always)]
1149 pub fn set_slot<T: 'static>(&mut self, value: T) -> bool {
1150 self
1151 .get_annex_mut()
1152 .slots
1153 .insert(TypeId::of::<T>(), RawSlot::new(value))
1154 .is_none()
1155 }
1156
1157 #[inline(always)]
1159 pub fn remove_slot<T: 'static>(&mut self) -> Option<T> {
1160 self
1161 .get_annex_mut()
1162 .slots
1163 .remove(&TypeId::of::<T>())
1164 .map(|slot| unsafe { slot.into_inner::<T>() })
1165 }
1166
1167 #[inline(always)]
1174 pub unsafe fn enter(&self) {
1175 unsafe {
1176 v8__Isolate__Enter(self.as_real_ptr());
1177 }
1178 }
1179
1180 #[inline(always)]
1189 pub unsafe fn exit(&self) {
1190 unsafe {
1191 v8__Isolate__Exit(self.as_real_ptr());
1192 }
1193 }
1194
1195 #[inline(always)]
1200 pub fn memory_pressure_notification(&mut self, level: MemoryPressureLevel) {
1201 unsafe {
1202 v8__Isolate__MemoryPressureNotification(self.as_real_ptr(), level as u8)
1203 }
1204 }
1205
1206 #[inline(always)]
1218 pub fn clear_kept_objects(&mut self) {
1219 unsafe { v8__Isolate__ClearKeptObjects(self.as_real_ptr()) }
1220 }
1221
1222 #[inline(always)]
1225 pub fn low_memory_notification(&mut self) {
1226 unsafe { v8__Isolate__LowMemoryNotification(self.as_real_ptr()) }
1227 }
1228
1229 #[inline(always)]
1231 pub fn get_heap_statistics(&mut self) -> HeapStatistics {
1232 let inner = unsafe {
1233 let mut s = MaybeUninit::zeroed();
1234 v8__Isolate__GetHeapStatistics(self.as_real_ptr(), s.as_mut_ptr());
1235 s.assume_init()
1236 };
1237 HeapStatistics(inner)
1238 }
1239
1240 #[inline(always)]
1242 pub fn number_of_heap_spaces(&mut self) -> usize {
1243 unsafe { v8__Isolate__NumberOfHeapSpaces(self.as_real_ptr()) }
1244 }
1245
1246 #[inline(always)]
1254 pub fn get_heap_space_statistics(
1255 &mut self,
1256 index: usize,
1257 ) -> Option<HeapSpaceStatistics> {
1258 let inner = unsafe {
1259 let mut s = MaybeUninit::zeroed();
1260 if !v8__Isolate__GetHeapSpaceStatistics(
1261 self.as_real_ptr(),
1262 s.as_mut_ptr(),
1263 index,
1264 ) {
1265 return None;
1266 }
1267 s.assume_init()
1268 };
1269 Some(HeapSpaceStatistics(inner))
1270 }
1271
1272 #[inline(always)]
1275 pub fn set_capture_stack_trace_for_uncaught_exceptions(
1276 &mut self,
1277 capture: bool,
1278 frame_limit: i32,
1279 ) {
1280 unsafe {
1281 v8__Isolate__SetCaptureStackTraceForUncaughtExceptions(
1282 self.as_real_ptr(),
1283 capture,
1284 frame_limit,
1285 );
1286 }
1287 }
1288
1289 #[inline(always)]
1296 pub fn add_message_listener(&mut self, callback: MessageCallback) -> bool {
1297 unsafe { v8__Isolate__AddMessageListener(self.as_real_ptr(), callback) }
1298 }
1299
1300 #[inline(always)]
1302 pub fn add_message_listener_with_error_level(
1303 &mut self,
1304 callback: MessageCallback,
1305 message_levels: MessageErrorLevel,
1306 ) -> bool {
1307 unsafe {
1308 v8__Isolate__AddMessageListenerWithErrorLevel(
1309 self.as_real_ptr(),
1310 callback,
1311 message_levels,
1312 )
1313 }
1314 }
1315
1316 #[inline(always)]
1325 pub fn set_prepare_stack_trace_callback<'s>(
1326 &mut self,
1327 callback: impl MapFnTo<PrepareStackTraceCallback<'s>>,
1328 ) {
1329 unsafe {
1333 v8__Isolate__SetPrepareStackTraceCallback(
1334 self.as_real_ptr(),
1335 callback.map_fn_to(),
1336 );
1337 };
1338 }
1339
1340 #[inline(always)]
1343 pub fn set_promise_hook(&mut self, hook: PromiseHook) {
1344 unsafe { v8__Isolate__SetPromiseHook(self.as_real_ptr(), hook) }
1345 }
1346
1347 #[inline(always)]
1350 pub fn set_promise_reject_callback(
1351 &mut self,
1352 callback: PromiseRejectCallback,
1353 ) {
1354 unsafe {
1355 v8__Isolate__SetPromiseRejectCallback(self.as_real_ptr(), callback)
1356 }
1357 }
1358
1359 #[inline(always)]
1360 pub fn set_wasm_async_resolve_promise_callback(
1361 &mut self,
1362 callback: WasmAsyncResolvePromiseCallback,
1363 ) {
1364 unsafe {
1365 v8__Isolate__SetWasmAsyncResolvePromiseCallback(
1366 self.as_real_ptr(),
1367 callback,
1368 )
1369 }
1370 }
1371
1372 #[inline(always)]
1373 pub fn set_allow_wasm_code_generation_callback(
1374 &mut self,
1375 callback: AllowWasmCodeGenerationCallback,
1376 ) {
1377 unsafe {
1378 v8__Isolate__SetAllowWasmCodeGenerationCallback(
1379 self.as_real_ptr(),
1380 callback,
1381 );
1382 }
1383 }
1384
1385 #[inline(always)]
1386 pub fn set_host_initialize_import_meta_object_callback(
1389 &mut self,
1390 callback: HostInitializeImportMetaObjectCallback,
1391 ) {
1392 unsafe {
1393 v8__Isolate__SetHostInitializeImportMetaObjectCallback(
1394 self.as_real_ptr(),
1395 callback,
1396 );
1397 }
1398 }
1399
1400 #[inline(always)]
1403 pub fn set_host_import_module_dynamically_callback(
1404 &mut self,
1405 callback: impl HostImportModuleDynamicallyCallback,
1406 ) {
1407 unsafe {
1408 v8__Isolate__SetHostImportModuleDynamicallyCallback(
1409 self.as_real_ptr(),
1410 callback.to_c_fn(),
1411 );
1412 }
1413 }
1414
1415 #[inline(always)]
1423 pub fn set_host_import_module_with_phase_dynamically_callback(
1424 &mut self,
1425 callback: impl HostImportModuleWithPhaseDynamicallyCallback,
1426 ) {
1427 unsafe {
1428 v8__Isolate__SetHostImportModuleWithPhaseDynamicallyCallback(
1429 self.as_real_ptr(),
1430 callback.to_c_fn(),
1431 );
1432 }
1433 }
1434
1435 pub fn set_host_create_shadow_realm_context_callback(
1438 &mut self,
1439 callback: HostCreateShadowRealmContextCallback,
1440 ) {
1441 #[inline]
1442 unsafe extern "C" fn rust_shadow_realm_callback(
1443 initiator_context: Local<Context>,
1444 ) -> *mut Context {
1445 let scope = pin!(unsafe { CallbackScope::new(initiator_context) });
1446 let mut scope = scope.init();
1447 let isolate = scope.as_ref();
1448 let callback = isolate
1449 .get_slot::<HostCreateShadowRealmContextCallback>()
1450 .unwrap();
1451 let context = callback(&mut scope);
1452 context.map_or_else(null_mut, |l| l.as_non_null().as_ptr())
1453 }
1454
1455 #[cfg(target_os = "windows")]
1457 unsafe extern "C" fn rust_shadow_realm_callback_windows(
1458 rv: *mut *mut Context,
1459 initiator_context: Local<Context>,
1460 ) -> *mut *mut Context {
1461 unsafe {
1462 let ret = rust_shadow_realm_callback(initiator_context);
1463 rv.write(ret);
1464 }
1465 rv
1466 }
1467
1468 let slot_didnt_exist_before = self.set_slot(callback);
1469 if slot_didnt_exist_before {
1470 unsafe {
1471 #[cfg(target_os = "windows")]
1472 v8__Isolate__SetHostCreateShadowRealmContextCallback(
1473 self.as_real_ptr(),
1474 rust_shadow_realm_callback_windows,
1475 );
1476 #[cfg(not(target_os = "windows"))]
1477 v8__Isolate__SetHostCreateShadowRealmContextCallback(
1478 self.as_real_ptr(),
1479 rust_shadow_realm_callback,
1480 );
1481 }
1482 }
1483 }
1484
1485 #[inline(always)]
1487 pub fn set_use_counter_callback(&mut self, callback: UseCounterCallback) {
1488 unsafe {
1489 v8__Isolate__SetUseCounterCallback(self.as_real_ptr(), callback);
1490 }
1491 }
1492
1493 #[allow(clippy::not_unsafe_ptr_arg_deref)] #[inline(always)]
1502 pub fn add_gc_prologue_callback(
1503 &mut self,
1504 callback: GcCallbackWithData,
1505 data: *mut c_void,
1506 gc_type_filter: GCType,
1507 ) {
1508 unsafe {
1509 v8__Isolate__AddGCPrologueCallback(
1510 self.as_real_ptr(),
1511 callback,
1512 data,
1513 gc_type_filter,
1514 );
1515 }
1516 }
1517
1518 #[allow(clippy::not_unsafe_ptr_arg_deref)] #[inline(always)]
1522 pub fn remove_gc_prologue_callback(
1523 &mut self,
1524 callback: GcCallbackWithData,
1525 data: *mut c_void,
1526 ) {
1527 unsafe {
1528 v8__Isolate__RemoveGCPrologueCallback(self.as_real_ptr(), callback, data)
1529 }
1530 }
1531
1532 #[allow(clippy::not_unsafe_ptr_arg_deref)] #[inline(always)]
1536 pub fn add_gc_epilogue_callback(
1537 &mut self,
1538 callback: GcCallbackWithData,
1539 data: *mut c_void,
1540 gc_type_filter: GCType,
1541 ) {
1542 unsafe {
1543 v8__Isolate__AddGCEpilogueCallback(
1544 self.as_real_ptr(),
1545 callback,
1546 data,
1547 gc_type_filter,
1548 );
1549 }
1550 }
1551
1552 #[allow(clippy::not_unsafe_ptr_arg_deref)] #[inline(always)]
1556 pub fn remove_gc_epilogue_callback(
1557 &mut self,
1558 callback: GcCallbackWithData,
1559 data: *mut c_void,
1560 ) {
1561 unsafe {
1562 v8__Isolate__RemoveGCEpilogueCallback(self.as_real_ptr(), callback, data)
1563 }
1564 }
1565
1566 #[allow(clippy::not_unsafe_ptr_arg_deref)] #[inline(always)]
1571 pub fn add_near_heap_limit_callback(
1572 &mut self,
1573 callback: NearHeapLimitCallback,
1574 data: *mut c_void,
1575 ) {
1576 unsafe {
1577 v8__Isolate__AddNearHeapLimitCallback(self.as_real_ptr(), callback, data)
1578 };
1579 }
1580
1581 #[inline(always)]
1586 pub fn remove_near_heap_limit_callback(
1587 &mut self,
1588 callback: NearHeapLimitCallback,
1589 heap_limit: usize,
1590 ) {
1591 unsafe {
1592 v8__Isolate__RemoveNearHeapLimitCallback(
1593 self.as_real_ptr(),
1594 callback,
1595 heap_limit,
1596 );
1597 };
1598 }
1599
1600 #[inline(always)]
1608 pub fn adjust_amount_of_external_allocated_memory(
1609 &mut self,
1610 change_in_bytes: i64,
1611 ) -> i64 {
1612 unsafe {
1613 v8__Isolate__AdjustAmountOfExternalAllocatedMemory(
1614 self.as_real_ptr(),
1615 change_in_bytes,
1616 )
1617 }
1618 }
1619
1620 #[inline(always)]
1621 pub fn get_cpp_heap(&mut self) -> Option<&Heap> {
1622 unsafe { v8__Isolate__GetCppHeap(self.as_real_ptr()).as_ref() }
1623 }
1624
1625 #[inline(always)]
1626 pub fn set_oom_error_handler(&mut self, callback: OomErrorCallback) {
1627 unsafe { v8__Isolate__SetOOMErrorHandler(self.as_real_ptr(), callback) };
1628 }
1629
1630 #[inline(always)]
1632 pub fn get_microtasks_policy(&self) -> MicrotasksPolicy {
1633 unsafe { v8__Isolate__GetMicrotasksPolicy(self.as_real_ptr()) }
1634 }
1635
1636 #[inline(always)]
1638 pub fn set_microtasks_policy(&mut self, policy: MicrotasksPolicy) {
1639 unsafe { v8__Isolate__SetMicrotasksPolicy(self.as_real_ptr(), policy) }
1640 }
1641
1642 #[inline(always)]
1647 pub fn perform_microtask_checkpoint(&mut self) {
1648 unsafe { v8__Isolate__PerformMicrotaskCheckpoint(self.as_real_ptr()) }
1649 }
1650
1651 #[inline(always)]
1653 pub fn enqueue_microtask(&mut self, microtask: Local<Function>) {
1654 unsafe { v8__Isolate__EnqueueMicrotask(self.as_real_ptr(), &*microtask) }
1655 }
1656
1657 #[inline(always)]
1661 pub fn set_allow_atomics_wait(&mut self, allow: bool) {
1662 unsafe { v8__Isolate__SetAllowAtomicsWait(self.as_real_ptr(), allow) }
1663 }
1664
1665 #[inline(always)]
1673 pub fn set_wasm_streaming_callback<F>(&mut self, _: F)
1674 where
1675 F: UnitType
1676 + for<'a, 'b, 'c> Fn(
1677 &'c mut PinScope<'a, 'b>,
1678 Local<'a, Value>,
1679 WasmStreaming,
1680 ),
1681 {
1682 unsafe {
1683 v8__Isolate__SetWasmStreamingCallback(
1684 self.as_real_ptr(),
1685 trampoline::<F>(),
1686 )
1687 }
1688 }
1689
1690 #[inline(always)]
1699 pub fn date_time_configuration_change_notification(
1700 &mut self,
1701 time_zone_detection: TimeZoneDetection,
1702 ) {
1703 unsafe {
1704 v8__Isolate__DateTimeConfigurationChangeNotification(
1705 self.as_real_ptr(),
1706 time_zone_detection,
1707 );
1708 }
1709 }
1710
1711 #[inline(always)]
1715 pub fn has_pending_background_tasks(&self) -> bool {
1716 unsafe { v8__Isolate__HasPendingBackgroundTasks(self.as_real_ptr()) }
1717 }
1718
1719 #[inline(always)]
1729 pub fn request_garbage_collection_for_testing(
1730 &mut self,
1731 r#type: GarbageCollectionType,
1732 ) {
1733 unsafe {
1734 v8__Isolate__RequestGarbageCollectionForTesting(
1735 self.as_real_ptr(),
1736 match r#type {
1737 GarbageCollectionType::Full => 0,
1738 GarbageCollectionType::Minor => 1,
1739 },
1740 );
1741 }
1742 }
1743
1744 unsafe fn dispose(&mut self) {
1747 unsafe {
1750 v8__Isolate__Dispose(self.as_real_ptr());
1751 }
1752 }
1753
1754 pub fn take_heap_snapshot<F>(&mut self, mut callback: F)
1761 where
1762 F: FnMut(&[u8]) -> bool,
1763 {
1764 unsafe extern "C" fn trampoline<F>(
1765 arg: *mut c_void,
1766 data: *const u8,
1767 size: usize,
1768 ) -> bool
1769 where
1770 F: FnMut(&[u8]) -> bool,
1771 {
1772 unsafe {
1773 let mut callback = NonNull::<F>::new_unchecked(arg as _);
1774 if size > 0 {
1775 (callback.as_mut())(std::slice::from_raw_parts(data, size))
1776 } else {
1777 (callback.as_mut())(&[])
1778 }
1779 }
1780 }
1781
1782 let arg = addr_of_mut!(callback);
1783 unsafe {
1784 v8__HeapProfiler__TakeHeapSnapshot(
1785 self.as_real_ptr(),
1786 trampoline::<F>,
1787 arg as _,
1788 );
1789 }
1790 }
1791
1792 #[inline(always)]
1800 pub fn set_default_context(&mut self, context: Local<Context>) {
1801 let snapshot_creator = self
1802 .get_annex_mut()
1803 .maybe_snapshot_creator
1804 .as_mut()
1805 .unwrap();
1806 snapshot_creator.set_default_context(context);
1807 }
1808
1809 #[inline(always)]
1818 pub fn add_context(&mut self, context: Local<Context>) -> usize {
1819 let snapshot_creator = self
1820 .get_annex_mut()
1821 .maybe_snapshot_creator
1822 .as_mut()
1823 .unwrap();
1824 snapshot_creator.add_context(context)
1825 }
1826
1827 #[inline(always)]
1836 pub fn add_isolate_data<T>(&mut self, data: Local<T>) -> usize
1837 where
1838 for<'l> Local<'l, T>: Into<Local<'l, Data>>,
1839 {
1840 let snapshot_creator = self
1841 .get_annex_mut()
1842 .maybe_snapshot_creator
1843 .as_mut()
1844 .unwrap();
1845 snapshot_creator.add_isolate_data(data)
1846 }
1847
1848 #[inline(always)]
1857 pub fn add_context_data<T>(
1858 &mut self,
1859 context: Local<Context>,
1860 data: Local<T>,
1861 ) -> usize
1862 where
1863 for<'l> Local<'l, T>: Into<Local<'l, Data>>,
1864 {
1865 let snapshot_creator = self
1866 .get_annex_mut()
1867 .maybe_snapshot_creator
1868 .as_mut()
1869 .unwrap();
1870 snapshot_creator.add_context_data(context, data)
1871 }
1872}
1873
1874pub(crate) struct IsolateAnnex {
1875 create_param_allocations: Box<dyn Any>,
1876 slots: HashMap<TypeId, RawSlot, BuildTypeIdHasher>,
1877 finalizer_map: FinalizerMap,
1878 maybe_snapshot_creator: Option<SnapshotCreator>,
1879 isolate: *mut RealIsolate,
1887 isolate_mutex: Mutex<()>,
1888}
1889
1890unsafe impl Send for IsolateAnnex {}
1891unsafe impl Sync for IsolateAnnex {}
1892
1893impl IsolateAnnex {
1894 fn new(
1895 isolate: &mut Isolate,
1896 create_param_allocations: Box<dyn Any>,
1897 ) -> Self {
1898 Self {
1899 create_param_allocations,
1900 slots: HashMap::default(),
1901 finalizer_map: FinalizerMap::default(),
1902 maybe_snapshot_creator: None,
1903 isolate: isolate.as_real_ptr(),
1904 isolate_mutex: Mutex::new(()),
1905 }
1906 }
1907}
1908
1909impl Debug for IsolateAnnex {
1910 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
1911 f.debug_struct("IsolateAnnex")
1912 .field("isolate", &self.isolate)
1913 .field("isolate_mutex", &self.isolate_mutex)
1914 .finish()
1915 }
1916}
1917
1918#[derive(Clone, Debug)]
1925pub struct IsolateHandle(Arc<IsolateAnnex>);
1926
1927impl IsolateHandle {
1928 pub(crate) unsafe fn get_isolate_ptr(&self) -> *mut RealIsolate {
1932 self.0.isolate
1933 }
1934
1935 #[inline(always)]
1936 fn new(isolate: &Isolate) -> Self {
1937 Self(isolate.get_annex_arc())
1938 }
1939
1940 #[inline(always)]
1948 pub fn terminate_execution(&self) -> bool {
1949 let _lock = self.0.isolate_mutex.lock().unwrap();
1950 if self.0.isolate.is_null() {
1951 false
1952 } else {
1953 unsafe { v8__Isolate__TerminateExecution(self.0.isolate) };
1954 true
1955 }
1956 }
1957
1958 #[inline(always)]
1973 pub fn cancel_terminate_execution(&self) -> bool {
1974 let _lock = self.0.isolate_mutex.lock().unwrap();
1975 if self.0.isolate.is_null() {
1976 false
1977 } else {
1978 unsafe { v8__Isolate__CancelTerminateExecution(self.0.isolate) };
1979 true
1980 }
1981 }
1982
1983 #[inline(always)]
1992 pub fn is_execution_terminating(&self) -> bool {
1993 let _lock = self.0.isolate_mutex.lock().unwrap();
1994 if self.0.isolate.is_null() {
1995 false
1996 } else {
1997 unsafe { v8__Isolate__IsExecutionTerminating(self.0.isolate) }
1998 }
1999 }
2000
2001 #[allow(clippy::not_unsafe_ptr_arg_deref)]
2012 #[inline(always)]
2013 pub fn request_interrupt(
2014 &self,
2015 callback: InterruptCallback,
2016 data: *mut c_void,
2017 ) -> bool {
2018 let _lock = self.0.isolate_mutex.lock().unwrap();
2019 if self.0.isolate.is_null() {
2020 false
2021 } else {
2022 unsafe { v8__Isolate__RequestInterrupt(self.0.isolate, callback, data) };
2023 true
2024 }
2025 }
2026}
2027
2028#[derive(Debug)]
2030pub struct OwnedIsolate {
2031 cxx_isolate: NonNull<RealIsolate>,
2032}
2033
2034impl OwnedIsolate {
2035 pub(crate) fn new(cxx_isolate: *mut RealIsolate) -> Self {
2036 let isolate = Self::new_already_entered(cxx_isolate);
2037 unsafe {
2038 isolate.enter();
2039 }
2040 isolate
2041 }
2042
2043 pub(crate) fn new_already_entered(cxx_isolate: *mut RealIsolate) -> Self {
2044 let cxx_isolate = NonNull::new(cxx_isolate).unwrap();
2045 let owned_isolate: OwnedIsolate = Self { cxx_isolate };
2046 owned_isolate
2048 }
2049}
2050
2051impl Drop for OwnedIsolate {
2052 fn drop(&mut self) {
2053 unsafe {
2054 let snapshot_creator = self.get_annex_mut().maybe_snapshot_creator.take();
2055 assert!(
2056 snapshot_creator.is_none(),
2057 "If isolate was created using v8::Isolate::snapshot_creator, you should use v8::OwnedIsolate::create_blob before dropping an isolate."
2058 );
2059 assert!(
2061 std::ptr::eq(self.cxx_isolate.as_mut(), v8__Isolate__GetCurrent()),
2062 "v8::OwnedIsolate instances must be dropped in the reverse order of creation. They are entered upon creation and exited upon being dropped."
2063 );
2064 self.exit();
2066 self.dispose_annex();
2067 Platform::notify_isolate_shutdown(&get_current_platform(), self);
2068 self.dispose();
2069 }
2070 }
2071}
2072
2073impl OwnedIsolate {
2074 #[inline(always)]
2081 pub fn create_blob(
2082 mut self,
2083 function_code_handling: FunctionCodeHandling,
2084 ) -> Option<StartupData> {
2085 let mut snapshot_creator =
2086 self.get_annex_mut().maybe_snapshot_creator.take().unwrap();
2087
2088 let _create_param_allocations = unsafe {
2091 self.dispose_annex()
2093 };
2094
2095 std::mem::forget(self);
2098 snapshot_creator.create_blob(function_code_handling)
2099 }
2100}
2101
2102impl Deref for OwnedIsolate {
2103 type Target = Isolate;
2104 fn deref(&self) -> &Self::Target {
2105 unsafe {
2106 std::mem::transmute::<&NonNull<RealIsolate>, &Isolate>(&self.cxx_isolate)
2107 }
2108 }
2109}
2110
2111impl DerefMut for OwnedIsolate {
2112 fn deref_mut(&mut self) -> &mut Self::Target {
2113 unsafe {
2114 std::mem::transmute::<&mut NonNull<RealIsolate>, &mut Isolate>(
2115 &mut self.cxx_isolate,
2116 )
2117 }
2118 }
2119}
2120
2121impl AsMut<Isolate> for OwnedIsolate {
2122 fn as_mut(&mut self) -> &mut Isolate {
2123 self
2124 }
2125}
2126
2127impl AsMut<Isolate> for Isolate {
2128 fn as_mut(&mut self) -> &mut Isolate {
2129 self
2130 }
2131}
2132
2133pub struct HeapStatistics(v8__HeapStatistics);
2138
2139impl HeapStatistics {
2140 #[inline(always)]
2141 pub fn total_heap_size(&self) -> usize {
2142 self.0.total_heap_size_
2143 }
2144
2145 #[inline(always)]
2146 pub fn total_heap_size_executable(&self) -> usize {
2147 self.0.total_heap_size_executable_
2148 }
2149
2150 #[inline(always)]
2151 pub fn total_physical_size(&self) -> usize {
2152 self.0.total_physical_size_
2153 }
2154
2155 #[inline(always)]
2156 pub fn total_available_size(&self) -> usize {
2157 self.0.total_available_size_
2158 }
2159
2160 #[inline(always)]
2161 pub fn total_global_handles_size(&self) -> usize {
2162 self.0.total_global_handles_size_
2163 }
2164
2165 #[inline(always)]
2166 pub fn used_global_handles_size(&self) -> usize {
2167 self.0.used_global_handles_size_
2168 }
2169
2170 #[inline(always)]
2171 pub fn used_heap_size(&self) -> usize {
2172 self.0.used_heap_size_
2173 }
2174
2175 #[inline(always)]
2176 pub fn heap_size_limit(&self) -> usize {
2177 self.0.heap_size_limit_
2178 }
2179
2180 #[inline(always)]
2181 pub fn malloced_memory(&self) -> usize {
2182 self.0.malloced_memory_
2183 }
2184
2185 #[inline(always)]
2186 pub fn external_memory(&self) -> usize {
2187 self.0.external_memory_
2188 }
2189
2190 #[inline(always)]
2191 pub fn peak_malloced_memory(&self) -> usize {
2192 self.0.peak_malloced_memory_
2193 }
2194
2195 #[inline(always)]
2196 pub fn number_of_native_contexts(&self) -> usize {
2197 self.0.number_of_native_contexts_
2198 }
2199
2200 #[inline(always)]
2201 pub fn number_of_detached_contexts(&self) -> usize {
2202 self.0.number_of_detached_contexts_
2203 }
2204
2205 #[inline(always)]
2208 pub fn does_zap_garbage(&self) -> bool {
2209 self.0.does_zap_garbage_
2210 }
2211}
2212
2213pub struct HeapSpaceStatistics(v8__HeapSpaceStatistics);
2214
2215impl HeapSpaceStatistics {
2216 pub fn space_name(&self) -> &'static CStr {
2217 unsafe { CStr::from_ptr(self.0.space_name_) }
2218 }
2219
2220 pub fn space_size(&self) -> usize {
2221 self.0.space_size_
2222 }
2223
2224 pub fn space_used_size(&self) -> usize {
2225 self.0.space_used_size_
2226 }
2227
2228 pub fn space_available_size(&self) -> usize {
2229 self.0.space_available_size_
2230 }
2231
2232 pub fn physical_space_size(&self) -> usize {
2233 self.0.physical_space_size_
2234 }
2235}
2236
2237impl<'s, F> MapFnFrom<F> for PrepareStackTraceCallback<'s>
2238where
2239 F: UnitType
2240 + for<'a> Fn(
2241 &mut PinScope<'s, 'a>,
2242 Local<'s, Value>,
2243 Local<'s, Array>,
2244 ) -> Local<'s, Value>,
2245{
2246 #[cfg(target_os = "windows")]
2248 fn mapping() -> Self {
2249 let f = |ret_ptr, context, error, sites| {
2250 let scope = pin!(unsafe { CallbackScope::new(context) });
2251 let mut scope: crate::PinnedRef<CallbackScope> = scope.init();
2252 let r = (F::get())(&mut scope, error, sites);
2253 unsafe { std::ptr::write(ret_ptr, &*r as *const _) };
2254 ret_ptr
2255 };
2256 f.to_c_fn()
2257 }
2258
2259 #[cfg(not(target_os = "windows"))]
2261 fn mapping() -> Self {
2262 let f = |context, error, sites| {
2263 let scope = pin!(unsafe { CallbackScope::new(context) });
2264 let mut scope: crate::PinnedRef<CallbackScope> = scope.init();
2265
2266 let r = (F::get())(&mut scope, error, sites);
2267 PrepareStackTraceCallbackRet(&*r as *const _)
2268 };
2269 f.to_c_fn()
2270 }
2271}
2272
2273#[derive(Clone, Default)]
2277pub(crate) struct TypeIdHasher {
2278 state: Option<u64>,
2279}
2280
2281impl Hasher for TypeIdHasher {
2282 fn write(&mut self, _bytes: &[u8]) {
2283 panic!("TypeIdHasher::write() called unexpectedly");
2284 }
2285
2286 #[inline]
2287 fn write_u64(&mut self, value: u64) {
2288 let prev_state = self.state.replace(value);
2291 debug_assert_eq!(prev_state, None);
2292 }
2293
2294 #[inline]
2295 fn finish(&self) -> u64 {
2296 self.state.unwrap()
2297 }
2298}
2299
2300#[derive(Copy, Clone, Default)]
2304pub(crate) struct BuildTypeIdHasher;
2305
2306impl BuildHasher for BuildTypeIdHasher {
2307 type Hasher = TypeIdHasher;
2308
2309 #[inline]
2310 fn build_hasher(&self) -> Self::Hasher {
2311 Default::default()
2312 }
2313}
2314
2315const _: () = {
2316 assert!(
2317 size_of::<TypeId>() == size_of::<u64>()
2318 || size_of::<TypeId>() == size_of::<u128>()
2319 );
2320 assert!(
2321 align_of::<TypeId>() == align_of::<u64>()
2322 || align_of::<TypeId>() == align_of::<u128>()
2323 );
2324};
2325
2326pub(crate) struct RawSlot {
2327 data: RawSlotData,
2328 dtor: Option<RawSlotDtor>,
2329}
2330
2331type RawSlotData = MaybeUninit<usize>;
2332type RawSlotDtor = unsafe fn(&mut RawSlotData) -> ();
2333
2334impl RawSlot {
2335 #[inline]
2336 pub fn new<T: 'static>(value: T) -> Self {
2337 if Self::needs_box::<T>() {
2338 Self::new_internal(Box::new(value))
2339 } else {
2340 Self::new_internal(value)
2341 }
2342 }
2343
2344 #[inline]
2348 pub unsafe fn borrow<T: 'static>(&self) -> &T {
2349 unsafe {
2350 if Self::needs_box::<T>() {
2351 &*(self.data.as_ptr() as *const Box<T>)
2352 } else {
2353 &*(self.data.as_ptr() as *const T)
2354 }
2355 }
2356 }
2357
2358 #[inline]
2360 pub unsafe fn borrow_mut<T: 'static>(&mut self) -> &mut T {
2361 unsafe {
2362 if Self::needs_box::<T>() {
2363 &mut *(self.data.as_mut_ptr() as *mut Box<T>)
2364 } else {
2365 &mut *(self.data.as_mut_ptr() as *mut T)
2366 }
2367 }
2368 }
2369
2370 #[inline]
2372 pub unsafe fn into_inner<T: 'static>(self) -> T {
2373 unsafe {
2374 let value = if Self::needs_box::<T>() {
2375 *std::ptr::read(self.data.as_ptr() as *mut Box<T>)
2376 } else {
2377 std::ptr::read(self.data.as_ptr() as *mut T)
2378 };
2379 forget(self);
2380 value
2381 }
2382 }
2383
2384 const fn needs_box<T: 'static>() -> bool {
2385 size_of::<T>() > size_of::<RawSlotData>()
2386 || align_of::<T>() > align_of::<RawSlotData>()
2387 }
2388
2389 #[inline]
2390 fn new_internal<B: 'static>(value: B) -> Self {
2391 assert!(!Self::needs_box::<B>());
2392 let mut self_ = Self {
2393 data: RawSlotData::zeroed(),
2394 dtor: None,
2395 };
2396 unsafe {
2397 ptr::write(self_.data.as_mut_ptr() as *mut B, value);
2398 }
2399 if needs_drop::<B>() {
2400 self_.dtor.replace(Self::drop_internal::<B>);
2401 };
2402 self_
2403 }
2404
2405 unsafe fn drop_internal<B: 'static>(data: &mut RawSlotData) {
2407 assert!(!Self::needs_box::<B>());
2408 unsafe {
2409 drop_in_place(data.as_mut_ptr() as *mut B);
2410 }
2411 }
2412}
2413
2414impl Drop for RawSlot {
2415 fn drop(&mut self) {
2416 if let Some(dtor) = self.dtor {
2417 unsafe { dtor(&mut self.data) };
2418 }
2419 }
2420}
2421
2422impl AsRef<Isolate> for OwnedIsolate {
2423 fn as_ref(&self) -> &Isolate {
2424 unsafe { Isolate::from_raw_ref(&self.cxx_isolate) }
2425 }
2426}
2427impl AsRef<Isolate> for Isolate {
2428 fn as_ref(&self) -> &Isolate {
2429 self
2430 }
2431}