1use crate::Array;
3use crate::CallbackScope;
4use crate::Context;
5use crate::Data;
6use crate::FixedArray;
7use crate::Function;
8use crate::FunctionCodeHandling;
9use crate::Local;
10use crate::Message;
11use crate::Module;
12use crate::Object;
13use crate::PinScope;
14use crate::Platform;
15use crate::Promise;
16use crate::PromiseResolver;
17use crate::StartupData;
18use crate::String;
19use crate::V8::get_current_platform;
20use crate::Value;
21use crate::binding::v8__HeapSpaceStatistics;
22use crate::binding::v8__HeapStatistics;
23use crate::binding::v8__Isolate__UseCounterFeature;
24pub use crate::binding::v8__ModuleImportPhase as ModuleImportPhase;
25use crate::cppgc::Heap;
26use crate::external_references::ExternalReference;
27use crate::function::FunctionCallbackInfo;
28use crate::gc::GCCallbackFlags;
29use crate::gc::GCType;
30use crate::handle::FinalizerCallback;
31use crate::handle::FinalizerMap;
32use crate::isolate_create_params::CreateParams;
33use crate::isolate_create_params::raw;
34use crate::promise::PromiseRejectMessage;
35use crate::snapshot::SnapshotCreator;
36use crate::support::MapFnFrom;
37use crate::support::MapFnTo;
38use crate::support::Opaque;
39use crate::support::ToCFn;
40use crate::support::UnitType;
41use crate::support::char;
42use crate::support::int;
43use crate::support::size_t;
44use crate::wasm::WasmStreaming;
45use crate::wasm::trampoline;
46use std::ffi::CStr;
47
48use std::any::Any;
49use std::any::TypeId;
50use std::borrow::Cow;
51use std::collections::HashMap;
52use std::ffi::c_void;
53use std::fmt::{self, Debug, Formatter};
54use std::hash::BuildHasher;
55use std::hash::Hasher;
56use std::mem::MaybeUninit;
57use std::mem::align_of;
58use std::mem::forget;
59use std::mem::needs_drop;
60use std::mem::size_of;
61use std::ops::Deref;
62use std::ops::DerefMut;
63use std::pin::pin;
64use std::ptr;
65use std::ptr::NonNull;
66use std::ptr::addr_of_mut;
67use std::ptr::drop_in_place;
68use std::ptr::null_mut;
69use std::sync::Arc;
70use std::sync::Mutex;
71
72#[derive(Debug, Clone, Copy, PartialEq, Eq)]
78#[repr(C)]
79pub enum MicrotasksPolicy {
80 Explicit = 0,
81 Auto = 2,
83}
84
85#[derive(Debug, Clone, Copy, PartialEq, Eq)]
92#[repr(C)]
93pub enum MemoryPressureLevel {
94 None = 0,
95 Moderate = 1,
96 Critical = 2,
97}
98
99#[derive(Debug, Clone, Copy, PartialEq, Eq)]
111#[repr(C)]
112pub enum TimeZoneDetection {
113 Skip = 0,
114 Redetect = 1,
115}
116
117#[derive(Debug, Clone, Copy, PartialEq, Eq)]
132#[repr(C)]
133pub enum PromiseHookType {
134 Init,
135 Resolve,
136 Before,
137 After,
138}
139
140#[derive(Debug, Clone, Copy, PartialEq, Eq)]
143#[repr(C)]
144pub enum GarbageCollectionType {
145 Full,
146 Minor,
147}
148
149pub type MessageCallback = unsafe extern "C" fn(Local<Message>, Local<Value>);
150
151bitflags! {
152 #[derive(Debug, Clone, Copy, PartialEq, Eq)]
153 #[repr(transparent)]
154 pub struct MessageErrorLevel: int {
155 const LOG = 1 << 0;
156 const DEBUG = 1 << 1;
157 const INFO = 1 << 2;
158 const ERROR = 1 << 3;
159 const WARNING = 1 << 4;
160 const ALL = (1 << 5) - 1;
161 }
162}
163
164pub type PromiseHook =
165 unsafe extern "C" fn(PromiseHookType, Local<Promise>, Local<Value>);
166
167pub type PromiseRejectCallback = unsafe extern "C" fn(PromiseRejectMessage);
168
169#[derive(Debug, Clone, Copy, PartialEq, Eq)]
170#[repr(C)]
171pub enum WasmAsyncSuccess {
172 Success,
173 Fail,
174}
175pub type WasmAsyncResolvePromiseCallback = unsafe extern "C" fn(
176 UnsafeRawIsolatePtr,
177 Local<Context>,
178 Local<PromiseResolver>,
179 Local<Value>,
180 WasmAsyncSuccess,
181);
182
183pub type AllowWasmCodeGenerationCallback =
184 unsafe extern "C" fn(Local<Context>, Local<String>) -> bool;
185
186pub type HostInitializeImportMetaObjectCallback =
195 unsafe extern "C" fn(Local<Context>, Local<Module>, Local<Object>);
196
197pub trait HostImportModuleDynamicallyCallback:
237 UnitType
238 + for<'s, 'i> FnOnce(
239 &mut PinScope<'s, 'i>,
240 Local<'s, Data>,
241 Local<'s, Value>,
242 Local<'s, String>,
243 Local<'s, FixedArray>,
244 ) -> Option<Local<'s, Promise>>
245{
246 fn to_c_fn(self) -> RawHostImportModuleDynamicallyCallback;
247}
248
249#[cfg(target_family = "unix")]
250pub(crate) type RawHostImportModuleDynamicallyCallback =
251 for<'s> unsafe extern "C" fn(
252 Local<'s, Context>,
253 Local<'s, Data>,
254 Local<'s, Value>,
255 Local<'s, String>,
256 Local<'s, FixedArray>,
257 ) -> *mut Promise;
258
259#[cfg(all(target_family = "windows", target_arch = "x86_64"))]
260pub type RawHostImportModuleDynamicallyCallback =
261 for<'s> unsafe extern "C" fn(
262 *mut *mut Promise,
263 Local<'s, Context>,
264 Local<'s, Data>,
265 Local<'s, Value>,
266 Local<'s, String>,
267 Local<'s, FixedArray>,
268 ) -> *mut *mut Promise;
269
270impl<F> HostImportModuleDynamicallyCallback for F
271where
272 F: UnitType
273 + for<'s, 'i> FnOnce(
274 &mut PinScope<'s, 'i>,
275 Local<'s, Data>,
276 Local<'s, Value>,
277 Local<'s, String>,
278 Local<'s, FixedArray>,
279 ) -> Option<Local<'s, Promise>>,
280{
281 #[inline(always)]
282 fn to_c_fn(self) -> RawHostImportModuleDynamicallyCallback {
283 #[allow(unused_variables)]
284 #[inline(always)]
285 fn scope_adapter<'s, 'i: 's, F: HostImportModuleDynamicallyCallback>(
286 context: Local<'s, Context>,
287 host_defined_options: Local<'s, Data>,
288 resource_name: Local<'s, Value>,
289 specifier: Local<'s, String>,
290 import_attributes: Local<'s, FixedArray>,
291 ) -> Option<Local<'s, Promise>> {
292 let scope = pin!(unsafe { CallbackScope::new(context) });
293 let mut scope = scope.init();
294 (F::get())(
295 &mut scope,
296 host_defined_options,
297 resource_name,
298 specifier,
299 import_attributes,
300 )
301 }
302
303 #[cfg(target_family = "unix")]
304 #[inline(always)]
305 unsafe extern "C" fn abi_adapter<
306 's,
307 F: HostImportModuleDynamicallyCallback,
308 >(
309 context: Local<'s, Context>,
310 host_defined_options: Local<'s, Data>,
311 resource_name: Local<'s, Value>,
312 specifier: Local<'s, String>,
313 import_attributes: Local<'s, FixedArray>,
314 ) -> *mut Promise {
315 scope_adapter::<F>(
316 context,
317 host_defined_options,
318 resource_name,
319 specifier,
320 import_attributes,
321 )
322 .map_or_else(null_mut, |return_value| return_value.as_non_null().as_ptr())
323 }
324
325 #[cfg(all(target_family = "windows", target_arch = "x86_64"))]
326 #[inline(always)]
327 unsafe extern "C" fn abi_adapter<
328 's,
329 F: HostImportModuleDynamicallyCallback,
330 >(
331 return_value: *mut *mut Promise,
332 context: Local<'s, Context>,
333 host_defined_options: Local<'s, Data>,
334 resource_name: Local<'s, Value>,
335 specifier: Local<'s, String>,
336 import_attributes: Local<'s, FixedArray>,
337 ) -> *mut *mut Promise {
338 unsafe {
339 std::ptr::write(
340 return_value,
341 scope_adapter::<F>(
342 context,
343 host_defined_options,
344 resource_name,
345 specifier,
346 import_attributes,
347 )
348 .map(|return_value| return_value.as_non_null().as_ptr())
349 .unwrap_or_else(null_mut),
350 );
351 return_value
352 }
353 }
354
355 abi_adapter::<F>
356 }
357}
358
359pub trait HostImportModuleWithPhaseDynamicallyCallback:
394 UnitType
395 + for<'s, 'i> FnOnce(
396 &mut PinScope<'s, 'i>,
397 Local<'s, Data>,
398 Local<'s, Value>,
399 Local<'s, String>,
400 ModuleImportPhase,
401 Local<'s, FixedArray>,
402 ) -> Option<Local<'s, Promise>>
403{
404 fn to_c_fn(self) -> RawHostImportModuleWithPhaseDynamicallyCallback;
405}
406
407#[cfg(target_family = "unix")]
408pub(crate) type RawHostImportModuleWithPhaseDynamicallyCallback =
409 for<'s> unsafe extern "C" fn(
410 Local<'s, Context>,
411 Local<'s, Data>,
412 Local<'s, Value>,
413 Local<'s, String>,
414 ModuleImportPhase,
415 Local<'s, FixedArray>,
416 ) -> *mut Promise;
417
418#[cfg(all(target_family = "windows", target_arch = "x86_64"))]
419pub type RawHostImportModuleWithPhaseDynamicallyCallback =
420 for<'s> unsafe extern "C" fn(
421 *mut *mut Promise,
422 Local<'s, Context>,
423 Local<'s, Data>,
424 Local<'s, Value>,
425 Local<'s, String>,
426 ModuleImportPhase,
427 Local<'s, FixedArray>,
428 ) -> *mut *mut Promise;
429
430impl<F> HostImportModuleWithPhaseDynamicallyCallback for F
431where
432 F: UnitType
433 + for<'s, 'i> FnOnce(
434 &mut PinScope<'s, 'i>,
435 Local<'s, Data>,
436 Local<'s, Value>,
437 Local<'s, String>,
438 ModuleImportPhase,
439 Local<'s, FixedArray>,
440 ) -> Option<Local<'s, Promise>>,
441{
442 #[inline(always)]
443 fn to_c_fn(self) -> RawHostImportModuleWithPhaseDynamicallyCallback {
444 #[allow(unused_variables)]
445 #[inline(always)]
446 fn scope_adapter<'s, F: HostImportModuleWithPhaseDynamicallyCallback>(
447 context: Local<'s, Context>,
448 host_defined_options: Local<'s, Data>,
449 resource_name: Local<'s, Value>,
450 specifier: Local<'s, String>,
451 import_phase: ModuleImportPhase,
452 import_attributes: Local<'s, FixedArray>,
453 ) -> Option<Local<'s, Promise>> {
454 let scope = pin!(unsafe { CallbackScope::new(context) });
455 let mut scope = scope.init();
456 (F::get())(
457 &mut scope,
458 host_defined_options,
459 resource_name,
460 specifier,
461 import_phase,
462 import_attributes,
463 )
464 }
465
466 #[cfg(target_family = "unix")]
467 #[inline(always)]
468 unsafe extern "C" fn abi_adapter<
469 's,
470 F: HostImportModuleWithPhaseDynamicallyCallback,
471 >(
472 context: Local<'s, Context>,
473 host_defined_options: Local<'s, Data>,
474 resource_name: Local<'s, Value>,
475 specifier: Local<'s, String>,
476 import_phase: ModuleImportPhase,
477 import_attributes: Local<'s, FixedArray>,
478 ) -> *mut Promise {
479 scope_adapter::<F>(
480 context,
481 host_defined_options,
482 resource_name,
483 specifier,
484 import_phase,
485 import_attributes,
486 )
487 .map_or_else(null_mut, |return_value| return_value.as_non_null().as_ptr())
488 }
489
490 #[cfg(all(target_family = "windows", target_arch = "x86_64"))]
491 #[inline(always)]
492 unsafe extern "C" fn abi_adapter<
493 's,
494 F: HostImportModuleWithPhaseDynamicallyCallback,
495 >(
496 return_value: *mut *mut Promise,
497 context: Local<'s, Context>,
498 host_defined_options: Local<'s, Data>,
499 resource_name: Local<'s, Value>,
500 specifier: Local<'s, String>,
501 import_phase: ModuleImportPhase,
502 import_attributes: Local<'s, FixedArray>,
503 ) -> *mut *mut Promise {
504 unsafe {
505 std::ptr::write(
506 return_value,
507 scope_adapter::<F>(
508 context,
509 host_defined_options,
510 resource_name,
511 specifier,
512 import_phase,
513 import_attributes,
514 )
515 .map(|return_value| return_value.as_non_null().as_ptr())
516 .unwrap_or_else(null_mut),
517 );
518 return_value
519 }
520 }
521
522 abi_adapter::<F>
523 }
524}
525
526pub type HostCreateShadowRealmContextCallback =
537 for<'s, 'i> fn(scope: &mut PinScope<'s, 'i>) -> Option<Local<'s, Context>>;
538
539pub type GcCallbackWithData = unsafe extern "C" fn(
540 isolate: UnsafeRawIsolatePtr,
541 r#type: GCType,
542 flags: GCCallbackFlags,
543 data: *mut c_void,
544);
545
546pub type InterruptCallback =
547 unsafe extern "C" fn(isolate: UnsafeRawIsolatePtr, data: *mut c_void);
548
549pub type NearHeapLimitCallback = unsafe extern "C" fn(
550 data: *mut c_void,
551 current_heap_limit: usize,
552 initial_heap_limit: usize,
553) -> usize;
554
555#[repr(C)]
556pub struct OomDetails {
557 pub is_heap_oom: bool,
558 pub detail: *const char,
559}
560
561pub type OomErrorCallback =
562 unsafe extern "C" fn(location: *const char, details: &OomDetails);
563
564#[cfg(target_os = "windows")]
566pub type PrepareStackTraceCallback<'s> =
567 unsafe extern "C" fn(
568 *mut *const Value,
569 Local<'s, Context>,
570 Local<'s, Value>,
571 Local<'s, Array>,
572 ) -> *mut *const Value;
573
574#[cfg(not(target_os = "windows"))]
577#[repr(C)]
578pub struct PrepareStackTraceCallbackRet(*const Value);
579
580#[cfg(not(target_os = "windows"))]
581pub type PrepareStackTraceCallback<'s> =
582 unsafe extern "C" fn(
583 Local<'s, Context>,
584 Local<'s, Value>,
585 Local<'s, Array>,
586 ) -> PrepareStackTraceCallbackRet;
587
588pub type UseCounterFeature = v8__Isolate__UseCounterFeature;
589pub type UseCounterCallback =
590 unsafe extern "C" fn(&mut Isolate, UseCounterFeature);
591
592unsafe extern "C" {
593 fn v8__Isolate__New(params: *const raw::CreateParams) -> *mut RealIsolate;
594 fn v8__Isolate__Dispose(this: *mut RealIsolate);
595 fn v8__Isolate__GetNumberOfDataSlots(this: *const RealIsolate) -> u32;
596 fn v8__Isolate__GetData(
597 isolate: *const RealIsolate,
598 slot: u32,
599 ) -> *mut c_void;
600 fn v8__Isolate__SetData(
601 isolate: *const RealIsolate,
602 slot: u32,
603 data: *mut c_void,
604 );
605 fn v8__Isolate__Enter(this: *mut RealIsolate);
606 fn v8__Isolate__Exit(this: *mut RealIsolate);
607 fn v8__Isolate__GetCurrent() -> *mut RealIsolate;
608 fn v8__Isolate__MemoryPressureNotification(this: *mut RealIsolate, level: u8);
609 fn v8__Isolate__ClearKeptObjects(isolate: *mut RealIsolate);
610 fn v8__Isolate__LowMemoryNotification(isolate: *mut RealIsolate);
611 fn v8__Isolate__GetHeapStatistics(
612 this: *mut RealIsolate,
613 s: *mut v8__HeapStatistics,
614 );
615 fn v8__Isolate__SetCaptureStackTraceForUncaughtExceptions(
616 this: *mut RealIsolate,
617 capture: bool,
618 frame_limit: i32,
619 );
620 fn v8__Isolate__AddMessageListener(
621 isolate: *mut RealIsolate,
622 callback: MessageCallback,
623 ) -> bool;
624 fn v8__Isolate__AddMessageListenerWithErrorLevel(
625 isolate: *mut RealIsolate,
626 callback: MessageCallback,
627 message_levels: MessageErrorLevel,
628 ) -> bool;
629 fn v8__Isolate__AddGCPrologueCallback(
630 isolate: *mut RealIsolate,
631 callback: GcCallbackWithData,
632 data: *mut c_void,
633 gc_type_filter: GCType,
634 );
635 fn v8__Isolate__RemoveGCPrologueCallback(
636 isolate: *mut RealIsolate,
637 callback: GcCallbackWithData,
638 data: *mut c_void,
639 );
640 fn v8__Isolate__AddGCEpilogueCallback(
641 isolate: *mut RealIsolate,
642 callback: GcCallbackWithData,
643 data: *mut c_void,
644 gc_type_filter: GCType,
645 );
646 fn v8__Isolate__RemoveGCEpilogueCallback(
647 isolate: *mut RealIsolate,
648 callback: GcCallbackWithData,
649 data: *mut c_void,
650 );
651 fn v8__Isolate__NumberOfHeapSpaces(isolate: *mut RealIsolate) -> size_t;
652 fn v8__Isolate__GetHeapSpaceStatistics(
653 isolate: *mut RealIsolate,
654 space_statistics: *mut v8__HeapSpaceStatistics,
655 index: size_t,
656 ) -> bool;
657 fn v8__Isolate__AddNearHeapLimitCallback(
658 isolate: *mut RealIsolate,
659 callback: NearHeapLimitCallback,
660 data: *mut c_void,
661 );
662 fn v8__Isolate__RemoveNearHeapLimitCallback(
663 isolate: *mut RealIsolate,
664 callback: NearHeapLimitCallback,
665 heap_limit: usize,
666 );
667 fn v8__Isolate__SetOOMErrorHandler(
668 isolate: *mut RealIsolate,
669 callback: OomErrorCallback,
670 );
671 fn v8__Isolate__AdjustAmountOfExternalAllocatedMemory(
672 isolate: *mut RealIsolate,
673 change_in_bytes: i64,
674 ) -> i64;
675 fn v8__Isolate__GetCppHeap(isolate: *mut RealIsolate) -> *mut Heap;
676 fn v8__Isolate__SetPrepareStackTraceCallback(
677 isolate: *mut RealIsolate,
678 callback: PrepareStackTraceCallback,
679 );
680 fn v8__Isolate__SetPromiseHook(isolate: *mut RealIsolate, hook: PromiseHook);
681 fn v8__Isolate__SetPromiseRejectCallback(
682 isolate: *mut RealIsolate,
683 callback: PromiseRejectCallback,
684 );
685 fn v8__Isolate__SetWasmAsyncResolvePromiseCallback(
686 isolate: *mut RealIsolate,
687 callback: WasmAsyncResolvePromiseCallback,
688 );
689 fn v8__Isolate__SetAllowWasmCodeGenerationCallback(
690 isolate: *mut RealIsolate,
691 callback: AllowWasmCodeGenerationCallback,
692 );
693 fn v8__Isolate__SetHostInitializeImportMetaObjectCallback(
694 isolate: *mut RealIsolate,
695 callback: HostInitializeImportMetaObjectCallback,
696 );
697 fn v8__Isolate__SetHostImportModuleDynamicallyCallback(
698 isolate: *mut RealIsolate,
699 callback: RawHostImportModuleDynamicallyCallback,
700 );
701 fn v8__Isolate__SetHostImportModuleWithPhaseDynamicallyCallback(
702 isolate: *mut RealIsolate,
703 callback: RawHostImportModuleWithPhaseDynamicallyCallback,
704 );
705 #[cfg(not(target_os = "windows"))]
706 fn v8__Isolate__SetHostCreateShadowRealmContextCallback(
707 isolate: *mut RealIsolate,
708 callback: unsafe extern "C" fn(
709 initiator_context: Local<Context>,
710 ) -> *mut Context,
711 );
712 #[cfg(target_os = "windows")]
713 fn v8__Isolate__SetHostCreateShadowRealmContextCallback(
714 isolate: *mut RealIsolate,
715 callback: unsafe extern "C" fn(
716 rv: *mut *mut Context,
717 initiator_context: Local<Context>,
718 ) -> *mut *mut Context,
719 );
720 fn v8__Isolate__SetUseCounterCallback(
721 isolate: *mut RealIsolate,
722 callback: UseCounterCallback,
723 );
724 fn v8__Isolate__RequestInterrupt(
725 isolate: *const RealIsolate,
726 callback: InterruptCallback,
727 data: *mut c_void,
728 );
729 fn v8__Isolate__TerminateExecution(isolate: *const RealIsolate);
730 fn v8__Isolate__IsExecutionTerminating(isolate: *const RealIsolate) -> bool;
731 fn v8__Isolate__CancelTerminateExecution(isolate: *const RealIsolate);
732 fn v8__Isolate__GetMicrotasksPolicy(
733 isolate: *const RealIsolate,
734 ) -> MicrotasksPolicy;
735 fn v8__Isolate__SetMicrotasksPolicy(
736 isolate: *mut RealIsolate,
737 policy: MicrotasksPolicy,
738 );
739 fn v8__Isolate__PerformMicrotaskCheckpoint(isolate: *mut RealIsolate);
740 fn v8__Isolate__EnqueueMicrotask(
741 isolate: *mut RealIsolate,
742 function: *const Function,
743 );
744 fn v8__Isolate__SetAllowAtomicsWait(isolate: *mut RealIsolate, allow: bool);
745 fn v8__Isolate__SetWasmStreamingCallback(
746 isolate: *mut RealIsolate,
747 callback: unsafe extern "C" fn(*const FunctionCallbackInfo),
748 );
749 fn v8__Isolate__DateTimeConfigurationChangeNotification(
750 isolate: *mut RealIsolate,
751 time_zone_detection: TimeZoneDetection,
752 );
753 fn v8__Isolate__HasPendingBackgroundTasks(
754 isolate: *const RealIsolate,
755 ) -> bool;
756 fn v8__Isolate__RequestGarbageCollectionForTesting(
757 isolate: *mut RealIsolate,
758 r#type: usize,
759 );
760
761 fn v8__HeapProfiler__TakeHeapSnapshot(
762 isolate: *mut RealIsolate,
763 callback: unsafe extern "C" fn(*mut c_void, *const u8, usize) -> bool,
764 arg: *mut c_void,
765 );
766}
767
768#[repr(transparent)]
779#[derive(Debug)]
780pub struct Isolate(NonNull<RealIsolate>);
781
782#[repr(transparent)]
783#[derive(Debug, Clone, Copy)]
784pub struct UnsafeRawIsolatePtr(*mut RealIsolate);
785
786impl UnsafeRawIsolatePtr {
787 pub fn null() -> Self {
788 Self(std::ptr::null_mut())
789 }
790
791 pub fn is_null(&self) -> bool {
792 self.0.is_null()
793 }
794}
795
796pub unsafe fn current_raw_isolate_ptr() -> Option<UnsafeRawIsolatePtr> {
803 let ptr = unsafe { v8__Isolate__GetCurrent() };
804 if ptr.is_null() {
805 None
806 } else {
807 Some(UnsafeRawIsolatePtr(ptr))
808 }
809}
810
811#[repr(C)]
812pub struct RealIsolate(Opaque);
813
814impl Isolate {
815 pub(crate) fn as_real_ptr(&self) -> *mut RealIsolate {
816 self.0.as_ptr()
817 }
818
819 pub unsafe fn as_raw_isolate_ptr(&self) -> UnsafeRawIsolatePtr {
820 UnsafeRawIsolatePtr(self.0.as_ptr())
821 }
822
823 #[inline]
824 pub unsafe fn from_raw_isolate_ptr(ptr: UnsafeRawIsolatePtr) -> Self {
825 Self(NonNull::new(ptr.0).unwrap())
826 }
827
828 #[inline]
829 pub unsafe fn from_raw_isolate_ptr_unchecked(
830 ptr: UnsafeRawIsolatePtr,
831 ) -> Self {
832 Self(unsafe { NonNull::new_unchecked(ptr.0) })
833 }
834
835 pub unsafe fn from_raw_ptr_unchecked(ptr: *mut RealIsolate) -> Self {
836 Self(unsafe { NonNull::new_unchecked(ptr) })
837 }
838
839 pub unsafe fn from_raw_ptr(ptr: *mut RealIsolate) -> Self {
840 Self(NonNull::new(ptr).unwrap())
841 }
842
843 #[inline]
844 pub unsafe fn ref_from_raw_isolate_ptr(ptr: &UnsafeRawIsolatePtr) -> &Self {
845 if ptr.is_null() {
846 panic!("UnsafeRawIsolatePtr is null");
847 }
848 unsafe { &*(ptr as *const UnsafeRawIsolatePtr as *const Isolate) }
849 }
850
851 #[inline]
852 pub unsafe fn ref_from_raw_isolate_ptr_unchecked(
853 ptr: &UnsafeRawIsolatePtr,
854 ) -> &Self {
855 unsafe { &*(ptr as *const UnsafeRawIsolatePtr as *const Isolate) }
856 }
857
858 #[inline]
859 pub unsafe fn ref_from_raw_isolate_ptr_mut(
860 ptr: &mut UnsafeRawIsolatePtr,
861 ) -> &mut Self {
862 if ptr.is_null() {
863 panic!("UnsafeRawIsolatePtr is null");
864 }
865 unsafe { &mut *(ptr as *mut UnsafeRawIsolatePtr as *mut Isolate) }
866 }
867
868 #[inline]
869 pub unsafe fn ref_from_raw_isolate_ptr_mut_unchecked(
870 ptr: &mut UnsafeRawIsolatePtr,
871 ) -> &mut Self {
872 unsafe { &mut *(ptr as *mut UnsafeRawIsolatePtr as *mut Isolate) }
873 }
874
875 #[inline]
876 pub(crate) unsafe fn from_non_null(ptr: NonNull<RealIsolate>) -> Self {
877 Self(ptr)
878 }
879
880 #[inline]
881 pub(crate) unsafe fn from_raw_ref(ptr: &NonNull<RealIsolate>) -> &Self {
882 unsafe { &*(ptr as *const NonNull<RealIsolate> as *const Isolate) }
884 }
885
886 #[inline]
887 pub(crate) unsafe fn from_raw_ref_mut(
888 ptr: &mut NonNull<RealIsolate>,
889 ) -> &mut Self {
890 unsafe { &mut *(ptr as *mut NonNull<RealIsolate> as *mut Isolate) }
892 }
893
894 const ANNEX_SLOT: u32 = 0;
896 const INTERNAL_DATA_SLOT_COUNT: u32 = 2;
897
898 #[inline(always)]
899 fn assert_embedder_data_slot_count_and_offset_correct(&self) {
900 assert!(
901 unsafe { v8__Isolate__GetNumberOfDataSlots(self.as_real_ptr()) }
902 >= Self::INTERNAL_DATA_SLOT_COUNT
903 )
904 }
905
906 fn new_impl(params: CreateParams) -> *mut RealIsolate {
907 crate::V8::assert_initialized();
908 let (raw_create_params, create_param_allocations) = params.finalize();
909 let cxx_isolate = unsafe { v8__Isolate__New(&raw_create_params) };
910 let mut isolate = unsafe { Isolate::from_raw_ptr(cxx_isolate) };
911 isolate.initialize(create_param_allocations);
912 cxx_isolate
913 }
914
915 pub(crate) fn initialize(&mut self, create_param_allocations: Box<dyn Any>) {
916 self.assert_embedder_data_slot_count_and_offset_correct();
917 self.create_annex(create_param_allocations);
918 }
919
920 #[allow(clippy::new_ret_no_self)]
928 pub fn new(params: CreateParams) -> OwnedIsolate {
929 OwnedIsolate::new(Self::new_impl(params))
930 }
931
932 #[allow(clippy::new_ret_no_self)]
933 pub fn snapshot_creator(
934 external_references: Option<Cow<'static, [ExternalReference]>>,
935 params: Option<CreateParams>,
936 ) -> OwnedIsolate {
937 SnapshotCreator::new(external_references, params)
938 }
939
940 #[allow(clippy::new_ret_no_self)]
941 pub fn snapshot_creator_from_existing_snapshot(
942 existing_snapshot_blob: StartupData,
943 external_references: Option<Cow<'static, [ExternalReference]>>,
944 params: Option<CreateParams>,
945 ) -> OwnedIsolate {
946 SnapshotCreator::from_existing_snapshot(
947 existing_snapshot_blob,
948 external_references,
949 params,
950 )
951 }
952
953 #[inline(always)]
955 pub fn create_params() -> CreateParams {
956 CreateParams::default()
957 }
958
959 #[inline(always)]
960 pub fn thread_safe_handle(&self) -> IsolateHandle {
961 IsolateHandle::new(self)
962 }
963
964 #[inline(always)]
966 pub fn terminate_execution(&self) -> bool {
967 self.thread_safe_handle().terminate_execution()
968 }
969
970 #[inline(always)]
972 pub fn cancel_terminate_execution(&self) -> bool {
973 self.thread_safe_handle().cancel_terminate_execution()
974 }
975
976 #[inline(always)]
978 pub fn is_execution_terminating(&self) -> bool {
979 self.thread_safe_handle().is_execution_terminating()
980 }
981
982 pub(crate) fn create_annex(
983 &mut self,
984 create_param_allocations: Box<dyn Any>,
985 ) {
986 let annex_arc = Arc::new(IsolateAnnex::new(self, create_param_allocations));
987 let annex_ptr = Arc::into_raw(annex_arc);
988 assert!(self.get_data_internal(Self::ANNEX_SLOT).is_null());
989 self.set_data_internal(Self::ANNEX_SLOT, annex_ptr as *mut _);
990 }
991
992 unsafe fn dispose_annex(&mut self) -> Box<dyn Any> {
993 let annex = self.get_annex_mut();
997 {
998 let _lock = annex.isolate_mutex.lock().unwrap();
999 annex.isolate = null_mut();
1000 }
1001
1002 let create_param_allocations =
1004 std::mem::replace(&mut annex.create_param_allocations, Box::new(()));
1005 annex.slots.clear();
1006
1007 for finalizer in annex.finalizer_map.drain() {
1009 if let FinalizerCallback::Guaranteed(callback) = finalizer {
1010 callback();
1011 }
1012 }
1013
1014 unsafe { Arc::from_raw(annex) };
1016 self.set_data(0, null_mut());
1017
1018 create_param_allocations
1019 }
1020
1021 #[inline(always)]
1022 fn get_annex(&self) -> &IsolateAnnex {
1023 let annex_ptr =
1024 self.get_data_internal(Self::ANNEX_SLOT) as *const IsolateAnnex;
1025 assert!(!annex_ptr.is_null());
1026 unsafe { &*annex_ptr }
1027 }
1028
1029 #[inline(always)]
1030 fn get_annex_mut(&mut self) -> &mut IsolateAnnex {
1031 let annex_ptr =
1032 self.get_data_internal(Self::ANNEX_SLOT) as *mut IsolateAnnex;
1033 assert!(!annex_ptr.is_null());
1034 unsafe { &mut *annex_ptr }
1035 }
1036
1037 pub(crate) fn set_snapshot_creator(
1038 &mut self,
1039 snapshot_creator: SnapshotCreator,
1040 ) {
1041 let prev = self
1042 .get_annex_mut()
1043 .maybe_snapshot_creator
1044 .replace(snapshot_creator);
1045 assert!(prev.is_none());
1046 }
1047
1048 pub(crate) fn get_finalizer_map(&self) -> &FinalizerMap {
1049 &self.get_annex().finalizer_map
1050 }
1051
1052 pub(crate) fn get_finalizer_map_mut(&mut self) -> &mut FinalizerMap {
1053 &mut self.get_annex_mut().finalizer_map
1054 }
1055
1056 fn get_annex_arc(&self) -> Arc<IsolateAnnex> {
1057 let annex_ptr = self.get_annex();
1058 let annex_arc = unsafe { Arc::from_raw(annex_ptr) };
1059 let _ = Arc::into_raw(annex_arc.clone());
1060 annex_arc
1061 }
1062
1063 pub fn get_data(&self, slot: u32) -> *mut c_void {
1066 self.get_data_internal(Self::INTERNAL_DATA_SLOT_COUNT + slot)
1067 }
1068
1069 #[inline(always)]
1072 pub fn set_data(&mut self, slot: u32, data: *mut c_void) {
1073 self.set_data_internal(Self::INTERNAL_DATA_SLOT_COUNT + slot, data);
1074 }
1075
1076 pub fn get_number_of_data_slots(&self) -> u32 {
1079 let n = unsafe { v8__Isolate__GetNumberOfDataSlots(self.as_real_ptr()) };
1080 n - Self::INTERNAL_DATA_SLOT_COUNT
1081 }
1082
1083 #[inline(always)]
1084 pub(crate) fn get_data_internal(&self, slot: u32) -> *mut c_void {
1085 unsafe { v8__Isolate__GetData(self.as_real_ptr(), slot) }
1086 }
1087
1088 #[inline(always)]
1089 pub(crate) fn set_data_internal(&mut self, slot: u32, data: *mut c_void) {
1090 unsafe { v8__Isolate__SetData(self.as_real_ptr(), slot, data) }
1091 }
1092
1093 #[inline(always)]
1122 pub fn get_slot<T: 'static>(&self) -> Option<&T> {
1123 self
1124 .get_annex()
1125 .slots
1126 .get(&TypeId::of::<T>())
1127 .map(|slot| unsafe { slot.borrow::<T>() })
1128 }
1129
1130 #[inline(always)]
1132 pub fn get_slot_mut<T: 'static>(&mut self) -> Option<&mut T> {
1133 self
1134 .get_annex_mut()
1135 .slots
1136 .get_mut(&TypeId::of::<T>())
1137 .map(|slot| unsafe { slot.borrow_mut::<T>() })
1138 }
1139
1140 #[inline(always)]
1152 pub fn set_slot<T: 'static>(&mut self, value: T) -> bool {
1153 self
1154 .get_annex_mut()
1155 .slots
1156 .insert(TypeId::of::<T>(), RawSlot::new(value))
1157 .is_none()
1158 }
1159
1160 #[inline(always)]
1162 pub fn remove_slot<T: 'static>(&mut self) -> Option<T> {
1163 self
1164 .get_annex_mut()
1165 .slots
1166 .remove(&TypeId::of::<T>())
1167 .map(|slot| unsafe { slot.into_inner::<T>() })
1168 }
1169
1170 #[inline(always)]
1177 pub unsafe fn enter(&self) {
1178 unsafe {
1179 v8__Isolate__Enter(self.as_real_ptr());
1180 }
1181 }
1182
1183 #[inline(always)]
1192 pub unsafe fn exit(&self) {
1193 unsafe {
1194 v8__Isolate__Exit(self.as_real_ptr());
1195 }
1196 }
1197
1198 #[inline(always)]
1203 pub fn memory_pressure_notification(&mut self, level: MemoryPressureLevel) {
1204 unsafe {
1205 v8__Isolate__MemoryPressureNotification(self.as_real_ptr(), level as u8)
1206 }
1207 }
1208
1209 #[inline(always)]
1221 pub fn clear_kept_objects(&mut self) {
1222 unsafe { v8__Isolate__ClearKeptObjects(self.as_real_ptr()) }
1223 }
1224
1225 #[inline(always)]
1228 pub fn low_memory_notification(&mut self) {
1229 unsafe { v8__Isolate__LowMemoryNotification(self.as_real_ptr()) }
1230 }
1231
1232 #[inline(always)]
1234 pub fn get_heap_statistics(&mut self) -> HeapStatistics {
1235 let inner = unsafe {
1236 let mut s = MaybeUninit::zeroed();
1237 v8__Isolate__GetHeapStatistics(self.as_real_ptr(), s.as_mut_ptr());
1238 s.assume_init()
1239 };
1240 HeapStatistics(inner)
1241 }
1242
1243 #[inline(always)]
1245 pub fn number_of_heap_spaces(&mut self) -> usize {
1246 unsafe { v8__Isolate__NumberOfHeapSpaces(self.as_real_ptr()) }
1247 }
1248
1249 #[inline(always)]
1257 pub fn get_heap_space_statistics(
1258 &mut self,
1259 index: usize,
1260 ) -> Option<HeapSpaceStatistics> {
1261 let inner = unsafe {
1262 let mut s = MaybeUninit::zeroed();
1263 if !v8__Isolate__GetHeapSpaceStatistics(
1264 self.as_real_ptr(),
1265 s.as_mut_ptr(),
1266 index,
1267 ) {
1268 return None;
1269 }
1270 s.assume_init()
1271 };
1272 Some(HeapSpaceStatistics(inner))
1273 }
1274
1275 #[inline(always)]
1278 pub fn set_capture_stack_trace_for_uncaught_exceptions(
1279 &mut self,
1280 capture: bool,
1281 frame_limit: i32,
1282 ) {
1283 unsafe {
1284 v8__Isolate__SetCaptureStackTraceForUncaughtExceptions(
1285 self.as_real_ptr(),
1286 capture,
1287 frame_limit,
1288 );
1289 }
1290 }
1291
1292 #[inline(always)]
1299 pub fn add_message_listener(&mut self, callback: MessageCallback) -> bool {
1300 unsafe { v8__Isolate__AddMessageListener(self.as_real_ptr(), callback) }
1301 }
1302
1303 #[inline(always)]
1305 pub fn add_message_listener_with_error_level(
1306 &mut self,
1307 callback: MessageCallback,
1308 message_levels: MessageErrorLevel,
1309 ) -> bool {
1310 unsafe {
1311 v8__Isolate__AddMessageListenerWithErrorLevel(
1312 self.as_real_ptr(),
1313 callback,
1314 message_levels,
1315 )
1316 }
1317 }
1318
1319 #[inline(always)]
1328 pub fn set_prepare_stack_trace_callback<'s>(
1329 &mut self,
1330 callback: impl MapFnTo<PrepareStackTraceCallback<'s>>,
1331 ) {
1332 unsafe {
1336 v8__Isolate__SetPrepareStackTraceCallback(
1337 self.as_real_ptr(),
1338 callback.map_fn_to(),
1339 );
1340 };
1341 }
1342
1343 #[inline(always)]
1346 pub fn set_promise_hook(&mut self, hook: PromiseHook) {
1347 unsafe { v8__Isolate__SetPromiseHook(self.as_real_ptr(), hook) }
1348 }
1349
1350 #[inline(always)]
1353 pub fn set_promise_reject_callback(
1354 &mut self,
1355 callback: PromiseRejectCallback,
1356 ) {
1357 unsafe {
1358 v8__Isolate__SetPromiseRejectCallback(self.as_real_ptr(), callback)
1359 }
1360 }
1361
1362 #[inline(always)]
1363 pub fn set_wasm_async_resolve_promise_callback(
1364 &mut self,
1365 callback: WasmAsyncResolvePromiseCallback,
1366 ) {
1367 unsafe {
1368 v8__Isolate__SetWasmAsyncResolvePromiseCallback(
1369 self.as_real_ptr(),
1370 callback,
1371 )
1372 }
1373 }
1374
1375 #[inline(always)]
1376 pub fn set_allow_wasm_code_generation_callback(
1377 &mut self,
1378 callback: AllowWasmCodeGenerationCallback,
1379 ) {
1380 unsafe {
1381 v8__Isolate__SetAllowWasmCodeGenerationCallback(
1382 self.as_real_ptr(),
1383 callback,
1384 );
1385 }
1386 }
1387
1388 #[inline(always)]
1389 pub fn set_host_initialize_import_meta_object_callback(
1392 &mut self,
1393 callback: HostInitializeImportMetaObjectCallback,
1394 ) {
1395 unsafe {
1396 v8__Isolate__SetHostInitializeImportMetaObjectCallback(
1397 self.as_real_ptr(),
1398 callback,
1399 );
1400 }
1401 }
1402
1403 #[inline(always)]
1406 pub fn set_host_import_module_dynamically_callback(
1407 &mut self,
1408 callback: impl HostImportModuleDynamicallyCallback,
1409 ) {
1410 unsafe {
1411 v8__Isolate__SetHostImportModuleDynamicallyCallback(
1412 self.as_real_ptr(),
1413 callback.to_c_fn(),
1414 );
1415 }
1416 }
1417
1418 #[inline(always)]
1426 pub fn set_host_import_module_with_phase_dynamically_callback(
1427 &mut self,
1428 callback: impl HostImportModuleWithPhaseDynamicallyCallback,
1429 ) {
1430 unsafe {
1431 v8__Isolate__SetHostImportModuleWithPhaseDynamicallyCallback(
1432 self.as_real_ptr(),
1433 callback.to_c_fn(),
1434 );
1435 }
1436 }
1437
1438 pub fn set_host_create_shadow_realm_context_callback(
1441 &mut self,
1442 callback: HostCreateShadowRealmContextCallback,
1443 ) {
1444 #[inline]
1445 unsafe extern "C" fn rust_shadow_realm_callback(
1446 initiator_context: Local<Context>,
1447 ) -> *mut Context {
1448 let scope = pin!(unsafe { CallbackScope::new(initiator_context) });
1449 let mut scope = scope.init();
1450 let isolate = scope.as_ref();
1451 let callback = isolate
1452 .get_slot::<HostCreateShadowRealmContextCallback>()
1453 .unwrap();
1454 let context = callback(&mut scope);
1455 context.map_or_else(null_mut, |l| l.as_non_null().as_ptr())
1456 }
1457
1458 #[cfg(target_os = "windows")]
1460 unsafe extern "C" fn rust_shadow_realm_callback_windows(
1461 rv: *mut *mut Context,
1462 initiator_context: Local<Context>,
1463 ) -> *mut *mut Context {
1464 unsafe {
1465 let ret = rust_shadow_realm_callback(initiator_context);
1466 rv.write(ret);
1467 }
1468 rv
1469 }
1470
1471 let slot_didnt_exist_before = self.set_slot(callback);
1472 if slot_didnt_exist_before {
1473 unsafe {
1474 #[cfg(target_os = "windows")]
1475 v8__Isolate__SetHostCreateShadowRealmContextCallback(
1476 self.as_real_ptr(),
1477 rust_shadow_realm_callback_windows,
1478 );
1479 #[cfg(not(target_os = "windows"))]
1480 v8__Isolate__SetHostCreateShadowRealmContextCallback(
1481 self.as_real_ptr(),
1482 rust_shadow_realm_callback,
1483 );
1484 }
1485 }
1486 }
1487
1488 #[inline(always)]
1490 pub fn set_use_counter_callback(&mut self, callback: UseCounterCallback) {
1491 unsafe {
1492 v8__Isolate__SetUseCounterCallback(self.as_real_ptr(), callback);
1493 }
1494 }
1495
1496 #[allow(clippy::not_unsafe_ptr_arg_deref)] #[inline(always)]
1505 pub fn add_gc_prologue_callback(
1506 &mut self,
1507 callback: GcCallbackWithData,
1508 data: *mut c_void,
1509 gc_type_filter: GCType,
1510 ) {
1511 unsafe {
1512 v8__Isolate__AddGCPrologueCallback(
1513 self.as_real_ptr(),
1514 callback,
1515 data,
1516 gc_type_filter,
1517 );
1518 }
1519 }
1520
1521 #[allow(clippy::not_unsafe_ptr_arg_deref)] #[inline(always)]
1525 pub fn remove_gc_prologue_callback(
1526 &mut self,
1527 callback: GcCallbackWithData,
1528 data: *mut c_void,
1529 ) {
1530 unsafe {
1531 v8__Isolate__RemoveGCPrologueCallback(self.as_real_ptr(), callback, data)
1532 }
1533 }
1534
1535 #[allow(clippy::not_unsafe_ptr_arg_deref)] #[inline(always)]
1539 pub fn add_gc_epilogue_callback(
1540 &mut self,
1541 callback: GcCallbackWithData,
1542 data: *mut c_void,
1543 gc_type_filter: GCType,
1544 ) {
1545 unsafe {
1546 v8__Isolate__AddGCEpilogueCallback(
1547 self.as_real_ptr(),
1548 callback,
1549 data,
1550 gc_type_filter,
1551 );
1552 }
1553 }
1554
1555 #[allow(clippy::not_unsafe_ptr_arg_deref)] #[inline(always)]
1559 pub fn remove_gc_epilogue_callback(
1560 &mut self,
1561 callback: GcCallbackWithData,
1562 data: *mut c_void,
1563 ) {
1564 unsafe {
1565 v8__Isolate__RemoveGCEpilogueCallback(self.as_real_ptr(), callback, data)
1566 }
1567 }
1568
1569 #[allow(clippy::not_unsafe_ptr_arg_deref)] #[inline(always)]
1574 pub fn add_near_heap_limit_callback(
1575 &mut self,
1576 callback: NearHeapLimitCallback,
1577 data: *mut c_void,
1578 ) {
1579 unsafe {
1580 v8__Isolate__AddNearHeapLimitCallback(self.as_real_ptr(), callback, data)
1581 };
1582 }
1583
1584 #[inline(always)]
1589 pub fn remove_near_heap_limit_callback(
1590 &mut self,
1591 callback: NearHeapLimitCallback,
1592 heap_limit: usize,
1593 ) {
1594 unsafe {
1595 v8__Isolate__RemoveNearHeapLimitCallback(
1596 self.as_real_ptr(),
1597 callback,
1598 heap_limit,
1599 );
1600 };
1601 }
1602
1603 #[inline(always)]
1611 pub fn adjust_amount_of_external_allocated_memory(
1612 &mut self,
1613 change_in_bytes: i64,
1614 ) -> i64 {
1615 unsafe {
1616 v8__Isolate__AdjustAmountOfExternalAllocatedMemory(
1617 self.as_real_ptr(),
1618 change_in_bytes,
1619 )
1620 }
1621 }
1622
1623 #[inline(always)]
1624 pub fn get_cpp_heap(&mut self) -> Option<&Heap> {
1625 unsafe { v8__Isolate__GetCppHeap(self.as_real_ptr()).as_ref() }
1626 }
1627
1628 #[inline(always)]
1629 pub fn set_oom_error_handler(&mut self, callback: OomErrorCallback) {
1630 unsafe { v8__Isolate__SetOOMErrorHandler(self.as_real_ptr(), callback) };
1631 }
1632
1633 #[inline(always)]
1635 pub fn get_microtasks_policy(&self) -> MicrotasksPolicy {
1636 unsafe { v8__Isolate__GetMicrotasksPolicy(self.as_real_ptr()) }
1637 }
1638
1639 #[inline(always)]
1641 pub fn set_microtasks_policy(&mut self, policy: MicrotasksPolicy) {
1642 unsafe { v8__Isolate__SetMicrotasksPolicy(self.as_real_ptr(), policy) }
1643 }
1644
1645 #[inline(always)]
1650 pub fn perform_microtask_checkpoint(&mut self) {
1651 unsafe { v8__Isolate__PerformMicrotaskCheckpoint(self.as_real_ptr()) }
1652 }
1653
1654 #[inline(always)]
1656 pub fn enqueue_microtask(&mut self, microtask: Local<Function>) {
1657 unsafe { v8__Isolate__EnqueueMicrotask(self.as_real_ptr(), &*microtask) }
1658 }
1659
1660 #[inline(always)]
1664 pub fn set_allow_atomics_wait(&mut self, allow: bool) {
1665 unsafe { v8__Isolate__SetAllowAtomicsWait(self.as_real_ptr(), allow) }
1666 }
1667
1668 #[inline(always)]
1676 pub fn set_wasm_streaming_callback<F>(&mut self, _: F)
1677 where
1678 F: UnitType
1679 + for<'a, 'b, 'c> Fn(
1680 &'c mut PinScope<'a, 'b>,
1681 Local<'a, Value>,
1682 WasmStreaming,
1683 ),
1684 {
1685 unsafe {
1686 v8__Isolate__SetWasmStreamingCallback(
1687 self.as_real_ptr(),
1688 trampoline::<F>(),
1689 )
1690 }
1691 }
1692
1693 #[inline(always)]
1702 pub fn date_time_configuration_change_notification(
1703 &mut self,
1704 time_zone_detection: TimeZoneDetection,
1705 ) {
1706 unsafe {
1707 v8__Isolate__DateTimeConfigurationChangeNotification(
1708 self.as_real_ptr(),
1709 time_zone_detection,
1710 );
1711 }
1712 }
1713
1714 #[inline(always)]
1718 pub fn has_pending_background_tasks(&self) -> bool {
1719 unsafe { v8__Isolate__HasPendingBackgroundTasks(self.as_real_ptr()) }
1720 }
1721
1722 #[inline(always)]
1732 pub fn request_garbage_collection_for_testing(
1733 &mut self,
1734 r#type: GarbageCollectionType,
1735 ) {
1736 unsafe {
1737 v8__Isolate__RequestGarbageCollectionForTesting(
1738 self.as_real_ptr(),
1739 match r#type {
1740 GarbageCollectionType::Full => 0,
1741 GarbageCollectionType::Minor => 1,
1742 },
1743 );
1744 }
1745 }
1746
1747 unsafe fn dispose(&mut self) {
1750 unsafe {
1753 v8__Isolate__Dispose(self.as_real_ptr());
1754 }
1755 }
1756
1757 pub fn take_heap_snapshot<F>(&mut self, mut callback: F)
1764 where
1765 F: FnMut(&[u8]) -> bool,
1766 {
1767 unsafe extern "C" fn trampoline<F>(
1768 arg: *mut c_void,
1769 data: *const u8,
1770 size: usize,
1771 ) -> bool
1772 where
1773 F: FnMut(&[u8]) -> bool,
1774 {
1775 unsafe {
1776 let mut callback = NonNull::<F>::new_unchecked(arg as _);
1777 if size > 0 {
1778 (callback.as_mut())(std::slice::from_raw_parts(data, size))
1779 } else {
1780 (callback.as_mut())(&[])
1781 }
1782 }
1783 }
1784
1785 let arg = addr_of_mut!(callback);
1786 unsafe {
1787 v8__HeapProfiler__TakeHeapSnapshot(
1788 self.as_real_ptr(),
1789 trampoline::<F>,
1790 arg as _,
1791 );
1792 }
1793 }
1794
1795 #[inline(always)]
1803 pub fn set_default_context(&mut self, context: Local<Context>) {
1804 let snapshot_creator = self
1805 .get_annex_mut()
1806 .maybe_snapshot_creator
1807 .as_mut()
1808 .unwrap();
1809 snapshot_creator.set_default_context(context);
1810 }
1811
1812 #[inline(always)]
1821 pub fn add_context(&mut self, context: Local<Context>) -> usize {
1822 let snapshot_creator = self
1823 .get_annex_mut()
1824 .maybe_snapshot_creator
1825 .as_mut()
1826 .unwrap();
1827 snapshot_creator.add_context(context)
1828 }
1829
1830 #[inline(always)]
1839 pub fn add_isolate_data<T>(&mut self, data: Local<T>) -> usize
1840 where
1841 for<'l> Local<'l, T>: Into<Local<'l, Data>>,
1842 {
1843 let snapshot_creator = self
1844 .get_annex_mut()
1845 .maybe_snapshot_creator
1846 .as_mut()
1847 .unwrap();
1848 snapshot_creator.add_isolate_data(data)
1849 }
1850
1851 #[inline(always)]
1860 pub fn add_context_data<T>(
1861 &mut self,
1862 context: Local<Context>,
1863 data: Local<T>,
1864 ) -> usize
1865 where
1866 for<'l> Local<'l, T>: Into<Local<'l, Data>>,
1867 {
1868 let snapshot_creator = self
1869 .get_annex_mut()
1870 .maybe_snapshot_creator
1871 .as_mut()
1872 .unwrap();
1873 snapshot_creator.add_context_data(context, data)
1874 }
1875}
1876
1877pub(crate) struct IsolateAnnex {
1878 create_param_allocations: Box<dyn Any>,
1879 slots: HashMap<TypeId, RawSlot, BuildTypeIdHasher>,
1880 finalizer_map: FinalizerMap,
1881 maybe_snapshot_creator: Option<SnapshotCreator>,
1882 isolate: *mut RealIsolate,
1890 isolate_mutex: Mutex<()>,
1891}
1892
1893unsafe impl Send for IsolateAnnex {}
1894unsafe impl Sync for IsolateAnnex {}
1895
1896impl IsolateAnnex {
1897 fn new(
1898 isolate: &mut Isolate,
1899 create_param_allocations: Box<dyn Any>,
1900 ) -> Self {
1901 Self {
1902 create_param_allocations,
1903 slots: HashMap::default(),
1904 finalizer_map: FinalizerMap::default(),
1905 maybe_snapshot_creator: None,
1906 isolate: isolate.as_real_ptr(),
1907 isolate_mutex: Mutex::new(()),
1908 }
1909 }
1910}
1911
1912impl Debug for IsolateAnnex {
1913 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
1914 f.debug_struct("IsolateAnnex")
1915 .field("isolate", &self.isolate)
1916 .field("isolate_mutex", &self.isolate_mutex)
1917 .finish()
1918 }
1919}
1920
1921#[derive(Clone, Debug)]
1928pub struct IsolateHandle(Arc<IsolateAnnex>);
1929
1930impl IsolateHandle {
1931 pub(crate) unsafe fn get_isolate_ptr(&self) -> *mut RealIsolate {
1935 self.0.isolate
1936 }
1937
1938 #[inline(always)]
1939 fn new(isolate: &Isolate) -> Self {
1940 Self(isolate.get_annex_arc())
1941 }
1942
1943 #[inline(always)]
1951 pub fn terminate_execution(&self) -> bool {
1952 let _lock = self.0.isolate_mutex.lock().unwrap();
1953 if self.0.isolate.is_null() {
1954 false
1955 } else {
1956 unsafe { v8__Isolate__TerminateExecution(self.0.isolate) };
1957 true
1958 }
1959 }
1960
1961 #[inline(always)]
1976 pub fn cancel_terminate_execution(&self) -> bool {
1977 let _lock = self.0.isolate_mutex.lock().unwrap();
1978 if self.0.isolate.is_null() {
1979 false
1980 } else {
1981 unsafe { v8__Isolate__CancelTerminateExecution(self.0.isolate) };
1982 true
1983 }
1984 }
1985
1986 #[inline(always)]
1995 pub fn is_execution_terminating(&self) -> bool {
1996 let _lock = self.0.isolate_mutex.lock().unwrap();
1997 if self.0.isolate.is_null() {
1998 false
1999 } else {
2000 unsafe { v8__Isolate__IsExecutionTerminating(self.0.isolate) }
2001 }
2002 }
2003
2004 #[allow(clippy::not_unsafe_ptr_arg_deref)]
2015 #[inline(always)]
2016 pub fn request_interrupt(
2017 &self,
2018 callback: InterruptCallback,
2019 data: *mut c_void,
2020 ) -> bool {
2021 let _lock = self.0.isolate_mutex.lock().unwrap();
2022 if self.0.isolate.is_null() {
2023 false
2024 } else {
2025 unsafe { v8__Isolate__RequestInterrupt(self.0.isolate, callback, data) };
2026 true
2027 }
2028 }
2029}
2030
2031#[derive(Debug)]
2033pub struct OwnedIsolate {
2034 cxx_isolate: NonNull<RealIsolate>,
2035}
2036
2037impl OwnedIsolate {
2038 pub(crate) fn new(cxx_isolate: *mut RealIsolate) -> Self {
2039 let isolate = Self::new_already_entered(cxx_isolate);
2040 unsafe {
2041 isolate.enter();
2042 }
2043 isolate
2044 }
2045
2046 pub(crate) fn new_already_entered(cxx_isolate: *mut RealIsolate) -> Self {
2047 let cxx_isolate = NonNull::new(cxx_isolate).unwrap();
2048 let owned_isolate: OwnedIsolate = Self { cxx_isolate };
2049 owned_isolate
2051 }
2052
2053 pub unsafe fn new_for_locker(params: CreateParams) -> Self {
2065 let cxx_isolate = Isolate::new_impl(params);
2066 Self::new_already_entered(cxx_isolate)
2067 }
2068
2069 pub fn is_current(&self) -> bool {
2072 unsafe {
2073 if let Some(current) = current_raw_isolate_ptr() {
2074 let this = self.as_raw_isolate_ptr();
2075 !this.is_null() && current.0 == this.0
2076 } else {
2077 false
2078 }
2079 }
2080 }
2081}
2082
2083impl Drop for OwnedIsolate {
2084 fn drop(&mut self) {
2085 unsafe {
2086 let snapshot_creator = self.get_annex_mut().maybe_snapshot_creator.take();
2087 assert!(
2088 snapshot_creator.is_none(),
2089 "If isolate was created using v8::Isolate::snapshot_creator, you should use v8::OwnedIsolate::create_blob before dropping an isolate."
2090 );
2091 let is_current =
2092 std::ptr::eq(self.cxx_isolate.as_mut(), v8__Isolate__GetCurrent());
2093 if is_current {
2094 self.exit();
2095 } else {
2096 let locker = crate::Locker::new(self);
2097 locker.enter();
2098 locker.exit();
2099 }
2100 self.dispose_annex();
2101 Platform::notify_isolate_shutdown(&get_current_platform(), self);
2102 self.dispose();
2103 }
2104 }
2105}
2106
2107impl OwnedIsolate {
2108 #[inline(always)]
2115 pub fn create_blob(
2116 mut self,
2117 function_code_handling: FunctionCodeHandling,
2118 ) -> Option<StartupData> {
2119 let mut snapshot_creator =
2120 self.get_annex_mut().maybe_snapshot_creator.take().unwrap();
2121
2122 let _create_param_allocations = unsafe {
2125 self.dispose_annex()
2127 };
2128
2129 std::mem::forget(self);
2132 snapshot_creator.create_blob(function_code_handling)
2133 }
2134}
2135
2136impl Deref for OwnedIsolate {
2137 type Target = Isolate;
2138 fn deref(&self) -> &Self::Target {
2139 unsafe {
2140 std::mem::transmute::<&NonNull<RealIsolate>, &Isolate>(&self.cxx_isolate)
2141 }
2142 }
2143}
2144
2145impl DerefMut for OwnedIsolate {
2146 fn deref_mut(&mut self) -> &mut Self::Target {
2147 unsafe {
2148 std::mem::transmute::<&mut NonNull<RealIsolate>, &mut Isolate>(
2149 &mut self.cxx_isolate,
2150 )
2151 }
2152 }
2153}
2154
2155impl AsMut<Isolate> for OwnedIsolate {
2156 fn as_mut(&mut self) -> &mut Isolate {
2157 self
2158 }
2159}
2160
2161impl AsMut<Isolate> for Isolate {
2162 fn as_mut(&mut self) -> &mut Isolate {
2163 self
2164 }
2165}
2166
2167pub struct HeapStatistics(v8__HeapStatistics);
2172
2173impl HeapStatistics {
2174 #[inline(always)]
2175 pub fn total_heap_size(&self) -> usize {
2176 self.0.total_heap_size_
2177 }
2178
2179 #[inline(always)]
2180 pub fn total_heap_size_executable(&self) -> usize {
2181 self.0.total_heap_size_executable_
2182 }
2183
2184 #[inline(always)]
2185 pub fn total_physical_size(&self) -> usize {
2186 self.0.total_physical_size_
2187 }
2188
2189 #[inline(always)]
2190 pub fn total_available_size(&self) -> usize {
2191 self.0.total_available_size_
2192 }
2193
2194 #[inline(always)]
2195 pub fn total_global_handles_size(&self) -> usize {
2196 self.0.total_global_handles_size_
2197 }
2198
2199 #[inline(always)]
2200 pub fn used_global_handles_size(&self) -> usize {
2201 self.0.used_global_handles_size_
2202 }
2203
2204 #[inline(always)]
2205 pub fn used_heap_size(&self) -> usize {
2206 self.0.used_heap_size_
2207 }
2208
2209 #[inline(always)]
2210 pub fn heap_size_limit(&self) -> usize {
2211 self.0.heap_size_limit_
2212 }
2213
2214 #[inline(always)]
2215 pub fn malloced_memory(&self) -> usize {
2216 self.0.malloced_memory_
2217 }
2218
2219 #[inline(always)]
2220 pub fn external_memory(&self) -> usize {
2221 self.0.external_memory_
2222 }
2223
2224 #[inline(always)]
2225 pub fn peak_malloced_memory(&self) -> usize {
2226 self.0.peak_malloced_memory_
2227 }
2228
2229 #[inline(always)]
2230 pub fn number_of_native_contexts(&self) -> usize {
2231 self.0.number_of_native_contexts_
2232 }
2233
2234 #[inline(always)]
2235 pub fn number_of_detached_contexts(&self) -> usize {
2236 self.0.number_of_detached_contexts_
2237 }
2238
2239 #[inline(always)]
2242 pub fn does_zap_garbage(&self) -> bool {
2243 self.0.does_zap_garbage_
2244 }
2245}
2246
2247pub struct HeapSpaceStatistics(v8__HeapSpaceStatistics);
2248
2249impl HeapSpaceStatistics {
2250 pub fn space_name(&self) -> &'static CStr {
2251 unsafe { CStr::from_ptr(self.0.space_name_) }
2252 }
2253
2254 pub fn space_size(&self) -> usize {
2255 self.0.space_size_
2256 }
2257
2258 pub fn space_used_size(&self) -> usize {
2259 self.0.space_used_size_
2260 }
2261
2262 pub fn space_available_size(&self) -> usize {
2263 self.0.space_available_size_
2264 }
2265
2266 pub fn physical_space_size(&self) -> usize {
2267 self.0.physical_space_size_
2268 }
2269}
2270
2271impl<'s, F> MapFnFrom<F> for PrepareStackTraceCallback<'s>
2272where
2273 F: UnitType
2274 + for<'a> Fn(
2275 &mut PinScope<'s, 'a>,
2276 Local<'s, Value>,
2277 Local<'s, Array>,
2278 ) -> Local<'s, Value>,
2279{
2280 #[cfg(target_os = "windows")]
2282 fn mapping() -> Self {
2283 let f = |ret_ptr, context, error, sites| {
2284 let scope = pin!(unsafe { CallbackScope::new(context) });
2285 let mut scope: crate::PinnedRef<CallbackScope> = scope.init();
2286 let r = (F::get())(&mut scope, error, sites);
2287 unsafe { std::ptr::write(ret_ptr, &*r as *const _) };
2288 ret_ptr
2289 };
2290 f.to_c_fn()
2291 }
2292
2293 #[cfg(not(target_os = "windows"))]
2295 fn mapping() -> Self {
2296 let f = |context, error, sites| {
2297 let scope = pin!(unsafe { CallbackScope::new(context) });
2298 let mut scope: crate::PinnedRef<CallbackScope> = scope.init();
2299
2300 let r = (F::get())(&mut scope, error, sites);
2301 PrepareStackTraceCallbackRet(&*r as *const _)
2302 };
2303 f.to_c_fn()
2304 }
2305}
2306
2307#[derive(Clone, Default)]
2311pub(crate) struct TypeIdHasher {
2312 state: Option<u64>,
2313}
2314
2315impl Hasher for TypeIdHasher {
2316 fn write(&mut self, _bytes: &[u8]) {
2317 panic!("TypeIdHasher::write() called unexpectedly");
2318 }
2319
2320 #[inline]
2321 fn write_u64(&mut self, value: u64) {
2322 let prev_state = self.state.replace(value);
2325 debug_assert_eq!(prev_state, None);
2326 }
2327
2328 #[inline]
2329 fn finish(&self) -> u64 {
2330 self.state.unwrap()
2331 }
2332}
2333
2334#[derive(Copy, Clone, Default)]
2338pub(crate) struct BuildTypeIdHasher;
2339
2340impl BuildHasher for BuildTypeIdHasher {
2341 type Hasher = TypeIdHasher;
2342
2343 #[inline]
2344 fn build_hasher(&self) -> Self::Hasher {
2345 Default::default()
2346 }
2347}
2348
2349const _: () = {
2350 assert!(
2351 size_of::<TypeId>() == size_of::<u64>()
2352 || size_of::<TypeId>() == size_of::<u128>()
2353 );
2354 assert!(
2355 align_of::<TypeId>() == align_of::<u64>()
2356 || align_of::<TypeId>() == align_of::<u128>()
2357 );
2358};
2359
2360pub(crate) struct RawSlot {
2361 data: RawSlotData,
2362 dtor: Option<RawSlotDtor>,
2363}
2364
2365type RawSlotData = MaybeUninit<usize>;
2366type RawSlotDtor = unsafe fn(&mut RawSlotData) -> ();
2367
2368impl RawSlot {
2369 #[inline]
2370 pub fn new<T: 'static>(value: T) -> Self {
2371 if Self::needs_box::<T>() {
2372 Self::new_internal(Box::new(value))
2373 } else {
2374 Self::new_internal(value)
2375 }
2376 }
2377
2378 #[inline]
2382 pub unsafe fn borrow<T: 'static>(&self) -> &T {
2383 unsafe {
2384 if Self::needs_box::<T>() {
2385 &*(self.data.as_ptr() as *const Box<T>)
2386 } else {
2387 &*(self.data.as_ptr() as *const T)
2388 }
2389 }
2390 }
2391
2392 #[inline]
2394 pub unsafe fn borrow_mut<T: 'static>(&mut self) -> &mut T {
2395 unsafe {
2396 if Self::needs_box::<T>() {
2397 &mut *(self.data.as_mut_ptr() as *mut Box<T>)
2398 } else {
2399 &mut *(self.data.as_mut_ptr() as *mut T)
2400 }
2401 }
2402 }
2403
2404 #[inline]
2406 pub unsafe fn into_inner<T: 'static>(self) -> T {
2407 unsafe {
2408 let value = if Self::needs_box::<T>() {
2409 *std::ptr::read(self.data.as_ptr() as *mut Box<T>)
2410 } else {
2411 std::ptr::read(self.data.as_ptr() as *mut T)
2412 };
2413 forget(self);
2414 value
2415 }
2416 }
2417
2418 const fn needs_box<T: 'static>() -> bool {
2419 size_of::<T>() > size_of::<RawSlotData>()
2420 || align_of::<T>() > align_of::<RawSlotData>()
2421 }
2422
2423 #[inline]
2424 fn new_internal<B: 'static>(value: B) -> Self {
2425 assert!(!Self::needs_box::<B>());
2426 let mut self_ = Self {
2427 data: RawSlotData::zeroed(),
2428 dtor: None,
2429 };
2430 unsafe {
2431 ptr::write(self_.data.as_mut_ptr() as *mut B, value);
2432 }
2433 if needs_drop::<B>() {
2434 self_.dtor.replace(Self::drop_internal::<B>);
2435 };
2436 self_
2437 }
2438
2439 unsafe fn drop_internal<B: 'static>(data: &mut RawSlotData) {
2441 assert!(!Self::needs_box::<B>());
2442 unsafe {
2443 drop_in_place(data.as_mut_ptr() as *mut B);
2444 }
2445 }
2446}
2447
2448impl Drop for RawSlot {
2449 fn drop(&mut self) {
2450 if let Some(dtor) = self.dtor {
2451 unsafe { dtor(&mut self.data) };
2452 }
2453 }
2454}
2455
2456impl AsRef<Isolate> for OwnedIsolate {
2457 fn as_ref(&self) -> &Isolate {
2458 unsafe { Isolate::from_raw_ref(&self.cxx_isolate) }
2459 }
2460}
2461impl AsRef<Isolate> for Isolate {
2462 fn as_ref(&self) -> &Isolate {
2463 self
2464 }
2465}