1use crate::Array;
3use crate::CallbackScope;
4use crate::Context;
5use crate::Data;
6use crate::FixedArray;
7use crate::Function;
8use crate::FunctionCodeHandling;
9use crate::Local;
10use crate::Message;
11use crate::Module;
12use crate::Object;
13use crate::PinScope;
14use crate::Platform;
15use crate::Promise;
16use crate::PromiseResolver;
17use crate::StartupData;
18use crate::String;
19use crate::V8::get_current_platform;
20use crate::Value;
21use crate::binding::v8__HeapCodeStatistics;
22use crate::binding::v8__HeapSpaceStatistics;
23use crate::binding::v8__HeapStatistics;
24use crate::binding::v8__Isolate__UseCounterFeature;
25pub use crate::binding::v8__ModuleImportPhase as ModuleImportPhase;
26use crate::cppgc::Heap;
27use crate::external_references::ExternalReference;
28use crate::function::FunctionCallbackInfo;
29use crate::gc::GCCallbackFlags;
30use crate::gc::GCType;
31use crate::handle::FinalizerCallback;
32use crate::handle::FinalizerMap;
33use crate::isolate_create_params::CreateParams;
34use crate::isolate_create_params::raw;
35use crate::promise::PromiseRejectMessage;
36use crate::snapshot::SnapshotCreator;
37use crate::support::MapFnFrom;
38use crate::support::MapFnTo;
39use crate::support::Opaque;
40use crate::support::ToCFn;
41use crate::support::UnitType;
42use crate::support::char;
43use crate::support::int;
44use crate::support::size_t;
45use crate::wasm::WasmStreaming;
46use crate::wasm::trampoline;
47use std::ffi::CStr;
48
49use std::any::Any;
50use std::any::TypeId;
51use std::borrow::Cow;
52use std::collections::HashMap;
53use std::ffi::c_void;
54use std::fmt::{self, Debug, Formatter};
55use std::hash::BuildHasher;
56use std::hash::Hasher;
57use std::mem::MaybeUninit;
58use std::mem::align_of;
59use std::mem::forget;
60use std::mem::needs_drop;
61use std::mem::size_of;
62use std::ops::Deref;
63use std::ops::DerefMut;
64use std::pin::pin;
65use std::ptr;
66use std::ptr::NonNull;
67use std::ptr::addr_of_mut;
68use std::ptr::drop_in_place;
69use std::ptr::null_mut;
70use std::sync::Arc;
71
72use parking_lot::Mutex;
73
74#[derive(Debug, Clone, Copy, PartialEq, Eq)]
80#[repr(C)]
81pub enum MicrotasksPolicy {
82 Explicit = 0,
83 Auto = 2,
85}
86
87#[derive(Debug, Clone, Copy, PartialEq, Eq)]
94#[repr(C)]
95pub enum MemoryPressureLevel {
96 None = 0,
97 Moderate = 1,
98 Critical = 2,
99}
100
101#[derive(Debug, Clone, Copy, PartialEq, Eq)]
113#[repr(C)]
114pub enum TimeZoneDetection {
115 Skip = 0,
116 Redetect = 1,
117}
118
119#[derive(Debug, Clone, Copy, PartialEq, Eq)]
134#[repr(C)]
135pub enum PromiseHookType {
136 Init,
137 Resolve,
138 Before,
139 After,
140}
141
142#[derive(Debug, Clone, Copy, PartialEq, Eq)]
145#[repr(C)]
146pub enum GarbageCollectionType {
147 Full,
148 Minor,
149}
150
151pub type MessageCallback = unsafe extern "C" fn(Local<Message>, Local<Value>);
152
153bitflags! {
154 #[derive(Debug, Clone, Copy, PartialEq, Eq)]
155 #[repr(transparent)]
156 pub struct MessageErrorLevel: int {
157 const LOG = 1 << 0;
158 const DEBUG = 1 << 1;
159 const INFO = 1 << 2;
160 const ERROR = 1 << 3;
161 const WARNING = 1 << 4;
162 const ALL = (1 << 5) - 1;
163 }
164}
165
166pub type PromiseHook =
167 unsafe extern "C" fn(PromiseHookType, Local<Promise>, Local<Value>);
168
169pub type PromiseRejectCallback = unsafe extern "C" fn(PromiseRejectMessage);
170
171#[derive(Debug, Clone, Copy, PartialEq, Eq)]
172#[repr(C)]
173pub enum WasmAsyncSuccess {
174 Success,
175 Fail,
176}
177pub type WasmAsyncResolvePromiseCallback = unsafe extern "C" fn(
178 UnsafeRawIsolatePtr,
179 Local<Context>,
180 Local<PromiseResolver>,
181 Local<Value>,
182 WasmAsyncSuccess,
183);
184
185pub type AllowWasmCodeGenerationCallback =
186 unsafe extern "C" fn(Local<Context>, Local<String>) -> bool;
187
188pub type HostInitializeImportMetaObjectCallback =
197 unsafe extern "C" fn(Local<Context>, Local<Module>, Local<Object>);
198
199pub trait HostImportModuleDynamicallyCallback:
239 UnitType
240 + for<'s, 'i> FnOnce(
241 &mut PinScope<'s, 'i>,
242 Local<'s, Data>,
243 Local<'s, Value>,
244 Local<'s, String>,
245 Local<'s, FixedArray>,
246 ) -> Option<Local<'s, Promise>>
247{
248 fn to_c_fn(self) -> RawHostImportModuleDynamicallyCallback;
249}
250
251#[cfg(target_family = "unix")]
252pub(crate) type RawHostImportModuleDynamicallyCallback =
253 for<'s> unsafe extern "C" fn(
254 Local<'s, Context>,
255 Local<'s, Data>,
256 Local<'s, Value>,
257 Local<'s, String>,
258 Local<'s, FixedArray>,
259 ) -> *mut Promise;
260
261#[cfg(all(
262 target_family = "windows",
263 any(target_arch = "x86_64", target_arch = "aarch64")
264))]
265pub type RawHostImportModuleDynamicallyCallback =
266 for<'s> unsafe extern "C" fn(
267 *mut *mut Promise,
268 Local<'s, Context>,
269 Local<'s, Data>,
270 Local<'s, Value>,
271 Local<'s, String>,
272 Local<'s, FixedArray>,
273 ) -> *mut *mut Promise;
274
275impl<F> HostImportModuleDynamicallyCallback for F
276where
277 F: UnitType
278 + for<'s, 'i> FnOnce(
279 &mut PinScope<'s, 'i>,
280 Local<'s, Data>,
281 Local<'s, Value>,
282 Local<'s, String>,
283 Local<'s, FixedArray>,
284 ) -> Option<Local<'s, Promise>>,
285{
286 #[inline(always)]
287 fn to_c_fn(self) -> RawHostImportModuleDynamicallyCallback {
288 #[allow(unused_variables)]
289 #[inline(always)]
290 fn scope_adapter<'s, 'i: 's, F: HostImportModuleDynamicallyCallback>(
291 context: Local<'s, Context>,
292 host_defined_options: Local<'s, Data>,
293 resource_name: Local<'s, Value>,
294 specifier: Local<'s, String>,
295 import_attributes: Local<'s, FixedArray>,
296 ) -> Option<Local<'s, Promise>> {
297 let scope = pin!(unsafe { CallbackScope::new(context) });
298 let mut scope = scope.init();
299 (F::get())(
300 &mut scope,
301 host_defined_options,
302 resource_name,
303 specifier,
304 import_attributes,
305 )
306 }
307
308 #[cfg(target_family = "unix")]
309 #[inline(always)]
310 unsafe extern "C" fn abi_adapter<
311 's,
312 F: HostImportModuleDynamicallyCallback,
313 >(
314 context: Local<'s, Context>,
315 host_defined_options: Local<'s, Data>,
316 resource_name: Local<'s, Value>,
317 specifier: Local<'s, String>,
318 import_attributes: Local<'s, FixedArray>,
319 ) -> *mut Promise {
320 scope_adapter::<F>(
321 context,
322 host_defined_options,
323 resource_name,
324 specifier,
325 import_attributes,
326 )
327 .map_or_else(null_mut, |return_value| return_value.as_non_null().as_ptr())
328 }
329
330 #[cfg(all(
331 target_family = "windows",
332 any(target_arch = "x86_64", target_arch = "aarch64")
333 ))]
334 #[inline(always)]
335 unsafe extern "C" fn abi_adapter<
336 's,
337 F: HostImportModuleDynamicallyCallback,
338 >(
339 return_value: *mut *mut Promise,
340 context: Local<'s, Context>,
341 host_defined_options: Local<'s, Data>,
342 resource_name: Local<'s, Value>,
343 specifier: Local<'s, String>,
344 import_attributes: Local<'s, FixedArray>,
345 ) -> *mut *mut Promise {
346 unsafe {
347 std::ptr::write(
348 return_value,
349 scope_adapter::<F>(
350 context,
351 host_defined_options,
352 resource_name,
353 specifier,
354 import_attributes,
355 )
356 .map(|return_value| return_value.as_non_null().as_ptr())
357 .unwrap_or_else(null_mut),
358 );
359 return_value
360 }
361 }
362
363 abi_adapter::<F>
364 }
365}
366
367pub trait HostImportModuleWithPhaseDynamicallyCallback:
402 UnitType
403 + for<'s, 'i> FnOnce(
404 &mut PinScope<'s, 'i>,
405 Local<'s, Data>,
406 Local<'s, Value>,
407 Local<'s, String>,
408 ModuleImportPhase,
409 Local<'s, FixedArray>,
410 ) -> Option<Local<'s, Promise>>
411{
412 fn to_c_fn(self) -> RawHostImportModuleWithPhaseDynamicallyCallback;
413}
414
415#[cfg(target_family = "unix")]
416pub(crate) type RawHostImportModuleWithPhaseDynamicallyCallback =
417 for<'s> unsafe extern "C" fn(
418 Local<'s, Context>,
419 Local<'s, Data>,
420 Local<'s, Value>,
421 Local<'s, String>,
422 ModuleImportPhase,
423 Local<'s, FixedArray>,
424 ) -> *mut Promise;
425
426#[cfg(all(
427 target_family = "windows",
428 any(target_arch = "x86_64", target_arch = "aarch64")
429))]
430pub type RawHostImportModuleWithPhaseDynamicallyCallback =
431 for<'s> unsafe extern "C" fn(
432 *mut *mut Promise,
433 Local<'s, Context>,
434 Local<'s, Data>,
435 Local<'s, Value>,
436 Local<'s, String>,
437 ModuleImportPhase,
438 Local<'s, FixedArray>,
439 ) -> *mut *mut Promise;
440
441impl<F> HostImportModuleWithPhaseDynamicallyCallback for F
442where
443 F: UnitType
444 + for<'s, 'i> FnOnce(
445 &mut PinScope<'s, 'i>,
446 Local<'s, Data>,
447 Local<'s, Value>,
448 Local<'s, String>,
449 ModuleImportPhase,
450 Local<'s, FixedArray>,
451 ) -> Option<Local<'s, Promise>>,
452{
453 #[inline(always)]
454 fn to_c_fn(self) -> RawHostImportModuleWithPhaseDynamicallyCallback {
455 #[allow(unused_variables)]
456 #[inline(always)]
457 fn scope_adapter<'s, F: HostImportModuleWithPhaseDynamicallyCallback>(
458 context: Local<'s, Context>,
459 host_defined_options: Local<'s, Data>,
460 resource_name: Local<'s, Value>,
461 specifier: Local<'s, String>,
462 import_phase: ModuleImportPhase,
463 import_attributes: Local<'s, FixedArray>,
464 ) -> Option<Local<'s, Promise>> {
465 let scope = pin!(unsafe { CallbackScope::new(context) });
466 let mut scope = scope.init();
467 (F::get())(
468 &mut scope,
469 host_defined_options,
470 resource_name,
471 specifier,
472 import_phase,
473 import_attributes,
474 )
475 }
476
477 #[cfg(target_family = "unix")]
478 #[inline(always)]
479 unsafe extern "C" fn abi_adapter<
480 's,
481 F: HostImportModuleWithPhaseDynamicallyCallback,
482 >(
483 context: Local<'s, Context>,
484 host_defined_options: Local<'s, Data>,
485 resource_name: Local<'s, Value>,
486 specifier: Local<'s, String>,
487 import_phase: ModuleImportPhase,
488 import_attributes: Local<'s, FixedArray>,
489 ) -> *mut Promise {
490 scope_adapter::<F>(
491 context,
492 host_defined_options,
493 resource_name,
494 specifier,
495 import_phase,
496 import_attributes,
497 )
498 .map_or_else(null_mut, |return_value| return_value.as_non_null().as_ptr())
499 }
500
501 #[cfg(all(
502 target_family = "windows",
503 any(target_arch = "x86_64", target_arch = "aarch64")
504 ))]
505 #[inline(always)]
506 unsafe extern "C" fn abi_adapter<
507 's,
508 F: HostImportModuleWithPhaseDynamicallyCallback,
509 >(
510 return_value: *mut *mut Promise,
511 context: Local<'s, Context>,
512 host_defined_options: Local<'s, Data>,
513 resource_name: Local<'s, Value>,
514 specifier: Local<'s, String>,
515 import_phase: ModuleImportPhase,
516 import_attributes: Local<'s, FixedArray>,
517 ) -> *mut *mut Promise {
518 unsafe {
519 std::ptr::write(
520 return_value,
521 scope_adapter::<F>(
522 context,
523 host_defined_options,
524 resource_name,
525 specifier,
526 import_phase,
527 import_attributes,
528 )
529 .map(|return_value| return_value.as_non_null().as_ptr())
530 .unwrap_or_else(null_mut),
531 );
532 return_value
533 }
534 }
535
536 abi_adapter::<F>
537 }
538}
539
540pub type HostCreateShadowRealmContextCallback =
551 for<'s, 'i> fn(scope: &mut PinScope<'s, 'i>) -> Option<Local<'s, Context>>;
552
553pub type GcCallbackWithData = unsafe extern "C" fn(
554 isolate: UnsafeRawIsolatePtr,
555 r#type: GCType,
556 flags: GCCallbackFlags,
557 data: *mut c_void,
558);
559
560pub type InterruptCallback =
561 unsafe extern "C" fn(isolate: UnsafeRawIsolatePtr, data: *mut c_void);
562
563pub type NearHeapLimitCallback = unsafe extern "C" fn(
564 data: *mut c_void,
565 current_heap_limit: usize,
566 initial_heap_limit: usize,
567) -> usize;
568
569#[repr(C)]
570pub struct OomDetails {
571 pub is_heap_oom: bool,
572 pub detail: *const char,
573}
574
575pub type OomErrorCallback =
576 unsafe extern "C" fn(location: *const char, details: &OomDetails);
577
578#[cfg(target_os = "windows")]
580pub type PrepareStackTraceCallback<'s> =
581 unsafe extern "C" fn(
582 *mut *const Value,
583 Local<'s, Context>,
584 Local<'s, Value>,
585 Local<'s, Array>,
586 ) -> *mut *const Value;
587
588#[cfg(not(target_os = "windows"))]
591#[repr(C)]
592pub struct PrepareStackTraceCallbackRet(*const Value);
593
594#[cfg(not(target_os = "windows"))]
595pub type PrepareStackTraceCallback<'s> =
596 unsafe extern "C" fn(
597 Local<'s, Context>,
598 Local<'s, Value>,
599 Local<'s, Array>,
600 ) -> PrepareStackTraceCallbackRet;
601
602pub type UseCounterFeature = v8__Isolate__UseCounterFeature;
603pub type UseCounterCallback =
604 unsafe extern "C" fn(&mut Isolate, UseCounterFeature);
605
606unsafe extern "C" {
607 fn v8__Isolate__New(params: *const raw::CreateParams) -> *mut RealIsolate;
608 fn v8__Isolate__Dispose(this: *mut RealIsolate);
609 fn v8__Isolate__GetNumberOfDataSlots(this: *const RealIsolate) -> u32;
610 fn v8__Isolate__GetData(
611 isolate: *const RealIsolate,
612 slot: u32,
613 ) -> *mut c_void;
614 fn v8__Isolate__SetData(
615 isolate: *const RealIsolate,
616 slot: u32,
617 data: *mut c_void,
618 );
619 fn v8__Isolate__Enter(this: *mut RealIsolate);
620 fn v8__Isolate__Exit(this: *mut RealIsolate);
621 fn v8__Isolate__GetCurrent() -> *mut RealIsolate;
622 fn v8__Isolate__MemoryPressureNotification(this: *mut RealIsolate, level: u8);
623 fn v8__Isolate__ClearKeptObjects(isolate: *mut RealIsolate);
624 fn v8__Isolate__LowMemoryNotification(isolate: *mut RealIsolate);
625 fn v8__Isolate__GetHeapStatistics(
626 this: *mut RealIsolate,
627 s: *mut v8__HeapStatistics,
628 );
629 fn v8__Isolate__SetCaptureStackTraceForUncaughtExceptions(
630 this: *mut RealIsolate,
631 capture: bool,
632 frame_limit: i32,
633 );
634 fn v8__Isolate__AddMessageListener(
635 isolate: *mut RealIsolate,
636 callback: MessageCallback,
637 ) -> bool;
638 fn v8__Isolate__AddMessageListenerWithErrorLevel(
639 isolate: *mut RealIsolate,
640 callback: MessageCallback,
641 message_levels: MessageErrorLevel,
642 ) -> bool;
643 fn v8__Isolate__AddGCPrologueCallback(
644 isolate: *mut RealIsolate,
645 callback: GcCallbackWithData,
646 data: *mut c_void,
647 gc_type_filter: GCType,
648 );
649 fn v8__Isolate__RemoveGCPrologueCallback(
650 isolate: *mut RealIsolate,
651 callback: GcCallbackWithData,
652 data: *mut c_void,
653 );
654 fn v8__Isolate__AddGCEpilogueCallback(
655 isolate: *mut RealIsolate,
656 callback: GcCallbackWithData,
657 data: *mut c_void,
658 gc_type_filter: GCType,
659 );
660 fn v8__Isolate__RemoveGCEpilogueCallback(
661 isolate: *mut RealIsolate,
662 callback: GcCallbackWithData,
663 data: *mut c_void,
664 );
665 fn v8__Isolate__NumberOfHeapSpaces(isolate: *mut RealIsolate) -> size_t;
666 fn v8__Isolate__GetHeapSpaceStatistics(
667 isolate: *mut RealIsolate,
668 space_statistics: *mut v8__HeapSpaceStatistics,
669 index: size_t,
670 ) -> bool;
671 fn v8__Isolate__GetHeapCodeAndMetadataStatistics(
672 isolate: *mut RealIsolate,
673 code_statistics: *mut v8__HeapCodeStatistics,
674 ) -> bool;
675 fn v8__Isolate__AddNearHeapLimitCallback(
676 isolate: *mut RealIsolate,
677 callback: NearHeapLimitCallback,
678 data: *mut c_void,
679 );
680 fn v8__Isolate__RemoveNearHeapLimitCallback(
681 isolate: *mut RealIsolate,
682 callback: NearHeapLimitCallback,
683 heap_limit: usize,
684 );
685 fn v8__Isolate__SetOOMErrorHandler(
686 isolate: *mut RealIsolate,
687 callback: OomErrorCallback,
688 );
689 fn v8__Isolate__AdjustAmountOfExternalAllocatedMemory(
690 isolate: *mut RealIsolate,
691 change_in_bytes: i64,
692 ) -> i64;
693 fn v8__Isolate__GetCppHeap(isolate: *mut RealIsolate) -> *mut Heap;
694 fn v8__Isolate__SetPrepareStackTraceCallback(
695 isolate: *mut RealIsolate,
696 callback: PrepareStackTraceCallback,
697 );
698 fn v8__Isolate__SetPromiseHook(isolate: *mut RealIsolate, hook: PromiseHook);
699 fn v8__Isolate__SetPromiseRejectCallback(
700 isolate: *mut RealIsolate,
701 callback: PromiseRejectCallback,
702 );
703 fn v8__Isolate__SetWasmAsyncResolvePromiseCallback(
704 isolate: *mut RealIsolate,
705 callback: WasmAsyncResolvePromiseCallback,
706 );
707 fn v8__Isolate__SetAllowWasmCodeGenerationCallback(
708 isolate: *mut RealIsolate,
709 callback: AllowWasmCodeGenerationCallback,
710 );
711 fn v8__Isolate__SetHostInitializeImportMetaObjectCallback(
712 isolate: *mut RealIsolate,
713 callback: HostInitializeImportMetaObjectCallback,
714 );
715 fn v8__Isolate__SetHostImportModuleDynamicallyCallback(
716 isolate: *mut RealIsolate,
717 callback: RawHostImportModuleDynamicallyCallback,
718 );
719 fn v8__Isolate__SetHostImportModuleWithPhaseDynamicallyCallback(
720 isolate: *mut RealIsolate,
721 callback: RawHostImportModuleWithPhaseDynamicallyCallback,
722 );
723 #[cfg(not(target_os = "windows"))]
724 fn v8__Isolate__SetHostCreateShadowRealmContextCallback(
725 isolate: *mut RealIsolate,
726 callback: unsafe extern "C" fn(
727 initiator_context: Local<Context>,
728 ) -> *mut Context,
729 );
730 #[cfg(target_os = "windows")]
731 fn v8__Isolate__SetHostCreateShadowRealmContextCallback(
732 isolate: *mut RealIsolate,
733 callback: unsafe extern "C" fn(
734 rv: *mut *mut Context,
735 initiator_context: Local<Context>,
736 ) -> *mut *mut Context,
737 );
738 fn v8__Isolate__SetUseCounterCallback(
739 isolate: *mut RealIsolate,
740 callback: UseCounterCallback,
741 );
742 fn v8__Isolate__RequestInterrupt(
743 isolate: *const RealIsolate,
744 callback: InterruptCallback,
745 data: *mut c_void,
746 );
747 fn v8__Isolate__TerminateExecution(isolate: *const RealIsolate);
748 fn v8__Isolate__IsExecutionTerminating(isolate: *const RealIsolate) -> bool;
749 fn v8__Isolate__CancelTerminateExecution(isolate: *const RealIsolate);
750 fn v8__Isolate__GetMicrotasksPolicy(
751 isolate: *const RealIsolate,
752 ) -> MicrotasksPolicy;
753 fn v8__Isolate__SetMicrotasksPolicy(
754 isolate: *mut RealIsolate,
755 policy: MicrotasksPolicy,
756 );
757 fn v8__Isolate__PerformMicrotaskCheckpoint(isolate: *mut RealIsolate);
758 fn v8__Isolate__EnqueueMicrotask(
759 isolate: *mut RealIsolate,
760 function: *const Function,
761 );
762 fn v8__Isolate__SetAllowAtomicsWait(isolate: *mut RealIsolate, allow: bool);
763 fn v8__Isolate__SetWasmStreamingCallback(
764 isolate: *mut RealIsolate,
765 callback: unsafe extern "C" fn(*const FunctionCallbackInfo),
766 );
767 fn v8__Isolate__DateTimeConfigurationChangeNotification(
768 isolate: *mut RealIsolate,
769 time_zone_detection: TimeZoneDetection,
770 );
771 fn v8__Isolate__HasPendingBackgroundTasks(
772 isolate: *const RealIsolate,
773 ) -> bool;
774 fn v8__Isolate__RequestGarbageCollectionForTesting(
775 isolate: *mut RealIsolate,
776 r#type: usize,
777 );
778
779 fn v8__HeapProfiler__TakeHeapSnapshot(
780 isolate: *mut RealIsolate,
781 callback: unsafe extern "C" fn(*mut c_void, *const u8, usize) -> bool,
782 arg: *mut c_void,
783 );
784}
785
786#[repr(transparent)]
797#[derive(Debug)]
798pub struct Isolate(NonNull<RealIsolate>);
799
800#[repr(transparent)]
801#[derive(Debug, Clone, Copy)]
802pub struct UnsafeRawIsolatePtr(*mut RealIsolate);
803
804impl UnsafeRawIsolatePtr {
805 pub fn null() -> Self {
806 Self(std::ptr::null_mut())
807 }
808
809 pub fn is_null(&self) -> bool {
810 self.0.is_null()
811 }
812}
813
814#[repr(C)]
815pub struct RealIsolate(Opaque);
816
817impl Isolate {
818 pub(crate) fn as_real_ptr(&self) -> *mut RealIsolate {
819 self.0.as_ptr()
820 }
821
822 pub unsafe fn as_raw_isolate_ptr(&self) -> UnsafeRawIsolatePtr {
823 UnsafeRawIsolatePtr(self.0.as_ptr())
824 }
825
826 #[inline]
827 pub unsafe fn from_raw_isolate_ptr(ptr: UnsafeRawIsolatePtr) -> Self {
828 Self(NonNull::new(ptr.0).unwrap())
829 }
830
831 #[inline]
832 pub unsafe fn from_raw_isolate_ptr_unchecked(
833 ptr: UnsafeRawIsolatePtr,
834 ) -> Self {
835 Self(unsafe { NonNull::new_unchecked(ptr.0) })
836 }
837
838 pub unsafe fn from_raw_ptr_unchecked(ptr: *mut RealIsolate) -> Self {
839 Self(unsafe { NonNull::new_unchecked(ptr) })
840 }
841
842 pub unsafe fn from_raw_ptr(ptr: *mut RealIsolate) -> Self {
843 Self(NonNull::new(ptr).unwrap())
844 }
845
846 #[inline]
847 pub unsafe fn ref_from_raw_isolate_ptr(ptr: &UnsafeRawIsolatePtr) -> &Self {
848 if ptr.is_null() {
849 panic!("UnsafeRawIsolatePtr is null");
850 }
851 unsafe { &*(ptr as *const UnsafeRawIsolatePtr as *const Isolate) }
852 }
853
854 #[inline]
855 pub unsafe fn ref_from_raw_isolate_ptr_unchecked(
856 ptr: &UnsafeRawIsolatePtr,
857 ) -> &Self {
858 unsafe { &*(ptr as *const UnsafeRawIsolatePtr as *const Isolate) }
859 }
860
861 #[inline]
862 pub unsafe fn ref_from_raw_isolate_ptr_mut(
863 ptr: &mut UnsafeRawIsolatePtr,
864 ) -> &mut Self {
865 if ptr.is_null() {
866 panic!("UnsafeRawIsolatePtr is null");
867 }
868 unsafe { &mut *(ptr as *mut UnsafeRawIsolatePtr as *mut Isolate) }
869 }
870
871 #[inline]
872 pub unsafe fn ref_from_raw_isolate_ptr_mut_unchecked(
873 ptr: &mut UnsafeRawIsolatePtr,
874 ) -> &mut Self {
875 unsafe { &mut *(ptr as *mut UnsafeRawIsolatePtr as *mut Isolate) }
876 }
877
878 #[inline]
879 pub(crate) unsafe fn from_non_null(ptr: NonNull<RealIsolate>) -> Self {
880 Self(ptr)
881 }
882
883 #[inline]
884 pub(crate) unsafe fn from_raw_ref(ptr: &NonNull<RealIsolate>) -> &Self {
885 unsafe { &*(ptr as *const NonNull<RealIsolate> as *const Isolate) }
887 }
888
889 #[inline]
890 pub(crate) unsafe fn from_raw_ref_mut(
891 ptr: &mut NonNull<RealIsolate>,
892 ) -> &mut Self {
893 unsafe { &mut *(ptr as *mut NonNull<RealIsolate> as *mut Isolate) }
895 }
896
897 const ANNEX_SLOT: u32 = 0;
899 const INTERNAL_DATA_SLOT_COUNT: u32 = 2;
900
901 #[inline(always)]
902 fn assert_embedder_data_slot_count_and_offset_correct(&self) {
903 assert!(
904 unsafe { v8__Isolate__GetNumberOfDataSlots(self.as_real_ptr()) }
905 >= Self::INTERNAL_DATA_SLOT_COUNT
906 )
907 }
908
909 fn new_impl(params: CreateParams) -> *mut RealIsolate {
910 crate::V8::assert_initialized();
911 let (raw_create_params, create_param_allocations) = params.finalize();
912 let cxx_isolate = unsafe { v8__Isolate__New(&raw_create_params) };
913 let mut isolate = unsafe { Isolate::from_raw_ptr(cxx_isolate) };
914 isolate.initialize(create_param_allocations);
915 cxx_isolate
916 }
917
918 pub(crate) fn initialize(&mut self, create_param_allocations: Box<dyn Any>) {
919 self.assert_embedder_data_slot_count_and_offset_correct();
920 self.create_annex(create_param_allocations);
921 }
922
923 #[allow(clippy::new_ret_no_self)]
931 pub fn new(params: CreateParams) -> OwnedIsolate {
932 OwnedIsolate::new(Self::new_impl(params))
933 }
934
935 #[allow(clippy::new_ret_no_self)]
939 pub fn new_unentered(params: CreateParams) -> UnenteredIsolate {
940 UnenteredIsolate::new(Self::new_impl(params))
941 }
942
943 #[allow(clippy::new_ret_no_self)]
944 pub fn snapshot_creator(
945 external_references: Option<Cow<'static, [ExternalReference]>>,
946 params: Option<CreateParams>,
947 ) -> OwnedIsolate {
948 SnapshotCreator::new(external_references, params)
949 }
950
951 #[allow(clippy::new_ret_no_self)]
952 pub fn snapshot_creator_from_existing_snapshot(
953 existing_snapshot_blob: StartupData,
954 external_references: Option<Cow<'static, [ExternalReference]>>,
955 params: Option<CreateParams>,
956 ) -> OwnedIsolate {
957 SnapshotCreator::from_existing_snapshot(
958 existing_snapshot_blob,
959 external_references,
960 params,
961 )
962 }
963
964 #[inline(always)]
966 pub fn create_params() -> CreateParams {
967 CreateParams::default()
968 }
969
970 #[inline(always)]
971 pub fn thread_safe_handle(&self) -> IsolateHandle {
972 IsolateHandle::new(self)
973 }
974
975 #[inline(always)]
977 pub fn terminate_execution(&self) -> bool {
978 self.thread_safe_handle().terminate_execution()
979 }
980
981 #[inline(always)]
983 pub fn cancel_terminate_execution(&self) -> bool {
984 self.thread_safe_handle().cancel_terminate_execution()
985 }
986
987 #[inline(always)]
989 pub fn is_execution_terminating(&self) -> bool {
990 self.thread_safe_handle().is_execution_terminating()
991 }
992
993 pub(crate) fn create_annex(
994 &mut self,
995 create_param_allocations: Box<dyn Any>,
996 ) {
997 let annex_arc = Arc::new(IsolateAnnex::new(self, create_param_allocations));
998 let annex_ptr = Arc::into_raw(annex_arc);
999 assert!(self.get_data_internal(Self::ANNEX_SLOT).is_null());
1000 self.set_data_internal(Self::ANNEX_SLOT, annex_ptr as *mut _);
1001 }
1002
1003 unsafe fn dispose_annex(&mut self) -> Box<dyn Any> {
1004 let annex = self.get_annex_mut();
1008 {
1009 let _lock = annex.isolate_mutex.lock();
1010 annex.isolate = null_mut();
1011 }
1012
1013 let create_param_allocations =
1015 std::mem::replace(&mut annex.create_param_allocations, Box::new(()));
1016 annex.slots.clear();
1017
1018 for finalizer in annex.finalizer_map.drain() {
1020 if let FinalizerCallback::Guaranteed(callback) = finalizer {
1021 callback();
1022 }
1023 }
1024
1025 unsafe { Arc::from_raw(annex) };
1027 self.set_data(0, null_mut());
1028
1029 create_param_allocations
1030 }
1031
1032 #[inline(always)]
1033 fn get_annex(&self) -> &IsolateAnnex {
1034 let annex_ptr =
1035 self.get_data_internal(Self::ANNEX_SLOT) as *const IsolateAnnex;
1036 assert!(!annex_ptr.is_null());
1037 unsafe { &*annex_ptr }
1038 }
1039
1040 #[inline(always)]
1041 fn get_annex_mut(&mut self) -> &mut IsolateAnnex {
1042 let annex_ptr =
1043 self.get_data_internal(Self::ANNEX_SLOT) as *mut IsolateAnnex;
1044 assert!(!annex_ptr.is_null());
1045 unsafe { &mut *annex_ptr }
1046 }
1047
1048 pub(crate) fn set_snapshot_creator(
1049 &mut self,
1050 snapshot_creator: SnapshotCreator,
1051 ) {
1052 let prev = self
1053 .get_annex_mut()
1054 .maybe_snapshot_creator
1055 .replace(snapshot_creator);
1056 assert!(prev.is_none());
1057 }
1058
1059 pub(crate) fn get_finalizer_map(&self) -> &FinalizerMap {
1060 &self.get_annex().finalizer_map
1061 }
1062
1063 pub(crate) fn get_finalizer_map_mut(&mut self) -> &mut FinalizerMap {
1064 &mut self.get_annex_mut().finalizer_map
1065 }
1066
1067 fn get_annex_arc(&self) -> Arc<IsolateAnnex> {
1068 let annex_ptr = self.get_annex();
1069 let annex_arc = unsafe { Arc::from_raw(annex_ptr) };
1070 let _ = Arc::into_raw(annex_arc.clone());
1071 annex_arc
1072 }
1073
1074 pub fn get_data(&self, slot: u32) -> *mut c_void {
1077 self.get_data_internal(Self::INTERNAL_DATA_SLOT_COUNT + slot)
1078 }
1079
1080 #[inline(always)]
1083 pub fn set_data(&mut self, slot: u32, data: *mut c_void) {
1084 self.set_data_internal(Self::INTERNAL_DATA_SLOT_COUNT + slot, data);
1085 }
1086
1087 pub fn get_number_of_data_slots(&self) -> u32 {
1090 let n = unsafe { v8__Isolate__GetNumberOfDataSlots(self.as_real_ptr()) };
1091 n - Self::INTERNAL_DATA_SLOT_COUNT
1092 }
1093
1094 #[inline(always)]
1095 pub(crate) fn get_data_internal(&self, slot: u32) -> *mut c_void {
1096 unsafe { v8__Isolate__GetData(self.as_real_ptr(), slot) }
1097 }
1098
1099 #[inline(always)]
1100 pub(crate) fn set_data_internal(&mut self, slot: u32, data: *mut c_void) {
1101 unsafe { v8__Isolate__SetData(self.as_real_ptr(), slot, data) }
1102 }
1103
1104 #[inline(always)]
1133 pub fn get_slot<T: 'static>(&self) -> Option<&T> {
1134 self
1135 .get_annex()
1136 .slots
1137 .get(&TypeId::of::<T>())
1138 .map(|slot| unsafe { slot.borrow::<T>() })
1139 }
1140
1141 #[inline(always)]
1143 pub fn get_slot_mut<T: 'static>(&mut self) -> Option<&mut T> {
1144 self
1145 .get_annex_mut()
1146 .slots
1147 .get_mut(&TypeId::of::<T>())
1148 .map(|slot| unsafe { slot.borrow_mut::<T>() })
1149 }
1150
1151 #[inline(always)]
1163 pub fn set_slot<T: 'static>(&mut self, value: T) -> bool {
1164 self
1165 .get_annex_mut()
1166 .slots
1167 .insert(TypeId::of::<T>(), RawSlot::new(value))
1168 .is_none()
1169 }
1170
1171 #[inline(always)]
1173 pub fn remove_slot<T: 'static>(&mut self) -> Option<T> {
1174 self
1175 .get_annex_mut()
1176 .slots
1177 .remove(&TypeId::of::<T>())
1178 .map(|slot| unsafe { slot.into_inner::<T>() })
1179 }
1180
1181 #[inline(always)]
1188 pub unsafe fn enter(&self) {
1189 unsafe {
1190 v8__Isolate__Enter(self.as_real_ptr());
1191 }
1192 }
1193
1194 #[inline(always)]
1203 pub unsafe fn exit(&self) {
1204 unsafe {
1205 v8__Isolate__Exit(self.as_real_ptr());
1206 }
1207 }
1208
1209 #[inline(always)]
1214 pub fn memory_pressure_notification(&mut self, level: MemoryPressureLevel) {
1215 unsafe {
1216 v8__Isolate__MemoryPressureNotification(self.as_real_ptr(), level as u8)
1217 }
1218 }
1219
1220 #[inline(always)]
1232 pub fn clear_kept_objects(&mut self) {
1233 unsafe { v8__Isolate__ClearKeptObjects(self.as_real_ptr()) }
1234 }
1235
1236 #[inline(always)]
1239 pub fn low_memory_notification(&mut self) {
1240 unsafe { v8__Isolate__LowMemoryNotification(self.as_real_ptr()) }
1241 }
1242
1243 #[inline(always)]
1245 pub fn get_heap_statistics(&mut self) -> HeapStatistics {
1246 let inner = unsafe {
1247 let mut s = MaybeUninit::zeroed();
1248 v8__Isolate__GetHeapStatistics(self.as_real_ptr(), s.as_mut_ptr());
1249 s.assume_init()
1250 };
1251 HeapStatistics(inner)
1252 }
1253
1254 #[inline(always)]
1256 pub fn number_of_heap_spaces(&mut self) -> usize {
1257 unsafe { v8__Isolate__NumberOfHeapSpaces(self.as_real_ptr()) }
1258 }
1259
1260 #[inline(always)]
1268 pub fn get_heap_space_statistics(
1269 &mut self,
1270 index: usize,
1271 ) -> Option<HeapSpaceStatistics> {
1272 let inner = unsafe {
1273 let mut s = MaybeUninit::zeroed();
1274 if !v8__Isolate__GetHeapSpaceStatistics(
1275 self.as_real_ptr(),
1276 s.as_mut_ptr(),
1277 index,
1278 ) {
1279 return None;
1280 }
1281 s.assume_init()
1282 };
1283 Some(HeapSpaceStatistics(inner))
1284 }
1285
1286 #[inline(always)]
1290 pub fn get_heap_code_and_metadata_statistics(
1291 &mut self,
1292 ) -> Option<HeapCodeStatistics> {
1293 let inner = unsafe {
1294 let mut s = MaybeUninit::zeroed();
1295 if !v8__Isolate__GetHeapCodeAndMetadataStatistics(
1296 self.as_real_ptr(),
1297 s.as_mut_ptr(),
1298 ) {
1299 return None;
1300 }
1301 s.assume_init()
1302 };
1303 Some(HeapCodeStatistics(inner))
1304 }
1305
1306 #[inline(always)]
1309 pub fn set_capture_stack_trace_for_uncaught_exceptions(
1310 &mut self,
1311 capture: bool,
1312 frame_limit: i32,
1313 ) {
1314 unsafe {
1315 v8__Isolate__SetCaptureStackTraceForUncaughtExceptions(
1316 self.as_real_ptr(),
1317 capture,
1318 frame_limit,
1319 );
1320 }
1321 }
1322
1323 #[inline(always)]
1330 pub fn add_message_listener(&mut self, callback: MessageCallback) -> bool {
1331 unsafe { v8__Isolate__AddMessageListener(self.as_real_ptr(), callback) }
1332 }
1333
1334 #[inline(always)]
1336 pub fn add_message_listener_with_error_level(
1337 &mut self,
1338 callback: MessageCallback,
1339 message_levels: MessageErrorLevel,
1340 ) -> bool {
1341 unsafe {
1342 v8__Isolate__AddMessageListenerWithErrorLevel(
1343 self.as_real_ptr(),
1344 callback,
1345 message_levels,
1346 )
1347 }
1348 }
1349
1350 #[inline(always)]
1359 pub fn set_prepare_stack_trace_callback<'s>(
1360 &mut self,
1361 callback: impl MapFnTo<PrepareStackTraceCallback<'s>>,
1362 ) {
1363 unsafe {
1367 v8__Isolate__SetPrepareStackTraceCallback(
1368 self.as_real_ptr(),
1369 callback.map_fn_to(),
1370 );
1371 };
1372 }
1373
1374 #[inline(always)]
1377 pub fn set_promise_hook(&mut self, hook: PromiseHook) {
1378 unsafe { v8__Isolate__SetPromiseHook(self.as_real_ptr(), hook) }
1379 }
1380
1381 #[inline(always)]
1384 pub fn set_promise_reject_callback(
1385 &mut self,
1386 callback: PromiseRejectCallback,
1387 ) {
1388 unsafe {
1389 v8__Isolate__SetPromiseRejectCallback(self.as_real_ptr(), callback)
1390 }
1391 }
1392
1393 #[inline(always)]
1394 pub fn set_wasm_async_resolve_promise_callback(
1395 &mut self,
1396 callback: WasmAsyncResolvePromiseCallback,
1397 ) {
1398 unsafe {
1399 v8__Isolate__SetWasmAsyncResolvePromiseCallback(
1400 self.as_real_ptr(),
1401 callback,
1402 )
1403 }
1404 }
1405
1406 #[inline(always)]
1407 pub fn set_allow_wasm_code_generation_callback(
1408 &mut self,
1409 callback: AllowWasmCodeGenerationCallback,
1410 ) {
1411 unsafe {
1412 v8__Isolate__SetAllowWasmCodeGenerationCallback(
1413 self.as_real_ptr(),
1414 callback,
1415 );
1416 }
1417 }
1418
1419 #[inline(always)]
1420 pub fn set_host_initialize_import_meta_object_callback(
1423 &mut self,
1424 callback: HostInitializeImportMetaObjectCallback,
1425 ) {
1426 unsafe {
1427 v8__Isolate__SetHostInitializeImportMetaObjectCallback(
1428 self.as_real_ptr(),
1429 callback,
1430 );
1431 }
1432 }
1433
1434 #[inline(always)]
1437 pub fn set_host_import_module_dynamically_callback(
1438 &mut self,
1439 callback: impl HostImportModuleDynamicallyCallback,
1440 ) {
1441 unsafe {
1442 v8__Isolate__SetHostImportModuleDynamicallyCallback(
1443 self.as_real_ptr(),
1444 callback.to_c_fn(),
1445 );
1446 }
1447 }
1448
1449 #[inline(always)]
1457 pub fn set_host_import_module_with_phase_dynamically_callback(
1458 &mut self,
1459 callback: impl HostImportModuleWithPhaseDynamicallyCallback,
1460 ) {
1461 unsafe {
1462 v8__Isolate__SetHostImportModuleWithPhaseDynamicallyCallback(
1463 self.as_real_ptr(),
1464 callback.to_c_fn(),
1465 );
1466 }
1467 }
1468
1469 pub fn set_host_create_shadow_realm_context_callback(
1472 &mut self,
1473 callback: HostCreateShadowRealmContextCallback,
1474 ) {
1475 #[inline]
1476 unsafe extern "C" fn rust_shadow_realm_callback(
1477 initiator_context: Local<Context>,
1478 ) -> *mut Context {
1479 let scope = pin!(unsafe { CallbackScope::new(initiator_context) });
1480 let mut scope = scope.init();
1481 let isolate = scope.as_ref();
1482 let callback = isolate
1483 .get_slot::<HostCreateShadowRealmContextCallback>()
1484 .unwrap();
1485 let context = callback(&mut scope);
1486 context.map_or_else(null_mut, |l| l.as_non_null().as_ptr())
1487 }
1488
1489 #[cfg(target_os = "windows")]
1491 unsafe extern "C" fn rust_shadow_realm_callback_windows(
1492 rv: *mut *mut Context,
1493 initiator_context: Local<Context>,
1494 ) -> *mut *mut Context {
1495 unsafe {
1496 let ret = rust_shadow_realm_callback(initiator_context);
1497 rv.write(ret);
1498 }
1499 rv
1500 }
1501
1502 let slot_didnt_exist_before = self.set_slot(callback);
1503 if slot_didnt_exist_before {
1504 unsafe {
1505 #[cfg(target_os = "windows")]
1506 v8__Isolate__SetHostCreateShadowRealmContextCallback(
1507 self.as_real_ptr(),
1508 rust_shadow_realm_callback_windows,
1509 );
1510 #[cfg(not(target_os = "windows"))]
1511 v8__Isolate__SetHostCreateShadowRealmContextCallback(
1512 self.as_real_ptr(),
1513 rust_shadow_realm_callback,
1514 );
1515 }
1516 }
1517 }
1518
1519 #[inline(always)]
1521 pub fn set_use_counter_callback(&mut self, callback: UseCounterCallback) {
1522 unsafe {
1523 v8__Isolate__SetUseCounterCallback(self.as_real_ptr(), callback);
1524 }
1525 }
1526
1527 #[allow(clippy::not_unsafe_ptr_arg_deref)] #[inline(always)]
1536 pub fn add_gc_prologue_callback(
1537 &mut self,
1538 callback: GcCallbackWithData,
1539 data: *mut c_void,
1540 gc_type_filter: GCType,
1541 ) {
1542 unsafe {
1543 v8__Isolate__AddGCPrologueCallback(
1544 self.as_real_ptr(),
1545 callback,
1546 data,
1547 gc_type_filter,
1548 );
1549 }
1550 }
1551
1552 #[allow(clippy::not_unsafe_ptr_arg_deref)] #[inline(always)]
1556 pub fn remove_gc_prologue_callback(
1557 &mut self,
1558 callback: GcCallbackWithData,
1559 data: *mut c_void,
1560 ) {
1561 unsafe {
1562 v8__Isolate__RemoveGCPrologueCallback(self.as_real_ptr(), callback, data)
1563 }
1564 }
1565
1566 #[allow(clippy::not_unsafe_ptr_arg_deref)] #[inline(always)]
1570 pub fn add_gc_epilogue_callback(
1571 &mut self,
1572 callback: GcCallbackWithData,
1573 data: *mut c_void,
1574 gc_type_filter: GCType,
1575 ) {
1576 unsafe {
1577 v8__Isolate__AddGCEpilogueCallback(
1578 self.as_real_ptr(),
1579 callback,
1580 data,
1581 gc_type_filter,
1582 );
1583 }
1584 }
1585
1586 #[allow(clippy::not_unsafe_ptr_arg_deref)] #[inline(always)]
1590 pub fn remove_gc_epilogue_callback(
1591 &mut self,
1592 callback: GcCallbackWithData,
1593 data: *mut c_void,
1594 ) {
1595 unsafe {
1596 v8__Isolate__RemoveGCEpilogueCallback(self.as_real_ptr(), callback, data)
1597 }
1598 }
1599
1600 #[allow(clippy::not_unsafe_ptr_arg_deref)] #[inline(always)]
1605 pub fn add_near_heap_limit_callback(
1606 &mut self,
1607 callback: NearHeapLimitCallback,
1608 data: *mut c_void,
1609 ) {
1610 unsafe {
1611 v8__Isolate__AddNearHeapLimitCallback(self.as_real_ptr(), callback, data)
1612 };
1613 }
1614
1615 #[inline(always)]
1620 pub fn remove_near_heap_limit_callback(
1621 &mut self,
1622 callback: NearHeapLimitCallback,
1623 heap_limit: usize,
1624 ) {
1625 unsafe {
1626 v8__Isolate__RemoveNearHeapLimitCallback(
1627 self.as_real_ptr(),
1628 callback,
1629 heap_limit,
1630 );
1631 };
1632 }
1633
1634 #[inline(always)]
1642 pub fn adjust_amount_of_external_allocated_memory(
1643 &mut self,
1644 change_in_bytes: i64,
1645 ) -> i64 {
1646 unsafe {
1647 v8__Isolate__AdjustAmountOfExternalAllocatedMemory(
1648 self.as_real_ptr(),
1649 change_in_bytes,
1650 )
1651 }
1652 }
1653
1654 #[inline(always)]
1655 pub fn get_cpp_heap(&mut self) -> Option<&Heap> {
1656 unsafe { v8__Isolate__GetCppHeap(self.as_real_ptr()).as_ref() }
1657 }
1658
1659 #[inline(always)]
1660 pub fn set_oom_error_handler(&mut self, callback: OomErrorCallback) {
1661 unsafe { v8__Isolate__SetOOMErrorHandler(self.as_real_ptr(), callback) };
1662 }
1663
1664 #[inline(always)]
1666 pub fn get_microtasks_policy(&self) -> MicrotasksPolicy {
1667 unsafe { v8__Isolate__GetMicrotasksPolicy(self.as_real_ptr()) }
1668 }
1669
1670 #[inline(always)]
1672 pub fn set_microtasks_policy(&mut self, policy: MicrotasksPolicy) {
1673 unsafe { v8__Isolate__SetMicrotasksPolicy(self.as_real_ptr(), policy) }
1674 }
1675
1676 #[inline(always)]
1681 pub fn perform_microtask_checkpoint(&mut self) {
1682 unsafe { v8__Isolate__PerformMicrotaskCheckpoint(self.as_real_ptr()) }
1683 }
1684
1685 #[inline(always)]
1687 pub fn enqueue_microtask(&mut self, microtask: Local<Function>) {
1688 unsafe { v8__Isolate__EnqueueMicrotask(self.as_real_ptr(), &*microtask) }
1689 }
1690
1691 #[inline(always)]
1695 pub fn set_allow_atomics_wait(&mut self, allow: bool) {
1696 unsafe { v8__Isolate__SetAllowAtomicsWait(self.as_real_ptr(), allow) }
1697 }
1698
1699 #[inline(always)]
1707 pub fn set_wasm_streaming_callback<F>(&mut self, _: F)
1708 where
1709 F: UnitType
1710 + for<'a, 'b, 'c> Fn(
1711 &'c mut PinScope<'a, 'b>,
1712 Local<'a, Value>,
1713 WasmStreaming<false>,
1714 ),
1715 {
1716 unsafe {
1717 v8__Isolate__SetWasmStreamingCallback(
1718 self.as_real_ptr(),
1719 trampoline::<F>(),
1720 )
1721 }
1722 }
1723
1724 #[inline(always)]
1733 pub fn date_time_configuration_change_notification(
1734 &mut self,
1735 time_zone_detection: TimeZoneDetection,
1736 ) {
1737 unsafe {
1738 v8__Isolate__DateTimeConfigurationChangeNotification(
1739 self.as_real_ptr(),
1740 time_zone_detection,
1741 );
1742 }
1743 }
1744
1745 #[inline(always)]
1749 pub fn has_pending_background_tasks(&self) -> bool {
1750 unsafe { v8__Isolate__HasPendingBackgroundTasks(self.as_real_ptr()) }
1751 }
1752
1753 #[inline(always)]
1763 pub fn request_garbage_collection_for_testing(
1764 &mut self,
1765 r#type: GarbageCollectionType,
1766 ) {
1767 unsafe {
1768 v8__Isolate__RequestGarbageCollectionForTesting(
1769 self.as_real_ptr(),
1770 match r#type {
1771 GarbageCollectionType::Full => 0,
1772 GarbageCollectionType::Minor => 1,
1773 },
1774 );
1775 }
1776 }
1777
1778 unsafe fn dispose(&mut self) {
1781 unsafe {
1784 v8__Isolate__Dispose(self.as_real_ptr());
1785 }
1786 }
1787
1788 pub fn take_heap_snapshot<F>(&mut self, mut callback: F)
1795 where
1796 F: FnMut(&[u8]) -> bool,
1797 {
1798 unsafe extern "C" fn trampoline<F>(
1799 arg: *mut c_void,
1800 data: *const u8,
1801 size: usize,
1802 ) -> bool
1803 where
1804 F: FnMut(&[u8]) -> bool,
1805 {
1806 unsafe {
1807 let mut callback = NonNull::<F>::new_unchecked(arg as _);
1808 if size > 0 {
1809 (callback.as_mut())(std::slice::from_raw_parts(data, size))
1810 } else {
1811 (callback.as_mut())(&[])
1812 }
1813 }
1814 }
1815
1816 let arg = addr_of_mut!(callback);
1817 unsafe {
1818 v8__HeapProfiler__TakeHeapSnapshot(
1819 self.as_real_ptr(),
1820 trampoline::<F>,
1821 arg as _,
1822 );
1823 }
1824 }
1825
1826 #[inline(always)]
1834 pub fn set_default_context(&mut self, context: Local<Context>) {
1835 let snapshot_creator = self
1836 .get_annex_mut()
1837 .maybe_snapshot_creator
1838 .as_mut()
1839 .unwrap();
1840 snapshot_creator.set_default_context(context);
1841 }
1842
1843 #[inline(always)]
1852 pub fn add_context(&mut self, context: Local<Context>) -> usize {
1853 let snapshot_creator = self
1854 .get_annex_mut()
1855 .maybe_snapshot_creator
1856 .as_mut()
1857 .unwrap();
1858 snapshot_creator.add_context(context)
1859 }
1860
1861 #[inline(always)]
1870 pub fn add_isolate_data<T>(&mut self, data: Local<T>) -> usize
1871 where
1872 for<'l> Local<'l, T>: Into<Local<'l, Data>>,
1873 {
1874 let snapshot_creator = self
1875 .get_annex_mut()
1876 .maybe_snapshot_creator
1877 .as_mut()
1878 .unwrap();
1879 snapshot_creator.add_isolate_data(data)
1880 }
1881
1882 #[inline(always)]
1891 pub fn add_context_data<T>(
1892 &mut self,
1893 context: Local<Context>,
1894 data: Local<T>,
1895 ) -> usize
1896 where
1897 for<'l> Local<'l, T>: Into<Local<'l, Data>>,
1898 {
1899 let snapshot_creator = self
1900 .get_annex_mut()
1901 .maybe_snapshot_creator
1902 .as_mut()
1903 .unwrap();
1904 snapshot_creator.add_context_data(context, data)
1905 }
1906}
1907
1908pub(crate) struct IsolateAnnex {
1909 create_param_allocations: Box<dyn Any>,
1910 slots: HashMap<TypeId, RawSlot, BuildTypeIdHasher>,
1911 finalizer_map: FinalizerMap,
1912 maybe_snapshot_creator: Option<SnapshotCreator>,
1913 isolate: *mut RealIsolate,
1921 isolate_mutex: Mutex<()>,
1922}
1923
1924unsafe impl Send for IsolateAnnex {}
1925unsafe impl Sync for IsolateAnnex {}
1926
1927impl IsolateAnnex {
1928 fn new(
1929 isolate: &mut Isolate,
1930 create_param_allocations: Box<dyn Any>,
1931 ) -> Self {
1932 Self {
1933 create_param_allocations,
1934 slots: HashMap::default(),
1935 finalizer_map: FinalizerMap::default(),
1936 maybe_snapshot_creator: None,
1937 isolate: isolate.as_real_ptr(),
1938 isolate_mutex: Mutex::new(()),
1939 }
1940 }
1941}
1942
1943impl Debug for IsolateAnnex {
1944 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
1945 f.debug_struct("IsolateAnnex")
1946 .field("isolate", &self.isolate)
1947 .field("isolate_mutex", &self.isolate_mutex)
1948 .finish()
1949 }
1950}
1951
1952#[derive(Clone, Debug)]
1959pub struct IsolateHandle(Arc<IsolateAnnex>);
1960
1961impl IsolateHandle {
1962 pub(crate) unsafe fn get_isolate_ptr(&self) -> *mut RealIsolate {
1966 self.0.isolate
1967 }
1968
1969 #[inline(always)]
1970 fn new(isolate: &Isolate) -> Self {
1971 Self(isolate.get_annex_arc())
1972 }
1973
1974 #[inline(always)]
1982 pub fn terminate_execution(&self) -> bool {
1983 let _lock = self.0.isolate_mutex.lock();
1984 if self.0.isolate.is_null() {
1985 false
1986 } else {
1987 unsafe { v8__Isolate__TerminateExecution(self.0.isolate) };
1988 true
1989 }
1990 }
1991
1992 #[inline(always)]
2007 pub fn cancel_terminate_execution(&self) -> bool {
2008 let _lock = self.0.isolate_mutex.lock();
2009 if self.0.isolate.is_null() {
2010 false
2011 } else {
2012 unsafe { v8__Isolate__CancelTerminateExecution(self.0.isolate) };
2013 true
2014 }
2015 }
2016
2017 #[inline(always)]
2026 pub fn is_execution_terminating(&self) -> bool {
2027 let _lock = self.0.isolate_mutex.lock();
2028 if self.0.isolate.is_null() {
2029 false
2030 } else {
2031 unsafe { v8__Isolate__IsExecutionTerminating(self.0.isolate) }
2032 }
2033 }
2034
2035 #[allow(clippy::not_unsafe_ptr_arg_deref)]
2046 #[inline(always)]
2047 pub fn request_interrupt(
2048 &self,
2049 callback: InterruptCallback,
2050 data: *mut c_void,
2051 ) -> bool {
2052 let _lock = self.0.isolate_mutex.lock();
2053 if self.0.isolate.is_null() {
2054 false
2055 } else {
2056 unsafe { v8__Isolate__RequestInterrupt(self.0.isolate, callback, data) };
2057 true
2058 }
2059 }
2060}
2061
2062#[derive(Debug)]
2064pub struct OwnedIsolate {
2065 cxx_isolate: NonNull<RealIsolate>,
2066}
2067
2068impl OwnedIsolate {
2069 pub(crate) fn new(cxx_isolate: *mut RealIsolate) -> Self {
2070 let isolate = Self::new_already_entered(cxx_isolate);
2071 unsafe {
2072 isolate.enter();
2073 }
2074 isolate
2075 }
2076
2077 pub(crate) fn new_already_entered(cxx_isolate: *mut RealIsolate) -> Self {
2078 let cxx_isolate = NonNull::new(cxx_isolate).unwrap();
2079 let owned_isolate: OwnedIsolate = Self { cxx_isolate };
2080 owned_isolate
2082 }
2083}
2084
2085impl Drop for OwnedIsolate {
2086 fn drop(&mut self) {
2087 unsafe {
2088 let snapshot_creator = self.get_annex_mut().maybe_snapshot_creator.take();
2089 assert!(
2090 snapshot_creator.is_none(),
2091 "If isolate was created using v8::Isolate::snapshot_creator, you should use v8::OwnedIsolate::create_blob before dropping an isolate."
2092 );
2093 assert!(
2095 std::ptr::eq(self.cxx_isolate.as_mut(), v8__Isolate__GetCurrent()),
2096 "v8::OwnedIsolate instances must be dropped in the reverse order of creation. They are entered upon creation and exited upon being dropped."
2097 );
2098 self.exit();
2100 self.dispose_annex();
2101 Platform::notify_isolate_shutdown(&get_current_platform(), self);
2102 self.dispose();
2103 }
2104 }
2105}
2106
2107impl OwnedIsolate {
2108 #[inline(always)]
2115 pub fn create_blob(
2116 mut self,
2117 function_code_handling: FunctionCodeHandling,
2118 ) -> Option<StartupData> {
2119 let mut snapshot_creator =
2120 self.get_annex_mut().maybe_snapshot_creator.take().unwrap();
2121
2122 let _create_param_allocations = unsafe {
2125 self.dispose_annex()
2127 };
2128
2129 std::mem::forget(self);
2132 snapshot_creator.create_blob(function_code_handling)
2133 }
2134}
2135
2136impl Deref for OwnedIsolate {
2137 type Target = Isolate;
2138 fn deref(&self) -> &Self::Target {
2139 unsafe {
2140 std::mem::transmute::<&NonNull<RealIsolate>, &Isolate>(&self.cxx_isolate)
2141 }
2142 }
2143}
2144
2145impl DerefMut for OwnedIsolate {
2146 fn deref_mut(&mut self) -> &mut Self::Target {
2147 unsafe {
2148 std::mem::transmute::<&mut NonNull<RealIsolate>, &mut Isolate>(
2149 &mut self.cxx_isolate,
2150 )
2151 }
2152 }
2153}
2154
2155impl AsMut<Isolate> for OwnedIsolate {
2156 fn as_mut(&mut self) -> &mut Isolate {
2157 self
2158 }
2159}
2160
2161impl AsMut<Isolate> for Isolate {
2162 fn as_mut(&mut self) -> &mut Isolate {
2163 self
2164 }
2165}
2166
2167#[derive(Debug)]
2199pub struct UnenteredIsolate {
2200 cxx_isolate: NonNull<RealIsolate>,
2201}
2202
2203impl UnenteredIsolate {
2204 pub(crate) fn new(cxx_isolate: *mut RealIsolate) -> Self {
2205 Self {
2206 cxx_isolate: NonNull::new(cxx_isolate).unwrap(),
2207 }
2208 }
2209
2210 #[inline]
2217 pub fn as_raw(&self) -> *mut RealIsolate {
2218 self.cxx_isolate.as_ptr()
2219 }
2220}
2221
2222impl Drop for UnenteredIsolate {
2223 fn drop(&mut self) {
2224 debug_assert!(
2226 !crate::scope::raw::Locker::is_locked(self.cxx_isolate),
2227 "Cannot drop UnenteredIsolate while a Locker is held. \
2228 Drop the Locker first."
2229 );
2230
2231 unsafe {
2232 let isolate = Isolate::from_raw_ref_mut(&mut self.cxx_isolate);
2233 let snapshot_creator =
2234 isolate.get_annex_mut().maybe_snapshot_creator.take();
2235 assert!(
2236 snapshot_creator.is_none(),
2237 "v8::UnenteredIsolate::create_blob must be called before dropping"
2238 );
2239 isolate.dispose_annex();
2240 Platform::notify_isolate_shutdown(&get_current_platform(), isolate);
2241 isolate.dispose();
2242 }
2243 }
2244}
2245
2246unsafe impl Send for UnenteredIsolate {}
2251
2252pub struct HeapStatistics(v8__HeapStatistics);
2257
2258impl HeapStatistics {
2259 #[inline(always)]
2260 pub fn total_heap_size(&self) -> usize {
2261 self.0.total_heap_size_
2262 }
2263
2264 #[inline(always)]
2265 pub fn total_heap_size_executable(&self) -> usize {
2266 self.0.total_heap_size_executable_
2267 }
2268
2269 #[inline(always)]
2270 pub fn total_physical_size(&self) -> usize {
2271 self.0.total_physical_size_
2272 }
2273
2274 #[inline(always)]
2275 pub fn total_available_size(&self) -> usize {
2276 self.0.total_available_size_
2277 }
2278
2279 #[inline(always)]
2280 pub fn total_global_handles_size(&self) -> usize {
2281 self.0.total_global_handles_size_
2282 }
2283
2284 #[inline(always)]
2285 pub fn used_global_handles_size(&self) -> usize {
2286 self.0.used_global_handles_size_
2287 }
2288
2289 #[inline(always)]
2290 pub fn used_heap_size(&self) -> usize {
2291 self.0.used_heap_size_
2292 }
2293
2294 #[inline(always)]
2295 pub fn heap_size_limit(&self) -> usize {
2296 self.0.heap_size_limit_
2297 }
2298
2299 #[inline(always)]
2300 pub fn malloced_memory(&self) -> usize {
2301 self.0.malloced_memory_
2302 }
2303
2304 #[inline(always)]
2305 pub fn external_memory(&self) -> usize {
2306 self.0.external_memory_
2307 }
2308
2309 #[inline(always)]
2310 pub fn peak_malloced_memory(&self) -> usize {
2311 self.0.peak_malloced_memory_
2312 }
2313
2314 #[inline(always)]
2315 pub fn number_of_native_contexts(&self) -> usize {
2316 self.0.number_of_native_contexts_
2317 }
2318
2319 #[inline(always)]
2320 pub fn number_of_detached_contexts(&self) -> usize {
2321 self.0.number_of_detached_contexts_
2322 }
2323
2324 #[inline(always)]
2328 pub fn total_allocated_bytes(&self) -> u64 {
2329 self.0.total_allocated_bytes_
2330 }
2331
2332 #[inline(always)]
2335 pub fn does_zap_garbage(&self) -> bool {
2336 self.0.does_zap_garbage_
2337 }
2338}
2339
2340pub struct HeapSpaceStatistics(v8__HeapSpaceStatistics);
2341
2342impl HeapSpaceStatistics {
2343 pub fn space_name(&self) -> &'static CStr {
2344 unsafe { CStr::from_ptr(self.0.space_name_) }
2345 }
2346
2347 pub fn space_size(&self) -> usize {
2348 self.0.space_size_
2349 }
2350
2351 pub fn space_used_size(&self) -> usize {
2352 self.0.space_used_size_
2353 }
2354
2355 pub fn space_available_size(&self) -> usize {
2356 self.0.space_available_size_
2357 }
2358
2359 pub fn physical_space_size(&self) -> usize {
2360 self.0.physical_space_size_
2361 }
2362}
2363
2364pub struct HeapCodeStatistics(v8__HeapCodeStatistics);
2365
2366impl HeapCodeStatistics {
2367 pub fn code_and_metadata_size(&self) -> usize {
2368 self.0.code_and_metadata_size_
2369 }
2370
2371 pub fn bytecode_and_metadata_size(&self) -> usize {
2372 self.0.bytecode_and_metadata_size_
2373 }
2374
2375 pub fn external_script_source_size(&self) -> usize {
2376 self.0.external_script_source_size_
2377 }
2378
2379 pub fn cpu_profiler_metadata_size(&self) -> usize {
2380 self.0.cpu_profiler_metadata_size_
2381 }
2382}
2383
2384impl<'s, F> MapFnFrom<F> for PrepareStackTraceCallback<'s>
2385where
2386 F: UnitType
2387 + for<'a> Fn(
2388 &mut PinScope<'s, 'a>,
2389 Local<'s, Value>,
2390 Local<'s, Array>,
2391 ) -> Local<'s, Value>,
2392{
2393 #[cfg(target_os = "windows")]
2395 fn mapping() -> Self {
2396 let f = |ret_ptr, context, error, sites| {
2397 let scope = pin!(unsafe { CallbackScope::new(context) });
2398 let mut scope: crate::PinnedRef<CallbackScope> = scope.init();
2399 let r = (F::get())(&mut scope, error, sites);
2400 unsafe { std::ptr::write(ret_ptr, &*r as *const _) };
2401 ret_ptr
2402 };
2403 f.to_c_fn()
2404 }
2405
2406 #[cfg(not(target_os = "windows"))]
2408 fn mapping() -> Self {
2409 let f = |context, error, sites| {
2410 let scope = pin!(unsafe { CallbackScope::new(context) });
2411 let mut scope: crate::PinnedRef<CallbackScope> = scope.init();
2412
2413 let r = (F::get())(&mut scope, error, sites);
2414 PrepareStackTraceCallbackRet(&*r as *const _)
2415 };
2416 f.to_c_fn()
2417 }
2418}
2419
2420#[derive(Clone, Default)]
2424pub(crate) struct TypeIdHasher {
2425 state: Option<u64>,
2426}
2427
2428impl Hasher for TypeIdHasher {
2429 fn write(&mut self, _bytes: &[u8]) {
2430 panic!("TypeIdHasher::write() called unexpectedly");
2431 }
2432
2433 #[inline]
2434 fn write_u64(&mut self, value: u64) {
2435 let prev_state = self.state.replace(value);
2438 debug_assert_eq!(prev_state, None);
2439 }
2440
2441 #[inline]
2442 fn finish(&self) -> u64 {
2443 self.state.unwrap()
2444 }
2445}
2446
2447#[derive(Copy, Clone, Default)]
2451pub(crate) struct BuildTypeIdHasher;
2452
2453impl BuildHasher for BuildTypeIdHasher {
2454 type Hasher = TypeIdHasher;
2455
2456 #[inline]
2457 fn build_hasher(&self) -> Self::Hasher {
2458 Default::default()
2459 }
2460}
2461
2462const _: () = {
2463 assert!(
2464 size_of::<TypeId>() == size_of::<u64>()
2465 || size_of::<TypeId>() == size_of::<u128>()
2466 );
2467 assert!(
2468 align_of::<TypeId>() == align_of::<u64>()
2469 || align_of::<TypeId>() == align_of::<u128>()
2470 );
2471};
2472
2473pub(crate) struct RawSlot {
2474 data: RawSlotData,
2475 dtor: Option<RawSlotDtor>,
2476}
2477
2478type RawSlotData = MaybeUninit<usize>;
2479type RawSlotDtor = unsafe fn(&mut RawSlotData) -> ();
2480
2481impl RawSlot {
2482 #[inline]
2483 pub fn new<T: 'static>(value: T) -> Self {
2484 if Self::needs_box::<T>() {
2485 Self::new_internal(Box::new(value))
2486 } else {
2487 Self::new_internal(value)
2488 }
2489 }
2490
2491 #[inline]
2495 pub unsafe fn borrow<T: 'static>(&self) -> &T {
2496 unsafe {
2497 if Self::needs_box::<T>() {
2498 &*(self.data.as_ptr() as *const Box<T>)
2499 } else {
2500 &*(self.data.as_ptr() as *const T)
2501 }
2502 }
2503 }
2504
2505 #[inline]
2507 pub unsafe fn borrow_mut<T: 'static>(&mut self) -> &mut T {
2508 unsafe {
2509 if Self::needs_box::<T>() {
2510 &mut *(self.data.as_mut_ptr() as *mut Box<T>)
2511 } else {
2512 &mut *(self.data.as_mut_ptr() as *mut T)
2513 }
2514 }
2515 }
2516
2517 #[inline]
2519 pub unsafe fn into_inner<T: 'static>(self) -> T {
2520 unsafe {
2521 let value = if Self::needs_box::<T>() {
2522 *std::ptr::read(self.data.as_ptr() as *mut Box<T>)
2523 } else {
2524 std::ptr::read(self.data.as_ptr() as *mut T)
2525 };
2526 forget(self);
2527 value
2528 }
2529 }
2530
2531 const fn needs_box<T: 'static>() -> bool {
2532 size_of::<T>() > size_of::<RawSlotData>()
2533 || align_of::<T>() > align_of::<RawSlotData>()
2534 }
2535
2536 #[inline]
2537 fn new_internal<B: 'static>(value: B) -> Self {
2538 assert!(!Self::needs_box::<B>());
2539 let mut self_ = Self {
2540 data: RawSlotData::zeroed(),
2541 dtor: None,
2542 };
2543 unsafe {
2544 ptr::write(self_.data.as_mut_ptr() as *mut B, value);
2545 }
2546 if needs_drop::<B>() {
2547 self_.dtor.replace(Self::drop_internal::<B>);
2548 };
2549 self_
2550 }
2551
2552 unsafe fn drop_internal<B: 'static>(data: &mut RawSlotData) {
2554 assert!(!Self::needs_box::<B>());
2555 unsafe {
2556 drop_in_place(data.as_mut_ptr() as *mut B);
2557 }
2558 }
2559}
2560
2561impl Drop for RawSlot {
2562 fn drop(&mut self) {
2563 if let Some(dtor) = self.dtor {
2564 unsafe { dtor(&mut self.data) };
2565 }
2566 }
2567}
2568
2569impl AsRef<Isolate> for OwnedIsolate {
2570 fn as_ref(&self) -> &Isolate {
2571 unsafe { Isolate::from_raw_ref(&self.cxx_isolate) }
2572 }
2573}
2574impl AsRef<Isolate> for Isolate {
2575 fn as_ref(&self) -> &Isolate {
2576 self
2577 }
2578}
2579
2580pub struct Locker<'a> {
2597 raw: std::mem::ManuallyDrop<crate::scope::raw::Locker>,
2598 isolate: &'a mut UnenteredIsolate,
2599}
2600
2601impl<'a> Locker<'a> {
2602 pub fn new(isolate: &'a mut UnenteredIsolate) -> Self {
2613 let isolate_ptr = isolate.cxx_isolate;
2614
2615 let mut raw = unsafe { crate::scope::raw::Locker::uninit() };
2617 unsafe { raw.init(isolate_ptr) };
2618
2619 unsafe {
2621 v8__Isolate__Enter(isolate_ptr.as_ptr());
2622 }
2623
2624 Self {
2625 raw: std::mem::ManuallyDrop::new(raw),
2626 isolate,
2627 }
2628 }
2629
2630 pub fn is_locked(isolate: &UnenteredIsolate) -> bool {
2632 crate::scope::raw::Locker::is_locked(isolate.cxx_isolate)
2633 }
2634}
2635
2636impl Drop for Locker<'_> {
2637 fn drop(&mut self) {
2638 unsafe {
2639 v8__Isolate__Exit(self.isolate.cxx_isolate.as_ptr());
2642 std::mem::ManuallyDrop::drop(&mut self.raw);
2643 }
2644 }
2645}
2646
2647impl Deref for Locker<'_> {
2648 type Target = Isolate;
2649 fn deref(&self) -> &Self::Target {
2650 unsafe { Isolate::from_raw_ref(&self.isolate.cxx_isolate) }
2651 }
2652}
2653
2654impl DerefMut for Locker<'_> {
2655 fn deref_mut(&mut self) -> &mut Self::Target {
2656 unsafe { Isolate::from_raw_ref_mut(&mut self.isolate.cxx_isolate) }
2657 }
2658}