use crate::Array;
use crate::CallbackScope;
use crate::Context;
use crate::Data;
use crate::FixedArray;
use crate::Function;
use crate::FunctionCodeHandling;
use crate::Local;
use crate::Message;
use crate::Module;
use crate::Object;
use crate::PinScope;
use crate::Platform;
use crate::Promise;
use crate::PromiseResolver;
use crate::StartupData;
use crate::String;
use crate::V8::get_current_platform;
use crate::Value;
use crate::binding::v8__HeapCodeStatistics;
use crate::binding::v8__HeapSpaceStatistics;
use crate::binding::v8__HeapStatistics;
use crate::binding::v8__Isolate__UseCounterFeature;
pub use crate::binding::v8__ModuleImportPhase as ModuleImportPhase;
use crate::cppgc::Heap;
use crate::external_references::ExternalReference;
use crate::function::FunctionCallbackInfo;
use crate::gc::GCCallbackFlags;
use crate::gc::GCType;
use crate::handle::FinalizerCallback;
use crate::handle::FinalizerMap;
use crate::isolate_create_params::CreateParams;
use crate::isolate_create_params::raw;
use crate::promise::PromiseRejectMessage;
use crate::snapshot::SnapshotCreator;
use crate::support::MapFnFrom;
use crate::support::MapFnTo;
use crate::support::Opaque;
use crate::support::ToCFn;
use crate::support::UnitType;
use crate::support::char;
use crate::support::int;
use crate::support::size_t;
use crate::wasm::WasmStreaming;
use crate::wasm::trampoline;
use std::ffi::CStr;
use std::any::Any;
use std::any::TypeId;
use std::borrow::Cow;
use std::collections::HashMap;
use std::ffi::c_void;
use std::fmt::{self, Debug, Formatter};
use std::hash::BuildHasher;
use std::hash::Hasher;
use std::mem::MaybeUninit;
use std::mem::align_of;
use std::mem::forget;
use std::mem::needs_drop;
use std::mem::size_of;
use std::ops::Deref;
use std::ops::DerefMut;
use std::pin::pin;
use std::ptr;
use std::ptr::NonNull;
use std::ptr::addr_of_mut;
use std::ptr::drop_in_place;
use std::ptr::null_mut;
use std::sync::Arc;
use parking_lot::Mutex;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(C)]
pub enum MicrotasksPolicy {
Explicit = 0,
Auto = 2,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(C)]
pub enum MemoryPressureLevel {
None = 0,
Moderate = 1,
Critical = 2,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(C)]
pub enum TimeZoneDetection {
Skip = 0,
Redetect = 1,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(C)]
pub enum PromiseHookType {
Init,
Resolve,
Before,
After,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(C)]
pub enum GarbageCollectionType {
Full,
Minor,
}
pub type MessageCallback = unsafe extern "C" fn(Local<Message>, Local<Value>);
bitflags! {
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(transparent)]
pub struct MessageErrorLevel: int {
const LOG = 1 << 0;
const DEBUG = 1 << 1;
const INFO = 1 << 2;
const ERROR = 1 << 3;
const WARNING = 1 << 4;
const ALL = (1 << 5) - 1;
}
}
pub type PromiseHook =
unsafe extern "C" fn(PromiseHookType, Local<Promise>, Local<Value>);
pub type PromiseRejectCallback = unsafe extern "C" fn(PromiseRejectMessage);
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(C)]
pub enum WasmAsyncSuccess {
Success,
Fail,
}
pub type WasmAsyncResolvePromiseCallback = unsafe extern "C" fn(
UnsafeRawIsolatePtr,
Local<Context>,
Local<PromiseResolver>,
Local<Value>,
WasmAsyncSuccess,
);
pub type AllowWasmCodeGenerationCallback =
unsafe extern "C" fn(Local<Context>, Local<String>) -> bool;
pub type HostInitializeImportMetaObjectCallback =
unsafe extern "C" fn(Local<Context>, Local<Module>, Local<Object>);
pub trait HostImportModuleDynamicallyCallback:
UnitType
+ for<'s, 'i> FnOnce(
&mut PinScope<'s, 'i>,
Local<'s, Data>,
Local<'s, Value>,
Local<'s, String>,
Local<'s, FixedArray>,
) -> Option<Local<'s, Promise>>
{
fn to_c_fn(self) -> RawHostImportModuleDynamicallyCallback;
}
#[cfg(target_family = "unix")]
pub(crate) type RawHostImportModuleDynamicallyCallback =
for<'s> unsafe extern "C" fn(
Local<'s, Context>,
Local<'s, Data>,
Local<'s, Value>,
Local<'s, String>,
Local<'s, FixedArray>,
) -> *mut Promise;
#[cfg(all(
target_family = "windows",
any(target_arch = "x86_64", target_arch = "aarch64")
))]
pub type RawHostImportModuleDynamicallyCallback =
for<'s> unsafe extern "C" fn(
*mut *mut Promise,
Local<'s, Context>,
Local<'s, Data>,
Local<'s, Value>,
Local<'s, String>,
Local<'s, FixedArray>,
) -> *mut *mut Promise;
impl<F> HostImportModuleDynamicallyCallback for F
where
F: UnitType
+ for<'s, 'i> FnOnce(
&mut PinScope<'s, 'i>,
Local<'s, Data>,
Local<'s, Value>,
Local<'s, String>,
Local<'s, FixedArray>,
) -> Option<Local<'s, Promise>>,
{
#[inline(always)]
fn to_c_fn(self) -> RawHostImportModuleDynamicallyCallback {
#[allow(unused_variables)]
#[inline(always)]
fn scope_adapter<'s, 'i: 's, F: HostImportModuleDynamicallyCallback>(
context: Local<'s, Context>,
host_defined_options: Local<'s, Data>,
resource_name: Local<'s, Value>,
specifier: Local<'s, String>,
import_attributes: Local<'s, FixedArray>,
) -> Option<Local<'s, Promise>> {
let scope = pin!(unsafe { CallbackScope::new(context) });
let mut scope = scope.init();
(F::get())(
&mut scope,
host_defined_options,
resource_name,
specifier,
import_attributes,
)
}
#[cfg(target_family = "unix")]
#[inline(always)]
unsafe extern "C" fn abi_adapter<
's,
F: HostImportModuleDynamicallyCallback,
>(
context: Local<'s, Context>,
host_defined_options: Local<'s, Data>,
resource_name: Local<'s, Value>,
specifier: Local<'s, String>,
import_attributes: Local<'s, FixedArray>,
) -> *mut Promise {
scope_adapter::<F>(
context,
host_defined_options,
resource_name,
specifier,
import_attributes,
)
.map_or_else(null_mut, |return_value| return_value.as_non_null().as_ptr())
}
#[cfg(all(
target_family = "windows",
any(target_arch = "x86_64", target_arch = "aarch64")
))]
#[inline(always)]
unsafe extern "C" fn abi_adapter<
's,
F: HostImportModuleDynamicallyCallback,
>(
return_value: *mut *mut Promise,
context: Local<'s, Context>,
host_defined_options: Local<'s, Data>,
resource_name: Local<'s, Value>,
specifier: Local<'s, String>,
import_attributes: Local<'s, FixedArray>,
) -> *mut *mut Promise {
unsafe {
std::ptr::write(
return_value,
scope_adapter::<F>(
context,
host_defined_options,
resource_name,
specifier,
import_attributes,
)
.map(|return_value| return_value.as_non_null().as_ptr())
.unwrap_or_else(null_mut),
);
return_value
}
}
abi_adapter::<F>
}
}
pub trait HostImportModuleWithPhaseDynamicallyCallback:
UnitType
+ for<'s, 'i> FnOnce(
&mut PinScope<'s, 'i>,
Local<'s, Data>,
Local<'s, Value>,
Local<'s, String>,
ModuleImportPhase,
Local<'s, FixedArray>,
) -> Option<Local<'s, Promise>>
{
fn to_c_fn(self) -> RawHostImportModuleWithPhaseDynamicallyCallback;
}
#[cfg(target_family = "unix")]
pub(crate) type RawHostImportModuleWithPhaseDynamicallyCallback =
for<'s> unsafe extern "C" fn(
Local<'s, Context>,
Local<'s, Data>,
Local<'s, Value>,
Local<'s, String>,
ModuleImportPhase,
Local<'s, FixedArray>,
) -> *mut Promise;
#[cfg(all(
target_family = "windows",
any(target_arch = "x86_64", target_arch = "aarch64")
))]
pub type RawHostImportModuleWithPhaseDynamicallyCallback =
for<'s> unsafe extern "C" fn(
*mut *mut Promise,
Local<'s, Context>,
Local<'s, Data>,
Local<'s, Value>,
Local<'s, String>,
ModuleImportPhase,
Local<'s, FixedArray>,
) -> *mut *mut Promise;
impl<F> HostImportModuleWithPhaseDynamicallyCallback for F
where
F: UnitType
+ for<'s, 'i> FnOnce(
&mut PinScope<'s, 'i>,
Local<'s, Data>,
Local<'s, Value>,
Local<'s, String>,
ModuleImportPhase,
Local<'s, FixedArray>,
) -> Option<Local<'s, Promise>>,
{
#[inline(always)]
fn to_c_fn(self) -> RawHostImportModuleWithPhaseDynamicallyCallback {
#[allow(unused_variables)]
#[inline(always)]
fn scope_adapter<'s, F: HostImportModuleWithPhaseDynamicallyCallback>(
context: Local<'s, Context>,
host_defined_options: Local<'s, Data>,
resource_name: Local<'s, Value>,
specifier: Local<'s, String>,
import_phase: ModuleImportPhase,
import_attributes: Local<'s, FixedArray>,
) -> Option<Local<'s, Promise>> {
let scope = pin!(unsafe { CallbackScope::new(context) });
let mut scope = scope.init();
(F::get())(
&mut scope,
host_defined_options,
resource_name,
specifier,
import_phase,
import_attributes,
)
}
#[cfg(target_family = "unix")]
#[inline(always)]
unsafe extern "C" fn abi_adapter<
's,
F: HostImportModuleWithPhaseDynamicallyCallback,
>(
context: Local<'s, Context>,
host_defined_options: Local<'s, Data>,
resource_name: Local<'s, Value>,
specifier: Local<'s, String>,
import_phase: ModuleImportPhase,
import_attributes: Local<'s, FixedArray>,
) -> *mut Promise {
scope_adapter::<F>(
context,
host_defined_options,
resource_name,
specifier,
import_phase,
import_attributes,
)
.map_or_else(null_mut, |return_value| return_value.as_non_null().as_ptr())
}
#[cfg(all(
target_family = "windows",
any(target_arch = "x86_64", target_arch = "aarch64")
))]
#[inline(always)]
unsafe extern "C" fn abi_adapter<
's,
F: HostImportModuleWithPhaseDynamicallyCallback,
>(
return_value: *mut *mut Promise,
context: Local<'s, Context>,
host_defined_options: Local<'s, Data>,
resource_name: Local<'s, Value>,
specifier: Local<'s, String>,
import_phase: ModuleImportPhase,
import_attributes: Local<'s, FixedArray>,
) -> *mut *mut Promise {
unsafe {
std::ptr::write(
return_value,
scope_adapter::<F>(
context,
host_defined_options,
resource_name,
specifier,
import_phase,
import_attributes,
)
.map(|return_value| return_value.as_non_null().as_ptr())
.unwrap_or_else(null_mut),
);
return_value
}
}
abi_adapter::<F>
}
}
pub type HostCreateShadowRealmContextCallback =
for<'s, 'i> fn(scope: &mut PinScope<'s, 'i>) -> Option<Local<'s, Context>>;
pub type GcCallbackWithData = unsafe extern "C" fn(
isolate: UnsafeRawIsolatePtr,
r#type: GCType,
flags: GCCallbackFlags,
data: *mut c_void,
);
pub type InterruptCallback =
unsafe extern "C" fn(isolate: UnsafeRawIsolatePtr, data: *mut c_void);
pub type NearHeapLimitCallback = unsafe extern "C" fn(
data: *mut c_void,
current_heap_limit: usize,
initial_heap_limit: usize,
) -> usize;
#[repr(C)]
pub struct OomDetails {
pub is_heap_oom: bool,
pub detail: *const char,
}
pub type OomErrorCallback =
unsafe extern "C" fn(location: *const char, details: &OomDetails);
#[cfg(target_os = "windows")]
pub type PrepareStackTraceCallback<'s> =
unsafe extern "C" fn(
*mut *const Value,
Local<'s, Context>,
Local<'s, Value>,
Local<'s, Array>,
) -> *mut *const Value;
#[cfg(not(target_os = "windows"))]
#[repr(C)]
pub struct PrepareStackTraceCallbackRet(*const Value);
#[cfg(not(target_os = "windows"))]
pub type PrepareStackTraceCallback<'s> =
unsafe extern "C" fn(
Local<'s, Context>,
Local<'s, Value>,
Local<'s, Array>,
) -> PrepareStackTraceCallbackRet;
pub type UseCounterFeature = v8__Isolate__UseCounterFeature;
pub type UseCounterCallback =
unsafe extern "C" fn(&mut Isolate, UseCounterFeature);
unsafe extern "C" {
fn v8__Isolate__New(params: *const raw::CreateParams) -> *mut RealIsolate;
fn v8__Isolate__Dispose(this: *mut RealIsolate);
fn v8__Isolate__GetNumberOfDataSlots(this: *const RealIsolate) -> u32;
fn v8__Isolate__GetData(
isolate: *const RealIsolate,
slot: u32,
) -> *mut c_void;
fn v8__Isolate__SetData(
isolate: *const RealIsolate,
slot: u32,
data: *mut c_void,
);
fn v8__Isolate__Enter(this: *mut RealIsolate);
fn v8__Isolate__Exit(this: *mut RealIsolate);
fn v8__Isolate__GetCurrent() -> *mut RealIsolate;
fn v8__Isolate__MemoryPressureNotification(this: *mut RealIsolate, level: u8);
fn v8__Isolate__ClearKeptObjects(isolate: *mut RealIsolate);
fn v8__Isolate__LowMemoryNotification(isolate: *mut RealIsolate);
fn v8__Isolate__GetHeapStatistics(
this: *mut RealIsolate,
s: *mut v8__HeapStatistics,
);
fn v8__Isolate__SetCaptureStackTraceForUncaughtExceptions(
this: *mut RealIsolate,
capture: bool,
frame_limit: i32,
);
fn v8__Isolate__AddMessageListener(
isolate: *mut RealIsolate,
callback: MessageCallback,
) -> bool;
fn v8__Isolate__AddMessageListenerWithErrorLevel(
isolate: *mut RealIsolate,
callback: MessageCallback,
message_levels: MessageErrorLevel,
) -> bool;
fn v8__Isolate__AddGCPrologueCallback(
isolate: *mut RealIsolate,
callback: GcCallbackWithData,
data: *mut c_void,
gc_type_filter: GCType,
);
fn v8__Isolate__RemoveGCPrologueCallback(
isolate: *mut RealIsolate,
callback: GcCallbackWithData,
data: *mut c_void,
);
fn v8__Isolate__AddGCEpilogueCallback(
isolate: *mut RealIsolate,
callback: GcCallbackWithData,
data: *mut c_void,
gc_type_filter: GCType,
);
fn v8__Isolate__RemoveGCEpilogueCallback(
isolate: *mut RealIsolate,
callback: GcCallbackWithData,
data: *mut c_void,
);
fn v8__Isolate__NumberOfHeapSpaces(isolate: *mut RealIsolate) -> size_t;
fn v8__Isolate__GetHeapSpaceStatistics(
isolate: *mut RealIsolate,
space_statistics: *mut v8__HeapSpaceStatistics,
index: size_t,
) -> bool;
fn v8__Isolate__GetHeapCodeAndMetadataStatistics(
isolate: *mut RealIsolate,
code_statistics: *mut v8__HeapCodeStatistics,
) -> bool;
fn v8__Isolate__AddNearHeapLimitCallback(
isolate: *mut RealIsolate,
callback: NearHeapLimitCallback,
data: *mut c_void,
);
fn v8__Isolate__RemoveNearHeapLimitCallback(
isolate: *mut RealIsolate,
callback: NearHeapLimitCallback,
heap_limit: usize,
);
fn v8__Isolate__SetOOMErrorHandler(
isolate: *mut RealIsolate,
callback: OomErrorCallback,
);
fn v8__Isolate__AdjustAmountOfExternalAllocatedMemory(
isolate: *mut RealIsolate,
change_in_bytes: i64,
) -> i64;
fn v8__Isolate__GetCppHeap(isolate: *mut RealIsolate) -> *mut Heap;
fn v8__Isolate__SetPrepareStackTraceCallback(
isolate: *mut RealIsolate,
callback: PrepareStackTraceCallback,
);
fn v8__Isolate__SetPromiseHook(isolate: *mut RealIsolate, hook: PromiseHook);
fn v8__Isolate__SetPromiseRejectCallback(
isolate: *mut RealIsolate,
callback: PromiseRejectCallback,
);
fn v8__Isolate__SetWasmAsyncResolvePromiseCallback(
isolate: *mut RealIsolate,
callback: WasmAsyncResolvePromiseCallback,
);
fn v8__Isolate__SetAllowWasmCodeGenerationCallback(
isolate: *mut RealIsolate,
callback: AllowWasmCodeGenerationCallback,
);
fn v8__Isolate__SetHostInitializeImportMetaObjectCallback(
isolate: *mut RealIsolate,
callback: HostInitializeImportMetaObjectCallback,
);
fn v8__Isolate__SetHostImportModuleDynamicallyCallback(
isolate: *mut RealIsolate,
callback: RawHostImportModuleDynamicallyCallback,
);
fn v8__Isolate__SetHostImportModuleWithPhaseDynamicallyCallback(
isolate: *mut RealIsolate,
callback: RawHostImportModuleWithPhaseDynamicallyCallback,
);
#[cfg(not(target_os = "windows"))]
fn v8__Isolate__SetHostCreateShadowRealmContextCallback(
isolate: *mut RealIsolate,
callback: unsafe extern "C" fn(
initiator_context: Local<Context>,
) -> *mut Context,
);
#[cfg(target_os = "windows")]
fn v8__Isolate__SetHostCreateShadowRealmContextCallback(
isolate: *mut RealIsolate,
callback: unsafe extern "C" fn(
rv: *mut *mut Context,
initiator_context: Local<Context>,
) -> *mut *mut Context,
);
fn v8__Isolate__SetUseCounterCallback(
isolate: *mut RealIsolate,
callback: UseCounterCallback,
);
fn v8__Isolate__RequestInterrupt(
isolate: *const RealIsolate,
callback: InterruptCallback,
data: *mut c_void,
);
fn v8__Isolate__TerminateExecution(isolate: *const RealIsolate);
fn v8__Isolate__IsExecutionTerminating(isolate: *const RealIsolate) -> bool;
fn v8__Isolate__CancelTerminateExecution(isolate: *const RealIsolate);
fn v8__Isolate__GetMicrotasksPolicy(
isolate: *const RealIsolate,
) -> MicrotasksPolicy;
fn v8__Isolate__SetMicrotasksPolicy(
isolate: *mut RealIsolate,
policy: MicrotasksPolicy,
);
fn v8__Isolate__PerformMicrotaskCheckpoint(isolate: *mut RealIsolate);
fn v8__Isolate__EnqueueMicrotask(
isolate: *mut RealIsolate,
function: *const Function,
);
fn v8__Isolate__SetAllowAtomicsWait(isolate: *mut RealIsolate, allow: bool);
fn v8__Isolate__SetWasmStreamingCallback(
isolate: *mut RealIsolate,
callback: unsafe extern "C" fn(*const FunctionCallbackInfo),
);
fn v8__Isolate__DateTimeConfigurationChangeNotification(
isolate: *mut RealIsolate,
time_zone_detection: TimeZoneDetection,
);
fn v8__Isolate__HasPendingBackgroundTasks(
isolate: *const RealIsolate,
) -> bool;
fn v8__Isolate__RequestGarbageCollectionForTesting(
isolate: *mut RealIsolate,
r#type: usize,
);
fn v8__HeapProfiler__TakeHeapSnapshot(
isolate: *mut RealIsolate,
callback: unsafe extern "C" fn(*mut c_void, *const u8, usize) -> bool,
arg: *mut c_void,
);
}
#[repr(transparent)]
#[derive(Debug)]
pub struct Isolate(NonNull<RealIsolate>);
#[repr(transparent)]
#[derive(Debug, Clone, Copy)]
pub struct UnsafeRawIsolatePtr(*mut RealIsolate);
impl UnsafeRawIsolatePtr {
pub fn null() -> Self {
Self(std::ptr::null_mut())
}
pub fn is_null(&self) -> bool {
self.0.is_null()
}
}
#[repr(C)]
pub struct RealIsolate(Opaque);
impl Isolate {
pub(crate) fn as_real_ptr(&self) -> *mut RealIsolate {
self.0.as_ptr()
}
pub unsafe fn as_raw_isolate_ptr(&self) -> UnsafeRawIsolatePtr {
UnsafeRawIsolatePtr(self.0.as_ptr())
}
#[inline]
pub unsafe fn from_raw_isolate_ptr(ptr: UnsafeRawIsolatePtr) -> Self {
Self(NonNull::new(ptr.0).unwrap())
}
#[inline]
pub unsafe fn from_raw_isolate_ptr_unchecked(
ptr: UnsafeRawIsolatePtr,
) -> Self {
Self(unsafe { NonNull::new_unchecked(ptr.0) })
}
pub unsafe fn from_raw_ptr_unchecked(ptr: *mut RealIsolate) -> Self {
Self(unsafe { NonNull::new_unchecked(ptr) })
}
pub unsafe fn from_raw_ptr(ptr: *mut RealIsolate) -> Self {
Self(NonNull::new(ptr).unwrap())
}
#[inline]
pub unsafe fn ref_from_raw_isolate_ptr(ptr: &UnsafeRawIsolatePtr) -> &Self {
if ptr.is_null() {
panic!("UnsafeRawIsolatePtr is null");
}
unsafe { &*(ptr as *const UnsafeRawIsolatePtr as *const Isolate) }
}
#[inline]
pub unsafe fn ref_from_raw_isolate_ptr_unchecked(
ptr: &UnsafeRawIsolatePtr,
) -> &Self {
unsafe { &*(ptr as *const UnsafeRawIsolatePtr as *const Isolate) }
}
#[inline]
pub unsafe fn ref_from_raw_isolate_ptr_mut(
ptr: &mut UnsafeRawIsolatePtr,
) -> &mut Self {
if ptr.is_null() {
panic!("UnsafeRawIsolatePtr is null");
}
unsafe { &mut *(ptr as *mut UnsafeRawIsolatePtr as *mut Isolate) }
}
#[inline]
pub unsafe fn ref_from_raw_isolate_ptr_mut_unchecked(
ptr: &mut UnsafeRawIsolatePtr,
) -> &mut Self {
unsafe { &mut *(ptr as *mut UnsafeRawIsolatePtr as *mut Isolate) }
}
#[inline]
pub(crate) unsafe fn from_non_null(ptr: NonNull<RealIsolate>) -> Self {
Self(ptr)
}
#[inline]
pub(crate) unsafe fn from_raw_ref(ptr: &NonNull<RealIsolate>) -> &Self {
unsafe { &*(ptr as *const NonNull<RealIsolate> as *const Isolate) }
}
#[inline]
pub(crate) unsafe fn from_raw_ref_mut(
ptr: &mut NonNull<RealIsolate>,
) -> &mut Self {
unsafe { &mut *(ptr as *mut NonNull<RealIsolate> as *mut Isolate) }
}
const ANNEX_SLOT: u32 = 0;
const INTERNAL_DATA_SLOT_COUNT: u32 = 2;
#[inline(always)]
fn assert_embedder_data_slot_count_and_offset_correct(&self) {
assert!(
unsafe { v8__Isolate__GetNumberOfDataSlots(self.as_real_ptr()) }
>= Self::INTERNAL_DATA_SLOT_COUNT
)
}
fn new_impl(params: CreateParams) -> *mut RealIsolate {
crate::V8::assert_initialized();
let (raw_create_params, create_param_allocations) = params.finalize();
let cxx_isolate = unsafe { v8__Isolate__New(&raw_create_params) };
let mut isolate = unsafe { Isolate::from_raw_ptr(cxx_isolate) };
isolate.initialize(create_param_allocations);
cxx_isolate
}
pub(crate) fn initialize(&mut self, create_param_allocations: Box<dyn Any>) {
self.assert_embedder_data_slot_count_and_offset_correct();
self.create_annex(create_param_allocations);
}
#[allow(clippy::new_ret_no_self)]
pub fn new(params: CreateParams) -> OwnedIsolate {
OwnedIsolate::new(Self::new_impl(params))
}
#[allow(clippy::new_ret_no_self)]
pub fn new_unentered(params: CreateParams) -> UnenteredIsolate {
UnenteredIsolate::new(Self::new_impl(params))
}
#[allow(clippy::new_ret_no_self)]
pub fn snapshot_creator(
external_references: Option<Cow<'static, [ExternalReference]>>,
params: Option<CreateParams>,
) -> OwnedIsolate {
SnapshotCreator::new(external_references, params)
}
#[allow(clippy::new_ret_no_self)]
pub fn snapshot_creator_from_existing_snapshot(
existing_snapshot_blob: StartupData,
external_references: Option<Cow<'static, [ExternalReference]>>,
params: Option<CreateParams>,
) -> OwnedIsolate {
SnapshotCreator::from_existing_snapshot(
existing_snapshot_blob,
external_references,
params,
)
}
#[inline(always)]
pub fn create_params() -> CreateParams {
CreateParams::default()
}
#[inline(always)]
pub fn thread_safe_handle(&self) -> IsolateHandle {
IsolateHandle::new(self)
}
#[inline(always)]
pub fn terminate_execution(&self) -> bool {
self.thread_safe_handle().terminate_execution()
}
#[inline(always)]
pub fn cancel_terminate_execution(&self) -> bool {
self.thread_safe_handle().cancel_terminate_execution()
}
#[inline(always)]
pub fn is_execution_terminating(&self) -> bool {
self.thread_safe_handle().is_execution_terminating()
}
pub(crate) fn create_annex(
&mut self,
create_param_allocations: Box<dyn Any>,
) {
let annex_arc = Arc::new(IsolateAnnex::new(self, create_param_allocations));
let annex_ptr = Arc::into_raw(annex_arc);
assert!(self.get_data_internal(Self::ANNEX_SLOT).is_null());
self.set_data_internal(Self::ANNEX_SLOT, annex_ptr as *mut _);
}
unsafe fn dispose_annex(&mut self) -> Box<dyn Any> {
let annex = self.get_annex_mut();
{
let _lock = annex.isolate_mutex.lock();
annex.isolate = null_mut();
}
let create_param_allocations =
std::mem::replace(&mut annex.create_param_allocations, Box::new(()));
annex.slots.clear();
for finalizer in annex.finalizer_map.drain() {
if let FinalizerCallback::Guaranteed(callback) = finalizer {
callback();
}
}
unsafe { Arc::from_raw(annex) };
self.set_data(0, null_mut());
create_param_allocations
}
#[inline(always)]
fn get_annex(&self) -> &IsolateAnnex {
let annex_ptr =
self.get_data_internal(Self::ANNEX_SLOT) as *const IsolateAnnex;
assert!(!annex_ptr.is_null());
unsafe { &*annex_ptr }
}
#[inline(always)]
fn get_annex_mut(&mut self) -> &mut IsolateAnnex {
let annex_ptr =
self.get_data_internal(Self::ANNEX_SLOT) as *mut IsolateAnnex;
assert!(!annex_ptr.is_null());
unsafe { &mut *annex_ptr }
}
pub(crate) fn set_snapshot_creator(
&mut self,
snapshot_creator: SnapshotCreator,
) {
let prev = self
.get_annex_mut()
.maybe_snapshot_creator
.replace(snapshot_creator);
assert!(prev.is_none());
}
pub(crate) fn get_finalizer_map(&self) -> &FinalizerMap {
&self.get_annex().finalizer_map
}
pub(crate) fn get_finalizer_map_mut(&mut self) -> &mut FinalizerMap {
&mut self.get_annex_mut().finalizer_map
}
fn get_annex_arc(&self) -> Arc<IsolateAnnex> {
let annex_ptr = self.get_annex();
let annex_arc = unsafe { Arc::from_raw(annex_ptr) };
let _ = Arc::into_raw(annex_arc.clone());
annex_arc
}
pub fn get_data(&self, slot: u32) -> *mut c_void {
self.get_data_internal(Self::INTERNAL_DATA_SLOT_COUNT + slot)
}
#[inline(always)]
pub fn set_data(&mut self, slot: u32, data: *mut c_void) {
self.set_data_internal(Self::INTERNAL_DATA_SLOT_COUNT + slot, data);
}
pub fn get_number_of_data_slots(&self) -> u32 {
let n = unsafe { v8__Isolate__GetNumberOfDataSlots(self.as_real_ptr()) };
n - Self::INTERNAL_DATA_SLOT_COUNT
}
#[inline(always)]
pub(crate) fn get_data_internal(&self, slot: u32) -> *mut c_void {
unsafe { v8__Isolate__GetData(self.as_real_ptr(), slot) }
}
#[inline(always)]
pub(crate) fn set_data_internal(&mut self, slot: u32, data: *mut c_void) {
unsafe { v8__Isolate__SetData(self.as_real_ptr(), slot, data) }
}
#[inline(always)]
pub fn get_slot<T: 'static>(&self) -> Option<&T> {
self
.get_annex()
.slots
.get(&TypeId::of::<T>())
.map(|slot| unsafe { slot.borrow::<T>() })
}
#[inline(always)]
pub fn get_slot_mut<T: 'static>(&mut self) -> Option<&mut T> {
self
.get_annex_mut()
.slots
.get_mut(&TypeId::of::<T>())
.map(|slot| unsafe { slot.borrow_mut::<T>() })
}
#[inline(always)]
pub fn set_slot<T: 'static>(&mut self, value: T) -> bool {
self
.get_annex_mut()
.slots
.insert(TypeId::of::<T>(), RawSlot::new(value))
.is_none()
}
#[inline(always)]
pub fn remove_slot<T: 'static>(&mut self) -> Option<T> {
self
.get_annex_mut()
.slots
.remove(&TypeId::of::<T>())
.map(|slot| unsafe { slot.into_inner::<T>() })
}
#[inline(always)]
pub unsafe fn enter(&self) {
unsafe {
v8__Isolate__Enter(self.as_real_ptr());
}
}
#[inline(always)]
pub unsafe fn exit(&self) {
unsafe {
v8__Isolate__Exit(self.as_real_ptr());
}
}
#[inline(always)]
pub fn memory_pressure_notification(&mut self, level: MemoryPressureLevel) {
unsafe {
v8__Isolate__MemoryPressureNotification(self.as_real_ptr(), level as u8)
}
}
#[inline(always)]
pub fn clear_kept_objects(&mut self) {
unsafe { v8__Isolate__ClearKeptObjects(self.as_real_ptr()) }
}
#[inline(always)]
pub fn low_memory_notification(&mut self) {
unsafe { v8__Isolate__LowMemoryNotification(self.as_real_ptr()) }
}
#[inline(always)]
pub fn get_heap_statistics(&mut self) -> HeapStatistics {
let inner = unsafe {
let mut s = MaybeUninit::zeroed();
v8__Isolate__GetHeapStatistics(self.as_real_ptr(), s.as_mut_ptr());
s.assume_init()
};
HeapStatistics(inner)
}
#[inline(always)]
pub fn number_of_heap_spaces(&mut self) -> usize {
unsafe { v8__Isolate__NumberOfHeapSpaces(self.as_real_ptr()) }
}
#[inline(always)]
pub fn get_heap_space_statistics(
&mut self,
index: usize,
) -> Option<HeapSpaceStatistics> {
let inner = unsafe {
let mut s = MaybeUninit::zeroed();
if !v8__Isolate__GetHeapSpaceStatistics(
self.as_real_ptr(),
s.as_mut_ptr(),
index,
) {
return None;
}
s.assume_init()
};
Some(HeapSpaceStatistics(inner))
}
#[inline(always)]
pub fn get_heap_code_and_metadata_statistics(
&mut self,
) -> Option<HeapCodeStatistics> {
let inner = unsafe {
let mut s = MaybeUninit::zeroed();
if !v8__Isolate__GetHeapCodeAndMetadataStatistics(
self.as_real_ptr(),
s.as_mut_ptr(),
) {
return None;
}
s.assume_init()
};
Some(HeapCodeStatistics(inner))
}
#[inline(always)]
pub fn set_capture_stack_trace_for_uncaught_exceptions(
&mut self,
capture: bool,
frame_limit: i32,
) {
unsafe {
v8__Isolate__SetCaptureStackTraceForUncaughtExceptions(
self.as_real_ptr(),
capture,
frame_limit,
);
}
}
#[inline(always)]
pub fn add_message_listener(&mut self, callback: MessageCallback) -> bool {
unsafe { v8__Isolate__AddMessageListener(self.as_real_ptr(), callback) }
}
#[inline(always)]
pub fn add_message_listener_with_error_level(
&mut self,
callback: MessageCallback,
message_levels: MessageErrorLevel,
) -> bool {
unsafe {
v8__Isolate__AddMessageListenerWithErrorLevel(
self.as_real_ptr(),
callback,
message_levels,
)
}
}
#[inline(always)]
pub fn set_prepare_stack_trace_callback<'s>(
&mut self,
callback: impl MapFnTo<PrepareStackTraceCallback<'s>>,
) {
unsafe {
v8__Isolate__SetPrepareStackTraceCallback(
self.as_real_ptr(),
callback.map_fn_to(),
);
};
}
#[inline(always)]
pub fn set_promise_hook(&mut self, hook: PromiseHook) {
unsafe { v8__Isolate__SetPromiseHook(self.as_real_ptr(), hook) }
}
#[inline(always)]
pub fn set_promise_reject_callback(
&mut self,
callback: PromiseRejectCallback,
) {
unsafe {
v8__Isolate__SetPromiseRejectCallback(self.as_real_ptr(), callback)
}
}
#[inline(always)]
pub fn set_wasm_async_resolve_promise_callback(
&mut self,
callback: WasmAsyncResolvePromiseCallback,
) {
unsafe {
v8__Isolate__SetWasmAsyncResolvePromiseCallback(
self.as_real_ptr(),
callback,
)
}
}
#[inline(always)]
pub fn set_allow_wasm_code_generation_callback(
&mut self,
callback: AllowWasmCodeGenerationCallback,
) {
unsafe {
v8__Isolate__SetAllowWasmCodeGenerationCallback(
self.as_real_ptr(),
callback,
);
}
}
#[inline(always)]
pub fn set_host_initialize_import_meta_object_callback(
&mut self,
callback: HostInitializeImportMetaObjectCallback,
) {
unsafe {
v8__Isolate__SetHostInitializeImportMetaObjectCallback(
self.as_real_ptr(),
callback,
);
}
}
#[inline(always)]
pub fn set_host_import_module_dynamically_callback(
&mut self,
callback: impl HostImportModuleDynamicallyCallback,
) {
unsafe {
v8__Isolate__SetHostImportModuleDynamicallyCallback(
self.as_real_ptr(),
callback.to_c_fn(),
);
}
}
#[inline(always)]
pub fn set_host_import_module_with_phase_dynamically_callback(
&mut self,
callback: impl HostImportModuleWithPhaseDynamicallyCallback,
) {
unsafe {
v8__Isolate__SetHostImportModuleWithPhaseDynamicallyCallback(
self.as_real_ptr(),
callback.to_c_fn(),
);
}
}
pub fn set_host_create_shadow_realm_context_callback(
&mut self,
callback: HostCreateShadowRealmContextCallback,
) {
#[inline]
unsafe extern "C" fn rust_shadow_realm_callback(
initiator_context: Local<Context>,
) -> *mut Context {
let scope = pin!(unsafe { CallbackScope::new(initiator_context) });
let mut scope = scope.init();
let isolate = scope.as_ref();
let callback = isolate
.get_slot::<HostCreateShadowRealmContextCallback>()
.unwrap();
let context = callback(&mut scope);
context.map_or_else(null_mut, |l| l.as_non_null().as_ptr())
}
#[cfg(target_os = "windows")]
unsafe extern "C" fn rust_shadow_realm_callback_windows(
rv: *mut *mut Context,
initiator_context: Local<Context>,
) -> *mut *mut Context {
unsafe {
let ret = rust_shadow_realm_callback(initiator_context);
rv.write(ret);
}
rv
}
let slot_didnt_exist_before = self.set_slot(callback);
if slot_didnt_exist_before {
unsafe {
#[cfg(target_os = "windows")]
v8__Isolate__SetHostCreateShadowRealmContextCallback(
self.as_real_ptr(),
rust_shadow_realm_callback_windows,
);
#[cfg(not(target_os = "windows"))]
v8__Isolate__SetHostCreateShadowRealmContextCallback(
self.as_real_ptr(),
rust_shadow_realm_callback,
);
}
}
}
#[inline(always)]
pub fn set_use_counter_callback(&mut self, callback: UseCounterCallback) {
unsafe {
v8__Isolate__SetUseCounterCallback(self.as_real_ptr(), callback);
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)] #[inline(always)]
pub fn add_gc_prologue_callback(
&mut self,
callback: GcCallbackWithData,
data: *mut c_void,
gc_type_filter: GCType,
) {
unsafe {
v8__Isolate__AddGCPrologueCallback(
self.as_real_ptr(),
callback,
data,
gc_type_filter,
);
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)] #[inline(always)]
pub fn remove_gc_prologue_callback(
&mut self,
callback: GcCallbackWithData,
data: *mut c_void,
) {
unsafe {
v8__Isolate__RemoveGCPrologueCallback(self.as_real_ptr(), callback, data)
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)] #[inline(always)]
pub fn add_gc_epilogue_callback(
&mut self,
callback: GcCallbackWithData,
data: *mut c_void,
gc_type_filter: GCType,
) {
unsafe {
v8__Isolate__AddGCEpilogueCallback(
self.as_real_ptr(),
callback,
data,
gc_type_filter,
);
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)] #[inline(always)]
pub fn remove_gc_epilogue_callback(
&mut self,
callback: GcCallbackWithData,
data: *mut c_void,
) {
unsafe {
v8__Isolate__RemoveGCEpilogueCallback(self.as_real_ptr(), callback, data)
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)] #[inline(always)]
pub fn add_near_heap_limit_callback(
&mut self,
callback: NearHeapLimitCallback,
data: *mut c_void,
) {
unsafe {
v8__Isolate__AddNearHeapLimitCallback(self.as_real_ptr(), callback, data)
};
}
#[inline(always)]
pub fn remove_near_heap_limit_callback(
&mut self,
callback: NearHeapLimitCallback,
heap_limit: usize,
) {
unsafe {
v8__Isolate__RemoveNearHeapLimitCallback(
self.as_real_ptr(),
callback,
heap_limit,
);
};
}
#[inline(always)]
pub fn adjust_amount_of_external_allocated_memory(
&mut self,
change_in_bytes: i64,
) -> i64 {
unsafe {
v8__Isolate__AdjustAmountOfExternalAllocatedMemory(
self.as_real_ptr(),
change_in_bytes,
)
}
}
#[inline(always)]
pub fn get_cpp_heap(&mut self) -> Option<&Heap> {
unsafe { v8__Isolate__GetCppHeap(self.as_real_ptr()).as_ref() }
}
#[inline(always)]
pub fn set_oom_error_handler(&mut self, callback: OomErrorCallback) {
unsafe { v8__Isolate__SetOOMErrorHandler(self.as_real_ptr(), callback) };
}
#[inline(always)]
pub fn get_microtasks_policy(&self) -> MicrotasksPolicy {
unsafe { v8__Isolate__GetMicrotasksPolicy(self.as_real_ptr()) }
}
#[inline(always)]
pub fn set_microtasks_policy(&mut self, policy: MicrotasksPolicy) {
unsafe { v8__Isolate__SetMicrotasksPolicy(self.as_real_ptr(), policy) }
}
#[inline(always)]
pub fn perform_microtask_checkpoint(&mut self) {
unsafe { v8__Isolate__PerformMicrotaskCheckpoint(self.as_real_ptr()) }
}
#[inline(always)]
pub fn enqueue_microtask(&mut self, microtask: Local<Function>) {
unsafe { v8__Isolate__EnqueueMicrotask(self.as_real_ptr(), &*microtask) }
}
#[inline(always)]
pub fn set_allow_atomics_wait(&mut self, allow: bool) {
unsafe { v8__Isolate__SetAllowAtomicsWait(self.as_real_ptr(), allow) }
}
#[inline(always)]
pub fn set_wasm_streaming_callback<F>(&mut self, _: F)
where
F: UnitType
+ for<'a, 'b, 'c> Fn(
&'c mut PinScope<'a, 'b>,
Local<'a, Value>,
WasmStreaming<false>,
),
{
unsafe {
v8__Isolate__SetWasmStreamingCallback(
self.as_real_ptr(),
trampoline::<F>(),
)
}
}
#[inline(always)]
pub fn date_time_configuration_change_notification(
&mut self,
time_zone_detection: TimeZoneDetection,
) {
unsafe {
v8__Isolate__DateTimeConfigurationChangeNotification(
self.as_real_ptr(),
time_zone_detection,
);
}
}
#[inline(always)]
pub fn has_pending_background_tasks(&self) -> bool {
unsafe { v8__Isolate__HasPendingBackgroundTasks(self.as_real_ptr()) }
}
#[inline(always)]
pub fn request_garbage_collection_for_testing(
&mut self,
r#type: GarbageCollectionType,
) {
unsafe {
v8__Isolate__RequestGarbageCollectionForTesting(
self.as_real_ptr(),
match r#type {
GarbageCollectionType::Full => 0,
GarbageCollectionType::Minor => 1,
},
);
}
}
unsafe fn dispose(&mut self) {
unsafe {
v8__Isolate__Dispose(self.as_real_ptr());
}
}
pub fn take_heap_snapshot<F>(&mut self, mut callback: F)
where
F: FnMut(&[u8]) -> bool,
{
unsafe extern "C" fn trampoline<F>(
arg: *mut c_void,
data: *const u8,
size: usize,
) -> bool
where
F: FnMut(&[u8]) -> bool,
{
unsafe {
let mut callback = NonNull::<F>::new_unchecked(arg as _);
if size > 0 {
(callback.as_mut())(std::slice::from_raw_parts(data, size))
} else {
(callback.as_mut())(&[])
}
}
}
let arg = addr_of_mut!(callback);
unsafe {
v8__HeapProfiler__TakeHeapSnapshot(
self.as_real_ptr(),
trampoline::<F>,
arg as _,
);
}
}
#[inline(always)]
pub fn set_default_context(&mut self, context: Local<Context>) {
let snapshot_creator = self
.get_annex_mut()
.maybe_snapshot_creator
.as_mut()
.unwrap();
snapshot_creator.set_default_context(context);
}
#[inline(always)]
pub fn add_context(&mut self, context: Local<Context>) -> usize {
let snapshot_creator = self
.get_annex_mut()
.maybe_snapshot_creator
.as_mut()
.unwrap();
snapshot_creator.add_context(context)
}
#[inline(always)]
pub fn add_isolate_data<T>(&mut self, data: Local<T>) -> usize
where
for<'l> Local<'l, T>: Into<Local<'l, Data>>,
{
let snapshot_creator = self
.get_annex_mut()
.maybe_snapshot_creator
.as_mut()
.unwrap();
snapshot_creator.add_isolate_data(data)
}
#[inline(always)]
pub fn add_context_data<T>(
&mut self,
context: Local<Context>,
data: Local<T>,
) -> usize
where
for<'l> Local<'l, T>: Into<Local<'l, Data>>,
{
let snapshot_creator = self
.get_annex_mut()
.maybe_snapshot_creator
.as_mut()
.unwrap();
snapshot_creator.add_context_data(context, data)
}
}
pub(crate) struct IsolateAnnex {
create_param_allocations: Box<dyn Any>,
slots: HashMap<TypeId, RawSlot, BuildTypeIdHasher>,
finalizer_map: FinalizerMap,
maybe_snapshot_creator: Option<SnapshotCreator>,
isolate: *mut RealIsolate,
isolate_mutex: Mutex<()>,
}
unsafe impl Send for IsolateAnnex {}
unsafe impl Sync for IsolateAnnex {}
impl IsolateAnnex {
fn new(
isolate: &mut Isolate,
create_param_allocations: Box<dyn Any>,
) -> Self {
Self {
create_param_allocations,
slots: HashMap::default(),
finalizer_map: FinalizerMap::default(),
maybe_snapshot_creator: None,
isolate: isolate.as_real_ptr(),
isolate_mutex: Mutex::new(()),
}
}
}
impl Debug for IsolateAnnex {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
f.debug_struct("IsolateAnnex")
.field("isolate", &self.isolate)
.field("isolate_mutex", &self.isolate_mutex)
.finish()
}
}
#[derive(Clone, Debug)]
pub struct IsolateHandle(Arc<IsolateAnnex>);
impl IsolateHandle {
pub(crate) unsafe fn get_isolate_ptr(&self) -> *mut RealIsolate {
self.0.isolate
}
#[inline(always)]
fn new(isolate: &Isolate) -> Self {
Self(isolate.get_annex_arc())
}
#[inline(always)]
pub fn terminate_execution(&self) -> bool {
let _lock = self.0.isolate_mutex.lock();
if self.0.isolate.is_null() {
false
} else {
unsafe { v8__Isolate__TerminateExecution(self.0.isolate) };
true
}
}
#[inline(always)]
pub fn cancel_terminate_execution(&self) -> bool {
let _lock = self.0.isolate_mutex.lock();
if self.0.isolate.is_null() {
false
} else {
unsafe { v8__Isolate__CancelTerminateExecution(self.0.isolate) };
true
}
}
#[inline(always)]
pub fn is_execution_terminating(&self) -> bool {
let _lock = self.0.isolate_mutex.lock();
if self.0.isolate.is_null() {
false
} else {
unsafe { v8__Isolate__IsExecutionTerminating(self.0.isolate) }
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[inline(always)]
pub fn request_interrupt(
&self,
callback: InterruptCallback,
data: *mut c_void,
) -> bool {
let _lock = self.0.isolate_mutex.lock();
if self.0.isolate.is_null() {
false
} else {
unsafe { v8__Isolate__RequestInterrupt(self.0.isolate, callback, data) };
true
}
}
}
#[derive(Debug)]
pub struct OwnedIsolate {
cxx_isolate: NonNull<RealIsolate>,
}
impl OwnedIsolate {
pub(crate) fn new(cxx_isolate: *mut RealIsolate) -> Self {
let isolate = Self::new_already_entered(cxx_isolate);
unsafe {
isolate.enter();
}
isolate
}
pub(crate) fn new_already_entered(cxx_isolate: *mut RealIsolate) -> Self {
let cxx_isolate = NonNull::new(cxx_isolate).unwrap();
let owned_isolate: OwnedIsolate = Self { cxx_isolate };
owned_isolate
}
}
impl Drop for OwnedIsolate {
fn drop(&mut self) {
unsafe {
let snapshot_creator = self.get_annex_mut().maybe_snapshot_creator.take();
assert!(
snapshot_creator.is_none(),
"If isolate was created using v8::Isolate::snapshot_creator, you should use v8::OwnedIsolate::create_blob before dropping an isolate."
);
assert!(
std::ptr::eq(self.cxx_isolate.as_mut(), v8__Isolate__GetCurrent()),
"v8::OwnedIsolate instances must be dropped in the reverse order of creation. They are entered upon creation and exited upon being dropped."
);
self.exit();
self.dispose_annex();
Platform::notify_isolate_shutdown(&get_current_platform(), self);
self.dispose();
}
}
}
impl OwnedIsolate {
#[inline(always)]
pub fn create_blob(
mut self,
function_code_handling: FunctionCodeHandling,
) -> Option<StartupData> {
let mut snapshot_creator =
self.get_annex_mut().maybe_snapshot_creator.take().unwrap();
let _create_param_allocations = unsafe {
self.dispose_annex()
};
std::mem::forget(self);
snapshot_creator.create_blob(function_code_handling)
}
}
impl Deref for OwnedIsolate {
type Target = Isolate;
fn deref(&self) -> &Self::Target {
unsafe {
std::mem::transmute::<&NonNull<RealIsolate>, &Isolate>(&self.cxx_isolate)
}
}
}
impl DerefMut for OwnedIsolate {
fn deref_mut(&mut self) -> &mut Self::Target {
unsafe {
std::mem::transmute::<&mut NonNull<RealIsolate>, &mut Isolate>(
&mut self.cxx_isolate,
)
}
}
}
impl AsMut<Isolate> for OwnedIsolate {
fn as_mut(&mut self) -> &mut Isolate {
self
}
}
impl AsMut<Isolate> for Isolate {
fn as_mut(&mut self) -> &mut Isolate {
self
}
}
#[derive(Debug)]
pub struct UnenteredIsolate {
cxx_isolate: NonNull<RealIsolate>,
}
impl UnenteredIsolate {
pub(crate) fn new(cxx_isolate: *mut RealIsolate) -> Self {
Self {
cxx_isolate: NonNull::new(cxx_isolate).unwrap(),
}
}
#[inline]
pub fn as_raw(&self) -> *mut RealIsolate {
self.cxx_isolate.as_ptr()
}
}
impl Drop for UnenteredIsolate {
fn drop(&mut self) {
debug_assert!(
!crate::scope::raw::Locker::is_locked(self.cxx_isolate),
"Cannot drop UnenteredIsolate while a Locker is held. \
Drop the Locker first."
);
unsafe {
let isolate = Isolate::from_raw_ref_mut(&mut self.cxx_isolate);
let snapshot_creator =
isolate.get_annex_mut().maybe_snapshot_creator.take();
assert!(
snapshot_creator.is_none(),
"v8::UnenteredIsolate::create_blob must be called before dropping"
);
isolate.dispose_annex();
Platform::notify_isolate_shutdown(&get_current_platform(), isolate);
isolate.dispose();
}
}
}
unsafe impl Send for UnenteredIsolate {}
pub struct HeapStatistics(v8__HeapStatistics);
impl HeapStatistics {
#[inline(always)]
pub fn total_heap_size(&self) -> usize {
self.0.total_heap_size_
}
#[inline(always)]
pub fn total_heap_size_executable(&self) -> usize {
self.0.total_heap_size_executable_
}
#[inline(always)]
pub fn total_physical_size(&self) -> usize {
self.0.total_physical_size_
}
#[inline(always)]
pub fn total_available_size(&self) -> usize {
self.0.total_available_size_
}
#[inline(always)]
pub fn total_global_handles_size(&self) -> usize {
self.0.total_global_handles_size_
}
#[inline(always)]
pub fn used_global_handles_size(&self) -> usize {
self.0.used_global_handles_size_
}
#[inline(always)]
pub fn used_heap_size(&self) -> usize {
self.0.used_heap_size_
}
#[inline(always)]
pub fn heap_size_limit(&self) -> usize {
self.0.heap_size_limit_
}
#[inline(always)]
pub fn malloced_memory(&self) -> usize {
self.0.malloced_memory_
}
#[inline(always)]
pub fn external_memory(&self) -> usize {
self.0.external_memory_
}
#[inline(always)]
pub fn peak_malloced_memory(&self) -> usize {
self.0.peak_malloced_memory_
}
#[inline(always)]
pub fn number_of_native_contexts(&self) -> usize {
self.0.number_of_native_contexts_
}
#[inline(always)]
pub fn number_of_detached_contexts(&self) -> usize {
self.0.number_of_detached_contexts_
}
#[inline(always)]
pub fn does_zap_garbage(&self) -> bool {
self.0.does_zap_garbage_
}
}
pub struct HeapSpaceStatistics(v8__HeapSpaceStatistics);
impl HeapSpaceStatistics {
pub fn space_name(&self) -> &'static CStr {
unsafe { CStr::from_ptr(self.0.space_name_) }
}
pub fn space_size(&self) -> usize {
self.0.space_size_
}
pub fn space_used_size(&self) -> usize {
self.0.space_used_size_
}
pub fn space_available_size(&self) -> usize {
self.0.space_available_size_
}
pub fn physical_space_size(&self) -> usize {
self.0.physical_space_size_
}
}
pub struct HeapCodeStatistics(v8__HeapCodeStatistics);
impl HeapCodeStatistics {
pub fn code_and_metadata_size(&self) -> usize {
self.0.code_and_metadata_size_
}
pub fn bytecode_and_metadata_size(&self) -> usize {
self.0.bytecode_and_metadata_size_
}
pub fn external_script_source_size(&self) -> usize {
self.0.external_script_source_size_
}
pub fn cpu_profiler_metadata_size(&self) -> usize {
self.0.cpu_profiler_metadata_size_
}
}
impl<'s, F> MapFnFrom<F> for PrepareStackTraceCallback<'s>
where
F: UnitType
+ for<'a> Fn(
&mut PinScope<'s, 'a>,
Local<'s, Value>,
Local<'s, Array>,
) -> Local<'s, Value>,
{
#[cfg(target_os = "windows")]
fn mapping() -> Self {
let f = |ret_ptr, context, error, sites| {
let scope = pin!(unsafe { CallbackScope::new(context) });
let mut scope: crate::PinnedRef<CallbackScope> = scope.init();
let r = (F::get())(&mut scope, error, sites);
unsafe { std::ptr::write(ret_ptr, &*r as *const _) };
ret_ptr
};
f.to_c_fn()
}
#[cfg(not(target_os = "windows"))]
fn mapping() -> Self {
let f = |context, error, sites| {
let scope = pin!(unsafe { CallbackScope::new(context) });
let mut scope: crate::PinnedRef<CallbackScope> = scope.init();
let r = (F::get())(&mut scope, error, sites);
PrepareStackTraceCallbackRet(&*r as *const _)
};
f.to_c_fn()
}
}
#[derive(Clone, Default)]
pub(crate) struct TypeIdHasher {
state: Option<u64>,
}
impl Hasher for TypeIdHasher {
fn write(&mut self, _bytes: &[u8]) {
panic!("TypeIdHasher::write() called unexpectedly");
}
#[inline]
fn write_u64(&mut self, value: u64) {
let prev_state = self.state.replace(value);
debug_assert_eq!(prev_state, None);
}
#[inline]
fn finish(&self) -> u64 {
self.state.unwrap()
}
}
#[derive(Copy, Clone, Default)]
pub(crate) struct BuildTypeIdHasher;
impl BuildHasher for BuildTypeIdHasher {
type Hasher = TypeIdHasher;
#[inline]
fn build_hasher(&self) -> Self::Hasher {
Default::default()
}
}
const _: () = {
assert!(
size_of::<TypeId>() == size_of::<u64>()
|| size_of::<TypeId>() == size_of::<u128>()
);
assert!(
align_of::<TypeId>() == align_of::<u64>()
|| align_of::<TypeId>() == align_of::<u128>()
);
};
pub(crate) struct RawSlot {
data: RawSlotData,
dtor: Option<RawSlotDtor>,
}
type RawSlotData = MaybeUninit<usize>;
type RawSlotDtor = unsafe fn(&mut RawSlotData) -> ();
impl RawSlot {
#[inline]
pub fn new<T: 'static>(value: T) -> Self {
if Self::needs_box::<T>() {
Self::new_internal(Box::new(value))
} else {
Self::new_internal(value)
}
}
#[inline]
pub unsafe fn borrow<T: 'static>(&self) -> &T {
unsafe {
if Self::needs_box::<T>() {
&*(self.data.as_ptr() as *const Box<T>)
} else {
&*(self.data.as_ptr() as *const T)
}
}
}
#[inline]
pub unsafe fn borrow_mut<T: 'static>(&mut self) -> &mut T {
unsafe {
if Self::needs_box::<T>() {
&mut *(self.data.as_mut_ptr() as *mut Box<T>)
} else {
&mut *(self.data.as_mut_ptr() as *mut T)
}
}
}
#[inline]
pub unsafe fn into_inner<T: 'static>(self) -> T {
unsafe {
let value = if Self::needs_box::<T>() {
*std::ptr::read(self.data.as_ptr() as *mut Box<T>)
} else {
std::ptr::read(self.data.as_ptr() as *mut T)
};
forget(self);
value
}
}
const fn needs_box<T: 'static>() -> bool {
size_of::<T>() > size_of::<RawSlotData>()
|| align_of::<T>() > align_of::<RawSlotData>()
}
#[inline]
fn new_internal<B: 'static>(value: B) -> Self {
assert!(!Self::needs_box::<B>());
let mut self_ = Self {
data: RawSlotData::zeroed(),
dtor: None,
};
unsafe {
ptr::write(self_.data.as_mut_ptr() as *mut B, value);
}
if needs_drop::<B>() {
self_.dtor.replace(Self::drop_internal::<B>);
};
self_
}
unsafe fn drop_internal<B: 'static>(data: &mut RawSlotData) {
assert!(!Self::needs_box::<B>());
unsafe {
drop_in_place(data.as_mut_ptr() as *mut B);
}
}
}
impl Drop for RawSlot {
fn drop(&mut self) {
if let Some(dtor) = self.dtor {
unsafe { dtor(&mut self.data) };
}
}
}
impl AsRef<Isolate> for OwnedIsolate {
fn as_ref(&self) -> &Isolate {
unsafe { Isolate::from_raw_ref(&self.cxx_isolate) }
}
}
impl AsRef<Isolate> for Isolate {
fn as_ref(&self) -> &Isolate {
self
}
}
pub struct Locker<'a> {
raw: std::mem::ManuallyDrop<crate::scope::raw::Locker>,
isolate: &'a mut UnenteredIsolate,
}
impl<'a> Locker<'a> {
pub fn new(isolate: &'a mut UnenteredIsolate) -> Self {
let isolate_ptr = isolate.cxx_isolate;
let mut raw = unsafe { crate::scope::raw::Locker::uninit() };
unsafe { raw.init(isolate_ptr) };
unsafe {
v8__Isolate__Enter(isolate_ptr.as_ptr());
}
Self {
raw: std::mem::ManuallyDrop::new(raw),
isolate,
}
}
pub fn is_locked(isolate: &UnenteredIsolate) -> bool {
crate::scope::raw::Locker::is_locked(isolate.cxx_isolate)
}
}
impl Drop for Locker<'_> {
fn drop(&mut self) {
unsafe {
v8__Isolate__Exit(self.isolate.cxx_isolate.as_ptr());
std::mem::ManuallyDrop::drop(&mut self.raw);
}
}
}
impl Deref for Locker<'_> {
type Target = Isolate;
fn deref(&self) -> &Self::Target {
unsafe { Isolate::from_raw_ref(&self.isolate.cxx_isolate) }
}
}
impl DerefMut for Locker<'_> {
fn deref_mut(&mut self) -> &mut Self::Target {
unsafe { Isolate::from_raw_ref_mut(&mut self.isolate.cxx_isolate) }
}
}