use crate::function::FunctionCallbackInfo;
use crate::gc::GCCallbackFlags;
use crate::gc::GCType;
use crate::handle::FinalizerCallback;
use crate::handle::FinalizerMap;
use crate::isolate_create_params::raw;
use crate::isolate_create_params::CreateParams;
use crate::promise::PromiseRejectMessage;
use crate::scope::data::ScopeData;
use crate::snapshot::SnapshotCreator;
use crate::support::char;
use crate::support::int;
use crate::support::Allocated;
use crate::support::MapFnFrom;
use crate::support::MapFnTo;
use crate::support::Opaque;
use crate::support::ToCFn;
use crate::support::UnitType;
use crate::wasm::trampoline;
use crate::wasm::WasmStreaming;
use crate::Array;
use crate::CallbackScope;
use crate::Context;
use crate::Data;
use crate::ExternalReferences;
use crate::FixedArray;
use crate::Function;
use crate::FunctionCodeHandling;
use crate::HandleScope;
use crate::Local;
use crate::Message;
use crate::Module;
use crate::Object;
use crate::Promise;
use crate::PromiseResolver;
use crate::StartupData;
use crate::String;
use crate::Value;
use std::any::Any;
use std::any::TypeId;
use std::collections::HashMap;
use std::ffi::c_void;
use std::fmt::{self, Debug, Formatter};
use std::hash::BuildHasher;
use std::hash::Hasher;
use std::mem::align_of;
use std::mem::forget;
use std::mem::needs_drop;
use std::mem::size_of;
use std::mem::MaybeUninit;
use std::ops::Deref;
use std::ops::DerefMut;
use std::ptr;
use std::ptr::drop_in_place;
use std::ptr::null_mut;
use std::ptr::NonNull;
use std::sync::Arc;
use std::sync::Mutex;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(C)]
pub enum MicrotasksPolicy {
Explicit = 0,
Auto = 2,
}
pub enum MemoryPressureLevel {
None = 0,
Moderate = 1,
Critical = 2,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(C)]
pub enum PromiseHookType {
Init,
Resolve,
Before,
After,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(C)]
pub enum GarbageCollectionType {
Full,
Minor,
}
pub type MessageCallback = extern "C" fn(Local<Message>, Local<Value>);
pub type PromiseHook =
extern "C" fn(PromiseHookType, Local<Promise>, Local<Value>);
pub type PromiseRejectCallback = extern "C" fn(PromiseRejectMessage);
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(C)]
pub enum WasmAsyncSuccess {
Success,
Fail,
}
pub type WasmAsyncResolvePromiseCallback = extern "C" fn(
*mut Isolate,
Local<Context>,
Local<PromiseResolver>,
Local<Value>,
WasmAsyncSuccess,
);
pub type HostInitializeImportMetaObjectCallback =
extern "C" fn(Local<Context>, Local<Module>, Local<Object>);
pub trait HostImportModuleDynamicallyCallback:
UnitType
+ for<'s> FnOnce(
&mut HandleScope<'s>,
Local<'s, Data>,
Local<'s, Value>,
Local<'s, String>,
Local<'s, FixedArray>,
) -> Option<Local<'s, Promise>>
{
fn to_c_fn(self) -> RawHostImportModuleDynamicallyCallback;
}
#[cfg(target_family = "unix")]
pub(crate) type RawHostImportModuleDynamicallyCallback =
for<'s> extern "C" fn(
Local<'s, Context>,
Local<'s, Data>,
Local<'s, Value>,
Local<'s, String>,
Local<'s, FixedArray>,
) -> *mut Promise;
#[cfg(all(target_family = "windows", target_arch = "x86_64"))]
pub type RawHostImportModuleDynamicallyCallback =
for<'s> extern "C" fn(
*mut *mut Promise,
Local<'s, Context>,
Local<'s, Data>,
Local<'s, Value>,
Local<'s, String>,
Local<'s, FixedArray>,
) -> *mut *mut Promise;
impl<F> HostImportModuleDynamicallyCallback for F
where
F: UnitType
+ for<'s> FnOnce(
&mut HandleScope<'s>,
Local<'s, Data>,
Local<'s, Value>,
Local<'s, String>,
Local<'s, FixedArray>,
) -> Option<Local<'s, Promise>>,
{
#[inline(always)]
fn to_c_fn(self) -> RawHostImportModuleDynamicallyCallback {
#[inline(always)]
fn scope_adapter<'s, F: HostImportModuleDynamicallyCallback>(
context: Local<'s, Context>,
host_defined_options: Local<'s, Data>,
resource_name: Local<'s, Value>,
specifier: Local<'s, String>,
import_assertions: Local<'s, FixedArray>,
) -> Option<Local<'s, Promise>> {
let scope = &mut unsafe { CallbackScope::new(context) };
(F::get())(
scope,
host_defined_options,
resource_name,
specifier,
import_assertions,
)
}
#[cfg(target_family = "unix")]
#[inline(always)]
extern "C" fn abi_adapter<'s, F: HostImportModuleDynamicallyCallback>(
context: Local<'s, Context>,
host_defined_options: Local<'s, Data>,
resource_name: Local<'s, Value>,
specifier: Local<'s, String>,
import_assertions: Local<'s, FixedArray>,
) -> *mut Promise {
scope_adapter::<F>(
context,
host_defined_options,
resource_name,
specifier,
import_assertions,
)
.map(|return_value| return_value.as_non_null().as_ptr())
.unwrap_or_else(null_mut)
}
#[cfg(all(target_family = "windows", target_arch = "x86_64"))]
#[inline(always)]
extern "C" fn abi_adapter<'s, F: HostImportModuleDynamicallyCallback>(
return_value: *mut *mut Promise,
context: Local<'s, Context>,
host_defined_options: Local<'s, Data>,
resource_name: Local<'s, Value>,
specifier: Local<'s, String>,
import_assertions: Local<'s, FixedArray>,
) -> *mut *mut Promise {
unsafe {
std::ptr::write(
return_value,
scope_adapter::<F>(
context,
host_defined_options,
resource_name,
specifier,
import_assertions,
)
.map(|return_value| return_value.as_non_null().as_ptr())
.unwrap_or_else(null_mut),
);
return_value
}
}
abi_adapter::<F>
}
}
pub type HostCreateShadowRealmContextCallback =
for<'s> fn(scope: &mut HandleScope<'s>) -> Option<Local<'s, Context>>;
pub type GcCallbackWithData = extern "C" fn(
isolate: *mut Isolate,
r#type: GCType,
flags: GCCallbackFlags,
data: *mut c_void,
);
pub type InterruptCallback =
extern "C" fn(isolate: &mut Isolate, data: *mut c_void);
pub type NearHeapLimitCallback = extern "C" fn(
data: *mut c_void,
current_heap_limit: usize,
initial_heap_limit: usize,
) -> usize;
#[repr(C)]
pub struct OomDetails {
pub is_heap_oom: bool,
pub detail: *const char,
}
pub type OomErrorCallback =
extern "C" fn(location: *const char, details: &OomDetails);
#[repr(C)]
#[derive(Debug)]
pub struct HeapStatistics([usize; 16]);
#[cfg(target_os = "windows")]
pub type PrepareStackTraceCallback<'s> = extern "C" fn(
*mut *const Value,
Local<'s, Context>,
Local<'s, Value>,
Local<'s, Array>,
) -> *mut *const Value;
#[cfg(not(target_os = "windows"))]
#[repr(C)]
pub struct PrepareStackTraceCallbackRet(*const Value);
#[cfg(not(target_os = "windows"))]
pub type PrepareStackTraceCallback<'s> =
extern "C" fn(
Local<'s, Context>,
Local<'s, Value>,
Local<'s, Array>,
) -> PrepareStackTraceCallbackRet;
extern "C" {
static v8__internal__Internals__kIsolateEmbedderDataOffset: int;
fn v8__Isolate__New(params: *const raw::CreateParams) -> *mut Isolate;
fn v8__Isolate__Dispose(this: *mut Isolate);
fn v8__Isolate__GetNumberOfDataSlots(this: *const Isolate) -> u32;
fn v8__Isolate__Enter(this: *mut Isolate);
fn v8__Isolate__Exit(this: *mut Isolate);
fn v8__Isolate__MemoryPressureNotification(this: *mut Isolate, level: u8);
fn v8__Isolate__ClearKeptObjects(isolate: *mut Isolate);
fn v8__Isolate__LowMemoryNotification(isolate: *mut Isolate);
fn v8__Isolate__GetHeapStatistics(this: *mut Isolate, s: *mut HeapStatistics);
fn v8__Isolate__SetCaptureStackTraceForUncaughtExceptions(
this: *mut Isolate,
caputre: bool,
frame_limit: i32,
);
fn v8__Isolate__AddMessageListener(
isolate: *mut Isolate,
callback: MessageCallback,
) -> bool;
fn v8__Isolate__AddGCPrologueCallback(
isolate: *mut Isolate,
callback: GcCallbackWithData,
data: *mut c_void,
gc_type_filter: GCType,
);
fn v8__Isolate__RemoveGCPrologueCallback(
isolate: *mut Isolate,
callback: GcCallbackWithData,
data: *mut c_void,
);
fn v8__Isolate__AddNearHeapLimitCallback(
isolate: *mut Isolate,
callback: NearHeapLimitCallback,
data: *mut c_void,
);
fn v8__Isolate__RemoveNearHeapLimitCallback(
isolate: *mut Isolate,
callback: NearHeapLimitCallback,
heap_limit: usize,
);
fn v8__Isolate__SetOOMErrorHandler(
isolate: *mut Isolate,
callback: OomErrorCallback,
);
fn v8__Isolate__AdjustAmountOfExternalAllocatedMemory(
isolate: *mut Isolate,
change_in_bytes: i64,
) -> i64;
fn v8__Isolate__SetPrepareStackTraceCallback(
isolate: *mut Isolate,
callback: PrepareStackTraceCallback,
);
fn v8__Isolate__SetPromiseHook(isolate: *mut Isolate, hook: PromiseHook);
fn v8__Isolate__SetPromiseRejectCallback(
isolate: *mut Isolate,
callback: PromiseRejectCallback,
);
fn v8__Isolate__SetWasmAsyncResolvePromiseCallback(
isolate: *mut Isolate,
callback: WasmAsyncResolvePromiseCallback,
);
fn v8__Isolate__SetHostInitializeImportMetaObjectCallback(
isolate: *mut Isolate,
callback: HostInitializeImportMetaObjectCallback,
);
fn v8__Isolate__SetHostImportModuleDynamicallyCallback(
isolate: *mut Isolate,
callback: RawHostImportModuleDynamicallyCallback,
);
#[cfg(not(target_os = "windows"))]
fn v8__Isolate__SetHostCreateShadowRealmContextCallback(
isolate: *mut Isolate,
callback: extern "C" fn(initiator_context: Local<Context>) -> *mut Context,
);
#[cfg(target_os = "windows")]
fn v8__Isolate__SetHostCreateShadowRealmContextCallback(
isolate: *mut Isolate,
callback: extern "C" fn(
rv: *mut *mut Context,
initiator_context: Local<Context>,
) -> *mut *mut Context,
);
fn v8__Isolate__RequestInterrupt(
isolate: *const Isolate,
callback: InterruptCallback,
data: *mut c_void,
);
fn v8__Isolate__TerminateExecution(isolate: *const Isolate);
fn v8__Isolate__IsExecutionTerminating(isolate: *const Isolate) -> bool;
fn v8__Isolate__CancelTerminateExecution(isolate: *const Isolate);
fn v8__Isolate__GetMicrotasksPolicy(
isolate: *const Isolate,
) -> MicrotasksPolicy;
fn v8__Isolate__SetMicrotasksPolicy(
isolate: *mut Isolate,
policy: MicrotasksPolicy,
);
fn v8__Isolate__PerformMicrotaskCheckpoint(isolate: *mut Isolate);
fn v8__Isolate__EnqueueMicrotask(
isolate: *mut Isolate,
function: *const Function,
);
fn v8__Isolate__SetAllowAtomicsWait(isolate: *mut Isolate, allow: bool);
fn v8__Isolate__SetWasmStreamingCallback(
isolate: *mut Isolate,
callback: extern "C" fn(*const FunctionCallbackInfo),
);
fn v8__Isolate__HasPendingBackgroundTasks(isolate: *const Isolate) -> bool;
fn v8__Isolate__RequestGarbageCollectionForTesting(
isolate: *mut Isolate,
r#type: usize,
);
fn v8__HeapProfiler__TakeHeapSnapshot(
isolate: *mut Isolate,
callback: extern "C" fn(*mut c_void, *const u8, usize) -> bool,
arg: *mut c_void,
);
fn v8__HeapStatistics__CONSTRUCT(s: *mut MaybeUninit<HeapStatistics>);
fn v8__HeapStatistics__total_heap_size(s: *const HeapStatistics) -> usize;
fn v8__HeapStatistics__total_heap_size_executable(
s: *const HeapStatistics,
) -> usize;
fn v8__HeapStatistics__total_physical_size(s: *const HeapStatistics)
-> usize;
fn v8__HeapStatistics__total_available_size(
s: *const HeapStatistics,
) -> usize;
fn v8__HeapStatistics__total_global_handles_size(
s: *const HeapStatistics,
) -> usize;
fn v8__HeapStatistics__used_global_handles_size(
s: *const HeapStatistics,
) -> usize;
fn v8__HeapStatistics__used_heap_size(s: *const HeapStatistics) -> usize;
fn v8__HeapStatistics__heap_size_limit(s: *const HeapStatistics) -> usize;
fn v8__HeapStatistics__malloced_memory(s: *const HeapStatistics) -> usize;
fn v8__HeapStatistics__external_memory(s: *const HeapStatistics) -> usize;
fn v8__HeapStatistics__peak_malloced_memory(
s: *const HeapStatistics,
) -> usize;
fn v8__HeapStatistics__number_of_native_contexts(
s: *const HeapStatistics,
) -> usize;
fn v8__HeapStatistics__number_of_detached_contexts(
s: *const HeapStatistics,
) -> usize;
fn v8__HeapStatistics__does_zap_garbage(s: *const HeapStatistics) -> usize;
}
#[repr(C)]
#[derive(Debug)]
pub struct Isolate(Opaque);
impl Isolate {
const EMBEDDER_DATA_SLOT_COUNT: u32 = 4;
const EMBEDDER_DATA_OFFSET: usize = size_of::<[*const (); 23]>();
const ANNEX_SLOT: u32 = 0;
const CURRENT_SCOPE_DATA_SLOT: u32 = 1;
const INTERNAL_DATA_SLOT_COUNT: u32 = 2;
#[inline(always)]
fn assert_embedder_data_slot_count_and_offset_correct(&self) {
assert_eq!(
unsafe { v8__Isolate__GetNumberOfDataSlots(self) },
Self::EMBEDDER_DATA_SLOT_COUNT
);
assert_eq!(
unsafe { v8__internal__Internals__kIsolateEmbedderDataOffset } as usize,
Self::EMBEDDER_DATA_OFFSET
);
}
#[allow(clippy::new_ret_no_self)]
pub fn new(params: CreateParams) -> OwnedIsolate {
crate::V8::assert_initialized();
let (raw_create_params, create_param_allocations) = params.finalize();
let cxx_isolate = unsafe { v8__Isolate__New(&raw_create_params) };
let mut owned_isolate = OwnedIsolate::new(cxx_isolate);
owned_isolate.assert_embedder_data_slot_count_and_offset_correct();
ScopeData::new_root(&mut owned_isolate);
owned_isolate.create_annex(create_param_allocations);
unsafe {
owned_isolate.enter();
}
owned_isolate
}
#[allow(clippy::new_ret_no_self)]
pub fn snapshot_creator(
external_references: Option<&'static ExternalReferences>,
) -> OwnedIsolate {
SnapshotCreator::new(external_references)
}
#[allow(clippy::new_ret_no_self)]
pub fn snapshot_creator_from_existing_snapshot(
existing_snapshot_blob: impl Allocated<[u8]>,
external_references: Option<&'static ExternalReferences>,
) -> OwnedIsolate {
SnapshotCreator::from_existing_snapshot(
existing_snapshot_blob,
external_references,
)
}
#[inline(always)]
pub fn create_params() -> CreateParams {
CreateParams::default()
}
#[inline(always)]
pub fn thread_safe_handle(&self) -> IsolateHandle {
IsolateHandle::new(self)
}
#[inline(always)]
pub fn terminate_execution(&self) -> bool {
self.thread_safe_handle().terminate_execution()
}
#[inline(always)]
pub fn cancel_terminate_execution(&self) -> bool {
self.thread_safe_handle().cancel_terminate_execution()
}
#[inline(always)]
pub fn is_execution_terminating(&self) -> bool {
self.thread_safe_handle().is_execution_terminating()
}
pub(crate) fn create_annex(
&mut self,
create_param_allocations: Box<dyn Any>,
) {
let annex_arc = Arc::new(IsolateAnnex::new(self, create_param_allocations));
let annex_ptr = Arc::into_raw(annex_arc);
assert!(self.get_data_internal(Self::ANNEX_SLOT).is_null());
self.set_data_internal(Self::ANNEX_SLOT, annex_ptr as *mut _);
}
#[inline(always)]
fn get_annex(&self) -> &IsolateAnnex {
let annex_ptr =
self.get_data_internal(Self::ANNEX_SLOT) as *const IsolateAnnex;
assert!(!annex_ptr.is_null());
unsafe { &*annex_ptr }
}
#[inline(always)]
fn get_annex_mut(&mut self) -> &mut IsolateAnnex {
let annex_ptr =
self.get_data_internal(Self::ANNEX_SLOT) as *mut IsolateAnnex;
assert!(!annex_ptr.is_null());
unsafe { &mut *annex_ptr }
}
pub(crate) fn set_snapshot_creator(
&mut self,
snapshot_creator: SnapshotCreator,
) {
let prev = self
.get_annex_mut()
.maybe_snapshot_creator
.replace(snapshot_creator);
assert!(prev.is_none());
}
pub(crate) fn get_finalizer_map(&self) -> &FinalizerMap {
&self.get_annex().finalizer_map
}
pub(crate) fn get_finalizer_map_mut(&mut self) -> &mut FinalizerMap {
&mut self.get_annex_mut().finalizer_map
}
fn get_annex_arc(&self) -> Arc<IsolateAnnex> {
let annex_ptr = self.get_annex();
let annex_arc = unsafe { Arc::from_raw(annex_ptr) };
let _ = Arc::into_raw(annex_arc.clone());
annex_arc
}
pub fn get_data(&self, slot: u32) -> *mut c_void {
self.get_data_internal(Self::INTERNAL_DATA_SLOT_COUNT + slot)
}
#[inline(always)]
pub fn set_data(&mut self, slot: u32, data: *mut c_void) {
self.set_data_internal(Self::INTERNAL_DATA_SLOT_COUNT + slot, data)
}
pub fn get_number_of_data_slots(&self) -> u32 {
Self::EMBEDDER_DATA_SLOT_COUNT - Self::INTERNAL_DATA_SLOT_COUNT
}
#[inline(always)]
pub(crate) fn get_data_internal(&self, slot: u32) -> *mut c_void {
let slots = unsafe {
let p = self as *const Self as *const u8;
let p = p.add(Self::EMBEDDER_DATA_OFFSET);
let p = p as *const [*mut c_void; Self::EMBEDDER_DATA_SLOT_COUNT as _];
&*p
};
slots[slot as usize]
}
#[inline(always)]
pub(crate) fn set_data_internal(&mut self, slot: u32, data: *mut c_void) {
let slots = unsafe {
let p = self as *mut Self as *mut u8;
let p = p.add(Self::EMBEDDER_DATA_OFFSET);
let p = p as *mut [*mut c_void; Self::EMBEDDER_DATA_SLOT_COUNT as _];
&mut *p
};
slots[slot as usize] = data;
}
#[inline(always)]
pub(crate) fn get_current_scope_data(&self) -> Option<NonNull<ScopeData>> {
let scope_data_ptr = self.get_data_internal(Self::CURRENT_SCOPE_DATA_SLOT);
NonNull::new(scope_data_ptr).map(NonNull::cast)
}
#[inline(always)]
pub(crate) fn set_current_scope_data(
&mut self,
scope_data: Option<NonNull<ScopeData>>,
) {
let scope_data_ptr = scope_data
.map(NonNull::cast)
.map(NonNull::as_ptr)
.unwrap_or_else(null_mut);
self.set_data_internal(Self::CURRENT_SCOPE_DATA_SLOT, scope_data_ptr);
}
#[inline(always)]
pub fn get_slot<T: 'static>(&self) -> Option<&T> {
self
.get_annex()
.slots
.get(&TypeId::of::<T>())
.map(|slot| unsafe { slot.borrow::<T>() })
}
#[inline(always)]
pub fn get_slot_mut<T: 'static>(&mut self) -> Option<&mut T> {
self
.get_annex_mut()
.slots
.get_mut(&TypeId::of::<T>())
.map(|slot| unsafe { slot.borrow_mut::<T>() })
}
#[inline(always)]
pub fn set_slot<T: 'static>(&mut self, value: T) -> bool {
self
.get_annex_mut()
.slots
.insert(TypeId::of::<T>(), RawSlot::new(value))
.is_none()
}
#[inline(always)]
pub fn remove_slot<T: 'static>(&mut self) -> Option<T> {
self
.get_annex_mut()
.slots
.remove(&TypeId::of::<T>())
.map(|slot| unsafe { slot.into_inner::<T>() })
}
#[inline(always)]
pub unsafe fn enter(&mut self) {
v8__Isolate__Enter(self)
}
#[inline(always)]
pub unsafe fn exit(&mut self) {
v8__Isolate__Exit(self)
}
#[inline(always)]
pub fn memory_pressure_notification(&mut self, level: MemoryPressureLevel) {
unsafe { v8__Isolate__MemoryPressureNotification(self, level as u8) }
}
#[inline(always)]
pub fn clear_kept_objects(&mut self) {
unsafe { v8__Isolate__ClearKeptObjects(self) }
}
#[inline(always)]
pub fn low_memory_notification(&mut self) {
unsafe { v8__Isolate__LowMemoryNotification(self) }
}
#[inline(always)]
pub fn get_heap_statistics(&mut self, s: &mut HeapStatistics) {
unsafe { v8__Isolate__GetHeapStatistics(self, s) }
}
#[inline(always)]
pub fn set_capture_stack_trace_for_uncaught_exceptions(
&mut self,
capture: bool,
frame_limit: i32,
) {
unsafe {
v8__Isolate__SetCaptureStackTraceForUncaughtExceptions(
self,
capture,
frame_limit,
)
}
}
#[inline(always)]
pub fn add_message_listener(&mut self, callback: MessageCallback) -> bool {
unsafe { v8__Isolate__AddMessageListener(self, callback) }
}
#[inline(always)]
pub fn set_prepare_stack_trace_callback<'s>(
&mut self,
callback: impl MapFnTo<PrepareStackTraceCallback<'s>>,
) {
unsafe {
v8__Isolate__SetPrepareStackTraceCallback(self, callback.map_fn_to())
};
}
#[inline(always)]
pub fn set_promise_hook(&mut self, hook: PromiseHook) {
unsafe { v8__Isolate__SetPromiseHook(self, hook) }
}
#[inline(always)]
pub fn set_promise_reject_callback(
&mut self,
callback: PromiseRejectCallback,
) {
unsafe { v8__Isolate__SetPromiseRejectCallback(self, callback) }
}
#[inline(always)]
pub fn set_wasm_async_resolve_promise_callback(
&mut self,
callback: WasmAsyncResolvePromiseCallback,
) {
unsafe { v8__Isolate__SetWasmAsyncResolvePromiseCallback(self, callback) }
}
#[inline(always)]
pub fn set_host_initialize_import_meta_object_callback(
&mut self,
callback: HostInitializeImportMetaObjectCallback,
) {
unsafe {
v8__Isolate__SetHostInitializeImportMetaObjectCallback(self, callback)
}
}
#[inline(always)]
pub fn set_host_import_module_dynamically_callback(
&mut self,
callback: impl HostImportModuleDynamicallyCallback,
) {
unsafe {
v8__Isolate__SetHostImportModuleDynamicallyCallback(
self,
callback.to_c_fn(),
)
}
}
pub fn set_host_create_shadow_realm_context_callback(
&mut self,
callback: HostCreateShadowRealmContextCallback,
) {
#[inline]
extern "C" fn rust_shadow_realm_callback(
initiator_context: Local<Context>,
) -> *mut Context {
let mut scope = unsafe { CallbackScope::new(initiator_context) };
let callback = scope
.get_slot::<HostCreateShadowRealmContextCallback>()
.unwrap();
let context = callback(&mut scope);
context
.map(|l| l.as_non_null().as_ptr())
.unwrap_or_else(null_mut)
}
#[cfg(target_os = "windows")]
extern "C" fn rust_shadow_realm_callback_windows(
rv: *mut *mut Context,
initiator_context: Local<Context>,
) -> *mut *mut Context {
let ret = rust_shadow_realm_callback(initiator_context);
unsafe {
rv.write(ret);
}
rv
}
let slot_didnt_exist_before = self.set_slot(callback);
if slot_didnt_exist_before {
unsafe {
#[cfg(target_os = "windows")]
v8__Isolate__SetHostCreateShadowRealmContextCallback(
self,
rust_shadow_realm_callback_windows,
);
#[cfg(not(target_os = "windows"))]
v8__Isolate__SetHostCreateShadowRealmContextCallback(
self,
rust_shadow_realm_callback,
);
}
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)] #[inline(always)]
pub fn add_gc_prologue_callback(
&mut self,
callback: GcCallbackWithData,
data: *mut c_void,
gc_type_filter: GCType,
) {
unsafe {
v8__Isolate__AddGCPrologueCallback(self, callback, data, gc_type_filter)
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)] #[inline(always)]
pub fn remove_gc_prologue_callback(
&mut self,
callback: GcCallbackWithData,
data: *mut c_void,
) {
unsafe { v8__Isolate__RemoveGCPrologueCallback(self, callback, data) }
}
#[allow(clippy::not_unsafe_ptr_arg_deref)] #[inline(always)]
pub fn add_near_heap_limit_callback(
&mut self,
callback: NearHeapLimitCallback,
data: *mut c_void,
) {
unsafe { v8__Isolate__AddNearHeapLimitCallback(self, callback, data) };
}
#[inline(always)]
pub fn remove_near_heap_limit_callback(
&mut self,
callback: NearHeapLimitCallback,
heap_limit: usize,
) {
unsafe {
v8__Isolate__RemoveNearHeapLimitCallback(self, callback, heap_limit)
};
}
#[inline(always)]
pub fn adjust_amount_of_external_allocated_memory(
&mut self,
change_in_bytes: i64,
) -> i64 {
unsafe {
v8__Isolate__AdjustAmountOfExternalAllocatedMemory(self, change_in_bytes)
}
}
#[inline(always)]
pub fn set_oom_error_handler(&mut self, callback: OomErrorCallback) {
unsafe { v8__Isolate__SetOOMErrorHandler(self, callback) };
}
#[inline(always)]
pub fn get_microtasks_policy(&self) -> MicrotasksPolicy {
unsafe { v8__Isolate__GetMicrotasksPolicy(self) }
}
#[inline(always)]
pub fn set_microtasks_policy(&mut self, policy: MicrotasksPolicy) {
unsafe { v8__Isolate__SetMicrotasksPolicy(self, policy) }
}
#[inline(always)]
pub fn perform_microtask_checkpoint(&mut self) {
unsafe { v8__Isolate__PerformMicrotaskCheckpoint(self) }
}
#[deprecated(note = "Use Isolate::perform_microtask_checkpoint() instead")]
pub fn run_microtasks(&mut self) {
self.perform_microtask_checkpoint()
}
#[inline(always)]
pub fn enqueue_microtask(&mut self, microtask: Local<Function>) {
unsafe { v8__Isolate__EnqueueMicrotask(self, &*microtask) }
}
#[inline(always)]
pub fn set_allow_atomics_wait(&mut self, allow: bool) {
unsafe { v8__Isolate__SetAllowAtomicsWait(self, allow) }
}
#[inline(always)]
pub fn set_wasm_streaming_callback<F>(&mut self, _: F)
where
F: UnitType + Fn(&mut HandleScope, Local<Value>, WasmStreaming),
{
unsafe { v8__Isolate__SetWasmStreamingCallback(self, trampoline::<F>()) }
}
#[inline(always)]
pub fn has_pending_background_tasks(&self) -> bool {
unsafe { v8__Isolate__HasPendingBackgroundTasks(self) }
}
#[inline(always)]
pub fn request_garbage_collection_for_testing(
&mut self,
r#type: GarbageCollectionType,
) {
unsafe {
v8__Isolate__RequestGarbageCollectionForTesting(
self,
match r#type {
GarbageCollectionType::Full => 0,
GarbageCollectionType::Minor => 1,
},
)
}
}
unsafe fn clear_scope_and_annex(&mut self) {
ScopeData::drop_root(self);
if !self.get_annex().finalizer_map.is_empty() {
self.low_memory_notification();
}
let annex = self.get_annex_mut();
{
let _lock = annex.isolate_mutex.lock().unwrap();
annex.isolate = null_mut();
}
annex.create_param_allocations = Box::new(());
annex.slots.clear();
for finalizer in annex.finalizer_map.drain() {
if let FinalizerCallback::Guaranteed(callback) = finalizer {
callback();
}
}
Arc::from_raw(annex);
self.set_data(0, null_mut());
}
unsafe fn dispose(&mut self) {
v8__Isolate__Dispose(self)
}
pub fn take_heap_snapshot<F>(&mut self, mut callback: F)
where
F: FnMut(&[u8]) -> bool,
{
extern "C" fn trampoline<F>(
arg: *mut c_void,
data: *const u8,
size: usize,
) -> bool
where
F: FnMut(&[u8]) -> bool,
{
let p = arg as *mut F;
let callback = unsafe { &mut *p };
let slice = unsafe { std::slice::from_raw_parts(data, size) };
callback(slice)
}
let arg = &mut callback as *mut F as *mut c_void;
unsafe { v8__HeapProfiler__TakeHeapSnapshot(self, trampoline::<F>, arg) }
}
#[inline(always)]
pub fn set_default_context(&mut self, context: Local<Context>) {
let snapshot_creator = self
.get_annex_mut()
.maybe_snapshot_creator
.as_mut()
.unwrap();
snapshot_creator.set_default_context(context);
}
#[inline(always)]
pub fn add_context(&mut self, context: Local<Context>) -> usize {
let snapshot_creator = self
.get_annex_mut()
.maybe_snapshot_creator
.as_mut()
.unwrap();
snapshot_creator.add_context(context)
}
#[inline(always)]
pub fn add_isolate_data<T>(&mut self, data: Local<T>) -> usize
where
for<'l> Local<'l, T>: Into<Local<'l, Data>>,
{
let snapshot_creator = self
.get_annex_mut()
.maybe_snapshot_creator
.as_mut()
.unwrap();
snapshot_creator.add_isolate_data(data)
}
#[inline(always)]
pub fn add_context_data<T>(
&mut self,
context: Local<Context>,
data: Local<T>,
) -> usize
where
for<'l> Local<'l, T>: Into<Local<'l, Data>>,
{
let snapshot_creator = self
.get_annex_mut()
.maybe_snapshot_creator
.as_mut()
.unwrap();
snapshot_creator.add_context_data(context, data)
}
}
pub(crate) struct IsolateAnnex {
create_param_allocations: Box<dyn Any>,
slots: HashMap<TypeId, RawSlot, BuildTypeIdHasher>,
finalizer_map: FinalizerMap,
maybe_snapshot_creator: Option<SnapshotCreator>,
isolate: *mut Isolate,
isolate_mutex: Mutex<()>,
}
impl IsolateAnnex {
fn new(
isolate: &mut Isolate,
create_param_allocations: Box<dyn Any>,
) -> Self {
Self {
create_param_allocations,
slots: HashMap::default(),
finalizer_map: FinalizerMap::default(),
maybe_snapshot_creator: None,
isolate,
isolate_mutex: Mutex::new(()),
}
}
}
impl Debug for IsolateAnnex {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
f.debug_struct("IsolateAnnex")
.field("isolate", &self.isolate)
.field("isolate_mutex", &self.isolate_mutex)
.finish()
}
}
#[derive(Clone, Debug)]
pub struct IsolateHandle(Arc<IsolateAnnex>);
unsafe impl Send for IsolateHandle {}
unsafe impl Sync for IsolateHandle {}
impl IsolateHandle {
pub(crate) unsafe fn get_isolate_ptr(&self) -> *mut Isolate {
self.0.isolate
}
#[inline(always)]
fn new(isolate: &Isolate) -> Self {
Self(isolate.get_annex_arc())
}
#[inline(always)]
pub fn terminate_execution(&self) -> bool {
let _lock = self.0.isolate_mutex.lock().unwrap();
if self.0.isolate.is_null() {
false
} else {
unsafe { v8__Isolate__TerminateExecution(self.0.isolate) };
true
}
}
#[inline(always)]
pub fn cancel_terminate_execution(&self) -> bool {
let _lock = self.0.isolate_mutex.lock().unwrap();
if self.0.isolate.is_null() {
false
} else {
unsafe { v8__Isolate__CancelTerminateExecution(self.0.isolate) };
true
}
}
#[inline(always)]
pub fn is_execution_terminating(&self) -> bool {
let _lock = self.0.isolate_mutex.lock().unwrap();
if self.0.isolate.is_null() {
false
} else {
unsafe { v8__Isolate__IsExecutionTerminating(self.0.isolate) }
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[inline(always)]
pub fn request_interrupt(
&self,
callback: InterruptCallback,
data: *mut c_void,
) -> bool {
let _lock = self.0.isolate_mutex.lock().unwrap();
if self.0.isolate.is_null() {
false
} else {
unsafe { v8__Isolate__RequestInterrupt(self.0.isolate, callback, data) };
true
}
}
}
#[derive(Debug)]
pub struct OwnedIsolate {
cxx_isolate: NonNull<Isolate>,
}
impl OwnedIsolate {
pub(crate) fn new(cxx_isolate: *mut Isolate) -> Self {
let cxx_isolate = NonNull::new(cxx_isolate).unwrap();
Self { cxx_isolate }
}
}
impl Drop for OwnedIsolate {
fn drop(&mut self) {
unsafe {
let snapshot_creator = self.get_annex_mut().maybe_snapshot_creator.take();
assert!(
snapshot_creator.is_none(),
"If isolate was created using v8::Isolate::snapshot_creator, you should use v8::OwnedIsolate::create_blob before dropping an isolate."
);
self.exit();
self.cxx_isolate.as_mut().clear_scope_and_annex();
self.cxx_isolate.as_mut().dispose();
}
}
}
impl Deref for OwnedIsolate {
type Target = Isolate;
fn deref(&self) -> &Self::Target {
unsafe { self.cxx_isolate.as_ref() }
}
}
impl DerefMut for OwnedIsolate {
fn deref_mut(&mut self) -> &mut Self::Target {
unsafe { self.cxx_isolate.as_mut() }
}
}
impl AsMut<Isolate> for OwnedIsolate {
fn as_mut(&mut self) -> &mut Isolate {
self
}
}
impl AsMut<Isolate> for Isolate {
fn as_mut(&mut self) -> &mut Isolate {
self
}
}
impl OwnedIsolate {
#[inline(always)]
pub fn create_blob(
mut self,
function_code_handling: FunctionCodeHandling,
) -> Option<StartupData> {
let mut snapshot_creator =
self.get_annex_mut().maybe_snapshot_creator.take().unwrap();
unsafe { self.cxx_isolate.as_mut().clear_scope_and_annex() };
std::mem::forget(self);
snapshot_creator.create_blob(function_code_handling)
}
}
impl HeapStatistics {
#[inline(always)]
pub fn total_heap_size(&self) -> usize {
unsafe { v8__HeapStatistics__total_heap_size(self) }
}
#[inline(always)]
pub fn total_heap_size_executable(&self) -> usize {
unsafe { v8__HeapStatistics__total_heap_size_executable(self) }
}
#[inline(always)]
pub fn total_physical_size(&self) -> usize {
unsafe { v8__HeapStatistics__total_physical_size(self) }
}
#[inline(always)]
pub fn total_available_size(&self) -> usize {
unsafe { v8__HeapStatistics__total_available_size(self) }
}
#[inline(always)]
pub fn total_global_handles_size(&self) -> usize {
unsafe { v8__HeapStatistics__total_global_handles_size(self) }
}
#[inline(always)]
pub fn used_global_handles_size(&self) -> usize {
unsafe { v8__HeapStatistics__used_global_handles_size(self) }
}
#[inline(always)]
pub fn used_heap_size(&self) -> usize {
unsafe { v8__HeapStatistics__used_heap_size(self) }
}
#[inline(always)]
pub fn heap_size_limit(&self) -> usize {
unsafe { v8__HeapStatistics__heap_size_limit(self) }
}
#[inline(always)]
pub fn malloced_memory(&self) -> usize {
unsafe { v8__HeapStatistics__malloced_memory(self) }
}
#[inline(always)]
pub fn external_memory(&self) -> usize {
unsafe { v8__HeapStatistics__external_memory(self) }
}
#[inline(always)]
pub fn peak_malloced_memory(&self) -> usize {
unsafe { v8__HeapStatistics__peak_malloced_memory(self) }
}
#[inline(always)]
pub fn number_of_native_contexts(&self) -> usize {
unsafe { v8__HeapStatistics__number_of_native_contexts(self) }
}
#[inline(always)]
pub fn number_of_detached_contexts(&self) -> usize {
unsafe { v8__HeapStatistics__number_of_detached_contexts(self) }
}
#[inline(always)]
pub fn does_zap_garbage(&self) -> usize {
unsafe { v8__HeapStatistics__does_zap_garbage(self) }
}
}
impl Default for HeapStatistics {
fn default() -> Self {
let mut s = MaybeUninit::<Self>::uninit();
unsafe {
v8__HeapStatistics__CONSTRUCT(&mut s);
s.assume_init()
}
}
}
impl<'s, F> MapFnFrom<F> for PrepareStackTraceCallback<'s>
where
F: UnitType
+ Fn(
&mut HandleScope<'s>,
Local<'s, Value>,
Local<'s, Array>,
) -> Local<'s, Value>,
{
#[cfg(target_os = "windows")]
fn mapping() -> Self {
let f = |ret_ptr, context, error, sites| {
let mut scope: CallbackScope = unsafe { CallbackScope::new(context) };
let r = (F::get())(&mut scope, error, sites);
unsafe { std::ptr::write(ret_ptr, &*r as *const _) };
ret_ptr
};
f.to_c_fn()
}
#[cfg(not(target_os = "windows"))]
fn mapping() -> Self {
let f = |context, error, sites| {
let mut scope: CallbackScope = unsafe { CallbackScope::new(context) };
let r = (F::get())(&mut scope, error, sites);
PrepareStackTraceCallbackRet(&*r as *const _)
};
f.to_c_fn()
}
}
#[derive(Clone, Default)]
pub(crate) struct TypeIdHasher {
state: Option<u64>,
}
impl Hasher for TypeIdHasher {
fn write(&mut self, _bytes: &[u8]) {
panic!("TypeIdHasher::write() called unexpectedly");
}
#[inline]
fn write_u64(&mut self, value: u64) {
let prev_state = self.state.replace(value);
debug_assert_eq!(prev_state, None);
}
#[inline]
fn finish(&self) -> u64 {
self.state.unwrap()
}
}
#[derive(Copy, Clone, Default)]
pub(crate) struct BuildTypeIdHasher;
impl BuildHasher for BuildTypeIdHasher {
type Hasher = TypeIdHasher;
#[inline]
fn build_hasher(&self) -> Self::Hasher {
Default::default()
}
}
const _: () = {
assert!(size_of::<TypeId>() == size_of::<u64>());
assert!(align_of::<TypeId>() == align_of::<u64>());
};
pub(crate) struct RawSlot {
data: RawSlotData,
dtor: Option<RawSlotDtor>,
}
type RawSlotData = MaybeUninit<usize>;
type RawSlotDtor = unsafe fn(&mut RawSlotData) -> ();
impl RawSlot {
#[inline]
pub fn new<T: 'static>(value: T) -> Self {
if Self::needs_box::<T>() {
Self::new_internal(Box::new(value))
} else {
Self::new_internal(value)
}
}
#[inline]
pub unsafe fn borrow<T: 'static>(&self) -> &T {
if Self::needs_box::<T>() {
&*(self.data.as_ptr() as *const Box<T>)
} else {
&*(self.data.as_ptr() as *const T)
}
}
#[inline]
pub unsafe fn borrow_mut<T: 'static>(&mut self) -> &mut T {
if Self::needs_box::<T>() {
&mut *(self.data.as_mut_ptr() as *mut Box<T>)
} else {
&mut *(self.data.as_mut_ptr() as *mut T)
}
}
#[inline]
pub unsafe fn into_inner<T: 'static>(self) -> T {
let value = if Self::needs_box::<T>() {
*std::ptr::read(self.data.as_ptr() as *mut Box<T>)
} else {
std::ptr::read(self.data.as_ptr() as *mut T)
};
forget(self);
value
}
const fn needs_box<T: 'static>() -> bool {
size_of::<T>() > size_of::<RawSlotData>()
|| align_of::<T>() > align_of::<RawSlotData>()
}
#[inline]
fn new_internal<B: 'static>(value: B) -> Self {
assert!(!Self::needs_box::<B>());
let mut self_ = Self {
data: RawSlotData::zeroed(),
dtor: None,
};
unsafe {
ptr::write(self_.data.as_mut_ptr() as *mut B, value);
}
if needs_drop::<B>() {
self_.dtor.replace(Self::drop_internal::<B>);
};
self_
}
unsafe fn drop_internal<B: 'static>(data: &mut RawSlotData) {
assert!(!Self::needs_box::<B>());
drop_in_place(data.as_mut_ptr() as *mut B);
}
}
impl Drop for RawSlot {
fn drop(&mut self) {
if let Some(dtor) = self.dtor {
unsafe { dtor(&mut self.data) };
}
}
}