1use crate::function::FunctionCallbackInfo;
3use crate::isolate_create_params::raw;
4use crate::isolate_create_params::CreateParams;
5use crate::promise::PromiseRejectMessage;
6use crate::scope::data::ScopeData;
7use crate::support::BuildTypeIdHasher;
8use crate::support::MapFnFrom;
9use crate::support::MapFnTo;
10use crate::support::Opaque;
11use crate::support::ToCFn;
12use crate::support::UnitType;
13use crate::wasm::trampoline;
14use crate::wasm::WasmStreaming;
15use crate::Array;
16use crate::CallbackScope;
17use crate::Context;
18use crate::FixedArray;
19use crate::Function;
20use crate::HandleScope;
21use crate::Local;
22use crate::Message;
23use crate::Module;
24use crate::Object;
25use crate::Promise;
26use crate::ScriptOrModule;
27use crate::String;
28use crate::Value;
29
30use std::any::Any;
31use std::any::TypeId;
32
33use std::collections::HashMap;
34use std::ffi::c_void;
35use std::fmt::{self, Debug, Formatter};
36use std::mem::MaybeUninit;
37use std::ops::Deref;
38use std::ops::DerefMut;
39use std::os::raw::c_char;
40use std::ptr::null_mut;
41use std::ptr::NonNull;
42use std::sync::Arc;
43use std::sync::Mutex;
44
45#[derive(Debug, Clone, Copy, PartialEq)]
51#[repr(C)]
52pub enum MicrotasksPolicy {
53 Explicit = 0,
54 Auto = 2,
56}
57
58#[derive(Debug, Clone, Copy, PartialEq)]
73#[repr(C)]
74pub enum PromiseHookType {
75 Init,
76 Resolve,
77 Before,
78 After,
79}
80
81pub type MessageCallback = extern "C" fn(Local<Message>, Local<Value>);
82
83pub type PromiseHook =
84 extern "C" fn(PromiseHookType, Local<Promise>, Local<Value>);
85
86pub type PromiseRejectCallback = extern "C" fn(PromiseRejectMessage);
87
88pub type HostInitializeImportMetaObjectCallback =
97 extern "C" fn(Local<Context>, Local<Module>, Local<Object>);
98
99pub type HostImportModuleDynamicallyWithImportAssertionsCallback =
118 extern "C" fn(
119 Local<Context>,
120 Local<ScriptOrModule>,
121 Local<String>,
122 Local<FixedArray>,
123 ) -> *mut Promise;
124
125pub type InterruptCallback =
126 extern "C" fn(isolate: &mut Isolate, data: *mut c_void);
127
128pub type NearHeapLimitCallback = extern "C" fn(
129 data: *mut c_void,
130 current_heap_limit: usize,
131 initial_heap_limit: usize,
132) -> usize;
133
134pub type OomErrorCallback =
135 extern "C" fn(location: *const c_char, is_heap_oom: bool);
136
137#[repr(C)]
143#[derive(Debug)]
144pub struct HeapStatistics([usize; 16]);
145
146#[cfg(target_os = "windows")]
148pub type PrepareStackTraceCallback<'s> = extern "C" fn(
149 *mut *const Value,
150 Local<'s, Context>,
151 Local<'s, Value>,
152 Local<'s, Array>,
153) -> *mut *const Value;
154
155#[cfg(not(target_os = "windows"))]
157pub type PrepareStackTraceCallback<'s> = extern "C" fn(
158 Local<'s, Context>,
159 Local<'s, Value>,
160 Local<'s, Array>,
161) -> *const Value;
162
163extern "C" {
164 fn v8__Isolate__New(params: *const raw::CreateParams) -> *mut Isolate;
165 fn v8__Isolate__Dispose(this: *mut Isolate);
166 fn v8__Isolate__SetData(this: *mut Isolate, slot: u32, data: *mut c_void);
167 fn v8__Isolate__GetData(this: *const Isolate, slot: u32) -> *mut c_void;
168 fn v8__Isolate__GetNumberOfDataSlots(this: *const Isolate) -> u32;
169 fn v8__Isolate__Enter(this: *mut Isolate);
170 fn v8__Isolate__Exit(this: *mut Isolate);
171 fn v8__Isolate__ClearKeptObjects(isolate: *mut Isolate);
172 fn v8__Isolate__LowMemoryNotification(isolate: *mut Isolate);
173 fn v8__Isolate__GetHeapStatistics(this: *mut Isolate, s: *mut HeapStatistics);
174 fn v8__Isolate__SetCaptureStackTraceForUncaughtExceptions(
175 this: *mut Isolate,
176 caputre: bool,
177 frame_limit: i32,
178 );
179 fn v8__Isolate__AddMessageListener(
180 isolate: *mut Isolate,
181 callback: MessageCallback,
182 ) -> bool;
183 fn v8__Isolate__AddNearHeapLimitCallback(
184 isolate: *mut Isolate,
185 callback: NearHeapLimitCallback,
186 data: *mut c_void,
187 );
188 fn v8__Isolate__RemoveNearHeapLimitCallback(
189 isolate: *mut Isolate,
190 callback: NearHeapLimitCallback,
191 heap_limit: usize,
192 );
193 fn v8__Isolate__SetOOMErrorHandler(
194 isolate: *mut Isolate,
195 callback: OomErrorCallback,
196 );
197 fn v8__Isolate__SetPrepareStackTraceCallback(
198 isolate: *mut Isolate,
199 callback: PrepareStackTraceCallback,
200 );
201 fn v8__Isolate__SetPromiseHook(isolate: *mut Isolate, hook: PromiseHook);
202 fn v8__Isolate__SetPromiseRejectCallback(
203 isolate: *mut Isolate,
204 callback: PromiseRejectCallback,
205 );
206 fn v8__Isolate__SetHostInitializeImportMetaObjectCallback(
207 isolate: *mut Isolate,
208 callback: HostInitializeImportMetaObjectCallback,
209 );
210 fn v8__Isolate__SetHostImportModuleDynamicallyCallback(
211 isolate: *mut Isolate,
212 callback: HostImportModuleDynamicallyWithImportAssertionsCallback,
213 );
214 fn v8__Isolate__RequestInterrupt(
215 isolate: *const Isolate,
216 callback: InterruptCallback,
217 data: *mut c_void,
218 );
219 fn v8__Isolate__TerminateExecution(isolate: *const Isolate);
220 fn v8__Isolate__IsExecutionTerminating(isolate: *const Isolate) -> bool;
221 fn v8__Isolate__CancelTerminateExecution(isolate: *const Isolate);
222 fn v8__Isolate__GetMicrotasksPolicy(
223 isolate: *const Isolate,
224 ) -> MicrotasksPolicy;
225 fn v8__Isolate__SetMicrotasksPolicy(
226 isolate: *mut Isolate,
227 policy: MicrotasksPolicy,
228 );
229 fn v8__Isolate__PerformMicrotaskCheckpoint(isolate: *mut Isolate);
230 fn v8__Isolate__EnqueueMicrotask(
231 isolate: *mut Isolate,
232 function: *const Function,
233 );
234 fn v8__Isolate__SetAllowAtomicsWait(isolate: *mut Isolate, allow: bool);
235 fn v8__Isolate__SetWasmStreamingCallback(
236 isolate: *mut Isolate,
237 callback: extern "C" fn(*const FunctionCallbackInfo),
238 );
239 fn v8__Isolate__HasPendingBackgroundTasks(isolate: *const Isolate) -> bool;
240
241 fn v8__HeapProfiler__TakeHeapSnapshot(
242 isolate: *mut Isolate,
243 callback: extern "C" fn(*mut c_void, *const u8, usize) -> bool,
244 arg: *mut c_void,
245 );
246
247 fn v8__HeapStatistics__CONSTRUCT(s: *mut MaybeUninit<HeapStatistics>);
248 fn v8__HeapStatistics__total_heap_size(s: *const HeapStatistics) -> usize;
249 fn v8__HeapStatistics__total_heap_size_executable(
250 s: *const HeapStatistics,
251 ) -> usize;
252 fn v8__HeapStatistics__total_physical_size(s: *const HeapStatistics)
253 -> usize;
254 fn v8__HeapStatistics__total_available_size(
255 s: *const HeapStatistics,
256 ) -> usize;
257 fn v8__HeapStatistics__total_global_handles_size(
258 s: *const HeapStatistics,
259 ) -> usize;
260 fn v8__HeapStatistics__used_global_handles_size(
261 s: *const HeapStatistics,
262 ) -> usize;
263 fn v8__HeapStatistics__used_heap_size(s: *const HeapStatistics) -> usize;
264 fn v8__HeapStatistics__heap_size_limit(s: *const HeapStatistics) -> usize;
265 fn v8__HeapStatistics__malloced_memory(s: *const HeapStatistics) -> usize;
266 fn v8__HeapStatistics__external_memory(s: *const HeapStatistics) -> usize;
267 fn v8__HeapStatistics__peak_malloced_memory(
268 s: *const HeapStatistics,
269 ) -> usize;
270 fn v8__HeapStatistics__number_of_native_contexts(
271 s: *const HeapStatistics,
272 ) -> usize;
273 fn v8__HeapStatistics__number_of_detached_contexts(
274 s: *const HeapStatistics,
275 ) -> usize;
276 fn v8__HeapStatistics__does_zap_garbage(s: *const HeapStatistics) -> usize;
277}
278
279#[repr(C)]
289#[derive(Debug)]
290pub struct Isolate(Opaque);
291
292impl Isolate {
293 const ANNEX_SLOT: u32 = 0;
294 const CURRENT_SCOPE_DATA_SLOT: u32 = 1;
295 const INTERNAL_SLOT_COUNT: u32 = 2;
296
297 #[allow(clippy::new_ret_no_self)]
305 pub fn new(params: CreateParams) -> OwnedIsolate {
306 crate::V8::assert_initialized();
307 let (raw_create_params, create_param_allocations) = params.finalize();
308 let cxx_isolate = unsafe { v8__Isolate__New(&raw_create_params) };
309 let mut owned_isolate = OwnedIsolate::new(cxx_isolate);
310 ScopeData::new_root(&mut owned_isolate);
311 owned_isolate.create_annex(create_param_allocations);
312 unsafe {
313 owned_isolate.enter();
314 }
315 owned_isolate
316 }
317
318 pub fn create_params() -> CreateParams {
320 CreateParams::default()
321 }
322
323 pub fn thread_safe_handle(&self) -> IsolateHandle {
324 IsolateHandle::new(self)
325 }
326
327 pub fn terminate_execution(&self) -> bool {
329 self.thread_safe_handle().terminate_execution()
330 }
331
332 pub fn cancel_terminate_execution(&self) -> bool {
334 self.thread_safe_handle().cancel_terminate_execution()
335 }
336
337 pub fn is_execution_terminating(&self) -> bool {
339 self.thread_safe_handle().is_execution_terminating()
340 }
341
342 pub(crate) fn create_annex(
343 &mut self,
344 create_param_allocations: Box<dyn Any>,
345 ) {
346 let annex_arc = Arc::new(IsolateAnnex::new(self, create_param_allocations));
347 let annex_ptr = Arc::into_raw(annex_arc);
348 unsafe {
349 assert!(v8__Isolate__GetData(self, Self::ANNEX_SLOT).is_null());
350 v8__Isolate__SetData(self, Self::ANNEX_SLOT, annex_ptr as *mut c_void);
351 };
352 }
353
354 fn get_annex(&self) -> &IsolateAnnex {
355 unsafe {
356 &*(v8__Isolate__GetData(self, Self::ANNEX_SLOT) as *const _
357 as *const IsolateAnnex)
358 }
359 }
360
361 fn get_annex_mut(&mut self) -> &mut IsolateAnnex {
362 unsafe {
363 &mut *(v8__Isolate__GetData(self, Self::ANNEX_SLOT) as *mut IsolateAnnex)
364 }
365 }
366
367 fn get_annex_arc(&self) -> Arc<IsolateAnnex> {
368 let annex_ptr = self.get_annex();
369 let annex_arc = unsafe { Arc::from_raw(annex_ptr) };
370 Arc::into_raw(annex_arc.clone());
371 annex_arc
372 }
373
374 unsafe fn set_data(&mut self, slot: u32, ptr: *mut c_void) {
377 v8__Isolate__SetData(self, slot + Self::INTERNAL_SLOT_COUNT, ptr)
378 }
379
380 fn get_data(&self, slot: u32) -> *mut c_void {
383 unsafe { v8__Isolate__GetData(self, slot + Self::INTERNAL_SLOT_COUNT) }
384 }
385
386 fn get_number_of_data_slots(&self) -> u32 {
389 unsafe {
390 v8__Isolate__GetNumberOfDataSlots(self) - Self::INTERNAL_SLOT_COUNT
391 }
392 }
393
394 pub(crate) fn get_current_scope_data(&self) -> Option<NonNull<ScopeData>> {
396 let scope_data_ptr =
397 unsafe { v8__Isolate__GetData(self, Self::CURRENT_SCOPE_DATA_SLOT) };
398 NonNull::new(scope_data_ptr).map(NonNull::cast)
399 }
400
401 pub(crate) fn set_current_scope_data(
403 &mut self,
404 scope_data: Option<NonNull<ScopeData>>,
405 ) {
406 let scope_data_ptr = scope_data
407 .map(NonNull::cast)
408 .map(NonNull::as_ptr)
409 .unwrap_or_else(null_mut);
410 unsafe {
411 v8__Isolate__SetData(self, Self::CURRENT_SCOPE_DATA_SLOT, scope_data_ptr)
412 };
413 }
414
415 pub fn get_slot<T: 'static>(&self) -> Option<&T> {
417 let b = self.get_annex().slots.get(&TypeId::of::<T>())?;
418 let r = <dyn Any>::downcast_ref::<T>(&**b).unwrap();
419 Some(r)
420 }
421
422 pub fn get_slot_mut<T: 'static>(&mut self) -> Option<&mut T> {
424 let b = self.get_annex_mut().slots.get_mut(&TypeId::of::<T>())?;
425 let r = <dyn Any>::downcast_mut::<T>(&mut **b).unwrap();
426 Some(r)
427 }
428
429 pub fn set_slot<T: 'static>(&mut self, value: T) -> bool {
441 self
442 .get_annex_mut()
443 .slots
444 .insert(Any::type_id(&value), Box::new(value))
445 .is_none()
446 }
447
448 pub fn remove_slot<T: 'static>(&mut self) -> Option<T> {
450 let b = self.get_annex_mut().slots.remove(&TypeId::of::<T>())?;
451 let v: T = *b.downcast::<T>().unwrap();
452 Some(v)
453 }
454
455 pub unsafe fn enter(&mut self) {
462 v8__Isolate__Enter(self)
463 }
464
465 pub unsafe fn exit(&mut self) {
474 v8__Isolate__Exit(self)
475 }
476
477 pub fn clear_kept_objects(&mut self) {
489 unsafe { v8__Isolate__ClearKeptObjects(self) }
490 }
491
492 pub fn low_memory_notification(&mut self) {
495 unsafe { v8__Isolate__LowMemoryNotification(self) }
496 }
497
498 pub fn get_heap_statistics(&mut self, s: &mut HeapStatistics) {
500 unsafe { v8__Isolate__GetHeapStatistics(self, s) }
501 }
502
503 pub fn set_capture_stack_trace_for_uncaught_exceptions(
506 &mut self,
507 capture: bool,
508 frame_limit: i32,
509 ) {
510 unsafe {
511 v8__Isolate__SetCaptureStackTraceForUncaughtExceptions(
512 self,
513 capture,
514 frame_limit,
515 )
516 }
517 }
518
519 pub fn add_message_listener(&mut self, callback: MessageCallback) -> bool {
526 unsafe { v8__Isolate__AddMessageListener(self, callback) }
527 }
528
529 pub fn set_prepare_stack_trace_callback<'s>(
538 &mut self,
539 callback: impl MapFnTo<PrepareStackTraceCallback<'s>>,
540 ) {
541 unsafe {
545 v8__Isolate__SetPrepareStackTraceCallback(self, callback.map_fn_to())
546 };
547 }
548
549 pub fn set_promise_hook(&mut self, hook: PromiseHook) {
552 unsafe { v8__Isolate__SetPromiseHook(self, hook) }
553 }
554
555 pub fn set_promise_reject_callback(
558 &mut self,
559 callback: PromiseRejectCallback,
560 ) {
561 unsafe { v8__Isolate__SetPromiseRejectCallback(self, callback) }
562 }
563 pub fn set_host_initialize_import_meta_object_callback(
566 &mut self,
567 callback: HostInitializeImportMetaObjectCallback,
568 ) {
569 unsafe {
570 v8__Isolate__SetHostInitializeImportMetaObjectCallback(self, callback)
571 }
572 }
573
574 pub fn set_host_import_module_dynamically_callback(
577 &mut self,
578 callback: HostImportModuleDynamicallyWithImportAssertionsCallback,
579 ) {
580 unsafe {
581 v8__Isolate__SetHostImportModuleDynamicallyCallback(self, callback)
582 }
583 }
584
585 #[allow(clippy::not_unsafe_ptr_arg_deref)] pub fn add_near_heap_limit_callback(
590 &mut self,
591 callback: NearHeapLimitCallback,
592 data: *mut c_void,
593 ) {
594 unsafe { v8__Isolate__AddNearHeapLimitCallback(self, callback, data) };
595 }
596
597 pub fn remove_near_heap_limit_callback(
602 &mut self,
603 callback: NearHeapLimitCallback,
604 heap_limit: usize,
605 ) {
606 unsafe {
607 v8__Isolate__RemoveNearHeapLimitCallback(self, callback, heap_limit)
608 };
609 }
610
611 pub fn set_oom_error_handler(&mut self, callback: OomErrorCallback) {
612 unsafe { v8__Isolate__SetOOMErrorHandler(self, callback) };
613 }
614
615 pub fn get_microtasks_policy(&self) -> MicrotasksPolicy {
617 unsafe { v8__Isolate__GetMicrotasksPolicy(self) }
618 }
619
620 pub fn set_microtasks_policy(&mut self, policy: MicrotasksPolicy) {
622 unsafe { v8__Isolate__SetMicrotasksPolicy(self, policy) }
623 }
624
625 pub fn perform_microtask_checkpoint(&mut self) {
630 unsafe { v8__Isolate__PerformMicrotaskCheckpoint(self) }
631 }
632
633 #[deprecated(note = "Use Isolate::perform_microtask_checkpoint() instead")]
635 pub fn run_microtasks(&mut self) {
636 self.perform_microtask_checkpoint()
637 }
638
639 pub fn enqueue_microtask(&mut self, microtask: Local<Function>) {
641 unsafe { v8__Isolate__EnqueueMicrotask(self, &*microtask) }
642 }
643
644 pub fn set_allow_atomics_wait(&mut self, allow: bool) {
648 unsafe { v8__Isolate__SetAllowAtomicsWait(self, allow) }
649 }
650
651 pub fn set_wasm_streaming_callback<F>(&mut self, _: F)
659 where
660 F: UnitType + Fn(&mut HandleScope, Local<Value>, WasmStreaming),
661 {
662 unsafe { v8__Isolate__SetWasmStreamingCallback(self, trampoline::<F>()) }
663 }
664
665 pub fn has_pending_background_tasks(&self) -> bool {
669 unsafe { v8__Isolate__HasPendingBackgroundTasks(self) }
670 }
671
672 unsafe fn dispose(&mut self) {
675 ScopeData::drop_root(self);
677
678 let annex = self.get_annex_mut();
682 {
683 let _lock = annex.isolate_mutex.lock().unwrap();
684 annex.isolate = null_mut();
685 }
686
687 annex.create_param_allocations = Box::new(());
689 annex.slots.clear();
690
691 Arc::from_raw(annex);
693 self.set_data(0, null_mut());
694
695 v8__Isolate__Dispose(self)
698 }
699
700 pub fn take_heap_snapshot<F>(&mut self, mut callback: F)
707 where
708 F: FnMut(&[u8]) -> bool,
709 {
710 extern "C" fn trampoline<F>(
711 arg: *mut c_void,
712 data: *const u8,
713 size: usize,
714 ) -> bool
715 where
716 F: FnMut(&[u8]) -> bool,
717 {
718 let p = arg as *mut F;
719 let callback = unsafe { &mut *p };
720 let slice = unsafe { std::slice::from_raw_parts(data, size) };
721 callback(slice)
722 }
723
724 let arg = &mut callback as *mut F as *mut c_void;
725 unsafe { v8__HeapProfiler__TakeHeapSnapshot(self, trampoline::<F>, arg) }
726 }
727}
728
729pub(crate) struct IsolateAnnex {
730 create_param_allocations: Box<dyn Any>,
731 slots: HashMap<TypeId, Box<dyn Any>, BuildTypeIdHasher>,
732 isolate: *mut Isolate,
740 isolate_mutex: Mutex<()>,
741}
742
743impl IsolateAnnex {
744 fn new(
745 isolate: &mut Isolate,
746 create_param_allocations: Box<dyn Any>,
747 ) -> Self {
748 Self {
749 create_param_allocations,
750 slots: HashMap::default(),
751 isolate,
752 isolate_mutex: Mutex::new(()),
753 }
754 }
755}
756
757impl Debug for IsolateAnnex {
758 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
759 f.debug_struct("IsolateAnnex")
760 .field("isolate", &self.isolate)
761 .field("isolate_mutex", &self.isolate_mutex)
762 .finish()
763 }
764}
765
766#[derive(Clone, Debug)]
773pub struct IsolateHandle(Arc<IsolateAnnex>);
774
775unsafe impl Send for IsolateHandle {}
776unsafe impl Sync for IsolateHandle {}
777
778impl IsolateHandle {
779 pub(crate) unsafe fn get_isolate_ptr(&self) -> *mut Isolate {
783 self.0.isolate
784 }
785
786 fn new(isolate: &Isolate) -> Self {
787 Self(isolate.get_annex_arc())
788 }
789
790 pub fn terminate_execution(&self) -> bool {
798 let _lock = self.0.isolate_mutex.lock().unwrap();
799 if self.0.isolate.is_null() {
800 false
801 } else {
802 unsafe { v8__Isolate__TerminateExecution(self.0.isolate) };
803 true
804 }
805 }
806
807 pub fn cancel_terminate_execution(&self) -> bool {
822 let _lock = self.0.isolate_mutex.lock().unwrap();
823 if self.0.isolate.is_null() {
824 false
825 } else {
826 unsafe { v8__Isolate__CancelTerminateExecution(self.0.isolate) };
827 true
828 }
829 }
830
831 pub fn is_execution_terminating(&self) -> bool {
840 let _lock = self.0.isolate_mutex.lock().unwrap();
841 if self.0.isolate.is_null() {
842 false
843 } else {
844 unsafe { v8__Isolate__IsExecutionTerminating(self.0.isolate) }
845 }
846 }
847
848 #[allow(clippy::not_unsafe_ptr_arg_deref)]
859 pub fn request_interrupt(
860 &self,
861 callback: InterruptCallback,
862 data: *mut c_void,
863 ) -> bool {
864 let _lock = self.0.isolate_mutex.lock().unwrap();
865 if self.0.isolate.is_null() {
866 false
867 } else {
868 unsafe { v8__Isolate__RequestInterrupt(self.0.isolate, callback, data) };
869 true
870 }
871 }
872}
873
874#[derive(Debug)]
876pub struct OwnedIsolate {
877 cxx_isolate: NonNull<Isolate>,
878}
879
880impl OwnedIsolate {
881 pub(crate) fn new(cxx_isolate: *mut Isolate) -> Self {
882 let cxx_isolate = NonNull::new(cxx_isolate).unwrap();
883 Self { cxx_isolate }
884 }
885}
886
887impl Drop for OwnedIsolate {
888 fn drop(&mut self) {
889 unsafe {
890 self.exit();
891 self.cxx_isolate.as_mut().dispose()
892 }
893 }
894}
895
896impl Deref for OwnedIsolate {
897 type Target = Isolate;
898 fn deref(&self) -> &Self::Target {
899 unsafe { self.cxx_isolate.as_ref() }
900 }
901}
902
903impl DerefMut for OwnedIsolate {
904 fn deref_mut(&mut self) -> &mut Self::Target {
905 unsafe { self.cxx_isolate.as_mut() }
906 }
907}
908
909impl HeapStatistics {
910 pub fn total_heap_size(&self) -> usize {
911 unsafe { v8__HeapStatistics__total_heap_size(self) }
912 }
913
914 pub fn total_heap_size_executable(&self) -> usize {
915 unsafe { v8__HeapStatistics__total_heap_size_executable(self) }
916 }
917
918 pub fn total_physical_size(&self) -> usize {
919 unsafe { v8__HeapStatistics__total_physical_size(self) }
920 }
921
922 pub fn total_available_size(&self) -> usize {
923 unsafe { v8__HeapStatistics__total_available_size(self) }
924 }
925
926 pub fn total_global_handles_size(&self) -> usize {
927 unsafe { v8__HeapStatistics__total_global_handles_size(self) }
928 }
929
930 pub fn used_global_handles_size(&self) -> usize {
931 unsafe { v8__HeapStatistics__used_global_handles_size(self) }
932 }
933
934 pub fn used_heap_size(&self) -> usize {
935 unsafe { v8__HeapStatistics__used_heap_size(self) }
936 }
937
938 pub fn heap_size_limit(&self) -> usize {
939 unsafe { v8__HeapStatistics__heap_size_limit(self) }
940 }
941
942 pub fn malloced_memory(&self) -> usize {
943 unsafe { v8__HeapStatistics__malloced_memory(self) }
944 }
945
946 pub fn external_memory(&self) -> usize {
947 unsafe { v8__HeapStatistics__external_memory(self) }
948 }
949
950 pub fn peak_malloced_memory(&self) -> usize {
951 unsafe { v8__HeapStatistics__peak_malloced_memory(self) }
952 }
953
954 pub fn number_of_native_contexts(&self) -> usize {
955 unsafe { v8__HeapStatistics__number_of_native_contexts(self) }
956 }
957
958 pub fn number_of_detached_contexts(&self) -> usize {
959 unsafe { v8__HeapStatistics__number_of_detached_contexts(self) }
960 }
961
962 pub fn does_zap_garbage(&self) -> usize {
965 unsafe { v8__HeapStatistics__does_zap_garbage(self) }
966 }
967}
968
969impl Default for HeapStatistics {
970 fn default() -> Self {
971 let mut s = MaybeUninit::<Self>::uninit();
972 unsafe {
973 v8__HeapStatistics__CONSTRUCT(&mut s);
974 s.assume_init()
975 }
976 }
977}
978
979impl<'s, F> MapFnFrom<F> for PrepareStackTraceCallback<'s>
980where
981 F: UnitType
982 + Fn(
983 &mut HandleScope<'s>,
984 Local<'s, Value>,
985 Local<'s, Array>,
986 ) -> Local<'s, Value>,
987{
988 #[cfg(target_os = "windows")]
990 fn mapping() -> Self {
991 let f = |ret_ptr, context, error, sites| {
992 let mut scope: CallbackScope = unsafe { CallbackScope::new(context) };
993 let r = (F::get())(&mut scope, error, sites);
994 unsafe { std::ptr::write(ret_ptr, &*r as *const _) };
995 ret_ptr
996 };
997 f.to_c_fn()
998 }
999
1000 #[cfg(not(target_os = "windows"))]
1002 fn mapping() -> Self {
1003 let f = |context, error, sites| {
1004 let mut scope: CallbackScope = unsafe { CallbackScope::new(context) };
1005 let r = (F::get())(&mut scope, error, sites);
1006 &*r as *const _
1007 };
1008 f.to_c_fn()
1009 }
1010}