1pub use crate::backing::{ImportBacking, LocalBacking, INTERNALS_SIZE};
4use crate::{
5 error::CallResult,
6 instance::call_func_with_index_inner,
7 memory::{Memory, MemoryType},
8 module::{ModuleInfo, ModuleInner},
9 sig_registry::SigRegistry,
10 structures::TypedIndex,
11 types::{LocalOrImport, MemoryIndex, TableIndex, Value},
12 vmcalls,
13};
14use std::{
15 cell::UnsafeCell,
16 ffi::c_void,
17 mem,
18 ptr::{self, NonNull},
19 sync::atomic::{AtomicUsize, Ordering},
20 sync::Once,
21};
22
23use std::collections::HashMap;
24
25#[derive(Debug)]
38#[repr(C)]
39pub struct Ctx {
40 pub internal: InternalCtx,
43
44 pub(crate) local_functions: *const *const Func,
45
46 pub local_backing: *mut LocalBacking,
49 pub import_backing: *mut ImportBacking,
51 pub module: *const ModuleInner,
53
54 pub data: *mut c_void,
66
67 pub data_finalizer: Option<fn(data: *mut c_void)>,
71}
72
73impl Drop for Ctx {
80 fn drop(&mut self) {
81 if let Some(ref finalizer) = self.data_finalizer {
82 finalizer(self.data);
83 }
84 }
85}
86
87#[doc(hidden)]
91#[derive(Debug)]
92#[repr(C)]
93pub struct InternalCtx {
94 pub memories: *mut *mut LocalMemory,
96
97 pub tables: *mut *mut LocalTable,
99
100 pub globals: *mut *mut LocalGlobal,
102
103 pub imported_memories: *mut *mut LocalMemory,
105
106 pub imported_tables: *mut *mut LocalTable,
108
109 pub imported_globals: *mut *mut LocalGlobal,
111
112 pub imported_funcs: *mut ImportedFunc,
114
115 pub dynamic_sigindices: *const SigId,
120
121 pub intrinsics: *const Intrinsics,
123
124 pub stack_lower_bound: *mut u8,
126
127 pub memory_base: *mut u8,
129 pub memory_bound: usize,
131
132 pub internals: *mut [u64; INTERNALS_SIZE], pub interrupt_signal_mem: *mut u8,
137}
138
139static INTERNAL_FIELDS: AtomicUsize = AtomicUsize::new(0);
140
141pub struct InternalField {
143 init: Once,
145 inner: UnsafeCell<usize>,
147}
148
149unsafe impl Send for InternalField {}
150unsafe impl Sync for InternalField {}
151
152impl InternalField {
153 pub const fn allocate() -> InternalField {
155 InternalField {
156 init: Once::new(),
157 inner: UnsafeCell::new(::std::usize::MAX),
158 }
159 }
160
161 pub fn index(&self) -> usize {
163 let inner: *mut usize = self.inner.get();
164 self.init.call_once(|| {
165 let idx = INTERNAL_FIELDS.fetch_add(1, Ordering::SeqCst);
166 if idx >= INTERNALS_SIZE {
167 INTERNAL_FIELDS.fetch_sub(1, Ordering::SeqCst);
168 panic!("at most {} internal fields are supported", INTERNALS_SIZE);
169 } else {
170 unsafe {
171 *inner = idx;
172 }
173 }
174 });
175 unsafe { *inner }
176 }
177}
178
179#[repr(C)]
181pub struct Intrinsics {
182 pub memory_grow: *const Func,
184 pub memory_size: *const Func,
186 }
196
197unsafe impl Send for Intrinsics {}
198unsafe impl Sync for Intrinsics {}
199
200impl Intrinsics {
201 #[allow(clippy::erasing_op)]
203 pub const fn offset_memory_grow() -> u8 {
204 (0 * ::std::mem::size_of::<usize>()) as u8
205 }
206 pub const fn offset_memory_size() -> u8 {
208 (1 * ::std::mem::size_of::<usize>()) as u8
209 }
210}
211
212pub static INTRINSICS_LOCAL_STATIC_MEMORY: Intrinsics = Intrinsics {
214 memory_grow: vmcalls::local_static_memory_grow as _,
215 memory_size: vmcalls::local_static_memory_size as _,
216};
217pub static INTRINSICS_LOCAL_DYNAMIC_MEMORY: Intrinsics = Intrinsics {
219 memory_grow: vmcalls::local_dynamic_memory_grow as _,
220 memory_size: vmcalls::local_dynamic_memory_size as _,
221};
222pub static INTRINSICS_IMPORTED_STATIC_MEMORY: Intrinsics = Intrinsics {
224 memory_grow: vmcalls::imported_static_memory_grow as _,
225 memory_size: vmcalls::imported_static_memory_size as _,
226};
227pub static INTRINSICS_IMPORTED_DYNAMIC_MEMORY: Intrinsics = Intrinsics {
229 memory_grow: vmcalls::imported_dynamic_memory_grow as _,
230 memory_size: vmcalls::imported_dynamic_memory_size as _,
231};
232
233fn get_intrinsics_for_module(m: &ModuleInfo) -> *const Intrinsics {
234 if m.memories.is_empty() && m.imported_memories.is_empty() {
235 ptr::null()
236 } else {
237 match MemoryIndex::new(0).local_or_import(m) {
238 LocalOrImport::Local(local_mem_index) => {
239 let mem_desc = &m.memories[local_mem_index];
240 match mem_desc.memory_type() {
241 MemoryType::Dynamic => &INTRINSICS_LOCAL_DYNAMIC_MEMORY,
242 MemoryType::Static => &INTRINSICS_LOCAL_STATIC_MEMORY,
243 MemoryType::SharedStatic => &INTRINSICS_LOCAL_STATIC_MEMORY,
244 }
245 }
246 LocalOrImport::Import(import_mem_index) => {
247 let mem_desc = &m.imported_memories[import_mem_index].1;
248 match mem_desc.memory_type() {
249 MemoryType::Dynamic => &INTRINSICS_IMPORTED_DYNAMIC_MEMORY,
250 MemoryType::Static => &INTRINSICS_IMPORTED_STATIC_MEMORY,
251 MemoryType::SharedStatic => &INTRINSICS_IMPORTED_STATIC_MEMORY,
252 }
253 }
254 }
255 }
256}
257
258#[cfg(all(unix, target_arch = "x86_64"))]
259fn get_interrupt_signal_mem() -> *mut u8 {
260 unsafe { crate::fault::get_wasm_interrupt_signal_mem() }
261}
262
263#[cfg(not(all(unix, target_arch = "x86_64")))]
264fn get_interrupt_signal_mem() -> *mut u8 {
265 static mut REGION: u64 = 0;
266 unsafe { &mut REGION as *mut u64 as *mut u8 }
267}
268
269impl Ctx {
270 #[doc(hidden)]
271 pub unsafe fn new(
272 local_backing: &mut LocalBacking,
273 import_backing: &mut ImportBacking,
274 module: &ModuleInner,
275 ) -> Self {
276 let (mem_base, mem_bound): (*mut u8, usize) =
277 if module.info.memories.is_empty() && module.info.imported_memories.is_empty() {
278 (::std::ptr::null_mut(), 0)
279 } else {
280 let mem = match MemoryIndex::new(0).local_or_import(&module.info) {
281 LocalOrImport::Local(index) => local_backing.vm_memories[index],
282 LocalOrImport::Import(index) => import_backing.vm_memories[index],
283 };
284 ((*mem).base, (*mem).bound)
285 };
286 Self {
287 internal: InternalCtx {
288 memories: local_backing.vm_memories.as_mut_ptr(),
289 tables: local_backing.vm_tables.as_mut_ptr(),
290 globals: local_backing.vm_globals.as_mut_ptr(),
291
292 imported_memories: import_backing.vm_memories.as_mut_ptr(),
293 imported_tables: import_backing.vm_tables.as_mut_ptr(),
294 imported_globals: import_backing.vm_globals.as_mut_ptr(),
295 imported_funcs: import_backing.vm_functions.as_mut_ptr(),
296
297 dynamic_sigindices: local_backing.dynamic_sigindices.as_ptr(),
298
299 intrinsics: get_intrinsics_for_module(&module.info),
300
301 stack_lower_bound: ::std::ptr::null_mut(),
302
303 memory_base: mem_base,
304 memory_bound: mem_bound,
305
306 internals: &mut local_backing.internals.0,
307
308 interrupt_signal_mem: get_interrupt_signal_mem(),
309 },
310 local_functions: local_backing.local_functions.as_ptr(),
311
312 local_backing,
313 import_backing,
314 module,
315
316 data: ptr::null_mut(),
317 data_finalizer: None,
318 }
319 }
320
321 #[doc(hidden)]
322 pub unsafe fn new_with_data(
323 local_backing: &mut LocalBacking,
324 import_backing: &mut ImportBacking,
325 module: &ModuleInner,
326 data: *mut c_void,
327 data_finalizer: fn(*mut c_void),
328 ) -> Self {
329 let (mem_base, mem_bound): (*mut u8, usize) =
330 if module.info.memories.is_empty() && module.info.imported_memories.is_empty() {
331 (::std::ptr::null_mut(), 0)
332 } else {
333 let mem = match MemoryIndex::new(0).local_or_import(&module.info) {
334 LocalOrImport::Local(index) => local_backing.vm_memories[index],
335 LocalOrImport::Import(index) => import_backing.vm_memories[index],
336 };
337 ((*mem).base, (*mem).bound)
338 };
339 Self {
340 internal: InternalCtx {
341 memories: local_backing.vm_memories.as_mut_ptr(),
342 tables: local_backing.vm_tables.as_mut_ptr(),
343 globals: local_backing.vm_globals.as_mut_ptr(),
344
345 imported_memories: import_backing.vm_memories.as_mut_ptr(),
346 imported_tables: import_backing.vm_tables.as_mut_ptr(),
347 imported_globals: import_backing.vm_globals.as_mut_ptr(),
348 imported_funcs: import_backing.vm_functions.as_mut_ptr(),
349
350 dynamic_sigindices: local_backing.dynamic_sigindices.as_ptr(),
351
352 intrinsics: get_intrinsics_for_module(&module.info),
353
354 stack_lower_bound: ptr::null_mut(),
355
356 memory_base: mem_base,
357 memory_bound: mem_bound,
358
359 internals: &mut local_backing.internals.0,
360
361 interrupt_signal_mem: get_interrupt_signal_mem(),
362 },
363 local_functions: local_backing.local_functions.as_ptr(),
364
365 local_backing,
366 import_backing,
367 module,
368
369 data,
370 data_finalizer: Some(data_finalizer),
371 }
372 }
373
374 pub fn memory(&self, mem_index: u32) -> &Memory {
393 let module = unsafe { &*self.module };
394 let mem_index = MemoryIndex::new(mem_index as usize);
395 match mem_index.local_or_import(&module.info) {
396 LocalOrImport::Local(local_mem_index) => unsafe {
397 let local_backing = &*self.local_backing;
398 &local_backing.memories[local_mem_index]
399 },
400 LocalOrImport::Import(import_mem_index) => unsafe {
401 let import_backing = &*self.import_backing;
402 &import_backing.memories[import_mem_index]
403 },
404 }
405 }
406
407 pub unsafe fn memory_and_data_mut<T>(&mut self, mem_index: u32) -> (&Memory, &mut T) {
419 (self.memory(mem_index), &mut *(self.data as *mut T))
420 }
421
422 pub unsafe fn borrow_symbol_map(&self) -> &Option<HashMap<u32, String>> {
424 &(*self.module).info.em_symbol_map
425 }
426
427 pub fn dynamic_sigindice_count(&self) -> usize {
429 unsafe { (*self.local_backing).dynamic_sigindices.len() }
430 }
431
432 pub fn get_internal(&self, field: &InternalField) -> u64 {
434 unsafe { (*self.internal.internals)[field.index()] }
435 }
436
437 pub fn set_internal(&mut self, field: &InternalField, value: u64) {
439 unsafe {
440 (*self.internal.internals)[field.index()] = value;
441 }
442 }
443
444 pub fn call_with_table_index(
446 &mut self,
447 index: TableIndex,
448 args: &[Value],
449 ) -> CallResult<Vec<Value>> {
450 let anyfunc_table =
451 unsafe { &*((**self.internal.tables).table as *mut crate::table::AnyfuncTable) };
452 let Anyfunc { func, ctx, sig_id } = anyfunc_table.backing[index.index()];
453
454 let signature = SigRegistry.lookup_signature(unsafe { std::mem::transmute(sig_id.0) });
455 let mut rets = vec![];
456
457 let wasm = {
458 let module = unsafe { &*self.module };
459 let runnable = &module.runnable_module;
460
461 let sig_index = SigRegistry.lookup_sig_index(signature.clone());
462 runnable
463 .get_trampoline(&module.info, sig_index)
464 .expect("wasm trampoline")
465 };
466
467 call_func_with_index_inner(
468 ctx,
469 NonNull::new(func as *mut _).unwrap(),
470 &signature,
471 wasm,
472 args,
473 &mut rets,
474 )?;
475
476 Ok(rets)
477 }
478}
479
480#[doc(hidden)]
481impl Ctx {
482 #[allow(clippy::erasing_op)] pub const fn offset_memories() -> u8 {
484 0 * (mem::size_of::<usize>() as u8)
485 }
486
487 pub const fn offset_tables() -> u8 {
488 1 * (mem::size_of::<usize>() as u8)
489 }
490
491 pub const fn offset_globals() -> u8 {
492 2 * (mem::size_of::<usize>() as u8)
493 }
494
495 pub const fn offset_imported_memories() -> u8 {
496 3 * (mem::size_of::<usize>() as u8)
497 }
498
499 pub const fn offset_imported_tables() -> u8 {
500 4 * (mem::size_of::<usize>() as u8)
501 }
502
503 pub const fn offset_imported_globals() -> u8 {
504 5 * (mem::size_of::<usize>() as u8)
505 }
506
507 pub const fn offset_imported_funcs() -> u8 {
508 6 * (mem::size_of::<usize>() as u8)
509 }
510
511 pub const fn offset_signatures() -> u8 {
512 7 * (mem::size_of::<usize>() as u8)
513 }
514
515 pub const fn offset_intrinsics() -> u8 {
516 8 * (mem::size_of::<usize>() as u8)
517 }
518
519 pub const fn offset_stack_lower_bound() -> u8 {
520 9 * (mem::size_of::<usize>() as u8)
521 }
522
523 pub const fn offset_memory_base() -> u8 {
524 10 * (mem::size_of::<usize>() as u8)
525 }
526
527 pub const fn offset_memory_bound() -> u8 {
528 11 * (mem::size_of::<usize>() as u8)
529 }
530
531 pub const fn offset_internals() -> u8 {
532 12 * (mem::size_of::<usize>() as u8)
533 }
534
535 pub const fn offset_interrupt_signal_mem() -> u8 {
536 13 * (mem::size_of::<usize>() as u8)
537 }
538
539 pub const fn offset_local_functions() -> u8 {
540 14 * (mem::size_of::<usize>() as u8)
541 }
542}
543
544#[repr(transparent)]
548pub struct Func(*mut c_void);
549
550#[repr(transparent)]
554pub struct FuncEnv(*mut c_void);
555
556#[derive(Debug)]
559#[repr(C)]
560pub struct FuncCtx {
561 pub(crate) vmctx: NonNull<Ctx>,
563
564 pub(crate) func_env: Option<NonNull<FuncEnv>>,
569}
570
571impl FuncCtx {
572 #[allow(clippy::erasing_op)]
574 pub const fn offset_vmctx() -> u8 {
575 0 * (mem::size_of::<usize>() as u8)
576 }
577
578 pub const fn offset_func_env() -> u8 {
580 1 * (mem::size_of::<usize>() as u8)
581 }
582
583 pub const fn size() -> u8 {
585 mem::size_of::<Self>() as u8
586 }
587}
588
589#[derive(Debug, Clone)]
592#[repr(C)]
593pub struct ImportedFunc {
594 pub(crate) func: *const Func,
596
597 pub(crate) func_ctx: NonNull<FuncCtx>,
599}
600
601unsafe impl Send for ImportedFunc {}
605
606impl ImportedFunc {
607 #[allow(clippy::erasing_op)] pub const fn offset_func() -> u8 {
610 0 * (mem::size_of::<usize>() as u8)
611 }
612
613 pub const fn offset_func_ctx() -> u8 {
615 1 * (mem::size_of::<usize>() as u8)
616 }
617
618 pub const fn size() -> u8 {
620 mem::size_of::<Self>() as u8
621 }
622}
623
624#[derive(Debug, Clone, Copy)]
626#[repr(C)]
627pub struct LocalTable {
628 pub base: *mut u8,
630 pub count: usize,
632 pub table: *mut (),
634}
635
636unsafe impl Send for LocalTable {}
638
639impl LocalTable {
640 #[allow(clippy::erasing_op)] pub const fn offset_base() -> u8 {
643 0 * (mem::size_of::<usize>() as u8)
644 }
645
646 pub const fn offset_count() -> u8 {
648 1 * (mem::size_of::<usize>() as u8)
649 }
650
651 pub const fn size() -> u8 {
653 mem::size_of::<Self>() as u8
654 }
655}
656
657#[derive(Debug, Clone, Copy)]
659#[repr(C)]
660pub struct LocalMemory {
661 pub base: *mut u8,
663 pub bound: usize,
665 pub memory: *mut (),
669}
670
671unsafe impl Send for LocalMemory {}
673
674impl LocalMemory {
675 #[allow(clippy::erasing_op)] pub const fn offset_base() -> u8 {
678 0 * (mem::size_of::<usize>() as u8)
679 }
680
681 pub const fn offset_bound() -> u8 {
683 1 * (mem::size_of::<usize>() as u8)
684 }
685
686 pub const fn size() -> u8 {
688 mem::size_of::<Self>() as u8
689 }
690}
691
692#[derive(Debug, Clone, Copy)]
694#[repr(C)]
695pub struct LocalGlobal {
696 pub data: u128,
698}
699
700impl LocalGlobal {
701 #[allow(clippy::erasing_op)] pub const fn offset_data() -> u8 {
704 0 * (mem::size_of::<usize>() as u8)
705 }
706
707 pub const fn null() -> Self {
709 Self { data: 0 }
710 }
711
712 pub const fn size() -> u8 {
714 mem::size_of::<Self>() as u8
715 }
716}
717
718#[derive(Debug, Clone, Copy)]
722#[repr(transparent)]
723pub struct SigId(pub u32);
724
725use crate::types::SigIndex;
726impl From<SigId> for SigIndex {
727 fn from(other: SigId) -> SigIndex {
728 SigIndex::new(other.0 as _)
729 }
730}
731
732#[derive(Debug, Clone, Copy)]
734#[repr(C)]
735pub struct Anyfunc {
736 pub func: *const Func,
738 pub ctx: *mut Ctx,
740 pub sig_id: SigId,
742}
743
744unsafe impl Send for Anyfunc {}
746
747impl Anyfunc {
748 pub const fn null() -> Self {
750 Self {
751 func: ptr::null(),
752 ctx: ptr::null_mut(),
753 sig_id: SigId(u32::max_value()),
754 }
755 }
756
757 #[allow(clippy::erasing_op)] pub const fn offset_func() -> u8 {
760 0 * (mem::size_of::<usize>() as u8)
761 }
762
763 pub const fn offset_vmctx() -> u8 {
765 1 * (mem::size_of::<usize>() as u8)
766 }
767
768 pub const fn offset_sig_id() -> u8 {
770 2 * (mem::size_of::<usize>() as u8)
771 }
772
773 pub const fn size() -> u8 {
775 mem::size_of::<Self>() as u8
776 }
777}
778
779#[cfg(test)]
780mod vm_offset_tests {
781 use super::{
782 Anyfunc, Ctx, FuncCtx, ImportedFunc, InternalCtx, LocalGlobal, LocalMemory, LocalTable,
783 };
784
785 macro_rules! offset_of {
787 ($struct:path, $field:ident) => {{
788 fn offset() -> usize {
789 use std::mem;
790
791 let structure = mem::MaybeUninit::<$struct>::uninit();
792
793 let &$struct {
794 $field: ref field, ..
795 } = unsafe { &*structure.as_ptr() };
796
797 let offset =
798 (field as *const _ as usize).wrapping_sub(&structure as *const _ as usize);
799
800 assert!((0..=mem::size_of_val(&structure)).contains(&offset));
801
802 offset
803 }
804
805 offset()
806 }};
807 }
808
809 #[test]
810 fn offset_of() {
811 use std::{mem, ptr::NonNull};
812
813 struct S0;
814
815 #[repr(C)]
816 struct S1 {
817 f1: u8,
818 f2: u16,
819 f3: u32,
820 f4: u64,
821 f5: u128,
822 f6: f32,
823 f7: f64,
824 f8: NonNull<S0>,
825 f9: Option<NonNull<S0>>,
826 f10: *mut S0,
827 z: u8,
828 }
829
830 assert_eq!(offset_of!(S1, f1), 0);
831 assert_eq!(offset_of!(S1, f2), 2);
832 assert_eq!(offset_of!(S1, f3), 4);
833 assert_eq!(offset_of!(S1, f4), 8);
834 assert_eq!(offset_of!(S1, f5), 16);
835 assert_eq!(offset_of!(S1, f6), 32);
836 assert_eq!(offset_of!(S1, f7), 40);
837 assert_eq!(offset_of!(S1, f8), 40 + mem::size_of::<usize>());
838 assert_eq!(offset_of!(S1, f9), 48 + mem::size_of::<usize>());
839 assert_eq!(offset_of!(S1, f10), 56 + mem::size_of::<usize>());
840 assert_eq!(offset_of!(S1, z), 64 + mem::size_of::<usize>());
841 }
842
843 #[test]
844 fn vmctx() {
845 assert_eq!(0usize, offset_of!(Ctx, internal));
846
847 assert_eq!(
848 Ctx::offset_memories() as usize,
849 offset_of!(InternalCtx, memories),
850 );
851
852 assert_eq!(
853 Ctx::offset_tables() as usize,
854 offset_of!(InternalCtx, tables),
855 );
856
857 assert_eq!(
858 Ctx::offset_globals() as usize,
859 offset_of!(InternalCtx, globals),
860 );
861
862 assert_eq!(
863 Ctx::offset_imported_memories() as usize,
864 offset_of!(InternalCtx, imported_memories),
865 );
866
867 assert_eq!(
868 Ctx::offset_imported_tables() as usize,
869 offset_of!(InternalCtx, imported_tables),
870 );
871
872 assert_eq!(
873 Ctx::offset_imported_globals() as usize,
874 offset_of!(InternalCtx, imported_globals),
875 );
876
877 assert_eq!(
878 Ctx::offset_imported_funcs() as usize,
879 offset_of!(InternalCtx, imported_funcs),
880 );
881
882 assert_eq!(
883 Ctx::offset_intrinsics() as usize,
884 offset_of!(InternalCtx, intrinsics),
885 );
886
887 assert_eq!(
888 Ctx::offset_stack_lower_bound() as usize,
889 offset_of!(InternalCtx, stack_lower_bound),
890 );
891
892 assert_eq!(
893 Ctx::offset_memory_base() as usize,
894 offset_of!(InternalCtx, memory_base),
895 );
896
897 assert_eq!(
898 Ctx::offset_memory_bound() as usize,
899 offset_of!(InternalCtx, memory_bound),
900 );
901
902 assert_eq!(
903 Ctx::offset_internals() as usize,
904 offset_of!(InternalCtx, internals),
905 );
906
907 assert_eq!(
908 Ctx::offset_interrupt_signal_mem() as usize,
909 offset_of!(InternalCtx, interrupt_signal_mem),
910 );
911
912 assert_eq!(
913 Ctx::offset_local_functions() as usize,
914 offset_of!(Ctx, local_functions),
915 );
916 }
917
918 #[test]
919 fn func_ctx() {
920 assert_eq!(FuncCtx::offset_vmctx() as usize, 0,);
921
922 assert_eq!(FuncCtx::offset_func_env() as usize, 8,);
923 }
924
925 #[test]
926 fn imported_func() {
927 assert_eq!(
928 ImportedFunc::offset_func() as usize,
929 offset_of!(ImportedFunc, func),
930 );
931
932 assert_eq!(
933 ImportedFunc::offset_func_ctx() as usize,
934 offset_of!(ImportedFunc, func_ctx),
935 );
936 }
937
938 #[test]
939 fn local_table() {
940 assert_eq!(
941 LocalTable::offset_base() as usize,
942 offset_of!(LocalTable, base),
943 );
944
945 assert_eq!(
946 LocalTable::offset_count() as usize,
947 offset_of!(LocalTable, count),
948 );
949 }
950
951 #[test]
952 fn local_memory() {
953 assert_eq!(
954 LocalMemory::offset_base() as usize,
955 offset_of!(LocalMemory, base),
956 );
957
958 assert_eq!(
959 LocalMemory::offset_bound() as usize,
960 offset_of!(LocalMemory, bound),
961 );
962 }
963
964 #[test]
965 fn local_global() {
966 assert_eq!(
967 LocalGlobal::offset_data() as usize,
968 offset_of!(LocalGlobal, data),
969 );
970 }
971
972 #[test]
973 fn cc_anyfunc() {
974 assert_eq!(Anyfunc::offset_func() as usize, offset_of!(Anyfunc, func),);
975
976 assert_eq!(Anyfunc::offset_vmctx() as usize, offset_of!(Anyfunc, ctx),);
977
978 assert_eq!(
979 Anyfunc::offset_sig_id() as usize,
980 offset_of!(Anyfunc, sig_id),
981 );
982 }
983}
984
985#[cfg(test)]
986mod vm_ctx_tests {
987 use super::{Ctx, ImportBacking, LocalBacking};
988 use crate::module::{ModuleInfo, ModuleInner, StringTable};
989 use crate::structures::Map;
990 use std::ffi::c_void;
991 use std::sync::Arc;
992
993 struct TestData {
994 x: u32,
995 y: bool,
996 str: String,
997 finalizer: Box<dyn FnMut()>,
998 }
999
1000 impl Drop for TestData {
1001 fn drop(&mut self) {
1002 (*self.finalizer)();
1003 }
1004 }
1005
1006 fn test_data_finalizer(data: *mut c_void) {
1007 let test_data: &mut TestData = unsafe { &mut *(data as *mut TestData) };
1008
1009 assert_eq!(10, test_data.x);
1010 assert_eq!(true, test_data.y);
1011 assert_eq!("Test".to_string(), test_data.str,);
1012
1013 println!("hello from finalizer");
1014
1015 drop(test_data);
1016 }
1017
1018 #[test]
1019 fn test_callback_on_drop() {
1020 let mut data = TestData {
1021 x: 10,
1022 y: true,
1023 str: "Test".to_string(),
1024 finalizer: Box::new(move || {}),
1025 };
1026
1027 let mut local_backing = LocalBacking {
1028 memories: Map::new().into_boxed_map(),
1029 tables: Map::new().into_boxed_map(),
1030 globals: Map::new().into_boxed_map(),
1031
1032 vm_memories: Map::new().into_boxed_map(),
1033 vm_tables: Map::new().into_boxed_map(),
1034 vm_globals: Map::new().into_boxed_map(),
1035
1036 dynamic_sigindices: Map::new().into_boxed_map(),
1037 local_functions: Map::new().into_boxed_map(),
1038
1039 internals: crate::backing::Internals([0; crate::backing::INTERNALS_SIZE]),
1040 };
1041
1042 let mut import_backing = ImportBacking {
1043 memories: Map::new().into_boxed_map(),
1044 tables: Map::new().into_boxed_map(),
1045 globals: Map::new().into_boxed_map(),
1046
1047 vm_functions: Map::new().into_boxed_map(),
1048 vm_memories: Map::new().into_boxed_map(),
1049 vm_tables: Map::new().into_boxed_map(),
1050 vm_globals: Map::new().into_boxed_map(),
1051 };
1052
1053 let module = generate_module();
1054 let data_ptr = &mut data as *mut _ as *mut c_void;
1055 let ctx = unsafe {
1056 Ctx::new_with_data(
1057 &mut local_backing,
1058 &mut import_backing,
1059 &module,
1060 data_ptr,
1061 test_data_finalizer,
1062 )
1063 };
1064
1065 let ctx_test_data = cast_test_data(ctx.data);
1066 assert_eq!(10, ctx_test_data.x);
1067 assert_eq!(true, ctx_test_data.y);
1068 assert_eq!("Test".to_string(), ctx_test_data.str);
1069
1070 drop(ctx);
1071 }
1072
1073 fn cast_test_data(data: *mut c_void) -> &'static mut TestData {
1074 let test_data: &mut TestData = unsafe { &mut *(data as *mut TestData) };
1075 test_data
1076 }
1077
1078 fn generate_module() -> ModuleInner {
1079 use super::Func;
1080 use crate::backend::{sys::Memory, CacheGen, RunnableModule};
1081 use crate::cache::Error as CacheError;
1082 use crate::error::RuntimeError;
1083 use crate::typed_func::Wasm;
1084 use crate::types::{LocalFuncIndex, SigIndex};
1085 use indexmap::IndexMap;
1086 use std::collections::HashMap;
1087 use std::ptr::NonNull;
1088 struct Placeholder;
1089 impl RunnableModule for Placeholder {
1090 fn get_func(
1091 &self,
1092 _module: &ModuleInfo,
1093 _local_func_index: LocalFuncIndex,
1094 ) -> Option<NonNull<Func>> {
1095 None
1096 }
1097
1098 fn get_trampoline(&self, _module: &ModuleInfo, _sig_index: SigIndex) -> Option<Wasm> {
1099 unimplemented!("generate_module::get_trampoline")
1100 }
1101 unsafe fn do_early_trap(&self, _: RuntimeError) -> ! {
1102 unimplemented!("generate_module::do_early_trap")
1103 }
1104 }
1105 impl CacheGen for Placeholder {
1106 fn generate_cache(&self) -> Result<(Box<[u8]>, Memory), CacheError> {
1107 unimplemented!("generate_module::generate_cache")
1108 }
1109 }
1110
1111 ModuleInner {
1112 runnable_module: Arc::new(Box::new(Placeholder)),
1113 cache_gen: Box::new(Placeholder),
1114 info: ModuleInfo {
1115 memories: Map::new(),
1116 globals: Map::new(),
1117 tables: Map::new(),
1118
1119 imported_functions: Map::new(),
1121 imported_memories: Map::new(),
1122 imported_tables: Map::new(),
1123 imported_globals: Map::new(),
1124
1125 exports: IndexMap::new(),
1126
1127 data_initializers: Vec::new(),
1128 elem_initializers: Vec::new(),
1129
1130 start_func: None,
1131
1132 func_assoc: Map::new(),
1133 signatures: Map::new(),
1134 backend: Default::default(),
1135
1136 namespace_table: StringTable::new(),
1137 name_table: StringTable::new(),
1138
1139 em_symbol_map: None,
1140
1141 custom_sections: HashMap::new(),
1142
1143 generate_debug_info: false,
1144 #[cfg(feature = "generate-debug-information")]
1145 debug_info_manager: crate::jit_debug::JitCodeDebugInfoManager::new(),
1146 },
1147 }
1148 }
1149}