wasmer_runtime_core_fl/
vm.rs

1//! The runtime vm module contains data structures and helper functions used during runtime to
2//! execute wasm instance functions.
3pub use crate::backing::{ImportBacking, LocalBacking, INTERNALS_SIZE};
4use crate::{
5    error::CallResult,
6    instance::call_func_with_index_inner,
7    memory::{Memory, MemoryType},
8    module::{ModuleInfo, ModuleInner},
9    sig_registry::SigRegistry,
10    structures::TypedIndex,
11    types::{LocalOrImport, MemoryIndex, TableIndex, Value},
12    vmcalls,
13};
14use std::{
15    cell::UnsafeCell,
16    ffi::c_void,
17    mem,
18    ptr::{self, NonNull},
19    sync::atomic::{AtomicUsize, Ordering},
20    sync::Once,
21};
22
23use std::collections::HashMap;
24
25/// The context of the currently running WebAssembly instance.
26///
27/// This is implicitly passed to every WebAssembly function.
28/// Since this is per-instance, each field has a statically
29/// (as in after compiling the wasm) known size, so no
30/// runtime checks are necessary.
31///
32/// While the runtime currently just passes this around
33/// as the first, implicit parameter of every function,
34/// it may someday be pinned to a register (especially
35/// on arm, which has a ton of registers) to reduce
36/// register shuffling.
37#[derive(Debug)]
38#[repr(C)]
39pub struct Ctx {
40    // `internal` must be the first field of `Ctx`.
41    /// InternalCtx data field
42    pub internal: InternalCtx,
43
44    pub(crate) local_functions: *const *const Func,
45
46    /// These are pointers to things that are known to be owned
47    /// by the owning `Instance`.
48    pub local_backing: *mut LocalBacking,
49    /// Mutable pointer to import data
50    pub import_backing: *mut ImportBacking,
51    /// Const pointer to module inner data
52    pub module: *const ModuleInner,
53
54    /// This is intended to be user-supplied, per-instance
55    /// contextual data. There are currently some issue with it,
56    /// notably that it cannot be set before running the `start`
57    /// function in a WebAssembly module. Additionally, the `data`
58    /// field may be taken by another ABI implementation that the user
59    /// wishes to use in addition to their own, such as WASI.  This issue is
60    /// being discussed at [#1111](https://github.com/wasmerio/wasmer/pull/1111).
61    ///
62    /// Alternatively, per-function data can be used if the function in the
63    /// [`ImportObject`] is a closure.  This cannot duplicate data though,
64    /// so if data may be shared if the [`ImportObject`] is reused.
65    pub data: *mut c_void,
66
67    /// If there's a function set in this field, it gets called
68    /// when the context is destructed, e.g. when an `Instance`
69    /// is dropped.
70    pub data_finalizer: Option<fn(data: *mut c_void)>,
71}
72
73/// When an instance context is destructed, we're calling its `data_finalizer`
74/// In order avoid leaking resources.
75///
76/// Implementing the `data_finalizer` function is the responsibility of the `wasmer` end-user.
77///
78/// See test: `test_data_finalizer` as an example
79impl Drop for Ctx {
80    fn drop(&mut self) {
81        if let Some(ref finalizer) = self.data_finalizer {
82            finalizer(self.data);
83        }
84    }
85}
86
87/// The internal context of the currently running WebAssembly instance.
88///
89///
90#[doc(hidden)]
91#[derive(Debug)]
92#[repr(C)]
93pub struct InternalCtx {
94    /// A pointer to an array of locally-defined memories, indexed by `MemoryIndex`.
95    pub memories: *mut *mut LocalMemory,
96
97    /// A pointer to an array of locally-defined tables, indexed by `TableIndex`.
98    pub tables: *mut *mut LocalTable,
99
100    /// A pointer to an array of locally-defined globals, indexed by `GlobalIndex`.
101    pub globals: *mut *mut LocalGlobal,
102
103    /// A pointer to an array of imported memories, indexed by `MemoryIndex`,
104    pub imported_memories: *mut *mut LocalMemory,
105
106    /// A pointer to an array of imported tables, indexed by `TableIndex`.
107    pub imported_tables: *mut *mut LocalTable,
108
109    /// A pointer to an array of imported globals, indexed by `GlobalIndex`.
110    pub imported_globals: *mut *mut LocalGlobal,
111
112    /// A pointer to an array of imported functions, indexed by `FuncIndex`.
113    pub imported_funcs: *mut ImportedFunc,
114
115    /// A pointer to an array of signature ids. Conceptually, this maps
116    /// from a static, module-local signature id to a runtime-global
117    /// signature id. This is used to allow call-indirect to other
118    /// modules safely.
119    pub dynamic_sigindices: *const SigId,
120
121    /// Const pointer to Intrinsics.
122    pub intrinsics: *const Intrinsics,
123
124    /// Stack lower bound.
125    pub stack_lower_bound: *mut u8,
126
127    /// Mutable pointer to memory base.
128    pub memory_base: *mut u8,
129    /// Memory bound.
130    pub memory_bound: usize,
131
132    /// Mutable pointer to internal fields.
133    pub internals: *mut [u64; INTERNALS_SIZE], // TODO: Make this dynamic?
134
135    /// Interrupt signal mem.
136    pub interrupt_signal_mem: *mut u8,
137}
138
139static INTERNAL_FIELDS: AtomicUsize = AtomicUsize::new(0);
140
141/// An internal field.
142pub struct InternalField {
143    /// Init once field.
144    init: Once,
145    /// Inner field.
146    inner: UnsafeCell<usize>,
147}
148
149unsafe impl Send for InternalField {}
150unsafe impl Sync for InternalField {}
151
152impl InternalField {
153    /// Allocate and return an `InternalField`.
154    pub const fn allocate() -> InternalField {
155        InternalField {
156            init: Once::new(),
157            inner: UnsafeCell::new(::std::usize::MAX),
158        }
159    }
160
161    /// Get the index of this `InternalField`.
162    pub fn index(&self) -> usize {
163        let inner: *mut usize = self.inner.get();
164        self.init.call_once(|| {
165            let idx = INTERNAL_FIELDS.fetch_add(1, Ordering::SeqCst);
166            if idx >= INTERNALS_SIZE {
167                INTERNAL_FIELDS.fetch_sub(1, Ordering::SeqCst);
168                panic!("at most {} internal fields are supported", INTERNALS_SIZE);
169            } else {
170                unsafe {
171                    *inner = idx;
172                }
173            }
174        });
175        unsafe { *inner }
176    }
177}
178
179/// A container for VM instrinsic functions
180#[repr(C)]
181pub struct Intrinsics {
182    /// Const pointer to memory grow `Func`.
183    pub memory_grow: *const Func,
184    /// Const pointer to memory size `Func`.
185    pub memory_size: *const Func,
186    /*pub memory_grow: unsafe extern "C" fn(
187        ctx: &mut Ctx,
188        memory_index: usize,
189        delta: Pages,
190    ) -> i32,
191    pub memory_size: unsafe extern "C" fn(
192        ctx: &Ctx,
193        memory_index: usize,
194    ) -> Pages,*/
195}
196
197unsafe impl Send for Intrinsics {}
198unsafe impl Sync for Intrinsics {}
199
200impl Intrinsics {
201    /// Offset of the `memory_grow` field.
202    #[allow(clippy::erasing_op)]
203    pub const fn offset_memory_grow() -> u8 {
204        (0 * ::std::mem::size_of::<usize>()) as u8
205    }
206    /// Offset of the `memory_size` field.
207    pub const fn offset_memory_size() -> u8 {
208        (1 * ::std::mem::size_of::<usize>()) as u8
209    }
210}
211
212/// Local static memory intrinsics
213pub static INTRINSICS_LOCAL_STATIC_MEMORY: Intrinsics = Intrinsics {
214    memory_grow: vmcalls::local_static_memory_grow as _,
215    memory_size: vmcalls::local_static_memory_size as _,
216};
217/// Local dynamic memory intrinsics
218pub static INTRINSICS_LOCAL_DYNAMIC_MEMORY: Intrinsics = Intrinsics {
219    memory_grow: vmcalls::local_dynamic_memory_grow as _,
220    memory_size: vmcalls::local_dynamic_memory_size as _,
221};
222/// Imported static memory intrinsics
223pub static INTRINSICS_IMPORTED_STATIC_MEMORY: Intrinsics = Intrinsics {
224    memory_grow: vmcalls::imported_static_memory_grow as _,
225    memory_size: vmcalls::imported_static_memory_size as _,
226};
227/// Imported dynamic memory intrinsics
228pub static INTRINSICS_IMPORTED_DYNAMIC_MEMORY: Intrinsics = Intrinsics {
229    memory_grow: vmcalls::imported_dynamic_memory_grow as _,
230    memory_size: vmcalls::imported_dynamic_memory_size as _,
231};
232
233fn get_intrinsics_for_module(m: &ModuleInfo) -> *const Intrinsics {
234    if m.memories.is_empty() && m.imported_memories.is_empty() {
235        ptr::null()
236    } else {
237        match MemoryIndex::new(0).local_or_import(m) {
238            LocalOrImport::Local(local_mem_index) => {
239                let mem_desc = &m.memories[local_mem_index];
240                match mem_desc.memory_type() {
241                    MemoryType::Dynamic => &INTRINSICS_LOCAL_DYNAMIC_MEMORY,
242                    MemoryType::Static => &INTRINSICS_LOCAL_STATIC_MEMORY,
243                    MemoryType::SharedStatic => &INTRINSICS_LOCAL_STATIC_MEMORY,
244                }
245            }
246            LocalOrImport::Import(import_mem_index) => {
247                let mem_desc = &m.imported_memories[import_mem_index].1;
248                match mem_desc.memory_type() {
249                    MemoryType::Dynamic => &INTRINSICS_IMPORTED_DYNAMIC_MEMORY,
250                    MemoryType::Static => &INTRINSICS_IMPORTED_STATIC_MEMORY,
251                    MemoryType::SharedStatic => &INTRINSICS_IMPORTED_STATIC_MEMORY,
252                }
253            }
254        }
255    }
256}
257
258#[cfg(all(unix, target_arch = "x86_64"))]
259fn get_interrupt_signal_mem() -> *mut u8 {
260    unsafe { crate::fault::get_wasm_interrupt_signal_mem() }
261}
262
263#[cfg(not(all(unix, target_arch = "x86_64")))]
264fn get_interrupt_signal_mem() -> *mut u8 {
265    static mut REGION: u64 = 0;
266    unsafe { &mut REGION as *mut u64 as *mut u8 }
267}
268
269impl Ctx {
270    #[doc(hidden)]
271    pub unsafe fn new(
272        local_backing: &mut LocalBacking,
273        import_backing: &mut ImportBacking,
274        module: &ModuleInner,
275    ) -> Self {
276        let (mem_base, mem_bound): (*mut u8, usize) =
277            if module.info.memories.is_empty() && module.info.imported_memories.is_empty() {
278                (::std::ptr::null_mut(), 0)
279            } else {
280                let mem = match MemoryIndex::new(0).local_or_import(&module.info) {
281                    LocalOrImport::Local(index) => local_backing.vm_memories[index],
282                    LocalOrImport::Import(index) => import_backing.vm_memories[index],
283                };
284                ((*mem).base, (*mem).bound)
285            };
286        Self {
287            internal: InternalCtx {
288                memories: local_backing.vm_memories.as_mut_ptr(),
289                tables: local_backing.vm_tables.as_mut_ptr(),
290                globals: local_backing.vm_globals.as_mut_ptr(),
291
292                imported_memories: import_backing.vm_memories.as_mut_ptr(),
293                imported_tables: import_backing.vm_tables.as_mut_ptr(),
294                imported_globals: import_backing.vm_globals.as_mut_ptr(),
295                imported_funcs: import_backing.vm_functions.as_mut_ptr(),
296
297                dynamic_sigindices: local_backing.dynamic_sigindices.as_ptr(),
298
299                intrinsics: get_intrinsics_for_module(&module.info),
300
301                stack_lower_bound: ::std::ptr::null_mut(),
302
303                memory_base: mem_base,
304                memory_bound: mem_bound,
305
306                internals: &mut local_backing.internals.0,
307
308                interrupt_signal_mem: get_interrupt_signal_mem(),
309            },
310            local_functions: local_backing.local_functions.as_ptr(),
311
312            local_backing,
313            import_backing,
314            module,
315
316            data: ptr::null_mut(),
317            data_finalizer: None,
318        }
319    }
320
321    #[doc(hidden)]
322    pub unsafe fn new_with_data(
323        local_backing: &mut LocalBacking,
324        import_backing: &mut ImportBacking,
325        module: &ModuleInner,
326        data: *mut c_void,
327        data_finalizer: fn(*mut c_void),
328    ) -> Self {
329        let (mem_base, mem_bound): (*mut u8, usize) =
330            if module.info.memories.is_empty() && module.info.imported_memories.is_empty() {
331                (::std::ptr::null_mut(), 0)
332            } else {
333                let mem = match MemoryIndex::new(0).local_or_import(&module.info) {
334                    LocalOrImport::Local(index) => local_backing.vm_memories[index],
335                    LocalOrImport::Import(index) => import_backing.vm_memories[index],
336                };
337                ((*mem).base, (*mem).bound)
338            };
339        Self {
340            internal: InternalCtx {
341                memories: local_backing.vm_memories.as_mut_ptr(),
342                tables: local_backing.vm_tables.as_mut_ptr(),
343                globals: local_backing.vm_globals.as_mut_ptr(),
344
345                imported_memories: import_backing.vm_memories.as_mut_ptr(),
346                imported_tables: import_backing.vm_tables.as_mut_ptr(),
347                imported_globals: import_backing.vm_globals.as_mut_ptr(),
348                imported_funcs: import_backing.vm_functions.as_mut_ptr(),
349
350                dynamic_sigindices: local_backing.dynamic_sigindices.as_ptr(),
351
352                intrinsics: get_intrinsics_for_module(&module.info),
353
354                stack_lower_bound: ptr::null_mut(),
355
356                memory_base: mem_base,
357                memory_bound: mem_bound,
358
359                internals: &mut local_backing.internals.0,
360
361                interrupt_signal_mem: get_interrupt_signal_mem(),
362            },
363            local_functions: local_backing.local_functions.as_ptr(),
364
365            local_backing,
366            import_backing,
367            module,
368
369            data,
370            data_finalizer: Some(data_finalizer),
371        }
372    }
373
374    /// This exposes the specified memory of the WebAssembly instance
375    /// as a immutable slice.
376    ///
377    /// WebAssembly will soon support multiple linear memories, so this
378    /// forces the user to specify.
379    ///
380    /// # Usage:
381    ///
382    /// ```
383    /// # use wasmer_runtime_core::{
384    /// #     vm::Ctx,
385    /// # };
386    /// fn read_memory(ctx: &Ctx) -> u8 {
387    ///     let first_memory = ctx.memory(0);
388    ///     // Read the first byte of that linear memory.
389    ///     first_memory.view()[0].get()
390    /// }
391    /// ```
392    pub fn memory(&self, mem_index: u32) -> &Memory {
393        let module = unsafe { &*self.module };
394        let mem_index = MemoryIndex::new(mem_index as usize);
395        match mem_index.local_or_import(&module.info) {
396            LocalOrImport::Local(local_mem_index) => unsafe {
397                let local_backing = &*self.local_backing;
398                &local_backing.memories[local_mem_index]
399            },
400            LocalOrImport::Import(import_mem_index) => unsafe {
401                let import_backing = &*self.import_backing;
402                &import_backing.memories[import_mem_index]
403            },
404        }
405    }
406
407    /// Get access to [`Memory`] and mutable access to the user defined data
408    /// field as the type, `T`.
409    ///
410    /// This method is required to access both at the same time.
411    /// This is useful for updating a data type that stores information about
412    /// locations in Wasm memory.
413    ///
414    /// # Safety
415    ///
416    /// This function must be called with the same type, `T`, that the `data`
417    /// was initialized with.
418    pub unsafe fn memory_and_data_mut<T>(&mut self, mem_index: u32) -> (&Memory, &mut T) {
419        (self.memory(mem_index), &mut *(self.data as *mut T))
420    }
421
422    /// Gives access to the emscripten symbol map, used for debugging
423    pub unsafe fn borrow_symbol_map(&self) -> &Option<HashMap<u32, String>> {
424        &(*self.module).info.em_symbol_map
425    }
426
427    /// Returns the number of dynamic sigindices.
428    pub fn dynamic_sigindice_count(&self) -> usize {
429        unsafe { (*self.local_backing).dynamic_sigindices.len() }
430    }
431
432    /// Returns the value of the specified internal field.
433    pub fn get_internal(&self, field: &InternalField) -> u64 {
434        unsafe { (*self.internal.internals)[field.index()] }
435    }
436
437    /// Writes the value to the specified internal field.
438    pub fn set_internal(&mut self, field: &InternalField, value: u64) {
439        unsafe {
440            (*self.internal.internals)[field.index()] = value;
441        }
442    }
443
444    /// Calls a host or Wasm function at the given table index
445    pub fn call_with_table_index(
446        &mut self,
447        index: TableIndex,
448        args: &[Value],
449    ) -> CallResult<Vec<Value>> {
450        let anyfunc_table =
451            unsafe { &*((**self.internal.tables).table as *mut crate::table::AnyfuncTable) };
452        let Anyfunc { func, ctx, sig_id } = anyfunc_table.backing[index.index()];
453
454        let signature = SigRegistry.lookup_signature(unsafe { std::mem::transmute(sig_id.0) });
455        let mut rets = vec![];
456
457        let wasm = {
458            let module = unsafe { &*self.module };
459            let runnable = &module.runnable_module;
460
461            let sig_index = SigRegistry.lookup_sig_index(signature.clone());
462            runnable
463                .get_trampoline(&module.info, sig_index)
464                .expect("wasm trampoline")
465        };
466
467        call_func_with_index_inner(
468            ctx,
469            NonNull::new(func as *mut _).unwrap(),
470            &signature,
471            wasm,
472            args,
473            &mut rets,
474        )?;
475
476        Ok(rets)
477    }
478}
479
480#[doc(hidden)]
481impl Ctx {
482    #[allow(clippy::erasing_op)] // TODO
483    pub const fn offset_memories() -> u8 {
484        0 * (mem::size_of::<usize>() as u8)
485    }
486
487    pub const fn offset_tables() -> u8 {
488        1 * (mem::size_of::<usize>() as u8)
489    }
490
491    pub const fn offset_globals() -> u8 {
492        2 * (mem::size_of::<usize>() as u8)
493    }
494
495    pub const fn offset_imported_memories() -> u8 {
496        3 * (mem::size_of::<usize>() as u8)
497    }
498
499    pub const fn offset_imported_tables() -> u8 {
500        4 * (mem::size_of::<usize>() as u8)
501    }
502
503    pub const fn offset_imported_globals() -> u8 {
504        5 * (mem::size_of::<usize>() as u8)
505    }
506
507    pub const fn offset_imported_funcs() -> u8 {
508        6 * (mem::size_of::<usize>() as u8)
509    }
510
511    pub const fn offset_signatures() -> u8 {
512        7 * (mem::size_of::<usize>() as u8)
513    }
514
515    pub const fn offset_intrinsics() -> u8 {
516        8 * (mem::size_of::<usize>() as u8)
517    }
518
519    pub const fn offset_stack_lower_bound() -> u8 {
520        9 * (mem::size_of::<usize>() as u8)
521    }
522
523    pub const fn offset_memory_base() -> u8 {
524        10 * (mem::size_of::<usize>() as u8)
525    }
526
527    pub const fn offset_memory_bound() -> u8 {
528        11 * (mem::size_of::<usize>() as u8)
529    }
530
531    pub const fn offset_internals() -> u8 {
532        12 * (mem::size_of::<usize>() as u8)
533    }
534
535    pub const fn offset_interrupt_signal_mem() -> u8 {
536        13 * (mem::size_of::<usize>() as u8)
537    }
538
539    pub const fn offset_local_functions() -> u8 {
540        14 * (mem::size_of::<usize>() as u8)
541    }
542}
543
544/// Represents a function pointer. It is mostly used in the
545/// `typed_func` module within the `wrap` functions, to wrap imported
546/// functions.
547#[repr(transparent)]
548pub struct Func(*mut c_void);
549
550/// Represents a function environment pointer, like a captured
551/// environment of a closure. It is mostly used in the `typed_func`
552/// module within the `wrap` functions, to wrap imported functions.
553#[repr(transparent)]
554pub struct FuncEnv(*mut c_void);
555
556/// Represents a function context. It is used by imported functions
557/// only.
558#[derive(Debug)]
559#[repr(C)]
560pub struct FuncCtx {
561    /// The `Ctx` pointer.
562    pub(crate) vmctx: NonNull<Ctx>,
563
564    /// A pointer to the function environment. It is used by imported
565    /// functions only to store the pointer to the real host function,
566    /// whether it is a regular function, or a closure with or without
567    /// a captured environment.
568    pub(crate) func_env: Option<NonNull<FuncEnv>>,
569}
570
571impl FuncCtx {
572    /// Offset to the `vmctx` field.
573    #[allow(clippy::erasing_op)]
574    pub const fn offset_vmctx() -> u8 {
575        0 * (mem::size_of::<usize>() as u8)
576    }
577
578    /// Offset to the `func_env` field.
579    pub const fn offset_func_env() -> u8 {
580        1 * (mem::size_of::<usize>() as u8)
581    }
582
583    /// Size of a `FuncCtx`.
584    pub const fn size() -> u8 {
585        mem::size_of::<Self>() as u8
586    }
587}
588
589/// An imported function is a function pointer associated to a
590/// function context.
591#[derive(Debug, Clone)]
592#[repr(C)]
593pub struct ImportedFunc {
594    /// Pointer to the function itself.
595    pub(crate) func: *const Func,
596
597    /// Mutable non-null pointer to [`FuncCtx`].
598    pub(crate) func_ctx: NonNull<FuncCtx>,
599}
600
601// Manually implemented because ImportedFunc contains raw pointers
602// directly; `Func` is marked Send (But `Ctx` actually isn't! (TODO:
603// review this, shouldn't `Ctx` be Send?))
604unsafe impl Send for ImportedFunc {}
605
606impl ImportedFunc {
607    /// Offset to the `func` field.
608    #[allow(clippy::erasing_op)] // TODO
609    pub const fn offset_func() -> u8 {
610        0 * (mem::size_of::<usize>() as u8)
611    }
612
613    /// Offset to the `func_ctx` field.
614    pub const fn offset_func_ctx() -> u8 {
615        1 * (mem::size_of::<usize>() as u8)
616    }
617
618    /// Size of an `ImportedFunc`.
619    pub const fn size() -> u8 {
620        mem::size_of::<Self>() as u8
621    }
622}
623
624/// Definition of a table used by the VM. (obviously)
625#[derive(Debug, Clone, Copy)]
626#[repr(C)]
627pub struct LocalTable {
628    /// pointer to the elements in the table.
629    pub base: *mut u8,
630    /// Number of elements in the table (NOT necessarily the size of the table in bytes!).
631    pub count: usize,
632    /// The table that this represents. At the moment, this can only be `*mut AnyfuncTable`.
633    pub table: *mut (),
634}
635
636// manually implemented because LocalTable contains raw pointers directly
637unsafe impl Send for LocalTable {}
638
639impl LocalTable {
640    /// Offset to the `base` field.
641    #[allow(clippy::erasing_op)] // TODO
642    pub const fn offset_base() -> u8 {
643        0 * (mem::size_of::<usize>() as u8)
644    }
645
646    /// Offset to the `count` field.
647    pub const fn offset_count() -> u8 {
648        1 * (mem::size_of::<usize>() as u8)
649    }
650
651    /// Size of a `LocalTable`.
652    pub const fn size() -> u8 {
653        mem::size_of::<Self>() as u8
654    }
655}
656
657/// Definition of a memory used by the VM.
658#[derive(Debug, Clone, Copy)]
659#[repr(C)]
660pub struct LocalMemory {
661    /// Pointer to the bottom of this linear memory.
662    pub base: *mut u8,
663    /// Current size of this linear memory in bytes.
664    pub bound: usize,
665    /// The actual memory that this represents.
666    /// This is either `*mut DynamicMemory`, `*mut StaticMemory`,
667    /// or `*mut SharedStaticMemory`.
668    pub memory: *mut (),
669}
670
671// manually implemented because LocalMemory contains raw pointers
672unsafe impl Send for LocalMemory {}
673
674impl LocalMemory {
675    /// Offset to the `base` field.
676    #[allow(clippy::erasing_op)] // TODO
677    pub const fn offset_base() -> u8 {
678        0 * (mem::size_of::<usize>() as u8)
679    }
680
681    /// Offset to the `bound` field.
682    pub const fn offset_bound() -> u8 {
683        1 * (mem::size_of::<usize>() as u8)
684    }
685
686    /// Size of a `LocalMemory`.
687    pub const fn size() -> u8 {
688        mem::size_of::<Self>() as u8
689    }
690}
691
692/// Definition of a global used by the VM.
693#[derive(Debug, Clone, Copy)]
694#[repr(C)]
695pub struct LocalGlobal {
696    /// Data.
697    pub data: u128,
698}
699
700impl LocalGlobal {
701    /// Offset to the `data` field.
702    #[allow(clippy::erasing_op)] // TODO
703    pub const fn offset_data() -> u8 {
704        0 * (mem::size_of::<usize>() as u8)
705    }
706
707    /// A null `LocalGlobal`.
708    pub const fn null() -> Self {
709        Self { data: 0 }
710    }
711
712    /// Size of a `LocalGlobal`.
713    pub const fn size() -> u8 {
714        mem::size_of::<Self>() as u8
715    }
716}
717
718/// Identifier for a function signature.
719///
720/// A transparent `SigIndex`
721#[derive(Debug, Clone, Copy)]
722#[repr(transparent)]
723pub struct SigId(pub u32);
724
725use crate::types::SigIndex;
726impl From<SigId> for SigIndex {
727    fn from(other: SigId) -> SigIndex {
728        SigIndex::new(other.0 as _)
729    }
730}
731
732/// Caller-checked anyfunc
733#[derive(Debug, Clone, Copy)]
734#[repr(C)]
735pub struct Anyfunc {
736    /// Const pointer to `Func`.
737    pub func: *const Func,
738    /// Mutable pointer to `Ctx`.
739    pub ctx: *mut Ctx,
740    /// Sig id of this function
741    pub sig_id: SigId,
742}
743
744// manually implemented because Anyfunc contains raw pointers directly
745unsafe impl Send for Anyfunc {}
746
747impl Anyfunc {
748    /// A null `Anyfunc` value.
749    pub const fn null() -> Self {
750        Self {
751            func: ptr::null(),
752            ctx: ptr::null_mut(),
753            sig_id: SigId(u32::max_value()),
754        }
755    }
756
757    /// Offset to the `func` field.
758    #[allow(clippy::erasing_op)] // TODO
759    pub const fn offset_func() -> u8 {
760        0 * (mem::size_of::<usize>() as u8)
761    }
762
763    /// Offset to the `vmctx` field..
764    pub const fn offset_vmctx() -> u8 {
765        1 * (mem::size_of::<usize>() as u8)
766    }
767
768    /// Offset to the `sig_id` field.
769    pub const fn offset_sig_id() -> u8 {
770        2 * (mem::size_of::<usize>() as u8)
771    }
772
773    /// The size of `Anyfunc`.
774    pub const fn size() -> u8 {
775        mem::size_of::<Self>() as u8
776    }
777}
778
779#[cfg(test)]
780mod vm_offset_tests {
781    use super::{
782        Anyfunc, Ctx, FuncCtx, ImportedFunc, InternalCtx, LocalGlobal, LocalMemory, LocalTable,
783    };
784
785    // Inspired by https://internals.rust-lang.org/t/discussion-on-offset-of/7440/2.
786    macro_rules! offset_of {
787        ($struct:path, $field:ident) => {{
788            fn offset() -> usize {
789                use std::mem;
790
791                let structure = mem::MaybeUninit::<$struct>::uninit();
792
793                let &$struct {
794                    $field: ref field, ..
795                } = unsafe { &*structure.as_ptr() };
796
797                let offset =
798                    (field as *const _ as usize).wrapping_sub(&structure as *const _ as usize);
799
800                assert!((0..=mem::size_of_val(&structure)).contains(&offset));
801
802                offset
803            }
804
805            offset()
806        }};
807    }
808
809    #[test]
810    fn offset_of() {
811        use std::{mem, ptr::NonNull};
812
813        struct S0;
814
815        #[repr(C)]
816        struct S1 {
817            f1: u8,
818            f2: u16,
819            f3: u32,
820            f4: u64,
821            f5: u128,
822            f6: f32,
823            f7: f64,
824            f8: NonNull<S0>,
825            f9: Option<NonNull<S0>>,
826            f10: *mut S0,
827            z: u8,
828        }
829
830        assert_eq!(offset_of!(S1, f1), 0);
831        assert_eq!(offset_of!(S1, f2), 2);
832        assert_eq!(offset_of!(S1, f3), 4);
833        assert_eq!(offset_of!(S1, f4), 8);
834        assert_eq!(offset_of!(S1, f5), 16);
835        assert_eq!(offset_of!(S1, f6), 32);
836        assert_eq!(offset_of!(S1, f7), 40);
837        assert_eq!(offset_of!(S1, f8), 40 + mem::size_of::<usize>());
838        assert_eq!(offset_of!(S1, f9), 48 + mem::size_of::<usize>());
839        assert_eq!(offset_of!(S1, f10), 56 + mem::size_of::<usize>());
840        assert_eq!(offset_of!(S1, z), 64 + mem::size_of::<usize>());
841    }
842
843    #[test]
844    fn vmctx() {
845        assert_eq!(0usize, offset_of!(Ctx, internal));
846
847        assert_eq!(
848            Ctx::offset_memories() as usize,
849            offset_of!(InternalCtx, memories),
850        );
851
852        assert_eq!(
853            Ctx::offset_tables() as usize,
854            offset_of!(InternalCtx, tables),
855        );
856
857        assert_eq!(
858            Ctx::offset_globals() as usize,
859            offset_of!(InternalCtx, globals),
860        );
861
862        assert_eq!(
863            Ctx::offset_imported_memories() as usize,
864            offset_of!(InternalCtx, imported_memories),
865        );
866
867        assert_eq!(
868            Ctx::offset_imported_tables() as usize,
869            offset_of!(InternalCtx, imported_tables),
870        );
871
872        assert_eq!(
873            Ctx::offset_imported_globals() as usize,
874            offset_of!(InternalCtx, imported_globals),
875        );
876
877        assert_eq!(
878            Ctx::offset_imported_funcs() as usize,
879            offset_of!(InternalCtx, imported_funcs),
880        );
881
882        assert_eq!(
883            Ctx::offset_intrinsics() as usize,
884            offset_of!(InternalCtx, intrinsics),
885        );
886
887        assert_eq!(
888            Ctx::offset_stack_lower_bound() as usize,
889            offset_of!(InternalCtx, stack_lower_bound),
890        );
891
892        assert_eq!(
893            Ctx::offset_memory_base() as usize,
894            offset_of!(InternalCtx, memory_base),
895        );
896
897        assert_eq!(
898            Ctx::offset_memory_bound() as usize,
899            offset_of!(InternalCtx, memory_bound),
900        );
901
902        assert_eq!(
903            Ctx::offset_internals() as usize,
904            offset_of!(InternalCtx, internals),
905        );
906
907        assert_eq!(
908            Ctx::offset_interrupt_signal_mem() as usize,
909            offset_of!(InternalCtx, interrupt_signal_mem),
910        );
911
912        assert_eq!(
913            Ctx::offset_local_functions() as usize,
914            offset_of!(Ctx, local_functions),
915        );
916    }
917
918    #[test]
919    fn func_ctx() {
920        assert_eq!(FuncCtx::offset_vmctx() as usize, 0,);
921
922        assert_eq!(FuncCtx::offset_func_env() as usize, 8,);
923    }
924
925    #[test]
926    fn imported_func() {
927        assert_eq!(
928            ImportedFunc::offset_func() as usize,
929            offset_of!(ImportedFunc, func),
930        );
931
932        assert_eq!(
933            ImportedFunc::offset_func_ctx() as usize,
934            offset_of!(ImportedFunc, func_ctx),
935        );
936    }
937
938    #[test]
939    fn local_table() {
940        assert_eq!(
941            LocalTable::offset_base() as usize,
942            offset_of!(LocalTable, base),
943        );
944
945        assert_eq!(
946            LocalTable::offset_count() as usize,
947            offset_of!(LocalTable, count),
948        );
949    }
950
951    #[test]
952    fn local_memory() {
953        assert_eq!(
954            LocalMemory::offset_base() as usize,
955            offset_of!(LocalMemory, base),
956        );
957
958        assert_eq!(
959            LocalMemory::offset_bound() as usize,
960            offset_of!(LocalMemory, bound),
961        );
962    }
963
964    #[test]
965    fn local_global() {
966        assert_eq!(
967            LocalGlobal::offset_data() as usize,
968            offset_of!(LocalGlobal, data),
969        );
970    }
971
972    #[test]
973    fn cc_anyfunc() {
974        assert_eq!(Anyfunc::offset_func() as usize, offset_of!(Anyfunc, func),);
975
976        assert_eq!(Anyfunc::offset_vmctx() as usize, offset_of!(Anyfunc, ctx),);
977
978        assert_eq!(
979            Anyfunc::offset_sig_id() as usize,
980            offset_of!(Anyfunc, sig_id),
981        );
982    }
983}
984
985#[cfg(test)]
986mod vm_ctx_tests {
987    use super::{Ctx, ImportBacking, LocalBacking};
988    use crate::module::{ModuleInfo, ModuleInner, StringTable};
989    use crate::structures::Map;
990    use std::ffi::c_void;
991    use std::sync::Arc;
992
993    struct TestData {
994        x: u32,
995        y: bool,
996        str: String,
997        finalizer: Box<dyn FnMut()>,
998    }
999
1000    impl Drop for TestData {
1001        fn drop(&mut self) {
1002            (*self.finalizer)();
1003        }
1004    }
1005
1006    fn test_data_finalizer(data: *mut c_void) {
1007        let test_data: &mut TestData = unsafe { &mut *(data as *mut TestData) };
1008
1009        assert_eq!(10, test_data.x);
1010        assert_eq!(true, test_data.y);
1011        assert_eq!("Test".to_string(), test_data.str,);
1012
1013        println!("hello from finalizer");
1014
1015        drop(test_data);
1016    }
1017
1018    #[test]
1019    fn test_callback_on_drop() {
1020        let mut data = TestData {
1021            x: 10,
1022            y: true,
1023            str: "Test".to_string(),
1024            finalizer: Box::new(move || {}),
1025        };
1026
1027        let mut local_backing = LocalBacking {
1028            memories: Map::new().into_boxed_map(),
1029            tables: Map::new().into_boxed_map(),
1030            globals: Map::new().into_boxed_map(),
1031
1032            vm_memories: Map::new().into_boxed_map(),
1033            vm_tables: Map::new().into_boxed_map(),
1034            vm_globals: Map::new().into_boxed_map(),
1035
1036            dynamic_sigindices: Map::new().into_boxed_map(),
1037            local_functions: Map::new().into_boxed_map(),
1038
1039            internals: crate::backing::Internals([0; crate::backing::INTERNALS_SIZE]),
1040        };
1041
1042        let mut import_backing = ImportBacking {
1043            memories: Map::new().into_boxed_map(),
1044            tables: Map::new().into_boxed_map(),
1045            globals: Map::new().into_boxed_map(),
1046
1047            vm_functions: Map::new().into_boxed_map(),
1048            vm_memories: Map::new().into_boxed_map(),
1049            vm_tables: Map::new().into_boxed_map(),
1050            vm_globals: Map::new().into_boxed_map(),
1051        };
1052
1053        let module = generate_module();
1054        let data_ptr = &mut data as *mut _ as *mut c_void;
1055        let ctx = unsafe {
1056            Ctx::new_with_data(
1057                &mut local_backing,
1058                &mut import_backing,
1059                &module,
1060                data_ptr,
1061                test_data_finalizer,
1062            )
1063        };
1064
1065        let ctx_test_data = cast_test_data(ctx.data);
1066        assert_eq!(10, ctx_test_data.x);
1067        assert_eq!(true, ctx_test_data.y);
1068        assert_eq!("Test".to_string(), ctx_test_data.str);
1069
1070        drop(ctx);
1071    }
1072
1073    fn cast_test_data(data: *mut c_void) -> &'static mut TestData {
1074        let test_data: &mut TestData = unsafe { &mut *(data as *mut TestData) };
1075        test_data
1076    }
1077
1078    fn generate_module() -> ModuleInner {
1079        use super::Func;
1080        use crate::backend::{sys::Memory, CacheGen, RunnableModule};
1081        use crate::cache::Error as CacheError;
1082        use crate::error::RuntimeError;
1083        use crate::typed_func::Wasm;
1084        use crate::types::{LocalFuncIndex, SigIndex};
1085        use indexmap::IndexMap;
1086        use std::collections::HashMap;
1087        use std::ptr::NonNull;
1088        struct Placeholder;
1089        impl RunnableModule for Placeholder {
1090            fn get_func(
1091                &self,
1092                _module: &ModuleInfo,
1093                _local_func_index: LocalFuncIndex,
1094            ) -> Option<NonNull<Func>> {
1095                None
1096            }
1097
1098            fn get_trampoline(&self, _module: &ModuleInfo, _sig_index: SigIndex) -> Option<Wasm> {
1099                unimplemented!("generate_module::get_trampoline")
1100            }
1101            unsafe fn do_early_trap(&self, _: RuntimeError) -> ! {
1102                unimplemented!("generate_module::do_early_trap")
1103            }
1104        }
1105        impl CacheGen for Placeholder {
1106            fn generate_cache(&self) -> Result<(Box<[u8]>, Memory), CacheError> {
1107                unimplemented!("generate_module::generate_cache")
1108            }
1109        }
1110
1111        ModuleInner {
1112            runnable_module: Arc::new(Box::new(Placeholder)),
1113            cache_gen: Box::new(Placeholder),
1114            info: ModuleInfo {
1115                memories: Map::new(),
1116                globals: Map::new(),
1117                tables: Map::new(),
1118
1119                // These are strictly imported and the typesystem ensures that.
1120                imported_functions: Map::new(),
1121                imported_memories: Map::new(),
1122                imported_tables: Map::new(),
1123                imported_globals: Map::new(),
1124
1125                exports: IndexMap::new(),
1126
1127                data_initializers: Vec::new(),
1128                elem_initializers: Vec::new(),
1129
1130                start_func: None,
1131
1132                func_assoc: Map::new(),
1133                signatures: Map::new(),
1134                backend: Default::default(),
1135
1136                namespace_table: StringTable::new(),
1137                name_table: StringTable::new(),
1138
1139                em_symbol_map: None,
1140
1141                custom_sections: HashMap::new(),
1142
1143                generate_debug_info: false,
1144                #[cfg(feature = "generate-debug-information")]
1145                debug_info_manager: crate::jit_debug::JitCodeDebugInfoManager::new(),
1146            },
1147        }
1148    }
1149}