1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
//! A registry for `VMFuncRef`s. This allows us to deduplicate funcrefs so that
//! identical `VMCallerCheckedAnyfunc`s will give us identical funcrefs.
//!
//! This registry also helps ensure that the `VMFuncRef`s can stay valid for as
//! long as we need them to.

use crate::vmcontext::VMCallerCheckedAnyfunc;
use loupe::MemoryUsage;
use std::collections::HashMap;
use std::sync::Mutex;

/// The registry that holds the values that `VMFuncRef`s point to.
#[derive(Debug, MemoryUsage)]
pub struct FuncDataRegistry {
    // This structure is stored in an `Engine` and is intended to be shared
    // across many instances. Ideally instances can themselves be sent across
    // threads, and ideally we can compile across many threads. As a result we
    // use interior mutability here with a lock to avoid having callers to
    // externally synchronize calls to compilation.
    inner: Mutex<Inner>,
}

// We use raw pointers but the data never moves, so it's not a problem
unsafe impl Send for FuncDataRegistry {}
unsafe impl Sync for FuncDataRegistry {}

/// A function reference. A single word that points to metadata about a function.
#[repr(transparent)]
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, MemoryUsage)]
pub struct VMFuncRef(pub(crate) *const VMCallerCheckedAnyfunc);

impl wasmer_types::NativeWasmType for VMFuncRef {
    const WASM_TYPE: wasmer_types::Type = wasmer_types::Type::FuncRef;
    type Abi = Self;

    #[inline]
    fn from_abi(abi: Self::Abi) -> Self {
        abi
    }

    #[inline]
    fn into_abi(self) -> Self::Abi {
        self
    }

    #[inline]
    fn to_binary(self) -> i128 {
        self.0 as _
    }

    #[inline]
    fn from_binary(bits: i128) -> Self {
        // TODO: ensure that the safety invariants are actually upheld here
        Self(bits as _)
    }
}

impl VMFuncRef {
    /// Check if the FuncRef is null
    // TODO: make this const when `std::ptr::is_null` is const
    pub fn is_null(&self) -> bool {
        self.0.is_null()
    }

    /// Create a new null FuncRef
    pub const fn null() -> Self {
        Self(std::ptr::null())
    }
}

impl std::ops::Deref for VMFuncRef {
    type Target = *const VMCallerCheckedAnyfunc;

    fn deref(&self) -> &Self::Target {
        &self.0
    }
}

impl std::ops::DerefMut for VMFuncRef {
    fn deref_mut(&mut self) -> &mut Self::Target {
        &mut self.0
    }
}

// We use raw pointers but the data never moves, so it's not a problem
// TODO: update docs
unsafe impl Send for VMFuncRef {}
unsafe impl Sync for VMFuncRef {}

#[derive(Debug, Default, MemoryUsage)]
struct Inner {
    func_data: Vec<Box<VMCallerCheckedAnyfunc>>,
    anyfunc_to_index: HashMap<VMCallerCheckedAnyfunc, usize>,
}

impl FuncDataRegistry {
    /// Create a new `FuncDataRegistry`.
    pub fn new() -> Self {
        Self {
            inner: Default::default(),
        }
    }

    /// Register a signature and return its unique index.
    pub fn register(&self, anyfunc: VMCallerCheckedAnyfunc) -> VMFuncRef {
        let mut inner = self.inner.lock().unwrap();
        if let Some(&idx) = inner.anyfunc_to_index.get(&anyfunc) {
            let data: &Box<_> = &inner.func_data[idx];
            let inner_ptr: &VMCallerCheckedAnyfunc = &*data;
            VMFuncRef(inner_ptr)
        } else {
            let idx = inner.func_data.len();
            inner.func_data.push(Box::new(anyfunc.clone()));
            inner.anyfunc_to_index.insert(anyfunc, idx);

            let data: &Box<_> = &inner.func_data[idx];
            let inner_ptr: &VMCallerCheckedAnyfunc = &*data;
            VMFuncRef(inner_ptr)
        }
    }
}