Skip to main content

wasmer/backend/sys/entities/memory/
mod.rs

1//! Data types, functions and traits for `sys` runtime's `Memory` implementation.
2use std::{
3    convert::TryInto,
4    marker::PhantomData,
5    mem::{self, MaybeUninit},
6    slice,
7};
8
9use tracing::warn;
10use wasmer_types::{MemoryType, Pages};
11use wasmer_vm::{LinearMemory, MemoryError, StoreHandle, ThreadConditionsHandle, VMMemory};
12
13use crate::{
14    BackendMemory, MemoryAccessError,
15    backend::sys::entities::{engine::NativeEngineExt, memory::MemoryView},
16    entities::store::{AsStoreMut, AsStoreRef},
17    location::{MemoryLocation, SharedMemoryOps},
18    vm::{VMExtern, VMExternMemory},
19};
20
21pub(crate) mod view;
22pub use view::*;
23
24use super::store::Store;
25
26#[derive(Debug, Clone)]
27#[cfg_attr(feature = "artifact-size", derive(loupe::MemoryUsage))]
28/// A WebAssembly `memory` in the `sys` runtime.
29pub struct Memory {
30    pub(crate) handle: StoreHandle<VMMemory>,
31}
32
33impl Memory {
34    pub(crate) fn new(store: &mut impl AsStoreMut, ty: MemoryType) -> Result<Self, MemoryError> {
35        let mut store = store.as_store_mut();
36        let tunables = store.engine().tunables();
37        let style = tunables.memory_style(&ty);
38        let memory = tunables.create_host_memory(&ty, &style)?;
39
40        Ok(Self {
41            handle: StoreHandle::new(store.as_store_mut().objects_mut().as_sys_mut(), memory),
42        })
43    }
44
45    pub(crate) fn new_from_existing(new_store: &mut impl AsStoreMut, memory: VMMemory) -> Self {
46        let handle = StoreHandle::new(new_store.objects_mut().as_sys_mut(), memory);
47        Self::from_vm_extern(new_store, VMExternMemory::Sys(handle.internal_handle()))
48    }
49
50    pub(crate) fn ty(&self, store: &impl AsStoreRef) -> MemoryType {
51        self.handle
52            .get(store.as_store_ref().objects().as_sys())
53            .ty()
54    }
55
56    pub(crate) fn size(&self, store: &impl AsStoreRef) -> Pages {
57        self.handle
58            .get(store.as_store_ref().objects().as_sys())
59            .size()
60    }
61
62    pub(crate) fn grow<IntoPages>(
63        &self,
64        store: &mut impl AsStoreMut,
65        delta: IntoPages,
66    ) -> Result<Pages, MemoryError>
67    where
68        IntoPages: Into<Pages>,
69    {
70        self.handle
71            .get_mut(store.objects_mut().as_sys_mut())
72            .grow(delta.into())
73    }
74
75    pub(crate) fn grow_at_least(
76        &self,
77        store: &mut impl AsStoreMut,
78        min_size: u64,
79    ) -> Result<(), MemoryError> {
80        self.handle
81            .get_mut(store.objects_mut().as_sys_mut())
82            .grow_at_least(min_size)
83    }
84
85    pub(crate) fn reset(&self, store: &mut impl AsStoreMut) -> Result<(), MemoryError> {
86        self.handle
87            .get_mut(store.as_store_mut().objects_mut().as_sys_mut())
88            .reset()?;
89        Ok(())
90    }
91
92    pub(crate) fn from_vm_extern(store: &impl AsStoreRef, vm_extern: VMExternMemory) -> Self {
93        Self {
94            handle: unsafe {
95                StoreHandle::from_internal(
96                    store.as_store_ref().objects().id(),
97                    vm_extern.into_sys(),
98                )
99            },
100        }
101    }
102
103    /// Checks whether this `Memory` can be used with the given context.
104    pub(crate) fn is_from_store(&self, store: &impl AsStoreRef) -> bool {
105        self.handle.store_id() == store.as_store_ref().objects().id()
106    }
107
108    /// Cloning memory will create another reference to the same memory that
109    /// can be put into a new store
110    pub(crate) fn try_clone(&self, store: &impl AsStoreRef) -> Result<VMMemory, MemoryError> {
111        let mem = self.handle.get(store.as_store_ref().objects().as_sys());
112        let cloned = mem.try_clone()?;
113        Ok(cloned.into())
114    }
115
116    /// Copying the memory will actually copy all the bytes in the memory to
117    /// a identical byte copy of the original that can be put into a new store
118    pub(crate) fn try_copy(
119        &self,
120        store: &impl AsStoreRef,
121    ) -> Result<Box<dyn LinearMemory + 'static>, MemoryError> {
122        let mut mem = self.try_clone(store)?;
123        mem.copy()
124    }
125
126    pub(crate) fn as_shared(
127        &self,
128        store: &impl AsStoreRef,
129    ) -> Option<crate::memory::shared::SharedMemory> {
130        let mem = self.handle.get(store.as_store_ref().objects().as_sys());
131        let conds = mem.thread_conditions()?.downgrade();
132
133        Some(crate::memory::shared::SharedMemory::new(
134            crate::Memory(BackendMemory::Sys(self.clone())),
135            conds,
136        ))
137    }
138
139    /// To `VMExtern`.
140    pub(crate) fn to_vm_extern(&self) -> VMExtern {
141        VMExtern::Sys(wasmer_vm::VMExtern::Memory(self.handle.internal_handle()))
142    }
143}
144
145impl SharedMemoryOps for ThreadConditionsHandle {
146    fn notify(&self, dst: MemoryLocation, count: u32) -> Result<u32, crate::AtomicsError> {
147        let count = self
148            .upgrade()
149            .ok_or(crate::AtomicsError::Unimplemented)?
150            .do_notify(dst.address, count);
151        Ok(count)
152    }
153
154    fn wait(
155        &self,
156        dst: MemoryLocation,
157        timeout: Option<std::time::Duration>,
158    ) -> Result<u32, crate::AtomicsError> {
159        // Safety: `ExpectedValue::None` has no safety requirements.
160        unsafe {
161            self.upgrade()
162                .ok_or(crate::AtomicsError::Unimplemented)?
163                .do_wait(
164                    wasmer_vm::NotifyLocation {
165                        memory_base: std::ptr::null_mut(),
166                        address: dst.address,
167                    },
168                    wasmer_vm::ExpectedValue::None,
169                    timeout,
170                )
171                .map_err(|e| match e {
172                    wasmer_vm::WaiterError::Unimplemented => crate::AtomicsError::Unimplemented,
173                    wasmer_vm::WaiterError::TooManyWaiters => crate::AtomicsError::TooManyWaiters,
174                    wasmer_vm::WaiterError::AtomicsDisabled => crate::AtomicsError::AtomicsDisabled,
175                    _ => crate::AtomicsError::Unimplemented,
176                })
177        }
178    }
179
180    fn disable_atomics(&self) -> Result<(), MemoryError> {
181        self.upgrade()
182            .ok_or_else(|| MemoryError::Generic("memory was dropped".to_string()))?
183            .disable_atomics();
184        Ok(())
185    }
186
187    fn wake_all_atomic_waiters(&self) -> Result<(), MemoryError> {
188        self.upgrade()
189            .ok_or_else(|| MemoryError::Generic("memory was dropped".to_string()))?
190            .wake_all_atomic_waiters();
191        Ok(())
192    }
193}
194
195impl std::cmp::PartialEq for Memory {
196    fn eq(&self, other: &Self) -> bool {
197        self.handle == other.handle
198    }
199}
200
201impl std::cmp::Eq for Memory {}
202
203/// Underlying buffer for a memory.
204#[derive(Debug, Copy, Clone)]
205pub(crate) struct MemoryBuffer<'a> {
206    pub(crate) base: *mut u8,
207    pub(crate) len: usize,
208    pub(crate) marker: PhantomData<&'a MemoryView<'a>>,
209}
210
211impl MemoryBuffer<'_> {
212    pub(crate) fn read(&self, offset: u64, buf: &mut [u8]) -> Result<(), MemoryAccessError> {
213        let end = offset
214            .checked_add(buf.len() as u64)
215            .ok_or(MemoryAccessError::Overflow)?;
216        if end > self.len.try_into().unwrap() {
217            warn!(
218                "attempted to read ({} bytes) beyond the bounds of the memory view ({} > {})",
219                buf.len(),
220                end,
221                self.len
222            );
223            return Err(MemoryAccessError::HeapOutOfBounds);
224        }
225        unsafe {
226            volatile_memcpy_read(self.base.add(offset as usize), buf.as_mut_ptr(), buf.len());
227        }
228        Ok(())
229    }
230
231    pub(crate) fn read_uninit<'b>(
232        &self,
233        offset: u64,
234        buf: &'b mut [MaybeUninit<u8>],
235    ) -> Result<&'b mut [u8], MemoryAccessError> {
236        let end = offset
237            .checked_add(buf.len() as u64)
238            .ok_or(MemoryAccessError::Overflow)?;
239        if end > self.len.try_into().unwrap() {
240            warn!(
241                "attempted to read ({} bytes) beyond the bounds of the memory view ({} > {})",
242                buf.len(),
243                end,
244                self.len
245            );
246            return Err(MemoryAccessError::HeapOutOfBounds);
247        }
248        let buf_ptr = buf.as_mut_ptr() as *mut u8;
249        unsafe {
250            volatile_memcpy_read(self.base.add(offset as usize), buf_ptr, buf.len());
251        }
252
253        Ok(unsafe { slice::from_raw_parts_mut(buf_ptr, buf.len()) })
254    }
255
256    pub(crate) fn write(&self, offset: u64, data: &[u8]) -> Result<(), MemoryAccessError> {
257        let end = offset
258            .checked_add(data.len() as u64)
259            .ok_or(MemoryAccessError::Overflow)?;
260        if end > self.len.try_into().unwrap() {
261            warn!(
262                "attempted to write ({} bytes) beyond the bounds of the memory view ({} > {})",
263                data.len(),
264                end,
265                self.len
266            );
267            return Err(MemoryAccessError::HeapOutOfBounds);
268        }
269        unsafe {
270            volatile_memcpy_write(data.as_ptr(), self.base.add(offset as usize), data.len());
271        }
272        Ok(())
273    }
274}
275
276// We can't use a normal memcpy here because it has undefined behavior if the
277// memory is being concurrently modified. So we need to write our own memcpy
278// implementation which uses volatile operations.
279//
280// The implementation of these functions can optimize very well when inlined
281// with a fixed length: they should compile down to a single load/store
282// instruction for small (8/16/32/64-bit) copies.
283#[inline]
284unsafe fn volatile_memcpy_read(mut src: *const u8, mut dst: *mut u8, mut len: usize) {
285    #[inline]
286    unsafe fn copy_one<T>(src: &mut *const u8, dst: &mut *mut u8, len: &mut usize) {
287        #[repr(C, packed)]
288        struct Unaligned<T>(T);
289
290        unsafe {
291            let val = (*src as *const Unaligned<T>).read_volatile();
292            (*dst as *mut Unaligned<T>).write(val);
293
294            *src = src.add(mem::size_of::<T>());
295            *dst = dst.add(mem::size_of::<T>());
296            *len -= mem::size_of::<T>();
297        }
298    }
299
300    unsafe {
301        while len >= 8 {
302            copy_one::<u64>(&mut src, &mut dst, &mut len);
303        }
304        if len >= 4 {
305            copy_one::<u32>(&mut src, &mut dst, &mut len);
306        }
307        if len >= 2 {
308            copy_one::<u16>(&mut src, &mut dst, &mut len);
309        }
310        if len >= 1 {
311            copy_one::<u8>(&mut src, &mut dst, &mut len);
312        }
313    }
314}
315#[inline]
316unsafe fn volatile_memcpy_write(mut src: *const u8, mut dst: *mut u8, mut len: usize) {
317    #[inline]
318    unsafe fn copy_one<T>(src: &mut *const u8, dst: &mut *mut u8, len: &mut usize) {
319        #[repr(C, packed)]
320        struct Unaligned<T>(T);
321
322        unsafe {
323            let val = (*src as *const Unaligned<T>).read();
324            (*dst as *mut Unaligned<T>).write_volatile(val);
325            *src = src.add(mem::size_of::<T>());
326            *dst = dst.add(mem::size_of::<T>());
327            *len -= mem::size_of::<T>();
328        }
329    }
330
331    unsafe {
332        while len >= 8 {
333            copy_one::<u64>(&mut src, &mut dst, &mut len);
334        }
335        if len >= 4 {
336            copy_one::<u32>(&mut src, &mut dst, &mut len);
337        }
338        if len >= 2 {
339            copy_one::<u16>(&mut src, &mut dst, &mut len);
340        }
341        if len >= 1 {
342            copy_one::<u8>(&mut src, &mut dst, &mut len);
343        }
344    }
345}
346
347impl crate::Memory {
348    /// Consume [`self`] into a [`crate::backend::sys::memory::Memory`].
349    pub fn into_sys(self) -> crate::backend::sys::memory::Memory {
350        match self.0 {
351            BackendMemory::Sys(s) => s,
352            _ => panic!("Not a `sys` memory!"),
353        }
354    }
355
356    /// Convert a reference to [`self`] into a reference to [`crate::backend::sys::memory::Memory`].
357    pub fn as_sys(&self) -> &crate::backend::sys::memory::Memory {
358        match self.0 {
359            BackendMemory::Sys(ref s) => s,
360            _ => panic!("Not a `sys` memory!"),
361        }
362    }
363
364    /// Convert a mutable reference to [`self`] into a mutable reference to [`crate::backend::sys::memory::Memory`].
365    pub fn as_sys_mut(&mut self) -> &mut crate::backend::sys::memory::Memory {
366        match self.0 {
367            BackendMemory::Sys(ref mut s) => s,
368            _ => panic!("Not a `sys` memory!"),
369        }
370    }
371}