use crate::Trap;
use crate::prelude::*;
use crate::runtime::vm::{self, ExportMemory};
use crate::store::{StoreInstanceId, StoreOpaque, StoreResourceLimiter};
use crate::trampoline::generate_memory_export;
#[cfg(feature = "async")]
use crate::vm::VMStore;
use crate::{AsContext, AsContextMut, Engine, MemoryType, StoreContext, StoreContextMut};
use core::cell::UnsafeCell;
use core::fmt;
use core::slice;
use core::time::Duration;
use wasmtime_environ::DefinedMemoryIndex;
pub use crate::runtime::vm::WaitResult;
#[derive(Debug)]
#[non_exhaustive]
pub struct MemoryAccessError {
_private: (),
}
impl fmt::Display for MemoryAccessError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "out of bounds memory access")
}
}
impl core::error::Error for MemoryAccessError {}
#[derive(Copy, Clone, Debug)]
#[repr(C)] pub struct Memory {
instance: StoreInstanceId,
index: DefinedMemoryIndex,
}
const _: () = {
#[repr(C)]
struct Tmp(u64, u32);
#[repr(C)]
struct C(Tmp, u32);
assert!(core::mem::size_of::<C>() == core::mem::size_of::<Memory>());
assert!(core::mem::align_of::<C>() == core::mem::align_of::<Memory>());
assert!(core::mem::offset_of!(Memory, instance) == 0);
};
impl Memory {
pub fn new(mut store: impl AsContextMut, ty: MemoryType) -> Result<Memory> {
let (mut limiter, store) = store
.as_context_mut()
.0
.validate_sync_resource_limiter_and_store_opaque()?;
vm::assert_ready(Self::_new(store, limiter.as_mut(), ty))
}
#[cfg(feature = "async")]
pub async fn new_async(mut store: impl AsContextMut, ty: MemoryType) -> Result<Memory> {
let (mut limiter, store) = store.as_context_mut().0.resource_limiter_and_store_opaque();
Self::_new(store, limiter.as_mut(), ty).await
}
async fn _new(
store: &mut StoreOpaque,
limiter: Option<&mut StoreResourceLimiter<'_>>,
ty: MemoryType,
) -> Result<Memory> {
if ty.is_shared() {
bail!("shared memories must be created through `SharedMemory`")
}
Ok(generate_memory_export(store, limiter, &ty, None)
.await?
.unshared()
.unwrap())
}
pub fn ty(&self, store: impl AsContext) -> MemoryType {
let store = store.as_context();
MemoryType::from_wasmtime_memory(self.wasmtime_ty(store.0))
}
pub fn read(
&self,
store: impl AsContext,
offset: usize,
buffer: &mut [u8],
) -> Result<(), MemoryAccessError> {
let store = store.as_context();
let slice = self
.data(&store)
.get(offset..)
.and_then(|s| s.get(..buffer.len()))
.ok_or(MemoryAccessError { _private: () })?;
buffer.copy_from_slice(slice);
Ok(())
}
pub fn write(
&self,
mut store: impl AsContextMut,
offset: usize,
buffer: &[u8],
) -> Result<(), MemoryAccessError> {
let mut context = store.as_context_mut();
self.data_mut(&mut context)
.get_mut(offset..)
.and_then(|s| s.get_mut(..buffer.len()))
.ok_or(MemoryAccessError { _private: () })?
.copy_from_slice(buffer);
Ok(())
}
pub fn data<'a, T: 'static>(&self, store: impl Into<StoreContext<'a, T>>) -> &'a [u8] {
unsafe {
let store = store.into();
let definition = store[self.instance].memory(self.index);
debug_assert!(!self.ty(store).is_shared());
slice::from_raw_parts(definition.base.as_ptr(), definition.current_length())
}
}
pub fn data_mut<'a, T: 'static>(
&self,
store: impl Into<StoreContextMut<'a, T>>,
) -> &'a mut [u8] {
unsafe {
let store = store.into();
let definition = store[self.instance].memory(self.index);
debug_assert!(!self.ty(store).is_shared());
slice::from_raw_parts_mut(definition.base.as_ptr(), definition.current_length())
}
}
pub fn data_and_store_mut<'a, T: 'static>(
&self,
store: impl Into<StoreContextMut<'a, T>>,
) -> (&'a mut [u8], &'a mut T) {
unsafe {
let mut store = store.into();
let data = &mut *(store.data_mut() as *mut T);
(self.data_mut(store), data)
}
}
pub fn data_ptr(&self, store: impl AsContext) -> *mut u8 {
store.as_context()[self.instance]
.memory(self.index)
.base
.as_ptr()
}
pub fn data_size(&self, store: impl AsContext) -> usize {
self.internal_data_size(store.as_context().0)
}
pub(crate) fn internal_data_size(&self, store: &StoreOpaque) -> usize {
store[self.instance].memory(self.index).current_length()
}
pub fn size(&self, store: impl AsContext) -> u64 {
self.internal_size(store.as_context().0)
}
pub(crate) fn internal_size(&self, store: &StoreOpaque) -> u64 {
let byte_size = self.internal_data_size(store);
let page_size = usize::try_from(self._page_size(store)).unwrap();
u64::try_from(byte_size / page_size).unwrap()
}
pub fn page_size(&self, store: impl AsContext) -> u64 {
self._page_size(store.as_context().0)
}
pub(crate) fn _page_size(&self, store: &StoreOpaque) -> u64 {
self.wasmtime_ty(store).page_size()
}
pub fn page_size_log2(&self, store: impl AsContext) -> u8 {
self._page_size_log2(store.as_context().0)
}
pub(crate) fn _page_size_log2(&self, store: &StoreOpaque) -> u8 {
self.wasmtime_ty(store).page_size_log2
}
pub fn grow(&self, mut store: impl AsContextMut, delta: u64) -> Result<u64> {
let store = store.as_context_mut().0;
let (mut limiter, store) = store.validate_sync_resource_limiter_and_store_opaque()?;
vm::assert_ready(self._grow(store, limiter.as_mut(), delta))
}
#[cfg(feature = "async")]
pub async fn grow_async(&self, mut store: impl AsContextMut, delta: u64) -> Result<u64> {
let store = store.as_context_mut();
let (mut limiter, store) = store.0.resource_limiter_and_store_opaque();
self._grow(store, limiter.as_mut(), delta).await
}
async fn _grow(
&self,
store: &mut StoreOpaque,
limiter: Option<&mut StoreResourceLimiter<'_>>,
delta: u64,
) -> Result<u64> {
let result = self
.instance
.get_mut(store)
.memory_grow(limiter, self.index, delta)
.await?;
match result {
Some(size) => {
let page_size = self.wasmtime_ty(store).page_size();
Ok(u64::try_from(size).unwrap() / page_size)
}
None => bail!("failed to grow memory by `{delta}`"),
}
}
pub(crate) unsafe fn from_raw(instance: StoreInstanceId, index: DefinedMemoryIndex) -> Memory {
Memory { instance, index }
}
pub(crate) fn wasmtime_ty<'a>(&self, store: &'a StoreOpaque) -> &'a wasmtime_environ::Memory {
let module = store[self.instance].env_module();
let index = module.memory_index(self.index);
&module.memories[index]
}
pub(crate) fn vmimport(&self, store: &StoreOpaque) -> crate::runtime::vm::VMMemoryImport {
store[self.instance].get_defined_memory_vmimport(self.index)
}
pub(crate) fn comes_from_same_store(&self, store: &StoreOpaque) -> bool {
store.id() == self.instance.store_id()
}
#[cfg(feature = "coredump")]
pub(crate) fn hash_key(&self, store: &StoreOpaque) -> impl core::hash::Hash + Eq + use<> {
store[self.instance].memory_ptr(self.index).as_ptr().addr()
}
}
pub unsafe trait LinearMemory: Send + Sync + 'static {
fn byte_size(&self) -> usize;
fn byte_capacity(&self) -> usize;
fn grow_to(&mut self, new_size: usize) -> Result<()>;
fn as_ptr(&self) -> *mut u8;
}
pub unsafe trait MemoryCreator: Send + Sync {
fn new_memory(
&self,
ty: MemoryType,
minimum: usize,
maximum: Option<usize>,
reserved_size_in_bytes: Option<usize>,
guard_size_in_bytes: usize,
) -> Result<Box<dyn LinearMemory>, String>;
}
#[derive(Clone)]
pub struct SharedMemory {
vm: crate::runtime::vm::SharedMemory,
engine: Engine,
}
impl SharedMemory {
#[cfg(feature = "threads")]
pub fn new(engine: &Engine, ty: MemoryType) -> Result<Self> {
if !ty.is_shared() {
bail!("shared memory must have the `shared` flag enabled on its memory type")
}
debug_assert!(ty.maximum().is_some());
let ty = ty.wasmtime_memory();
let memory = crate::runtime::vm::SharedMemory::new(engine, ty)?;
Ok(Self {
vm: memory,
engine: engine.clone(),
})
}
pub fn ty(&self) -> MemoryType {
MemoryType::from_wasmtime_memory(&self.vm.ty())
}
pub fn size(&self) -> u64 {
let byte_size = u64::try_from(self.data_size()).unwrap();
let page_size = self.page_size();
byte_size / page_size
}
pub fn page_size(&self) -> u64 {
self.ty().page_size()
}
pub fn data_size(&self) -> usize {
self.vm.byte_size()
}
pub fn data(&self) -> &[UnsafeCell<u8>] {
unsafe {
let definition = self.vm.vmmemory_ptr().as_ref();
slice::from_raw_parts(definition.base.as_ptr().cast(), definition.current_length())
}
}
pub fn grow(&self, delta: u64) -> Result<u64> {
match self.vm.grow(delta)? {
Some((old_size, _new_size)) => {
Ok(u64::try_from(old_size).unwrap() / self.page_size())
}
None => bail!("failed to grow memory by `{delta}`"),
}
}
pub fn atomic_notify(&self, addr: u64, count: u32) -> Result<u32, Trap> {
self.vm.atomic_notify(addr, count)
}
pub fn atomic_wait32(
&self,
addr: u64,
expected: u32,
timeout: Option<Duration>,
) -> Result<WaitResult, Trap> {
self.vm.atomic_wait32(addr, expected, timeout)
}
pub fn atomic_wait64(
&self,
addr: u64,
expected: u64,
timeout: Option<Duration>,
) -> Result<WaitResult, Trap> {
self.vm.atomic_wait64(addr, expected, timeout)
}
pub(crate) fn engine(&self) -> &Engine {
&self.engine
}
pub(crate) fn vmimport(&self, store: &mut StoreOpaque) -> crate::runtime::vm::VMMemoryImport {
let memory = vm::assert_ready(generate_memory_export(
store,
None,
&self.ty(),
Some(&self.vm),
))
.unwrap();
match memory {
ExportMemory::Unshared(_) => unreachable!(),
ExportMemory::Shared(_shared, vmimport) => vmimport,
}
}
pub(crate) fn from_raw(vm: crate::runtime::vm::SharedMemory, engine: Engine) -> Self {
SharedMemory { vm, engine }
}
}
impl fmt::Debug for SharedMemory {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("SharedMemory").finish_non_exhaustive()
}
}
#[cfg(test)]
mod tests {
use crate::*;
#[test]
fn respect_tunables() {
let mut cfg = Config::new();
cfg.memory_reservation(0).memory_guard_size(0);
let mut store = Store::new(&Engine::new(&cfg).unwrap(), ());
let ty = MemoryType::new(1, None);
let mem = Memory::new(&mut store, ty).unwrap();
let store = store.as_context();
let tunables = store.engine().tunables();
assert_eq!(tunables.memory_guard_size, 0);
assert!(
!mem.wasmtime_ty(store.0)
.can_elide_bounds_check(tunables, 12)
);
}
#[test]
fn hash_key_is_stable_across_duplicate_store_data_entries() -> Result<()> {
let mut store = Store::<()>::default();
let module = Module::new(
store.engine(),
r#"
(module
(memory (export "m") 1 1)
)
"#,
)?;
let instance = Instance::new(&mut store, &module, &[])?;
let m1 = instance.get_memory(&mut store, "m").unwrap();
let m2 = instance.get_memory(&mut store, "m").unwrap();
assert_eq!(m1.data(&store)[0], 0);
assert_eq!(m2.data(&store)[0], 0);
m1.data_mut(&mut store)[0] = 42;
assert_eq!(m1.data(&mut store)[0], 42);
assert_eq!(m2.data(&mut store)[0], 42);
assert!(m1.hash_key(&store.as_context().0) == m2.hash_key(&store.as_context().0));
let instance2 = Instance::new(&mut store, &module, &[])?;
let m3 = instance2.get_memory(&mut store, "m").unwrap();
assert!(m1.hash_key(&store.as_context().0) != m3.hash_key(&store.as_context().0));
Ok(())
}
}