use crate::runtime::vm::VMGcRef;
use crate::store::{Asyncness, StoreId, StoreResourceLimiter};
#[cfg(feature = "async")]
use crate::vm::VMStore;
use crate::vm::{self, VMArrayRef, VMGcHeader};
use crate::{AnyRef, FieldType};
use crate::{
ArrayType, AsContext, AsContextMut, EqRef, GcHeapOutOfMemory, GcRefImpl, GcRootIndex, HeapType,
OwnedRooted, RefType, Rooted, Val, ValRaw, ValType, WasmTy,
prelude::*,
store::{AutoAssertNoGc, StoreContextMut, StoreOpaque},
};
use core::mem::{self, MaybeUninit};
use wasmtime_environ::{GcArrayLayout, GcLayout, VMGcKind, VMSharedTypeIndex};
pub struct ArrayRefPre {
store_id: StoreId,
ty: ArrayType,
}
impl ArrayRefPre {
pub fn new(mut store: impl AsContextMut, ty: ArrayType) -> Self {
Self::_new(store.as_context_mut().0, ty)
}
pub(crate) fn _new(store: &mut StoreOpaque, ty: ArrayType) -> Self {
store.insert_gc_host_alloc_type(ty.registered_type().clone());
let store_id = store.id();
ArrayRefPre { store_id, ty }
}
pub(crate) fn layout(&self) -> &GcArrayLayout {
self.ty
.registered_type()
.layout()
.expect("array types have a layout")
.unwrap_array()
}
pub(crate) fn type_index(&self) -> VMSharedTypeIndex {
self.ty.registered_type().index()
}
}
#[derive(Debug)]
#[repr(transparent)]
pub struct ArrayRef {
pub(super) inner: GcRootIndex,
}
unsafe impl GcRefImpl for ArrayRef {
fn transmute_ref(index: &GcRootIndex) -> &Self {
let me: &Self = unsafe { mem::transmute(index) };
assert!(matches!(
me,
Self {
inner: GcRootIndex { .. },
}
));
me
}
}
impl Rooted<ArrayRef> {
#[inline]
pub fn to_anyref(self) -> Rooted<AnyRef> {
self.unchecked_cast()
}
#[inline]
pub fn to_eqref(self) -> Rooted<EqRef> {
self.unchecked_cast()
}
}
impl OwnedRooted<ArrayRef> {
#[inline]
pub fn to_anyref(self) -> OwnedRooted<AnyRef> {
self.unchecked_cast()
}
#[inline]
pub fn to_eqref(self) -> OwnedRooted<EqRef> {
self.unchecked_cast()
}
}
#[derive(Clone)]
struct RepeatN<'a>(&'a Val, u32);
impl<'a> Iterator for RepeatN<'a> {
type Item = &'a Val;
fn next(&mut self) -> Option<Self::Item> {
if self.1 == 0 {
None
} else {
self.1 -= 1;
Some(self.0)
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
let len = self.len();
(len, Some(len))
}
}
impl ExactSizeIterator for RepeatN<'_> {
fn len(&self) -> usize {
usize::try_from(self.1).unwrap()
}
}
impl ArrayRef {
pub fn new(
mut store: impl AsContextMut,
allocator: &ArrayRefPre,
elem: &Val,
len: u32,
) -> Result<Rooted<ArrayRef>> {
let (mut limiter, store) = store
.as_context_mut()
.0
.validate_sync_resource_limiter_and_store_opaque()?;
vm::assert_ready(Self::_new_async(
store,
limiter.as_mut(),
allocator,
elem,
len,
Asyncness::No,
))
}
#[cfg(feature = "async")]
pub async fn new_async(
mut store: impl AsContextMut,
allocator: &ArrayRefPre,
elem: &Val,
len: u32,
) -> Result<Rooted<ArrayRef>> {
let (mut limiter, store) = store.as_context_mut().0.resource_limiter_and_store_opaque();
Self::_new_async(
store,
limiter.as_mut(),
allocator,
elem,
len,
Asyncness::Yes,
)
.await
}
pub(crate) async fn _new_async(
store: &mut StoreOpaque,
limiter: Option<&mut StoreResourceLimiter<'_>>,
allocator: &ArrayRefPre,
elem: &Val,
len: u32,
asyncness: Asyncness,
) -> Result<Rooted<ArrayRef>> {
store
.retry_after_gc_async(limiter, (), asyncness, |store, ()| {
Self::new_from_iter(store, allocator, RepeatN(elem, len))
})
.await
}
fn new_from_iter<'a>(
store: &mut StoreOpaque,
allocator: &ArrayRefPre,
elems: impl Clone + ExactSizeIterator<Item = &'a Val>,
) -> Result<Rooted<ArrayRef>> {
assert_eq!(
store.id(),
allocator.store_id,
"attempted to use a `ArrayRefPre` with the wrong store"
);
for elem in elems.clone() {
elem.ensure_matches_ty(store, allocator.ty.element_type().unpack())
.context("element type mismatch")?;
}
let len = u32::try_from(elems.len()).unwrap();
let arrayref = store
.require_gc_store_mut()?
.alloc_uninit_array(allocator.type_index(), len, allocator.layout())
.context("unrecoverable error when allocating new `arrayref`")?
.map_err(|n| GcHeapOutOfMemory::new((), n))?;
let mut store = AutoAssertNoGc::new(store);
match (|| {
let elem_ty = allocator.ty.element_type();
for (i, elem) in elems.enumerate() {
let i = u32::try_from(i).unwrap();
debug_assert!(i < len);
arrayref.initialize_elem(&mut store, allocator.layout(), &elem_ty, i, *elem)?;
}
Ok(())
})() {
Ok(()) => Ok(Rooted::new(&mut store, arrayref.into())),
Err(e) => {
store.require_gc_store_mut()?.dealloc_uninit_array(arrayref);
Err(e)
}
}
}
pub fn new_fixed(
mut store: impl AsContextMut,
allocator: &ArrayRefPre,
elems: &[Val],
) -> Result<Rooted<ArrayRef>> {
let (mut limiter, store) = store
.as_context_mut()
.0
.validate_sync_resource_limiter_and_store_opaque()?;
vm::assert_ready(Self::_new_fixed_async(
store,
limiter.as_mut(),
allocator,
elems,
Asyncness::No,
))
}
#[cfg(feature = "async")]
pub async fn new_fixed_async(
mut store: impl AsContextMut,
allocator: &ArrayRefPre,
elems: &[Val],
) -> Result<Rooted<ArrayRef>> {
let (mut limiter, store) = store.as_context_mut().0.resource_limiter_and_store_opaque();
Self::_new_fixed_async(store, limiter.as_mut(), allocator, elems, Asyncness::Yes).await
}
pub(crate) async fn _new_fixed_async(
store: &mut StoreOpaque,
limiter: Option<&mut StoreResourceLimiter<'_>>,
allocator: &ArrayRefPre,
elems: &[Val],
asyncness: Asyncness,
) -> Result<Rooted<ArrayRef>> {
store
.retry_after_gc_async(limiter, (), asyncness, |store, ()| {
Self::new_from_iter(store, allocator, elems.iter())
})
.await
}
#[inline]
pub(crate) fn comes_from_same_store(&self, store: &StoreOpaque) -> bool {
self.inner.comes_from_same_store(store)
}
pub fn ty(&self, store: impl AsContext) -> Result<ArrayType> {
self._ty(store.as_context().0)
}
pub(crate) fn _ty(&self, store: &StoreOpaque) -> Result<ArrayType> {
assert!(self.comes_from_same_store(store));
let index = self.type_index(store)?;
Ok(ArrayType::from_shared_type_index(store.engine(), index))
}
pub fn matches_ty(&self, store: impl AsContext, ty: &ArrayType) -> Result<bool> {
self._matches_ty(store.as_context().0, ty)
}
pub(crate) fn _matches_ty(&self, store: &StoreOpaque, ty: &ArrayType) -> Result<bool> {
assert!(self.comes_from_same_store(store));
Ok(self._ty(store)?.matches(ty))
}
pub(crate) fn ensure_matches_ty(&self, store: &StoreOpaque, ty: &ArrayType) -> Result<()> {
if !self.comes_from_same_store(store) {
bail!("function used with wrong store");
}
if self._matches_ty(store, ty)? {
Ok(())
} else {
let actual_ty = self._ty(store)?;
bail!("type mismatch: expected `(ref {ty})`, found `(ref {actual_ty})`")
}
}
pub fn len(&self, store: impl AsContext) -> Result<u32> {
self._len(store.as_context().0)
}
pub(crate) fn _len(&self, store: &StoreOpaque) -> Result<u32> {
assert!(self.comes_from_same_store(store));
let gc_ref = self.inner.try_gc_ref(store)?;
debug_assert!({
let header = store.require_gc_store()?.header(gc_ref);
header.kind().matches(VMGcKind::ArrayRef)
});
let arrayref = gc_ref.as_arrayref_unchecked();
Ok(arrayref.len(store))
}
pub fn elems<'a, T: 'static>(
&'a self,
store: impl Into<StoreContextMut<'a, T>>,
) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
self._elems(store.into().0)
}
pub(crate) fn _elems<'a>(
&'a self,
store: &'a mut StoreOpaque,
) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
assert!(self.comes_from_same_store(store));
let store = AutoAssertNoGc::new(store);
let gc_ref = self.inner.try_gc_ref(&store)?;
let header = store.require_gc_store()?.header(gc_ref);
debug_assert!(header.kind().matches(VMGcKind::ArrayRef));
let len = self._len(&store)?;
return Ok(Elems {
arrayref: self,
store,
index: 0,
len,
});
struct Elems<'a, 'b> {
arrayref: &'a ArrayRef,
store: AutoAssertNoGc<'b>,
index: u32,
len: u32,
}
impl Iterator for Elems<'_, '_> {
type Item = Val;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
let i = self.index;
debug_assert!(i <= self.len);
if i >= self.len {
return None;
}
self.index += 1;
Some(self.arrayref._get(&mut self.store, i).unwrap())
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let len = self.len - self.index;
let len = usize::try_from(len).unwrap();
(len, Some(len))
}
}
impl ExactSizeIterator for Elems<'_, '_> {
#[inline]
fn len(&self) -> usize {
let len = self.len - self.index;
usize::try_from(len).unwrap()
}
}
}
fn header<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMGcHeader> {
assert!(self.comes_from_same_store(&store));
let gc_ref = self.inner.try_gc_ref(store)?;
Ok(store.require_gc_store()?.header(gc_ref))
}
fn arrayref<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMArrayRef> {
assert!(self.comes_from_same_store(&store));
let gc_ref = self.inner.try_gc_ref(store)?;
debug_assert!(self.header(store)?.kind().matches(VMGcKind::ArrayRef));
Ok(gc_ref.as_arrayref_unchecked())
}
pub(crate) fn layout(&self, store: &AutoAssertNoGc<'_>) -> Result<GcArrayLayout> {
assert!(self.comes_from_same_store(&store));
let type_index = self.type_index(store)?;
let layout = store
.engine()
.signatures()
.layout(type_index)
.expect("array types should have GC layouts");
match layout {
GcLayout::Array(a) => Ok(a),
GcLayout::Struct(_) => unreachable!(),
}
}
fn field_ty(&self, store: &StoreOpaque) -> Result<FieldType> {
let ty = self._ty(store)?;
Ok(ty.field_type())
}
pub fn get(&self, mut store: impl AsContextMut, index: u32) -> Result<Val> {
let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
self._get(&mut store, index)
}
pub(crate) fn _get(&self, store: &mut AutoAssertNoGc<'_>, index: u32) -> Result<Val> {
assert!(
self.comes_from_same_store(store),
"attempted to use an array with the wrong store",
);
let arrayref = self.arrayref(store)?.unchecked_copy();
let field_ty = self.field_ty(store)?;
let layout = self.layout(store)?;
let len = arrayref.len(store);
ensure!(
index < len,
"index out of bounds: the length is {len} but the index is {index}"
);
Ok(arrayref.read_elem(store, &layout, field_ty.element_type(), index))
}
pub fn set(&self, mut store: impl AsContextMut, index: u32, value: Val) -> Result<()> {
self._set(store.as_context_mut().0, index, value)
}
pub(crate) fn _set(&self, store: &mut StoreOpaque, index: u32, value: Val) -> Result<()> {
assert!(
self.comes_from_same_store(store),
"attempted to use an array with the wrong store",
);
assert!(
value.comes_from_same_store(store),
"attempted to use a value with the wrong store",
);
let mut store = AutoAssertNoGc::new(store);
let field_ty = self.field_ty(&store)?;
ensure!(
field_ty.mutability().is_var(),
"cannot set element {index}: array elements are not mutable"
);
value
.ensure_matches_ty(&store, &field_ty.element_type().unpack())
.with_context(|| format!("cannot set element {index}: type mismatch"))?;
let layout = self.layout(&store)?;
let arrayref = self.arrayref(&store)?.unchecked_copy();
let len = arrayref.len(&store);
ensure!(
index < len,
"index out of bounds: the length is {len} but the index is {index}"
);
arrayref.write_elem(&mut store, &layout, field_ty.element_type(), index, value)
}
pub(crate) fn type_index(&self, store: &StoreOpaque) -> Result<VMSharedTypeIndex> {
let gc_ref = self.inner.try_gc_ref(store)?;
let header = store.require_gc_store()?.header(gc_ref);
debug_assert!(header.kind().matches(VMGcKind::ArrayRef));
Ok(header.ty().expect("arrayrefs should have concrete types"))
}
pub(crate) fn from_cloned_gc_ref(
store: &mut AutoAssertNoGc<'_>,
gc_ref: VMGcRef,
) -> Rooted<Self> {
debug_assert!(gc_ref.is_arrayref(&*store.unwrap_gc_store().gc_heap));
Rooted::new(store, gc_ref)
}
}
unsafe impl WasmTy for Rooted<ArrayRef> {
#[inline]
fn valtype() -> ValType {
ValType::Ref(RefType::new(false, HeapType::Array))
}
#[inline]
fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
self.comes_from_same_store(store)
}
#[inline]
fn dynamic_concrete_type_check(
&self,
store: &StoreOpaque,
_nullable: bool,
ty: &HeapType,
) -> Result<()> {
match ty {
HeapType::Any | HeapType::Eq | HeapType::Array => Ok(()),
HeapType::ConcreteArray(ty) => self.ensure_matches_ty(store, ty),
HeapType::Extern
| HeapType::NoExtern
| HeapType::Func
| HeapType::ConcreteFunc(_)
| HeapType::NoFunc
| HeapType::I31
| HeapType::Struct
| HeapType::ConcreteStruct(_)
| HeapType::Cont
| HeapType::NoCont
| HeapType::ConcreteCont(_)
| HeapType::Exn
| HeapType::NoExn
| HeapType::ConcreteExn(_)
| HeapType::None => bail!(
"type mismatch: expected `(ref {ty})`, got `(ref {})`",
self._ty(store)?,
),
}
}
fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
self.wasm_ty_store(store, ptr, ValRaw::anyref)
}
unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
Self::wasm_ty_load(store, ptr.get_anyref(), ArrayRef::from_cloned_gc_ref)
}
}
unsafe impl WasmTy for Option<Rooted<ArrayRef>> {
#[inline]
fn valtype() -> ValType {
ValType::ARRAYREF
}
#[inline]
fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
self.map_or(true, |x| x.comes_from_same_store(store))
}
#[inline]
fn dynamic_concrete_type_check(
&self,
store: &StoreOpaque,
nullable: bool,
ty: &HeapType,
) -> Result<()> {
match self {
Some(s) => Rooted::<ArrayRef>::dynamic_concrete_type_check(s, store, nullable, ty),
None => {
ensure!(
nullable,
"expected a non-null reference, but found a null reference"
);
Ok(())
}
}
}
#[inline]
fn is_vmgcref_and_points_to_object(&self) -> bool {
self.is_some()
}
fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
<Rooted<ArrayRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
}
unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
<Rooted<ArrayRef>>::wasm_ty_option_load(
store,
ptr.get_anyref(),
ArrayRef::from_cloned_gc_ref,
)
}
}
unsafe impl WasmTy for OwnedRooted<ArrayRef> {
#[inline]
fn valtype() -> ValType {
ValType::Ref(RefType::new(false, HeapType::Array))
}
#[inline]
fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
self.comes_from_same_store(store)
}
#[inline]
fn dynamic_concrete_type_check(
&self,
store: &StoreOpaque,
_: bool,
ty: &HeapType,
) -> Result<()> {
match ty {
HeapType::Any | HeapType::Eq | HeapType::Array => Ok(()),
HeapType::ConcreteArray(ty) => self.ensure_matches_ty(store, ty),
HeapType::Extern
| HeapType::NoExtern
| HeapType::Func
| HeapType::ConcreteFunc(_)
| HeapType::NoFunc
| HeapType::I31
| HeapType::Struct
| HeapType::ConcreteStruct(_)
| HeapType::Cont
| HeapType::NoCont
| HeapType::ConcreteCont(_)
| HeapType::Exn
| HeapType::NoExn
| HeapType::ConcreteExn(_)
| HeapType::None => bail!(
"type mismatch: expected `(ref {ty})`, got `(ref {})`",
self._ty(store)?,
),
}
}
fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
self.wasm_ty_store(store, ptr, ValRaw::anyref)
}
unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
Self::wasm_ty_load(store, ptr.get_anyref(), ArrayRef::from_cloned_gc_ref)
}
}
unsafe impl WasmTy for Option<OwnedRooted<ArrayRef>> {
#[inline]
fn valtype() -> ValType {
ValType::ARRAYREF
}
#[inline]
fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
self.as_ref()
.map_or(true, |x| x.comes_from_same_store(store))
}
#[inline]
fn dynamic_concrete_type_check(
&self,
store: &StoreOpaque,
nullable: bool,
ty: &HeapType,
) -> Result<()> {
match self {
Some(s) => OwnedRooted::<ArrayRef>::dynamic_concrete_type_check(s, store, nullable, ty),
None => {
ensure!(
nullable,
"expected a non-null reference, but found a null reference"
);
Ok(())
}
}
}
#[inline]
fn is_vmgcref_and_points_to_object(&self) -> bool {
self.is_some()
}
fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
<OwnedRooted<ArrayRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
}
unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
<OwnedRooted<ArrayRef>>::wasm_ty_option_load(
store,
ptr.get_anyref(),
ArrayRef::from_cloned_gc_ref,
)
}
}