use crate::runtime::vm::VMGcRef;
use crate::store::{Asyncness, StoreId};
#[cfg(feature = "async")]
use crate::vm::VMStore;
use crate::vm::{self, VMGcHeader, VMStructRef};
use crate::{AnyRef, FieldType};
use crate::{
AsContext, AsContextMut, EqRef, GcHeapOutOfMemory, GcRefImpl, GcRootIndex, HeapType,
OwnedRooted, RefType, Rooted, StructType, Val, ValRaw, ValType, WasmTy,
prelude::*,
store::{AutoAssertNoGc, StoreContextMut, StoreOpaque, StoreResourceLimiter},
};
use core::mem::{self, MaybeUninit};
use wasmtime_environ::{GcLayout, GcStructLayout, VMGcKind, VMSharedTypeIndex};
pub struct StructRefPre {
store_id: StoreId,
ty: StructType,
}
impl StructRefPre {
pub fn new(mut store: impl AsContextMut, ty: StructType) -> Self {
Self::_new(store.as_context_mut().0, ty)
}
pub(crate) fn _new(store: &mut StoreOpaque, ty: StructType) -> Self {
store.insert_gc_host_alloc_type(ty.registered_type().clone());
let store_id = store.id();
StructRefPre { store_id, ty }
}
pub(crate) fn layout(&self) -> &GcStructLayout {
self.ty
.registered_type()
.layout()
.expect("struct types have a layout")
.unwrap_struct()
}
pub(crate) fn type_index(&self) -> VMSharedTypeIndex {
self.ty.registered_type().index()
}
}
#[derive(Debug)]
#[repr(transparent)]
pub struct StructRef {
pub(super) inner: GcRootIndex,
}
unsafe impl GcRefImpl for StructRef {
fn transmute_ref(index: &GcRootIndex) -> &Self {
let me: &Self = unsafe { mem::transmute(index) };
assert!(matches!(
me,
Self {
inner: GcRootIndex { .. },
}
));
me
}
}
impl Rooted<StructRef> {
#[inline]
pub fn to_anyref(self) -> Rooted<AnyRef> {
self.unchecked_cast()
}
#[inline]
pub fn to_eqref(self) -> Rooted<EqRef> {
self.unchecked_cast()
}
}
impl OwnedRooted<StructRef> {
#[inline]
pub fn to_anyref(self) -> OwnedRooted<AnyRef> {
self.unchecked_cast()
}
#[inline]
pub fn to_eqref(self) -> OwnedRooted<EqRef> {
self.unchecked_cast()
}
}
impl StructRef {
pub fn new(
mut store: impl AsContextMut,
allocator: &StructRefPre,
fields: &[Val],
) -> Result<Rooted<StructRef>> {
let (mut limiter, store) = store
.as_context_mut()
.0
.validate_sync_resource_limiter_and_store_opaque()?;
vm::assert_ready(Self::_new_async(
store,
limiter.as_mut(),
allocator,
fields,
Asyncness::No,
))
}
#[cfg(feature = "async")]
pub async fn new_async(
mut store: impl AsContextMut,
allocator: &StructRefPre,
fields: &[Val],
) -> Result<Rooted<StructRef>> {
let (mut limiter, store) = store.as_context_mut().0.resource_limiter_and_store_opaque();
Self::_new_async(store, limiter.as_mut(), allocator, fields, Asyncness::Yes).await
}
pub(crate) async fn _new_async(
store: &mut StoreOpaque,
limiter: Option<&mut StoreResourceLimiter<'_>>,
allocator: &StructRefPre,
fields: &[Val],
asyncness: Asyncness,
) -> Result<Rooted<StructRef>> {
Self::type_check_fields(store, allocator, fields)?;
store
.retry_after_gc_async(limiter, (), asyncness, |store, ()| {
Self::new_unchecked(store, allocator, fields)
})
.await
}
fn type_check_fields(
store: &mut StoreOpaque,
allocator: &StructRefPre,
fields: &[Val],
) -> Result<(), Error> {
let expected_len = allocator.ty.fields().len();
let actual_len = fields.len();
ensure!(
actual_len == expected_len,
"expected {expected_len} fields, got {actual_len}"
);
for (ty, val) in allocator.ty.fields().zip(fields) {
assert!(
val.comes_from_same_store(store),
"field value comes from the wrong store",
);
let ty = ty.element_type().unpack();
val.ensure_matches_ty(store, ty)
.context("field type mismatch")?;
}
Ok(())
}
fn new_unchecked(
store: &mut StoreOpaque,
allocator: &StructRefPre,
fields: &[Val],
) -> Result<Rooted<StructRef>> {
assert_eq!(
store.id(),
allocator.store_id,
"attempted to use a `StructRefPre` with the wrong store"
);
let structref = store
.require_gc_store_mut()?
.alloc_uninit_struct(allocator.type_index(), &allocator.layout())
.context("unrecoverable error when allocating new `structref`")?
.map_err(|n| GcHeapOutOfMemory::new((), n))?;
let mut store = AutoAssertNoGc::new(store);
match (|| {
for (index, (ty, val)) in allocator.ty.fields().zip(fields).enumerate() {
structref.initialize_field(
&mut store,
allocator.layout(),
ty.element_type(),
index,
*val,
)?;
}
Ok(())
})() {
Ok(()) => Ok(Rooted::new(&mut store, structref.into())),
Err(e) => {
store
.require_gc_store_mut()?
.dealloc_uninit_struct(structref);
Err(e)
}
}
}
#[inline]
pub(crate) fn comes_from_same_store(&self, store: &StoreOpaque) -> bool {
self.inner.comes_from_same_store(store)
}
pub fn ty(&self, store: impl AsContext) -> Result<StructType> {
self._ty(store.as_context().0)
}
pub(crate) fn _ty(&self, store: &StoreOpaque) -> Result<StructType> {
assert!(self.comes_from_same_store(store));
let index = self.type_index(store)?;
Ok(StructType::from_shared_type_index(store.engine(), index))
}
pub fn matches_ty(&self, store: impl AsContext, ty: &StructType) -> Result<bool> {
self._matches_ty(store.as_context().0, ty)
}
pub(crate) fn _matches_ty(&self, store: &StoreOpaque, ty: &StructType) -> Result<bool> {
assert!(self.comes_from_same_store(store));
Ok(self._ty(store)?.matches(ty))
}
pub(crate) fn ensure_matches_ty(&self, store: &StoreOpaque, ty: &StructType) -> Result<()> {
if !self.comes_from_same_store(store) {
bail!("function used with wrong store");
}
if self._matches_ty(store, ty)? {
Ok(())
} else {
let actual_ty = self._ty(store)?;
bail!("type mismatch: expected `(ref {ty})`, found `(ref {actual_ty})`")
}
}
pub fn fields<'a, T: 'static>(
&'a self,
store: impl Into<StoreContextMut<'a, T>>,
) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
self._fields(store.into().0)
}
pub(crate) fn _fields<'a>(
&'a self,
store: &'a mut StoreOpaque,
) -> Result<impl ExactSizeIterator<Item = Val> + 'a> {
assert!(self.comes_from_same_store(store));
let store = AutoAssertNoGc::new(store);
let gc_ref = self.inner.try_gc_ref(&store)?;
let header = store.require_gc_store()?.header(gc_ref);
debug_assert!(header.kind().matches(VMGcKind::StructRef));
let index = header.ty().expect("structrefs should have concrete types");
let ty = StructType::from_shared_type_index(store.engine(), index);
let len = ty.fields().len();
return Ok(Fields {
structref: self,
store,
index: 0,
len,
});
struct Fields<'a, 'b> {
structref: &'a StructRef,
store: AutoAssertNoGc<'b>,
index: usize,
len: usize,
}
impl Iterator for Fields<'_, '_> {
type Item = Val;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
let i = self.index;
debug_assert!(i <= self.len);
if i >= self.len {
return None;
}
self.index += 1;
Some(self.structref._field(&mut self.store, i).unwrap())
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let len = self.len - self.index;
(len, Some(len))
}
}
impl ExactSizeIterator for Fields<'_, '_> {
#[inline]
fn len(&self) -> usize {
self.len - self.index
}
}
}
fn header<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMGcHeader> {
assert!(self.comes_from_same_store(&store));
let gc_ref = self.inner.try_gc_ref(store)?;
Ok(store.require_gc_store()?.header(gc_ref))
}
fn structref<'a>(&self, store: &'a AutoAssertNoGc<'_>) -> Result<&'a VMStructRef> {
assert!(self.comes_from_same_store(&store));
let gc_ref = self.inner.try_gc_ref(store)?;
debug_assert!(self.header(store)?.kind().matches(VMGcKind::StructRef));
Ok(gc_ref.as_structref_unchecked())
}
fn layout(&self, store: &AutoAssertNoGc<'_>) -> Result<GcStructLayout> {
assert!(self.comes_from_same_store(&store));
let type_index = self.type_index(store)?;
let layout = store
.engine()
.signatures()
.layout(type_index)
.expect("struct types should have GC layouts");
match layout {
GcLayout::Struct(s) => Ok(s),
GcLayout::Array(_) => unreachable!(),
}
}
fn field_ty(&self, store: &StoreOpaque, field: usize) -> Result<FieldType> {
let ty = self._ty(store)?;
match ty.field(field) {
Some(f) => Ok(f),
None => {
let len = ty.fields().len();
bail!("cannot access field {field}: struct only has {len} fields")
}
}
}
pub fn field(&self, mut store: impl AsContextMut, index: usize) -> Result<Val> {
let mut store = AutoAssertNoGc::new(store.as_context_mut().0);
self._field(&mut store, index)
}
pub(crate) fn _field(&self, store: &mut AutoAssertNoGc<'_>, index: usize) -> Result<Val> {
assert!(self.comes_from_same_store(store));
let structref = self.structref(store)?.unchecked_copy();
let field_ty = self.field_ty(store, index)?;
let layout = self.layout(store)?;
Ok(structref.read_field(store, &layout, field_ty.element_type(), index))
}
pub fn set_field(&self, mut store: impl AsContextMut, index: usize, value: Val) -> Result<()> {
self._set_field(store.as_context_mut().0, index, value)
}
pub(crate) fn _set_field(
&self,
store: &mut StoreOpaque,
index: usize,
value: Val,
) -> Result<()> {
assert!(self.comes_from_same_store(store));
let mut store = AutoAssertNoGc::new(store);
let field_ty = self.field_ty(&store, index)?;
ensure!(
field_ty.mutability().is_var(),
"cannot set field {index}: field is not mutable"
);
value
.ensure_matches_ty(&store, &field_ty.element_type().unpack())
.with_context(|| format!("cannot set field {index}: type mismatch"))?;
let layout = self.layout(&store)?;
let structref = self.structref(&store)?.unchecked_copy();
structref.write_field(&mut store, &layout, field_ty.element_type(), index, value)
}
pub(crate) fn type_index(&self, store: &StoreOpaque) -> Result<VMSharedTypeIndex> {
let gc_ref = self.inner.try_gc_ref(store)?;
let header = store.require_gc_store()?.header(gc_ref);
debug_assert!(header.kind().matches(VMGcKind::StructRef));
Ok(header.ty().expect("structrefs should have concrete types"))
}
pub(crate) fn from_cloned_gc_ref(
store: &mut AutoAssertNoGc<'_>,
gc_ref: VMGcRef,
) -> Rooted<Self> {
debug_assert!(gc_ref.is_structref(&*store.unwrap_gc_store().gc_heap));
Rooted::new(store, gc_ref)
}
}
unsafe impl WasmTy for Rooted<StructRef> {
#[inline]
fn valtype() -> ValType {
ValType::Ref(RefType::new(false, HeapType::Struct))
}
#[inline]
fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
self.comes_from_same_store(store)
}
#[inline]
fn dynamic_concrete_type_check(
&self,
store: &StoreOpaque,
_nullable: bool,
ty: &HeapType,
) -> Result<()> {
match ty {
HeapType::Any | HeapType::Eq | HeapType::Struct => Ok(()),
HeapType::ConcreteStruct(ty) => self.ensure_matches_ty(store, ty),
HeapType::Extern
| HeapType::NoExtern
| HeapType::Func
| HeapType::ConcreteFunc(_)
| HeapType::NoFunc
| HeapType::I31
| HeapType::Array
| HeapType::ConcreteArray(_)
| HeapType::None
| HeapType::NoCont
| HeapType::Cont
| HeapType::ConcreteCont(_)
| HeapType::NoExn
| HeapType::Exn
| HeapType::ConcreteExn(_) => bail!(
"type mismatch: expected `(ref {ty})`, got `(ref {})`",
self._ty(store)?,
),
}
}
fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
self.wasm_ty_store(store, ptr, ValRaw::anyref)
}
unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
Self::wasm_ty_load(store, ptr.get_anyref(), StructRef::from_cloned_gc_ref)
}
}
unsafe impl WasmTy for Option<Rooted<StructRef>> {
#[inline]
fn valtype() -> ValType {
ValType::STRUCTREF
}
#[inline]
fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
self.map_or(true, |x| x.comes_from_same_store(store))
}
#[inline]
fn dynamic_concrete_type_check(
&self,
store: &StoreOpaque,
nullable: bool,
ty: &HeapType,
) -> Result<()> {
match self {
Some(s) => Rooted::<StructRef>::dynamic_concrete_type_check(s, store, nullable, ty),
None => {
ensure!(
nullable,
"expected a non-null reference, but found a null reference"
);
Ok(())
}
}
}
#[inline]
fn is_vmgcref_and_points_to_object(&self) -> bool {
self.is_some()
}
fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
<Rooted<StructRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
}
unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
<Rooted<StructRef>>::wasm_ty_option_load(
store,
ptr.get_anyref(),
StructRef::from_cloned_gc_ref,
)
}
}
unsafe impl WasmTy for OwnedRooted<StructRef> {
#[inline]
fn valtype() -> ValType {
ValType::Ref(RefType::new(false, HeapType::Struct))
}
#[inline]
fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
self.comes_from_same_store(store)
}
#[inline]
fn dynamic_concrete_type_check(
&self,
store: &StoreOpaque,
_: bool,
ty: &HeapType,
) -> Result<()> {
match ty {
HeapType::Any | HeapType::Eq | HeapType::Struct => Ok(()),
HeapType::ConcreteStruct(ty) => self.ensure_matches_ty(store, ty),
HeapType::Extern
| HeapType::NoExtern
| HeapType::Func
| HeapType::ConcreteFunc(_)
| HeapType::NoFunc
| HeapType::I31
| HeapType::Array
| HeapType::ConcreteArray(_)
| HeapType::None
| HeapType::NoCont
| HeapType::Cont
| HeapType::ConcreteCont(_)
| HeapType::NoExn
| HeapType::Exn
| HeapType::ConcreteExn(_) => bail!(
"type mismatch: expected `(ref {ty})`, got `(ref {})`",
self._ty(store)?,
),
}
}
fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
self.wasm_ty_store(store, ptr, ValRaw::anyref)
}
unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
Self::wasm_ty_load(store, ptr.get_anyref(), StructRef::from_cloned_gc_ref)
}
}
unsafe impl WasmTy for Option<OwnedRooted<StructRef>> {
#[inline]
fn valtype() -> ValType {
ValType::STRUCTREF
}
#[inline]
fn compatible_with_store(&self, store: &StoreOpaque) -> bool {
self.as_ref()
.map_or(true, |x| x.comes_from_same_store(store))
}
#[inline]
fn dynamic_concrete_type_check(
&self,
store: &StoreOpaque,
nullable: bool,
ty: &HeapType,
) -> Result<()> {
match self {
Some(s) => {
OwnedRooted::<StructRef>::dynamic_concrete_type_check(s, store, nullable, ty)
}
None => {
ensure!(
nullable,
"expected a non-null reference, but found a null reference"
);
Ok(())
}
}
}
#[inline]
fn is_vmgcref_and_points_to_object(&self) -> bool {
self.is_some()
}
fn store(self, store: &mut AutoAssertNoGc<'_>, ptr: &mut MaybeUninit<ValRaw>) -> Result<()> {
<OwnedRooted<StructRef>>::wasm_ty_option_store(self, store, ptr, ValRaw::anyref)
}
unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
<OwnedRooted<StructRef>>::wasm_ty_option_load(
store,
ptr.get_anyref(),
StructRef::from_cloned_gc_ref,
)
}
}