use std::borrow::Cow;
use std::cell::UnsafeCell;
use std::fmt;
use std::mem;
use std::ops::Range;
use std::str;
use wasmtime_environ::error::{Result, bail};
pub use wiggle_macro::from_witx;
pub use wasmtime_environ::error;
pub use wiggle_macro::wasmtime_integration;
pub use bitflags;
#[cfg(feature = "wiggle_metadata")]
pub use witx;
mod guest_error;
mod guest_type;
mod region;
pub use tracing;
pub use guest_error::GuestError;
pub use guest_type::{GuestErrorType, GuestType, GuestTypeTransparent};
pub use region::Region;
#[cfg(feature = "wasmtime")]
pub mod wasmtime_crate {
pub use wasmtime::*;
}
pub enum GuestMemory<'a> {
Unshared(&'a mut [u8]),
Shared(&'a [UnsafeCell<u8>]),
}
unsafe impl Send for GuestMemory<'_> {}
unsafe impl Sync for GuestMemory<'_> {}
impl<'a> GuestMemory<'a> {
pub fn read<T>(&self, ptr: GuestPtr<T>) -> Result<T, GuestError>
where
T: GuestType,
{
T::read(self, ptr)
}
pub fn write<T>(&mut self, ptr: GuestPtr<T>, val: T) -> Result<(), GuestError>
where
T: GuestType,
{
T::write(self, ptr, val)
}
pub fn as_cow(&self, ptr: GuestPtr<[u8]>) -> Result<Cow<'_, [u8]>, GuestError> {
match self {
GuestMemory::Unshared(_) => match self.as_slice(ptr)? {
Some(slice) => Ok(Cow::Borrowed(slice)),
None => unreachable!(),
},
GuestMemory::Shared(_) => Ok(Cow::Owned(self.to_vec(ptr)?)),
}
}
pub fn as_cow_str(&self, ptr: GuestPtr<str>) -> Result<Cow<'_, str>, GuestError> {
match self.as_cow(ptr.cast::<[u8]>())? {
Cow::Owned(bytes) => Ok(Cow::Owned(
String::from_utf8(bytes).map_err(|e| e.utf8_error())?,
)),
Cow::Borrowed(bytes) => Ok(Cow::Borrowed(std::str::from_utf8(bytes)?)),
}
}
pub fn as_slice(&self, ptr: GuestPtr<[u8]>) -> Result<Option<&[u8]>, GuestError> {
let range = self.validate_range::<u8>(ptr.pointer.0, ptr.pointer.1)?;
match self {
GuestMemory::Unshared(slice) => Ok(Some(&slice[range])),
GuestMemory::Shared(_) => Ok(None),
}
}
pub fn as_str(&self, ptr: GuestPtr<str>) -> Result<Option<&str>, GuestError> {
match self.as_slice(ptr.cast())? {
Some(bytes) => Ok(Some(std::str::from_utf8(bytes)?)),
None => Ok(None),
}
}
pub fn as_slice_mut(&mut self, ptr: GuestPtr<[u8]>) -> Result<Option<&mut [u8]>, GuestError> {
let range = self.validate_range::<u8>(ptr.pointer.0, ptr.pointer.1)?;
match self {
GuestMemory::Unshared(slice) => Ok(Some(&mut slice[range])),
GuestMemory::Shared(_) => Ok(None),
}
}
pub fn to_vec<T>(&self, ptr: GuestPtr<[T]>) -> Result<Vec<T>, GuestError>
where
T: GuestTypeTransparent + Copy,
{
let guest = self.validate_size_align::<T>(ptr.pointer.0, ptr.pointer.1)?;
let mut host = Vec::with_capacity(guest.len());
unsafe {
std::ptr::copy(guest.as_ptr().cast(), host.as_mut_ptr(), guest.len());
host.set_len(guest.len());
}
Ok(host)
}
pub fn copy_from_slice<T>(&mut self, slice: &[T], ptr: GuestPtr<[T]>) -> Result<(), GuestError>
where
T: GuestTypeTransparent + Copy,
{
if usize::try_from(ptr.len())? != slice.len() {
return Err(GuestError::SliceLengthsDiffer);
}
if slice.is_empty() {
return Ok(());
}
let guest = self.validate_size_align::<T>(ptr.pointer.0, ptr.pointer.1)?;
assert_eq!(guest.len(), slice.len());
unsafe {
let guest: &[UnsafeCell<T>] = guest;
let guest: *const UnsafeCell<T> = guest.as_ptr();
let guest = guest.cast_mut().cast::<T>();
std::ptr::copy(slice.as_ptr(), guest, slice.len());
}
Ok(())
}
fn validate_size_align<T>(&self, offset: u32, len: u32) -> Result<&[UnsafeCell<T>], GuestError>
where
T: GuestTypeTransparent,
{
let range = self.validate_range::<T>(offset, len)?;
let cells = match self {
GuestMemory::Unshared(s) => {
let s: &[u8] = s;
unsafe { &*(s as *const [u8] as *const [UnsafeCell<u8>]) }
}
GuestMemory::Shared(s) => s,
};
let memory = &cells[range.clone()];
assert!(mem::align_of::<T>() <= T::guest_align());
let (start, mid, end) = unsafe { memory.align_to() };
if start.len() > 0 || end.len() > 0 {
let region = Region {
start: range.start as u32,
len: range.len() as u32,
};
return Err(GuestError::PtrNotAligned(region, T::guest_align() as u32));
}
Ok(mid)
}
fn validate_range<T>(&self, offset: u32, len: u32) -> Result<Range<usize>, GuestError>
where
T: GuestTypeTransparent,
{
let byte_len = len
.checked_mul(T::guest_size())
.ok_or(GuestError::PtrOverflow)?;
let region = Region {
start: offset,
len: byte_len,
};
let offset = usize::try_from(offset)?;
let byte_len = usize::try_from(byte_len)?;
let range = offset..offset + byte_len;
let oob = match self {
GuestMemory::Unshared(b) => b.get(range.clone()).is_none(),
GuestMemory::Shared(b) => b.get(range.clone()).is_none(),
};
if oob {
Err(GuestError::PtrOutOfBounds(region))
} else {
Ok(range)
}
}
pub fn is_shared_memory(&self) -> bool {
match self {
GuestMemory::Shared(_) => true,
GuestMemory::Unshared(_) => false,
}
}
}
#[repr(transparent)]
pub struct GuestPtr<T: ?Sized + Pointee> {
pointer: T::Pointer,
}
impl<T: ?Sized + Pointee> GuestPtr<T> {
pub fn new(pointer: T::Pointer) -> GuestPtr<T> {
GuestPtr { pointer }
}
pub fn offset(&self) -> T::Pointer {
self.pointer
}
pub fn cast<U>(&self) -> GuestPtr<U>
where
U: Pointee<Pointer = T::Pointer> + ?Sized,
{
GuestPtr::new(self.pointer)
}
pub fn add(&self, amt: u32) -> Result<GuestPtr<T>, GuestError>
where
T: GuestType + Pointee<Pointer = u32>,
{
let offset = amt
.checked_mul(T::guest_size())
.and_then(|o| self.pointer.checked_add(o));
let offset = match offset {
Some(o) => o,
None => return Err(GuestError::PtrOverflow),
};
Ok(GuestPtr::new(offset))
}
pub fn as_array(&self, elems: u32) -> GuestPtr<[T]>
where
T: GuestType + Pointee<Pointer = u32>,
{
GuestPtr::new((self.pointer, elems))
}
}
impl<T> GuestPtr<[T]> {
pub fn offset_base(&self) -> u32 {
self.pointer.0
}
pub fn len(&self) -> u32 {
self.pointer.1
}
pub fn iter(&self) -> impl ExactSizeIterator<Item = Result<GuestPtr<T>, GuestError>> + '_
where
T: GuestType,
{
let base = self.as_ptr();
(0..self.len()).map(move |i| base.add(i))
}
pub fn as_ptr(&self) -> GuestPtr<T> {
GuestPtr::new(self.offset_base())
}
pub fn get(&self, index: u32) -> Option<GuestPtr<T>>
where
T: GuestType,
{
if index < self.len() {
Some(
self.as_ptr()
.add(index)
.expect("just performed bounds check"),
)
} else {
None
}
}
pub fn get_range(&self, r: std::ops::Range<u32>) -> Option<GuestPtr<[T]>>
where
T: GuestType,
{
if r.end < r.start {
return None;
}
let range_length = r.end - r.start;
if r.start <= self.len() && r.end <= self.len() {
Some(
self.as_ptr()
.add(r.start)
.expect("just performed bounds check")
.as_array(range_length),
)
} else {
None
}
}
}
impl GuestPtr<str> {
pub fn offset_base(&self) -> u32 {
self.pointer.0
}
pub fn len(&self) -> u32 {
self.pointer.1
}
pub fn as_bytes(&self) -> GuestPtr<[u8]> {
GuestPtr::new(self.pointer)
}
}
impl<T: ?Sized + Pointee> Clone for GuestPtr<T> {
fn clone(&self) -> Self {
*self
}
}
impl<T: ?Sized + Pointee> Copy for GuestPtr<T> {}
impl<T: ?Sized + Pointee> fmt::Debug for GuestPtr<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
T::debug(self.pointer, f)
}
}
impl<T: ?Sized + Pointee> PartialEq for GuestPtr<T> {
fn eq(&self, other: &Self) -> bool {
self.pointer == other.pointer
}
}
mod private {
pub trait Sealed {}
impl<T> Sealed for T {}
impl<T> Sealed for [T] {}
impl Sealed for str {}
}
pub trait Pointee: private::Sealed {
#[doc(hidden)]
type Pointer: Copy + PartialEq;
#[doc(hidden)]
fn debug(pointer: Self::Pointer, f: &mut fmt::Formatter) -> fmt::Result;
}
impl<T> Pointee for T {
type Pointer = u32;
fn debug(pointer: Self::Pointer, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "*guest {pointer:#x}")
}
}
impl<T> Pointee for [T] {
type Pointer = (u32, u32);
fn debug(pointer: Self::Pointer, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "*guest {:#x}/{}", pointer.0, pointer.1)
}
}
impl Pointee for str {
type Pointer = (u32, u32);
fn debug(pointer: Self::Pointer, f: &mut fmt::Formatter) -> fmt::Result {
<[u8]>::debug(pointer, f)
}
}
pub fn run_in_dummy_executor<F: std::future::Future>(future: F) -> Result<F::Output> {
use std::pin::Pin;
use std::task::{Context, Poll, RawWaker, RawWakerVTable, Waker};
let mut f = Pin::from(Box::new(future));
let waker = dummy_waker();
let mut cx = Context::from_waker(&waker);
match f.as_mut().poll(&mut cx) {
Poll::Ready(val) => return Ok(val),
Poll::Pending => bail!(
"Cannot wait on pending future: must enable wiggle \"async\" future and execute on an async Store"
),
}
fn dummy_waker() -> Waker {
return unsafe { Waker::from_raw(clone(5 as *const _)) };
unsafe fn clone(ptr: *const ()) -> RawWaker {
assert_eq!(ptr as usize, 5);
const VTABLE: RawWakerVTable = RawWakerVTable::new(clone, wake, wake_by_ref, drop);
RawWaker::new(ptr, &VTABLE)
}
unsafe fn wake(ptr: *const ()) {
assert_eq!(ptr as usize, 5);
}
unsafe fn wake_by_ref(ptr: *const ()) {
assert_eq!(ptr as usize, 5);
}
unsafe fn drop(ptr: *const ()) {
assert_eq!(ptr as usize, 5);
}
}
}