1#![no_std]
3#![feature(maybe_uninit_as_bytes)]
4#![warn(missing_docs)]
5
6use core::{mem::MaybeUninit, slice};
7
8use axerrno::AxError;
9use extern_trait::extern_trait;
10
11#[derive(Debug, PartialEq, Clone, Copy)]
13pub enum VmError {
14 BadAddress,
17 AccessDenied,
19 #[cfg(feature = "alloc")]
24 TooLong,
25}
26
27impl From<VmError> for AxError {
28 fn from(err: VmError) -> Self {
29 match err {
30 VmError::BadAddress | VmError::AccessDenied => AxError::BadAddress,
31 #[cfg(feature = "alloc")]
32 VmError::TooLong => AxError::NameTooLong,
33 }
34 }
35}
36
37pub type VmResult<T = ()> = Result<T, VmError>;
39
40#[extern_trait(VmImpl)]
47pub unsafe trait VmIo {
48 fn new() -> Self;
54
55 fn read(&mut self, start: usize, buf: &mut [MaybeUninit<u8>]) -> VmResult;
57
58 fn write(&mut self, start: usize, buf: &[u8]) -> VmResult;
60}
61
62pub fn vm_read_slice<T>(ptr: *const T, buf: &mut [MaybeUninit<T>]) -> VmResult {
64 if !ptr.is_aligned() {
65 return Err(VmError::BadAddress);
66 }
67 VmImpl::new().read(ptr.addr(), buf.as_bytes_mut())
68}
69
70pub fn vm_write_slice<T>(ptr: *mut T, buf: &[T]) -> VmResult {
72 if !ptr.is_aligned() {
73 return Err(VmError::BadAddress);
74 }
75 let bytes = unsafe { slice::from_raw_parts(buf.as_ptr().cast::<u8>(), size_of_val(buf)) };
78 VmImpl::new().write(ptr.addr(), bytes)
79}
80
81mod thin;
82pub use thin::{VmMutPtr, VmPtr};
83
84#[cfg(feature = "alloc")]
85mod alloc;
86#[cfg(feature = "alloc")]
87pub use alloc::{vm_load, vm_load_any, vm_load_until_nul};