use ax_memory_set::MappingBackend;
use ax_page_table_multiarch::{MappingFlags, PagingHandler};
use crate::{GuestPhysAddr, npt::NestedPageTable as PageTable};
mod alloc;
mod linear;
pub enum Backend<H: PagingHandler> {
Linear {
pa_va_offset: usize,
},
Alloc {
populate: bool,
_phantom: core::marker::PhantomData<H>,
},
}
impl<H: PagingHandler> Clone for Backend<H> {
fn clone(&self) -> Self {
match *self {
Self::Linear { pa_va_offset } => Self::Linear { pa_va_offset },
Self::Alloc { populate, .. } => Self::Alloc {
populate,
_phantom: core::marker::PhantomData,
},
}
}
}
impl<H: PagingHandler> MappingBackend for Backend<H> {
type Addr = GuestPhysAddr;
type Flags = MappingFlags;
type PageTable = PageTable<H>;
fn map(
&self,
start: GuestPhysAddr,
size: usize,
flags: MappingFlags,
pt: &mut PageTable<H>,
) -> bool {
match *self {
Self::Linear { pa_va_offset } => self.map_linear(start, size, flags, pt, pa_va_offset),
Self::Alloc { populate, .. } => self.map_alloc(start, size, flags, pt, populate),
}
}
fn unmap(&self, start: GuestPhysAddr, size: usize, pt: &mut PageTable<H>) -> bool {
match *self {
Self::Linear { pa_va_offset } => self.unmap_linear(start, size, pt, pa_va_offset),
Self::Alloc { populate, .. } => self.unmap_alloc(start, size, pt, populate),
}
}
fn protect(
&self,
start: GuestPhysAddr,
size: usize,
new_flags: MappingFlags,
page_table: &mut PageTable<H>,
) -> bool {
page_table.protect_region(start, size, new_flags)
}
}
impl<H: PagingHandler> Backend<H> {
pub(crate) fn handle_page_fault(
&self,
vaddr: GuestPhysAddr,
orig_flags: MappingFlags,
page_table: &mut PageTable<H>,
) -> bool {
match *self {
Self::Linear { .. } => false, Self::Alloc { populate, .. } => {
self.handle_page_fault_alloc(vaddr, orig_flags, page_table, populate)
}
}
}
}