use crate::{
MapError, Mapping,
descriptor::{
Descriptor, PagingAttributes, PhysicalAddress, UpdatableDescriptor, VirtualAddress,
},
paging::{
Constraints, MemoryRegion, PageTable, Translation, TranslationRegime, VaRange, deallocate,
},
};
use core::marker::PhantomData;
use core::ptr::NonNull;
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub struct IdTranslation<A: PagingAttributes> {
_phantom: PhantomData<A>,
}
impl<A: PagingAttributes> Default for IdTranslation<A> {
fn default() -> Self {
Self::new()
}
}
impl<A: PagingAttributes> IdTranslation<A> {
pub fn new() -> Self {
Self {
_phantom: PhantomData,
}
}
fn virtual_to_physical(va: VirtualAddress) -> PhysicalAddress {
PhysicalAddress(va.0)
}
}
impl<A: PagingAttributes> Translation<A> for IdTranslation<A> {
fn allocate_table(&mut self) -> (NonNull<PageTable<A>>, PhysicalAddress) {
let table = PageTable::new();
(table, PhysicalAddress(table.as_ptr() as usize))
}
unsafe fn deallocate_table(&mut self, page_table: NonNull<PageTable<A>>) {
unsafe {
deallocate(page_table);
}
}
fn physical_to_virtual(&self, pa: PhysicalAddress) -> NonNull<PageTable<A>> {
NonNull::new(pa.0 as *mut PageTable<A>).expect("Got physical address 0 for pagetable")
}
}
#[derive(Debug)]
pub struct IdMap<R: TranslationRegime> {
mapping: Mapping<IdTranslation<R::Attributes>, R>,
}
impl<R: TranslationRegime<Asid = (), VaRange = ()>> IdMap<R> {
pub fn new(rootlevel: usize, regime: R) -> Self {
Self {
mapping: Mapping::new(IdTranslation::<R::Attributes>::new(), rootlevel, regime),
}
}
}
impl<R: TranslationRegime<Asid = usize, VaRange = VaRange>> IdMap<R> {
pub fn with_asid(asid: usize, rootlevel: usize, regime: R) -> Self {
Self {
mapping: Mapping::with_asid_and_va_range(
IdTranslation::<R::Attributes>::new(),
asid,
rootlevel,
regime,
VaRange::Lower,
),
}
}
}
impl<R: TranslationRegime> IdMap<R> {
pub fn size(&self) -> usize {
self.mapping.size()
}
pub unsafe fn activate(&mut self) -> usize {
unsafe { self.mapping.activate() }
}
pub unsafe fn deactivate(&mut self, previous_ttbr: usize) {
unsafe {
self.mapping.deactivate(previous_ttbr);
}
}
pub fn map_range(
&mut self,
range: &MemoryRegion,
flags: R::Attributes,
) -> Result<(), MapError> {
self.map_range_with_constraints(range, flags, Constraints::empty())
}
pub fn map_range_with_constraints(
&mut self,
range: &MemoryRegion,
flags: R::Attributes,
constraints: Constraints,
) -> Result<(), MapError> {
let pa = IdTranslation::<R::Attributes>::virtual_to_physical(range.start());
self.mapping.map_range(range, pa, flags, constraints)
}
pub fn modify_range<F>(&mut self, range: &MemoryRegion, f: &F) -> Result<(), MapError>
where
F: Fn(&MemoryRegion, &mut UpdatableDescriptor<'_, R::Attributes>) -> Result<(), ()>
+ ?Sized,
{
self.mapping.modify_range(range, f)
}
pub fn walk_range<F>(&self, range: &MemoryRegion, f: &mut F) -> Result<(), MapError>
where
F: FnMut(&MemoryRegion, &Descriptor<R::Attributes>, usize) -> Result<(), ()>,
{
self.mapping.walk_range(range, f)
}
pub fn compact_subtables(&mut self) {
self.mapping.compact_subtables();
}
pub fn root_address(&self) -> PhysicalAddress {
self.mapping.root_address()
}
pub fn mark_active(&mut self) {
self.mapping.mark_active();
}
pub fn mark_inactive(&mut self) {
self.mapping.mark_inactive();
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::descriptor::El1Attributes;
use crate::paging::El1And0;
use crate::{
MapError, VirtualAddress,
paging::{BITS_PER_LEVEL, MemoryRegion, PAGE_SIZE},
};
const MAX_ADDRESS_FOR_ROOT_LEVEL_1: usize = 1 << 39;
const DEVICE_NGNRE: El1Attributes = El1Attributes::ATTRIBUTE_INDEX_0;
const NORMAL_CACHEABLE: El1Attributes =
El1Attributes::ATTRIBUTE_INDEX_1.union(El1Attributes::INNER_SHAREABLE);
#[test]
fn map_valid() {
let mut idmap = IdMap::with_asid(1, 1, El1And0);
let ttbr = unsafe { idmap.activate() };
assert_eq!(
idmap.map_range(
&MemoryRegion::new(0, 1),
NORMAL_CACHEABLE | El1Attributes::VALID | El1Attributes::ACCESSED
),
Ok(())
);
unsafe {
idmap.deactivate(ttbr);
}
let mut idmap = IdMap::with_asid(1, 1, El1And0);
let ttbr = unsafe { idmap.activate() };
assert_eq!(
idmap.map_range(
&MemoryRegion::new(0, PAGE_SIZE * 2),
NORMAL_CACHEABLE | El1Attributes::VALID | El1Attributes::ACCESSED
),
Ok(())
);
unsafe {
idmap.deactivate(ttbr);
}
let mut idmap = IdMap::with_asid(1, 1, El1And0);
let ttbr = unsafe { idmap.activate() };
assert_eq!(
idmap.map_range(
&MemoryRegion::new(
MAX_ADDRESS_FOR_ROOT_LEVEL_1 - 1,
MAX_ADDRESS_FOR_ROOT_LEVEL_1
),
NORMAL_CACHEABLE | El1Attributes::VALID | El1Attributes::ACCESSED
),
Ok(())
);
unsafe {
idmap.deactivate(ttbr);
}
let mut idmap = IdMap::with_asid(1, 1, El1And0);
let ttbr = unsafe { idmap.activate() };
assert_eq!(
idmap.map_range(
&MemoryRegion::new(PAGE_SIZE * 1023, PAGE_SIZE * 1025),
NORMAL_CACHEABLE | El1Attributes::VALID | El1Attributes::ACCESSED
),
Ok(())
);
unsafe {
idmap.deactivate(ttbr);
}
let mut idmap = IdMap::with_asid(1, 1, El1And0);
let ttbr = unsafe { idmap.activate() };
assert_eq!(
idmap.map_range(
&MemoryRegion::new(0, MAX_ADDRESS_FOR_ROOT_LEVEL_1),
NORMAL_CACHEABLE | El1Attributes::VALID | El1Attributes::ACCESSED
),
Ok(())
);
unsafe {
idmap.deactivate(ttbr);
}
}
#[test]
fn map_break_before_make() {
const BLOCK_SIZE: usize = PAGE_SIZE << BITS_PER_LEVEL;
let mut idmap = IdMap::with_asid(1, 1, El1And0);
idmap
.map_range_with_constraints(
&MemoryRegion::new(BLOCK_SIZE, 2 * BLOCK_SIZE),
NORMAL_CACHEABLE | El1Attributes::VALID | El1Attributes::ACCESSED,
Constraints::NO_BLOCK_MAPPINGS,
)
.unwrap();
let ttbr = unsafe { idmap.activate() };
assert_eq!(
idmap.map_range(
&MemoryRegion::new(BLOCK_SIZE, BLOCK_SIZE + PAGE_SIZE),
NORMAL_CACHEABLE | El1Attributes::VALID | El1Attributes::ACCESSED,
),
Ok(())
);
unsafe {
idmap.deactivate(ttbr);
}
let mut idmap = IdMap::with_asid(1, 1, El1And0);
idmap
.map_range(
&MemoryRegion::new(BLOCK_SIZE, 2 * BLOCK_SIZE),
NORMAL_CACHEABLE | El1Attributes::VALID | El1Attributes::ACCESSED,
)
.ok();
let ttbr = unsafe { idmap.activate() };
assert_eq!(
idmap.map_range(
&MemoryRegion::new(BLOCK_SIZE - PAGE_SIZE, 2 * BLOCK_SIZE + PAGE_SIZE),
NORMAL_CACHEABLE | El1Attributes::VALID | El1Attributes::ACCESSED,
),
Ok(())
);
assert_eq!(
idmap.map_range(
&MemoryRegion::new(BLOCK_SIZE, BLOCK_SIZE + PAGE_SIZE),
NORMAL_CACHEABLE | El1Attributes::VALID | El1Attributes::ACCESSED,
),
Ok(())
);
assert_eq!(
idmap.map_range(
&MemoryRegion::new(BLOCK_SIZE, BLOCK_SIZE + PAGE_SIZE),
NORMAL_CACHEABLE
| El1Attributes::VALID
| El1Attributes::ACCESSED
| El1Attributes::READ_ONLY,
),
Err(MapError::BreakBeforeMakeViolation(MemoryRegion::new(
BLOCK_SIZE,
BLOCK_SIZE + PAGE_SIZE
)))
);
assert_eq!(
idmap.map_range(
&MemoryRegion::new(0, BLOCK_SIZE + PAGE_SIZE),
NORMAL_CACHEABLE
| El1Attributes::VALID
| El1Attributes::ACCESSED
| El1Attributes::READ_ONLY,
),
Err(MapError::BreakBeforeMakeViolation(MemoryRegion::new(
BLOCK_SIZE,
BLOCK_SIZE + PAGE_SIZE
)))
);
assert_eq!(
idmap.map_range(
&MemoryRegion::new(0, BLOCK_SIZE),
NORMAL_CACHEABLE
| El1Attributes::VALID
| El1Attributes::ACCESSED
| El1Attributes::READ_ONLY,
),
Ok(())
);
assert_eq!(
idmap.map_range(
&MemoryRegion::new(0, BLOCK_SIZE),
DEVICE_NGNRE
| El1Attributes::VALID
| El1Attributes::ACCESSED
| El1Attributes::NON_GLOBAL,
),
Err(MapError::BreakBeforeMakeViolation(MemoryRegion::new(
0, PAGE_SIZE
)))
);
assert_eq!(
idmap.map_range(
&MemoryRegion::new(PAGE_SIZE, BLOCK_SIZE + PAGE_SIZE),
NORMAL_CACHEABLE,
),
Err(MapError::BreakBeforeMakeViolation(MemoryRegion::new(
BLOCK_SIZE,
BLOCK_SIZE + PAGE_SIZE
)))
);
assert_eq!(
idmap.map_range(&MemoryRegion::new(PAGE_SIZE, BLOCK_SIZE), NORMAL_CACHEABLE),
Ok(())
);
assert_eq!(
idmap.map_range(
&MemoryRegion::new(0, 2 * PAGE_SIZE),
NORMAL_CACHEABLE | El1Attributes::VALID | El1Attributes::ACCESSED,
),
Ok(())
);
assert_eq!(
idmap.map_range(
&MemoryRegion::new(0, PAGE_SIZE),
NORMAL_CACHEABLE
| El1Attributes::VALID
| El1Attributes::ACCESSED
| El1Attributes::NON_GLOBAL,
),
Ok(())
);
assert_eq!(
idmap.map_range(
&MemoryRegion::new(0, PAGE_SIZE),
NORMAL_CACHEABLE | El1Attributes::VALID | El1Attributes::ACCESSED,
),
Err(MapError::BreakBeforeMakeViolation(MemoryRegion::new(
0, PAGE_SIZE
)))
);
unsafe {
idmap.deactivate(ttbr);
}
assert_eq!(
idmap.map_range(
&MemoryRegion::new(0, PAGE_SIZE),
NORMAL_CACHEABLE | El1Attributes::VALID | El1Attributes::ACCESSED,
),
Ok(())
);
}
#[test]
fn map_out_of_range() {
let mut idmap = IdMap::with_asid(1, 1, El1And0);
assert_eq!(
idmap.map_range(
&MemoryRegion::new(
MAX_ADDRESS_FOR_ROOT_LEVEL_1,
MAX_ADDRESS_FOR_ROOT_LEVEL_1 + 1,
),
NORMAL_CACHEABLE | El1Attributes::VALID | El1Attributes::ACCESSED
),
Err(MapError::AddressRange(VirtualAddress(
MAX_ADDRESS_FOR_ROOT_LEVEL_1 + PAGE_SIZE
)))
);
assert_eq!(
idmap.map_range(
&MemoryRegion::new(0, MAX_ADDRESS_FOR_ROOT_LEVEL_1 + 1),
NORMAL_CACHEABLE | El1Attributes::VALID | El1Attributes::ACCESSED
),
Err(MapError::AddressRange(VirtualAddress(
MAX_ADDRESS_FOR_ROOT_LEVEL_1 + PAGE_SIZE
)))
);
}
#[test]
#[should_panic]
fn split_live_block_mapping() -> () {
let mut idmap = IdMap::with_asid(1, 1, El1And0);
idmap
.map_range(
&MemoryRegion::new(0, PAGE_SIZE << BITS_PER_LEVEL),
NORMAL_CACHEABLE
| El1Attributes::NON_GLOBAL
| El1Attributes::READ_ONLY
| El1Attributes::VALID
| El1Attributes::ACCESSED,
)
.unwrap();
let ttbr = unsafe { idmap.activate() };
idmap
.map_range(
&MemoryRegion::new(0, PAGE_SIZE),
NORMAL_CACHEABLE
| El1Attributes::NON_GLOBAL
| El1Attributes::READ_ONLY
| El1Attributes::VALID
| El1Attributes::ACCESSED,
)
.unwrap();
let r = idmap.map_range(
&MemoryRegion::new(PAGE_SIZE, 2 * PAGE_SIZE),
NORMAL_CACHEABLE
| El1Attributes::NON_GLOBAL
| El1Attributes::VALID
| El1Attributes::ACCESSED,
);
unsafe { idmap.deactivate(ttbr) };
r.unwrap();
}
fn make_map() -> (IdMap<El1And0>, usize) {
let mut idmap = IdMap::with_asid(1, 1, El1And0);
idmap
.map_range(
&MemoryRegion::new(0, PAGE_SIZE * 2),
NORMAL_CACHEABLE
| El1Attributes::NON_GLOBAL
| El1Attributes::READ_ONLY
| El1Attributes::VALID
| El1Attributes::ACCESSED,
)
.unwrap();
let ttbr = unsafe { idmap.activate() };
(idmap, ttbr)
}
#[test]
fn update_backwards_range() {
let (mut idmap, ttbr) = make_map();
assert!(
idmap
.modify_range(&MemoryRegion::new(PAGE_SIZE * 2, 1), &|_range, entry| {
entry.modify_flags(
El1Attributes::SWFLAG_0,
El1Attributes::from_bits(0usize).unwrap(),
)
},)
.is_err()
);
unsafe {
idmap.deactivate(ttbr);
}
}
#[test]
fn update_range() {
let (mut idmap, ttbr) = make_map();
assert!(
idmap
.modify_range(&MemoryRegion::new(1, PAGE_SIZE), &|_range, entry| {
if !entry.is_table() {
entry.modify_flags(El1Attributes::SWFLAG_0, El1Attributes::NON_GLOBAL)?;
}
Ok(())
})
.is_err()
);
idmap
.modify_range(&MemoryRegion::new(1, PAGE_SIZE), &|_range, entry| {
if !entry.is_table() {
entry.modify_flags(
El1Attributes::SWFLAG_0,
El1Attributes::from_bits(0usize).unwrap(),
)?;
}
Ok(())
})
.unwrap();
idmap
.modify_range(&MemoryRegion::new(1, PAGE_SIZE), &|range, entry| {
if !entry.is_table() {
assert!(entry.flags().contains(El1Attributes::SWFLAG_0));
assert_eq!(range.end() - range.start(), PAGE_SIZE);
}
Ok(())
})
.unwrap();
unsafe {
idmap.deactivate(ttbr);
}
}
#[test]
fn breakup_invalid_block() {
const BLOCK_RANGE: usize = 0x200000;
let mut idmap = IdMap::with_asid(1, 1, El1And0);
let ttbr = unsafe { idmap.activate() };
idmap
.map_range(
&MemoryRegion::new(0, BLOCK_RANGE),
NORMAL_CACHEABLE | El1Attributes::NON_GLOBAL | El1Attributes::SWFLAG_0,
)
.unwrap();
idmap
.map_range(
&MemoryRegion::new(0, PAGE_SIZE),
NORMAL_CACHEABLE
| El1Attributes::NON_GLOBAL
| El1Attributes::VALID
| El1Attributes::ACCESSED,
)
.unwrap();
idmap
.modify_range(&MemoryRegion::new(0, BLOCK_RANGE), &|range, entry| {
if entry.level() == 3 {
let has_swflag = entry.flags().contains(El1Attributes::SWFLAG_0);
let is_first_page = range.start().0 == 0usize;
assert!(has_swflag != is_first_page);
}
Ok(())
})
.unwrap();
unsafe {
idmap.deactivate(ttbr);
}
}
#[test]
fn unmap_subtable() {
let mut idmap = IdMap::with_asid(1, 1, El1And0);
assert_eq!(idmap.size(), PAGE_SIZE * 512 * 512 * 512);
let ttbr = unsafe { idmap.activate() };
idmap
.map_range(
&MemoryRegion::new(0, PAGE_SIZE),
NORMAL_CACHEABLE | El1Attributes::VALID | El1Attributes::ACCESSED,
)
.unwrap();
idmap
.map_range(
&MemoryRegion::new(0, PAGE_SIZE * 512 * 512),
El1Attributes::empty(),
)
.unwrap();
idmap
.walk_range(
&MemoryRegion::new(0, idmap.size()),
&mut |region, descriptor, level| {
assert_eq!(region.len(), PAGE_SIZE * 512 * 512);
assert_eq!(descriptor.bits(), 0);
assert_eq!(level, 1);
Ok(())
},
)
.unwrap();
unsafe {
idmap.deactivate(ttbr);
}
}
#[test]
fn unmap_subtable_higher() {
let mut idmap = IdMap::with_asid(1, 1, El1And0);
assert_eq!(idmap.size(), PAGE_SIZE * 512 * 512 * 512);
let ttbr = unsafe { idmap.activate() };
const ROOT_GRANULARITY: usize = PAGE_SIZE * 512 * 512;
idmap
.map_range(
&MemoryRegion::new(ROOT_GRANULARITY, ROOT_GRANULARITY + PAGE_SIZE),
NORMAL_CACHEABLE | El1Attributes::VALID | El1Attributes::ACCESSED,
)
.unwrap();
idmap
.map_range(
&MemoryRegion::new(ROOT_GRANULARITY, ROOT_GRANULARITY * 2),
El1Attributes::empty(),
)
.unwrap();
idmap
.walk_range(
&MemoryRegion::new(0, idmap.size()),
&mut |region, descriptor, level| {
assert_eq!(region.len(), PAGE_SIZE * 512 * 512);
assert_eq!(descriptor.bits(), 0);
assert_eq!(level, 1);
Ok(())
},
)
.unwrap();
unsafe {
idmap.deactivate(ttbr);
}
}
#[test]
fn compact() {
let mut idmap = IdMap::with_asid(1, 1, El1And0);
assert_eq!(idmap.size(), PAGE_SIZE * 512 * 512 * 512);
let ttbr = unsafe { idmap.activate() };
idmap
.map_range(
&MemoryRegion::new(0, PAGE_SIZE * 2),
NORMAL_CACHEABLE | El1Attributes::VALID | El1Attributes::ACCESSED,
)
.unwrap();
idmap
.map_range(&MemoryRegion::new(0, PAGE_SIZE * 2), El1Attributes::empty())
.unwrap();
idmap.compact_subtables();
idmap
.walk_range(
&MemoryRegion::new(0, idmap.size()),
&mut |region, descriptor, level| {
assert_eq!(region.len(), PAGE_SIZE * 512 * 512);
assert_eq!(descriptor.bits(), 0);
assert_eq!(level, 1);
Ok(())
},
)
.unwrap();
unsafe {
idmap.deactivate(ttbr);
}
}
#[test]
fn compact_blocks() {
let mut idmap = IdMap::with_asid(1, 1, El1And0);
assert_eq!(idmap.size(), PAGE_SIZE * 512 * 512 * 512);
let ttbr = unsafe { idmap.activate() };
const BLOCK_SIZE: usize = PAGE_SIZE * 512;
idmap
.map_range(
&MemoryRegion::new(0, BLOCK_SIZE * 2),
NORMAL_CACHEABLE | El1Attributes::VALID | El1Attributes::ACCESSED,
)
.unwrap();
idmap
.map_range(
&MemoryRegion::new(0, BLOCK_SIZE * 2),
El1Attributes::empty(),
)
.unwrap();
idmap.compact_subtables();
idmap
.walk_range(
&MemoryRegion::new(0, idmap.size()),
&mut |region, descriptor, level| {
assert_eq!(region.len(), PAGE_SIZE * 512 * 512);
assert_eq!(descriptor.bits(), 0);
assert_eq!(level, 1);
Ok(())
},
)
.unwrap();
unsafe {
idmap.deactivate(ttbr);
}
}
#[test]
fn split_table_zero() {
let mut idmap = IdMap::with_asid(1, 1, El1And0);
idmap
.map_range(
&MemoryRegion::new(0, PAGE_SIZE),
NORMAL_CACHEABLE | El1Attributes::VALID | El1Attributes::ACCESSED,
)
.unwrap();
idmap
.walk_range(
&MemoryRegion::new(PAGE_SIZE, PAGE_SIZE * 20),
&mut |_, descriptor, _| {
assert!(!descriptor.is_valid());
assert_eq!(descriptor.bits(), 0);
assert_eq!(descriptor.flags(), El1Attributes::empty());
assert_eq!(descriptor.output_address(), PhysicalAddress(0));
Ok(())
},
)
.unwrap();
}
#[test]
fn modify_unmap_compact() {
let mut idmap = IdMap::with_asid(1, 1, El1And0);
assert_eq!(idmap.size(), PAGE_SIZE * 512 * 512 * 512);
let ttbr = unsafe { idmap.activate() };
idmap
.map_range(
&MemoryRegion::new(PAGE_SIZE, PAGE_SIZE * 3),
NORMAL_CACHEABLE | El1Attributes::VALID | El1Attributes::ACCESSED,
)
.unwrap();
idmap
.modify_range(
&MemoryRegion::new(PAGE_SIZE, PAGE_SIZE * 3),
&|_, descriptor| descriptor.set(PhysicalAddress(0), El1Attributes::empty()),
)
.unwrap();
idmap.compact_subtables();
idmap
.walk_range(
&MemoryRegion::new(0, idmap.size()),
&mut |region, descriptor, level| {
assert_eq!(region.len(), PAGE_SIZE * 512 * 512);
assert_eq!(descriptor.bits(), 0);
assert_eq!(level, 1);
Ok(())
},
)
.unwrap();
unsafe {
idmap.deactivate(ttbr);
}
}
#[test]
fn table_sizes() {
assert_eq!(IdMap::<El1And0>::with_asid(1, 0, El1And0).size(), 1 << 48);
assert_eq!(IdMap::<El1And0>::with_asid(1, 1, El1And0).size(), 1 << 39);
assert_eq!(IdMap::<El1And0>::with_asid(1, 2, El1And0).size(), 1 << 30);
assert_eq!(IdMap::<El1And0>::with_asid(1, 3, El1And0).size(), 1 << 21);
}
#[test]
fn dont_use_l0_block_mapping() {
let mut idmap = IdMap::with_asid(1, 0, El1And0);
let block_size = PAGE_SIZE * 512 * 512 * 512; let range = MemoryRegion::new(0, block_size);
idmap
.map_range(
&range,
NORMAL_CACHEABLE | El1Attributes::VALID | El1Attributes::ACCESSED,
)
.unwrap();
assert_eq!(idmap.mapping.root.mapping_level(range.start()), Some(1));
idmap.map_range(&range, El1Attributes::empty()).unwrap();
idmap.compact_subtables();
assert_eq!(idmap.mapping.root.mapping_level(range.start()), None);
}
}