use crate::MapError;
use crate::descriptor::{
Descriptor, El1Attributes, El23Attributes, PagingAttributes, PhysicalAddress, Stage2Attributes,
UpdatableDescriptor, VirtualAddress,
};
use crate::paging::private::IntoVaRange;
#[cfg(feature = "alloc")]
use alloc::alloc::{Layout, alloc_zeroed, dealloc, handle_alloc_error};
use bitflags::{Flags, bitflags};
#[cfg(all(not(test), target_arch = "aarch64"))]
use core::arch::asm;
use core::fmt::{self, Debug, Display, Formatter};
use core::marker::PhantomData;
use core::ops::Range;
use core::ptr::NonNull;
const PAGE_SHIFT: usize = 12;
pub const LEAF_LEVEL: usize = 3;
pub const PAGE_SIZE: usize = 1 << PAGE_SHIFT;
pub const BITS_PER_LEVEL: usize = PAGE_SHIFT - 3;
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum VaRange {
Lower,
Upper,
}
pub trait TranslationRegime: Copy + Clone + Debug + Eq + PartialEq + Send + Sync + 'static {
type Attributes: PagingAttributes;
type Asid: Copy + Clone + Debug + Eq + PartialEq + Send + Sync + 'static;
type VaRange: private::IntoVaRange
+ Copy
+ Clone
+ Debug
+ Eq
+ PartialEq
+ Send
+ Sync
+ 'static;
fn invalidate_va(va: VirtualAddress);
unsafe fn activate(
root_pa: PhysicalAddress,
asid: Self::Asid,
va_range: Self::VaRange,
) -> usize;
unsafe fn deactivate(previous_ttbr: usize, asid: Self::Asid, va_range: Self::VaRange);
}
mod private {
use crate::paging::VaRange;
pub trait IntoVaRange {
fn into_va_range(self) -> VaRange;
}
impl IntoVaRange for VaRange {
fn into_va_range(self) -> VaRange {
self
}
}
impl IntoVaRange for () {
fn into_va_range(self) -> VaRange {
VaRange::Lower
}
}
}
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub struct El1And0;
impl TranslationRegime for El1And0 {
type Attributes = El1Attributes;
type Asid = usize;
type VaRange = VaRange;
fn invalidate_va(va: VirtualAddress) {
#[allow(unused)]
let va = va.0 >> 12;
#[cfg(all(not(test), target_arch = "aarch64"))]
unsafe {
asm!(
"tlbi vaae1is, {va}",
va = in(reg) va,
options(preserves_flags, nostack),
);
}
}
#[allow(
unused_mut,
unused_assignments,
unused_variables,
reason = "used only on aarch64"
)]
unsafe fn activate(root_pa: PhysicalAddress, asid: usize, va_range: VaRange) -> usize {
let mut previous_ttbr = usize::MAX;
#[cfg(all(not(test), target_arch = "aarch64"))]
unsafe {
match va_range {
VaRange::Lower => asm!(
"mrs {previous_ttbr}, ttbr0_el1",
"msr ttbr0_el1, {ttbrval}",
"isb",
ttbrval = in(reg) root_pa.0 | (asid << 48),
previous_ttbr = out(reg) previous_ttbr,
options(preserves_flags),
),
VaRange::Upper => asm!(
"mrs {previous_ttbr}, ttbr1_el1",
"msr ttbr1_el1, {ttbrval}",
"isb",
ttbrval = in(reg) root_pa.0 | (asid << 48),
previous_ttbr = out(reg) previous_ttbr,
options(preserves_flags),
),
}
}
previous_ttbr
}
#[allow(
unused_mut,
unused_assignments,
unused_variables,
reason = "used only on aarch64"
)]
unsafe fn deactivate(previous_ttbr: usize, asid: usize, va_range: VaRange) {
#[cfg(all(not(test), target_arch = "aarch64"))]
unsafe {
match va_range {
VaRange::Lower => asm!(
"msr ttbr0_el1, {ttbrval}",
"isb",
"tlbi aside1, {asid}",
"dsb nsh",
"isb",
asid = in(reg) asid << 48,
ttbrval = in(reg) previous_ttbr,
options(preserves_flags),
),
VaRange::Upper => asm!(
"msr ttbr1_el1, {ttbrval}",
"isb",
"tlbi aside1, {asid}",
"dsb nsh",
"isb",
asid = in(reg) asid << 48,
ttbrval = in(reg) previous_ttbr,
options(preserves_flags),
),
}
}
}
}
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub struct El2And0;
impl TranslationRegime for El2And0 {
type Attributes = El1Attributes;
type Asid = usize;
type VaRange = VaRange;
fn invalidate_va(va: VirtualAddress) {
#[allow(unused)]
let va = va.0 >> 12;
#[cfg(all(not(test), target_arch = "aarch64"))]
unsafe {
asm!(
"tlbi vae2is, {va}",
va = in(reg) va,
options(preserves_flags, nostack),
);
}
}
#[allow(
unused_mut,
unused_assignments,
unused_variables,
reason = "used only on aarch64"
)]
unsafe fn activate(root_pa: PhysicalAddress, asid: usize, va_range: VaRange) -> usize {
let mut previous_ttbr = usize::MAX;
#[cfg(all(not(test), target_arch = "aarch64"))]
unsafe {
match va_range {
VaRange::Lower => asm!(
"mrs {previous_ttbr}, ttbr0_el2",
"msr ttbr0_el2, {ttbrval}",
"isb",
ttbrval = in(reg) root_pa.0 | (asid << 48),
previous_ttbr = out(reg) previous_ttbr,
options(preserves_flags),
),
VaRange::Upper => asm!(
"mrs {previous_ttbr}, s3_4_c2_c0_1", "msr s3_4_c2_c0_1, {ttbrval}",
"isb",
ttbrval = in(reg) root_pa.0 | (asid << 48),
previous_ttbr = out(reg) previous_ttbr,
options(preserves_flags),
),
}
}
previous_ttbr
}
#[allow(
unused_mut,
unused_assignments,
unused_variables,
reason = "used only on aarch64"
)]
unsafe fn deactivate(previous_ttbr: usize, asid: usize, va_range: VaRange) {
#[cfg(all(not(test), target_arch = "aarch64"))]
unsafe {
match va_range {
VaRange::Lower => asm!(
"msr ttbr0_el2, {ttbrval}",
"isb",
"tlbi aside1, {asid}",
"dsb nsh",
"isb",
asid = in(reg) asid << 48,
ttbrval = in(reg) previous_ttbr,
options(preserves_flags),
),
VaRange::Upper => asm!(
"msr s3_4_c2_c0_1, {ttbrval}", "isb",
"tlbi aside1, {asid}",
"dsb nsh",
"isb",
asid = in(reg) asid << 48,
ttbrval = in(reg) previous_ttbr,
options(preserves_flags),
),
}
}
}
}
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub struct El2;
impl TranslationRegime for El2 {
type Attributes = El23Attributes;
type Asid = ();
type VaRange = ();
fn invalidate_va(va: VirtualAddress) {
#[allow(unused)]
let va = va.0 >> 12;
#[cfg(all(not(test), target_arch = "aarch64"))]
unsafe {
asm!(
"tlbi vae2is, {va}",
va = in(reg) va,
options(preserves_flags, nostack),
);
}
}
#[allow(
unused_mut,
unused_assignments,
unused_variables,
reason = "used only on aarch64"
)]
unsafe fn activate(root_pa: PhysicalAddress, asid: (), va_range: ()) -> usize {
let mut previous_ttbr = usize::MAX;
#[cfg(all(not(test), target_arch = "aarch64"))]
unsafe {
asm!(
"mrs {previous_ttbr}, ttbr0_el2",
"msr ttbr0_el2, {ttbrval}",
"isb",
ttbrval = in(reg) root_pa.0,
previous_ttbr = out(reg) previous_ttbr,
options(preserves_flags),
);
}
previous_ttbr
}
unsafe fn deactivate(_previous_ttbr: usize, _asid: (), _va_range: ()) {
panic!("EL2 page table can't safely be deactivated.");
}
}
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub struct El3;
impl TranslationRegime for El3 {
type Attributes = El23Attributes;
type Asid = ();
type VaRange = ();
fn invalidate_va(va: VirtualAddress) {
#[allow(unused)]
let va = va.0 >> 12;
#[cfg(all(not(test), target_arch = "aarch64"))]
unsafe {
asm!(
"tlbi vae3is, {va}",
va = in(reg) va,
options(preserves_flags, nostack),
);
}
}
#[allow(
unused_mut,
unused_assignments,
unused_variables,
reason = "used only on aarch64"
)]
unsafe fn activate(root_pa: PhysicalAddress, asid: (), va_range: ()) -> usize {
let mut previous_ttbr = usize::MAX;
#[cfg(all(not(test), target_arch = "aarch64"))]
unsafe {
asm!(
"mrs {previous_ttbr}, ttbr0_el3",
"msr ttbr0_el3, {ttbrval}",
"isb",
ttbrval = in(reg) root_pa.0,
previous_ttbr = out(reg) previous_ttbr,
options(preserves_flags),
);
}
previous_ttbr
}
unsafe fn deactivate(_previous_ttbr: usize, _asid: (), _va_range: ()) {
panic!("EL3 page table can't safely be deactivated.");
}
}
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub struct Stage2;
impl TranslationRegime for Stage2 {
type Attributes = Stage2Attributes;
type Asid = ();
type VaRange = ();
fn invalidate_va(va: VirtualAddress) {
#[allow(unused)]
let va = va.0 >> 12;
#[cfg(all(not(test), target_arch = "aarch64"))]
unsafe {
asm!(
"tlbi ipas2e1is, {va}",
va = in(reg) va,
options(preserves_flags, nostack),
);
}
}
#[allow(
unused_mut,
unused_assignments,
unused_variables,
reason = "used only on aarch64"
)]
unsafe fn activate(root_pa: PhysicalAddress, asid: (), va_range: ()) -> usize {
let mut previous_ttbr = usize::MAX;
#[cfg(all(not(test), target_arch = "aarch64"))]
unsafe {
asm!(
"mrs {previous_ttbr}, vttbr_el2",
"msr vttbr_el2, {ttbrval}",
"isb",
ttbrval = in(reg) root_pa.0,
previous_ttbr = out(reg) previous_ttbr,
options(preserves_flags),
);
}
previous_ttbr
}
#[allow(
unused_mut,
unused_assignments,
unused_variables,
reason = "used only on aarch64"
)]
unsafe fn deactivate(previous_ttbr: usize, asid: (), va_range: ()) {
#[cfg(all(not(test), target_arch = "aarch64"))]
unsafe {
asm!(
"tlbi vmalls12e1",
"dsb nsh",
"isb",
"msr vttbr_el2, {ttbrval}",
"isb",
ttbrval = in(reg) previous_ttbr,
options(preserves_flags),
);
}
}
}
#[derive(Clone, Eq, PartialEq)]
pub struct MemoryRegion(Range<VirtualAddress>);
pub(crate) fn granularity_at_level(level: usize) -> usize {
PAGE_SIZE << ((LEAF_LEVEL - level) * BITS_PER_LEVEL)
}
pub trait Translation<A: PagingAttributes> {
fn allocate_table(&mut self) -> (NonNull<PageTable<A>>, PhysicalAddress);
unsafe fn deallocate_table(&mut self, page_table: NonNull<PageTable<A>>);
fn physical_to_virtual(&self, pa: PhysicalAddress) -> NonNull<PageTable<A>>;
}
impl MemoryRegion {
pub const fn new(start: usize, end: usize) -> MemoryRegion {
MemoryRegion(
VirtualAddress(align_down(start, PAGE_SIZE))..VirtualAddress(align_up(end, PAGE_SIZE)),
)
}
pub const fn start(&self) -> VirtualAddress {
self.0.start
}
pub const fn end(&self) -> VirtualAddress {
self.0.end
}
pub const fn len(&self) -> usize {
self.0.end.0 - self.0.start.0
}
pub const fn is_empty(&self) -> bool {
self.0.start.0 == self.0.end.0
}
fn split(&self, level: usize) -> ChunkedIterator<'_> {
ChunkedIterator {
range: self,
granularity: granularity_at_level(level),
start: self.0.start.0,
}
}
pub(crate) fn is_block(&self, level: usize) -> bool {
let gran = granularity_at_level(level);
(self.0.start.0 | self.0.end.0) & (gran - 1) == 0
}
}
impl From<Range<VirtualAddress>> for MemoryRegion {
fn from(range: Range<VirtualAddress>) -> Self {
Self::new(range.start.0, range.end.0)
}
}
impl Display for MemoryRegion {
fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> {
write!(f, "{}..{}", self.0.start, self.0.end)
}
}
impl Debug for MemoryRegion {
fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> {
Display::fmt(self, f)
}
}
bitflags! {
#[derive(Copy, Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct Constraints: usize {
const NO_BLOCK_MAPPINGS = 1 << 0;
const NO_CONTIGUOUS_HINT = 1 << 1;
}
}
pub struct RootTable<R: TranslationRegime, T: Translation<R::Attributes>> {
table: PageTableWithLevel<T, R::Attributes>,
translation: T,
pa: PhysicalAddress,
va_range: R::VaRange,
_regime: PhantomData<R>,
}
impl<R: TranslationRegime<VaRange = ()>, T: Translation<R::Attributes>> RootTable<R, T> {
pub fn new(translation: T, level: usize, regime: R) -> Self {
Self::new_impl(translation, level, regime, ())
}
}
impl<R: TranslationRegime<VaRange = VaRange>, T: Translation<R::Attributes>> RootTable<R, T> {
pub fn with_va_range(translation: T, level: usize, regime: R, va_range: VaRange) -> Self {
Self::new_impl(translation, level, regime, va_range)
}
pub fn va_range(&self) -> VaRange {
self.va_range
}
}
impl<R: TranslationRegime, T: Translation<R::Attributes>> RootTable<R, T> {
fn new_impl(mut translation: T, level: usize, _regime: R, va_range: R::VaRange) -> Self {
if level > LEAF_LEVEL {
panic!("Invalid root table level {}.", level);
}
let (table, pa) = PageTableWithLevel::new(&mut translation, level);
RootTable {
table,
translation,
pa,
va_range,
_regime: PhantomData,
}
}
pub fn size(&self) -> usize {
granularity_at_level(self.table.level) << BITS_PER_LEVEL
}
pub fn map_range(
&mut self,
range: &MemoryRegion,
pa: PhysicalAddress,
flags: R::Attributes,
constraints: Constraints,
) -> Result<(), MapError> {
if flags.contains(R::Attributes::TABLE_OR_PAGE) {
return Err(MapError::InvalidFlags(flags.bits()));
}
self.verify_region(range)?;
self.table
.map_range(&mut self.translation, range, pa, flags, constraints);
Ok(())
}
pub fn to_physical(&self) -> PhysicalAddress {
self.pa
}
pub fn translation(&self) -> &T {
&self.translation
}
pub(crate) fn modify_range<F>(
&mut self,
range: &MemoryRegion,
f: &F,
live: bool,
) -> Result<bool, MapError>
where
F: Fn(&MemoryRegion, &mut UpdatableDescriptor<R::Attributes>) -> Result<(), ()> + ?Sized,
{
self.verify_region(range)?;
self.table
.modify_range::<F, R>(&mut self.translation, range, f, live)
}
pub(crate) fn va_range_or_unit(&self) -> R::VaRange {
self.va_range
}
pub fn walk_range<F>(&self, range: &MemoryRegion, f: &mut F) -> Result<(), MapError>
where
F: FnMut(&MemoryRegion, &Descriptor<R::Attributes>, usize) -> Result<(), ()>,
{
self.visit_range(range, &mut |mr, desc, level| {
f(mr, desc, level).map_err(|_| MapError::PteUpdateFault(desc.bits()))
})
}
pub fn compact_subtables(&mut self) {
self.table.compact_subtables(&mut self.translation);
}
pub(crate) fn visit_range<F>(&self, range: &MemoryRegion, f: &mut F) -> Result<(), MapError>
where
F: FnMut(&MemoryRegion, &Descriptor<R::Attributes>, usize) -> Result<(), MapError>,
{
self.verify_region(range)?;
self.table.visit_range(&self.translation, range, f)
}
#[cfg(all(test, feature = "alloc"))]
pub(crate) fn mapping_level(&self, va: VirtualAddress) -> Option<usize> {
self.table.mapping_level(&self.translation, va)
}
fn verify_region(&self, region: &MemoryRegion) -> Result<(), MapError> {
if region.end() < region.start() {
return Err(MapError::RegionBackwards(region.clone()));
}
match self.va_range.into_va_range() {
VaRange::Lower => {
if (region.start().0 as isize) < 0 {
return Err(MapError::AddressRange(region.start()));
} else if region.end().0 > self.size() {
return Err(MapError::AddressRange(region.end()));
}
}
VaRange::Upper => {
if region.start().0 as isize >= 0
|| (region.start().0 as isize).unsigned_abs() > self.size()
{
return Err(MapError::AddressRange(region.start()));
}
}
}
Ok(())
}
}
impl<R: TranslationRegime, T: Translation<R::Attributes>> Debug for RootTable<R, T> {
fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> {
writeln!(
f,
"RootTable {{ pa: {}, translation_regime: {:?}, va_range: {:?}, level: {}, table:",
self.pa, PhantomData::<R>, self.va_range, self.table.level
)?;
self.table.fmt_indented(f, &self.translation, 0)?;
write!(f, "}}")
}
}
impl<R: TranslationRegime, T: Translation<R::Attributes>> Drop for RootTable<R, T> {
fn drop(&mut self) {
unsafe { self.table.free(&mut self.translation) }
}
}
struct ChunkedIterator<'a> {
range: &'a MemoryRegion,
granularity: usize,
start: usize,
}
impl Iterator for ChunkedIterator<'_> {
type Item = MemoryRegion;
fn next(&mut self) -> Option<MemoryRegion> {
if !self.range.0.contains(&VirtualAddress(self.start)) {
return None;
}
let end = self
.range
.0
.end
.0
.min((self.start | (self.granularity - 1)) + 1);
let c = MemoryRegion::new(self.start, end);
self.start = end;
Some(c)
}
}
#[derive(Debug)]
pub(crate) struct PageTableWithLevel<T: Translation<A>, A: PagingAttributes> {
table: NonNull<PageTable<A>>,
level: usize,
_translation: PhantomData<T>,
}
unsafe impl<T: Translation<A> + Send, A: PagingAttributes> Send for PageTableWithLevel<T, A> {}
unsafe impl<T: Translation<A> + Sync, A: PagingAttributes> Sync for PageTableWithLevel<T, A> {}
impl<T: Translation<A>, A: PagingAttributes> PageTableWithLevel<T, A> {
fn new(translation: &mut T, level: usize) -> (Self, PhysicalAddress) {
assert!(level <= LEAF_LEVEL);
let (table, pa) = translation.allocate_table();
(
Self::from_pointer(table, level),
pa,
)
}
pub(crate) fn from_pointer(table: NonNull<PageTable<A>>, level: usize) -> Self {
Self {
table,
level,
_translation: PhantomData,
}
}
fn get_entry(&self, va: VirtualAddress) -> &Descriptor<A> {
let shift = PAGE_SHIFT + (LEAF_LEVEL - self.level) * BITS_PER_LEVEL;
let index = (va.0 >> shift) % (1 << BITS_PER_LEVEL);
let table = unsafe { self.table.as_ref() };
&table.entries[index]
}
fn get_entry_mut(&mut self, va: VirtualAddress) -> &mut Descriptor<A> {
let shift = PAGE_SHIFT + (LEAF_LEVEL - self.level) * BITS_PER_LEVEL;
let index = (va.0 >> shift) % (1 << BITS_PER_LEVEL);
let table = unsafe { self.table.as_mut() };
&mut table.entries[index]
}
fn split_entry(
translation: &mut T,
chunk: &MemoryRegion,
entry: &mut Descriptor<A>,
level: usize,
) -> Self {
let granularity = granularity_at_level(level);
let (mut subtable, subtable_pa) = Self::new(translation, level + 1);
let old_flags = entry.flags();
let old_pa = entry.output_address();
if !old_flags.contains(A::TABLE_OR_PAGE) && (!old_flags.is_empty() || old_pa.0 != 0) {
let a = align_down(chunk.0.start.0, granularity);
let b = align_up(chunk.0.end.0, granularity);
subtable.map_range(
translation,
&MemoryRegion::new(a, b),
old_pa,
old_flags,
Constraints::empty(),
);
}
entry.set(subtable_pa, A::TABLE_OR_PAGE | A::VALID);
subtable
}
fn map_range(
&mut self,
translation: &mut T,
range: &MemoryRegion,
mut pa: PhysicalAddress,
flags: A,
constraints: Constraints,
) {
let level = self.level;
let granularity = granularity_at_level(level);
for chunk in range.split(level) {
let entry = self.get_entry_mut(chunk.0.start);
if level == LEAF_LEVEL {
if flags.contains(A::VALID) {
entry.set(pa, flags | A::TABLE_OR_PAGE);
} else {
entry.set(PhysicalAddress(0), flags);
}
} else if !entry.is_table_or_page()
&& entry.flags() == flags
&& entry.output_address().0 == pa.0 - chunk.0.start.0 % granularity
{
} else if chunk.is_block(level)
&& !entry.is_table_or_page()
&& is_aligned(pa.0, granularity)
&& !constraints.contains(Constraints::NO_BLOCK_MAPPINGS)
&& level > 0
{
if flags.contains(A::VALID) {
entry.set(pa, flags);
} else {
entry.set(PhysicalAddress(0), flags);
}
} else if chunk.is_block(level)
&& let Some(mut subtable) = entry.subtable(translation, level)
&& !flags.contains(A::VALID)
{
entry.set(PhysicalAddress(0), flags);
unsafe {
subtable.free(translation);
}
} else {
let mut subtable = entry
.subtable(translation, level)
.unwrap_or_else(|| Self::split_entry(translation, &chunk, entry, level));
subtable.map_range(translation, &chunk, pa, flags, constraints);
}
pa.0 += chunk.len();
}
}
fn fmt_indented(
&self,
f: &mut Formatter,
translation: &T,
indentation: usize,
) -> Result<(), fmt::Error> {
const WIDTH: usize = 3;
let table = unsafe { self.table.as_ref() };
let mut i = 0;
while i < table.entries.len() {
if let Some(subtable) = table.entries[i].subtable(translation, self.level) {
writeln!(
f,
"{:indentation$}{: <WIDTH$} : {:?}",
"", i, table.entries[i],
)?;
subtable.fmt_indented(f, translation, indentation + 2)?;
i += 1;
} else {
let first_contiguous = i;
let first_entry = table.entries[i].bits();
let granularity = granularity_at_level(self.level);
while i < table.entries.len()
&& (table.entries[i].bits() == first_entry
|| (first_entry != 0
&& table.entries[i].bits()
== first_entry + granularity * (i - first_contiguous)))
{
i += 1;
}
if i - 1 == first_contiguous {
write!(f, "{:indentation$}{: <WIDTH$} : ", "", first_contiguous)?;
} else {
write!(
f,
"{:indentation$}{: <WIDTH$}-{: <WIDTH$}: ",
"",
first_contiguous,
i - 1,
)?;
}
if first_entry == 0 {
writeln!(f, "0")?;
} else {
writeln!(f, "{:?}", Descriptor::<A>::new(first_entry))?;
}
}
}
Ok(())
}
unsafe fn free(&mut self, translation: &mut T) {
let table = unsafe { self.table.as_ref() };
for entry in &table.entries {
if let Some(mut subtable) = entry.subtable(translation, self.level) {
unsafe {
subtable.free(translation);
}
}
}
unsafe {
translation.deallocate_table(self.table);
}
}
fn modify_range<F, R: TranslationRegime<Attributes = A>>(
&mut self,
translation: &mut T,
range: &MemoryRegion,
f: &F,
live: bool,
) -> Result<bool, MapError>
where
F: Fn(&MemoryRegion, &mut UpdatableDescriptor<A>) -> Result<(), ()> + ?Sized,
{
let mut modified = false;
let level = self.level;
for chunk in range.split(level) {
let entry = self.get_entry_mut(chunk.0.start);
if let Some(mut subtable) = entry.subtable(translation, level).or_else(|| {
if !chunk.is_block(level) {
Some(Self::split_entry(translation, &chunk, entry, level))
} else {
None
}
}) {
modified |= subtable.modify_range::<F, R>(translation, &chunk, f, live)?;
} else {
let bits = entry.bits();
let mut desc = UpdatableDescriptor::new(entry, level, live);
f(&chunk, &mut desc).map_err(|_| MapError::PteUpdateFault(bits))?;
if live && desc.updated() {
R::invalidate_va(chunk.start());
modified = true;
}
}
}
Ok(modified)
}
fn visit_range<F, E>(&self, translation: &T, range: &MemoryRegion, f: &mut F) -> Result<(), E>
where
F: FnMut(&MemoryRegion, &Descriptor<A>, usize) -> Result<(), E>,
{
let level = self.level;
for chunk in range.split(level) {
let entry = self.get_entry(chunk.0.start);
if let Some(subtable) = entry.subtable(translation, level) {
subtable.visit_range(translation, &chunk, f)?;
} else {
f(&chunk, entry, level)?;
}
}
Ok(())
}
pub fn compact_subtables(&mut self, translation: &mut T) -> bool {
let table = unsafe { self.table.as_mut() };
let mut all_empty = true;
for entry in &mut table.entries {
if let Some(mut subtable) = entry.subtable(translation, self.level)
&& subtable.compact_subtables(translation)
{
entry.set(PhysicalAddress(0), A::default());
unsafe {
subtable.free(translation);
}
}
if entry.bits() != 0 {
all_empty = false;
}
}
all_empty
}
#[cfg(all(test, feature = "alloc"))]
pub(crate) fn mapping_level(&self, translation: &T, va: VirtualAddress) -> Option<usize> {
let entry = self.get_entry(va);
if let Some(subtable) = entry.subtable(translation, self.level) {
subtable.mapping_level(translation, va)
} else {
if entry.is_valid() {
Some(self.level)
} else {
None
}
}
}
}
#[repr(C, align(4096))]
pub struct PageTable<A: PagingAttributes> {
entries: [Descriptor<A>; 1 << BITS_PER_LEVEL],
}
impl<A: PagingAttributes> PageTable<A> {
pub const EMPTY: Self = Self {
entries: [Descriptor::EMPTY; 1 << BITS_PER_LEVEL],
};
#[cfg(feature = "alloc")]
pub fn new() -> NonNull<Self> {
unsafe { allocate_zeroed() }
}
pub fn write_to(&self, page: &mut [u8]) -> Result<(), ()> {
if page.len() != self.entries.len() * size_of::<Descriptor<A>>() {
return Err(());
}
for (chunk, desc) in page
.chunks_exact_mut(size_of::<Descriptor<A>>())
.zip(self.entries.iter())
{
chunk.copy_from_slice(&desc.bits().to_le_bytes());
}
Ok(())
}
}
impl<A: PagingAttributes> Default for PageTable<A> {
fn default() -> Self {
Self::EMPTY
}
}
#[cfg(feature = "alloc")]
unsafe fn allocate_zeroed<T>() -> NonNull<T> {
let layout = Layout::new::<T>();
assert_ne!(layout.size(), 0);
let pointer = unsafe { alloc_zeroed(layout) };
if pointer.is_null() {
handle_alloc_error(layout);
}
unsafe { NonNull::new_unchecked(pointer as *mut T) }
}
#[cfg(feature = "alloc")]
pub(crate) unsafe fn deallocate<T>(ptr: NonNull<T>) {
let layout = Layout::new::<T>();
unsafe {
dealloc(ptr.as_ptr() as *mut u8, layout);
}
}
const fn align_down(value: usize, alignment: usize) -> usize {
value & !(alignment - 1)
}
const fn align_up(value: usize, alignment: usize) -> usize {
((value - 1) | (alignment - 1)) + 1
}
pub(crate) const fn is_aligned(value: usize, alignment: usize) -> bool {
value & (alignment - 1) == 0
}
#[cfg(test)]
mod tests {
use super::*;
#[cfg(feature = "alloc")]
use crate::target::TargetAllocator;
#[cfg(feature = "alloc")]
use alloc::{format, string::ToString, vec, vec::Vec};
#[cfg(feature = "alloc")]
#[test]
fn display_memory_region() {
let region = MemoryRegion::new(0x1234, 0x56789);
assert_eq!(
®ion.to_string(),
"0x0000000000001000..0x0000000000057000"
);
assert_eq!(
&format!("{:?}", region),
"0x0000000000001000..0x0000000000057000"
);
}
#[test]
fn subtract_virtual_address() {
let low = VirtualAddress(0x12);
let high = VirtualAddress(0x1234);
assert_eq!(high - low, 0x1222);
}
#[cfg(debug_assertions)]
#[test]
#[should_panic]
fn subtract_virtual_address_overflow() {
let low = VirtualAddress(0x12);
let high = VirtualAddress(0x1234);
let _ = low - high;
}
#[test]
fn add_virtual_address() {
assert_eq!(VirtualAddress(0x1234) + 0x42, VirtualAddress(0x1276));
}
#[test]
fn subtract_physical_address() {
let low = PhysicalAddress(0x12);
let high = PhysicalAddress(0x1234);
assert_eq!(high - low, 0x1222);
}
#[cfg(debug_assertions)]
#[test]
#[should_panic]
fn subtract_physical_address_overflow() {
let low = PhysicalAddress(0x12);
let high = PhysicalAddress(0x1234);
let _ = low - high;
}
#[test]
fn add_physical_address() {
assert_eq!(PhysicalAddress(0x1234) + 0x42, PhysicalAddress(0x1276));
}
#[test]
fn invalid_descriptor() {
let desc = Descriptor::<El1Attributes>::new(0usize);
assert!(!desc.is_valid());
assert!(!desc.flags().contains(El1Attributes::VALID));
}
#[test]
fn set_descriptor() {
const PHYSICAL_ADDRESS: usize = 0x12340000;
let mut desc = Descriptor::<El1Attributes>::new(0usize);
assert!(!desc.is_valid());
desc.set(
PhysicalAddress(PHYSICAL_ADDRESS),
El1Attributes::TABLE_OR_PAGE
| El1Attributes::USER
| El1Attributes::SWFLAG_1
| El1Attributes::VALID,
);
assert!(desc.is_valid());
assert_eq!(
desc.flags(),
El1Attributes::TABLE_OR_PAGE
| El1Attributes::USER
| El1Attributes::SWFLAG_1
| El1Attributes::VALID
);
assert_eq!(desc.output_address(), PhysicalAddress(PHYSICAL_ADDRESS));
}
#[test]
fn modify_descriptor_flags() {
let mut desc = Descriptor::<El1Attributes>::new(0usize);
assert!(!desc.is_valid());
desc.set(
PhysicalAddress(0x12340000),
El1Attributes::TABLE_OR_PAGE | El1Attributes::USER | El1Attributes::SWFLAG_1,
);
UpdatableDescriptor::new(&mut desc, 3, true)
.modify_flags(
El1Attributes::DBM | El1Attributes::SWFLAG_3,
El1Attributes::VALID | El1Attributes::SWFLAG_1,
)
.unwrap();
assert!(!desc.is_valid());
assert_eq!(
desc.flags(),
El1Attributes::TABLE_OR_PAGE
| El1Attributes::USER
| El1Attributes::SWFLAG_3
| El1Attributes::DBM
);
}
#[test]
#[should_panic]
fn modify_descriptor_table_or_page_flag() {
let mut desc = Descriptor::<El1Attributes>::new(0usize);
assert!(!desc.is_valid());
desc.set(
PhysicalAddress(0x12340000),
El1Attributes::TABLE_OR_PAGE | El1Attributes::USER | El1Attributes::SWFLAG_1,
);
UpdatableDescriptor::new(&mut desc, 3, false)
.modify_flags(El1Attributes::VALID, El1Attributes::TABLE_OR_PAGE)
.unwrap();
}
#[cfg(feature = "alloc")]
#[test]
fn unaligned_chunks() {
let region = MemoryRegion::new(0x0000_2000, 0x0020_5000);
let chunks = region.split(LEAF_LEVEL - 1).collect::<Vec<_>>();
assert_eq!(
chunks,
vec![
MemoryRegion::new(0x0000_2000, 0x0020_0000),
MemoryRegion::new(0x0020_0000, 0x0020_5000),
]
);
}
#[test]
fn table_or_page() {
assert!(!Descriptor::<El1Attributes>::new(0b00).is_table_or_page());
assert!(!Descriptor::<El1Attributes>::new(0b10).is_table_or_page());
assert!(!Descriptor::<El1Attributes>::new(0b01).is_table_or_page());
assert!(Descriptor::<El1Attributes>::new(0b11).is_table_or_page());
}
#[test]
fn table_or_page_unknown_bits() {
const UNKNOWN: usize = 1 << 50 | 1 << 52;
assert!(!Descriptor::<El1Attributes>::new(UNKNOWN | 0b00).is_table_or_page());
assert!(!Descriptor::<El1Attributes>::new(UNKNOWN | 0b10).is_table_or_page());
assert!(!Descriptor::<El1Attributes>::new(UNKNOWN | 0b01).is_table_or_page());
assert!(Descriptor::<El1Attributes>::new(UNKNOWN | 0b11).is_table_or_page());
}
#[cfg(feature = "alloc")]
#[test]
fn debug_roottable_empty() {
let table = RootTable::with_va_range(TargetAllocator::new(0), 1, El1And0, VaRange::Lower);
assert_eq!(
format!("{table:?}"),
"RootTable { pa: 0x0000000000000000, translation_regime: PhantomData<aarch64_paging::paging::El1And0>, va_range: Lower, level: 1, table:
0 -511: 0
}"
);
}
#[cfg(feature = "alloc")]
#[test]
fn debug_roottable_contiguous() {
let mut table =
RootTable::with_va_range(TargetAllocator::new(0), 1, El1And0, VaRange::Lower);
table
.map_range(
&MemoryRegion::new(PAGE_SIZE * 3, PAGE_SIZE * 6),
PhysicalAddress(PAGE_SIZE * 3),
El1Attributes::VALID | El1Attributes::NON_GLOBAL,
Constraints::empty(),
)
.unwrap();
table
.map_range(
&MemoryRegion::new(PAGE_SIZE * 6, PAGE_SIZE * 7),
PhysicalAddress(PAGE_SIZE * 6),
El1Attributes::VALID | El1Attributes::READ_ONLY,
Constraints::empty(),
)
.unwrap();
table
.map_range(
&MemoryRegion::new(PAGE_SIZE * 8, PAGE_SIZE * 9),
PhysicalAddress(PAGE_SIZE * 8),
El1Attributes::VALID | El1Attributes::READ_ONLY,
Constraints::empty(),
)
.unwrap();
assert_eq!(
format!("{table:?}"),
"RootTable { pa: 0x0000000000000000, translation_regime: PhantomData<aarch64_paging::paging::El1And0>, va_range: Lower, level: 1, table:
0 : 0x00000000001003 (0x0000000000001000, El1Attributes(VALID | TABLE_OR_PAGE))
0 : 0x00000000002003 (0x0000000000002000, El1Attributes(VALID | TABLE_OR_PAGE))
0 -2 : 0\n 3 -5 : 0x00000000003803 (0x0000000000003000, El1Attributes(VALID | TABLE_OR_PAGE | NON_GLOBAL))
6 : 0x00000000006083 (0x0000000000006000, El1Attributes(VALID | TABLE_OR_PAGE | READ_ONLY))
7 : 0
8 : 0x00000000008083 (0x0000000000008000, El1Attributes(VALID | TABLE_OR_PAGE | READ_ONLY))
9 -511: 0
1 -511: 0
1 -511: 0
}"
);
}
#[cfg(feature = "alloc")]
#[test]
fn debug_roottable_contiguous_block() {
let mut table =
RootTable::with_va_range(TargetAllocator::new(0), 1, El1And0, VaRange::Lower);
const BLOCK_SIZE: usize = PAGE_SIZE * 512;
table
.map_range(
&MemoryRegion::new(BLOCK_SIZE * 3, BLOCK_SIZE * 6),
PhysicalAddress(BLOCK_SIZE * 3),
El1Attributes::VALID | El1Attributes::NON_GLOBAL,
Constraints::empty(),
)
.unwrap();
table
.map_range(
&MemoryRegion::new(BLOCK_SIZE * 6, BLOCK_SIZE * 7),
PhysicalAddress(BLOCK_SIZE * 6),
El1Attributes::VALID | El1Attributes::READ_ONLY,
Constraints::empty(),
)
.unwrap();
table
.map_range(
&MemoryRegion::new(BLOCK_SIZE * 8, BLOCK_SIZE * 9),
PhysicalAddress(BLOCK_SIZE * 8),
El1Attributes::VALID | El1Attributes::READ_ONLY,
Constraints::empty(),
)
.unwrap();
assert_eq!(
format!("{table:?}"),
"RootTable { pa: 0x0000000000000000, translation_regime: PhantomData<aarch64_paging::paging::El1And0>, va_range: Lower, level: 1, table:
0 : 0x00000000001003 (0x0000000000001000, El1Attributes(VALID | TABLE_OR_PAGE))
0 -2 : 0
3 -5 : 0x00000000600801 (0x0000000000600000, El1Attributes(VALID | NON_GLOBAL))
6 : 0x00000000c00081 (0x0000000000c00000, El1Attributes(VALID | READ_ONLY))
7 : 0
8 : 0x00000001000081 (0x0000000001000000, El1Attributes(VALID | READ_ONLY))
9 -511: 0
1 -511: 0
}"
);
}
}