1use crate::MapError;
9use crate::descriptor::{
10 Descriptor, El1Attributes, El23Attributes, PagingAttributes, PhysicalAddress, Stage2Attributes,
11 UpdatableDescriptor, VirtualAddress,
12};
13
14use crate::paging::private::IntoVaRange;
15#[cfg(feature = "alloc")]
16use alloc::alloc::{Layout, alloc_zeroed, dealloc, handle_alloc_error};
17use bitflags::{Flags, bitflags};
18#[cfg(all(not(test), target_arch = "aarch64"))]
19use core::arch::asm;
20use core::fmt::{self, Debug, Display, Formatter};
21use core::marker::PhantomData;
22use core::ops::Range;
23use core::ptr::NonNull;
24
25const PAGE_SHIFT: usize = 12;
26
27pub const LEAF_LEVEL: usize = 3;
29
30pub const PAGE_SIZE: usize = 1 << PAGE_SHIFT;
32
33pub const BITS_PER_LEVEL: usize = PAGE_SHIFT - 3;
36
37#[derive(Copy, Clone, Debug, Eq, PartialEq)]
39pub enum VaRange {
40 Lower,
43 Upper,
46}
47
48pub trait TranslationRegime: Copy + Clone + Debug + Eq + PartialEq + Send + Sync + 'static {
52 type Attributes: PagingAttributes;
53
54 type Asid: Copy + Clone + Debug + Eq + PartialEq + Send + Sync + 'static;
56
57 type VaRange: private::IntoVaRange
59 + Copy
60 + Clone
61 + Debug
62 + Eq
63 + PartialEq
64 + Send
65 + Sync
66 + 'static;
67
68 fn invalidate_va(va: VirtualAddress);
70
71 unsafe fn activate(
77 root_pa: PhysicalAddress,
78 asid: Self::Asid,
79 va_range: Self::VaRange,
80 ) -> usize;
81
82 unsafe fn deactivate(previous_ttbr: usize, asid: Self::Asid, va_range: Self::VaRange);
88}
89
90mod private {
91 use crate::paging::VaRange;
92
93 pub trait IntoVaRange {
94 fn into_va_range(self) -> VaRange;
95 }
96
97 impl IntoVaRange for VaRange {
98 fn into_va_range(self) -> VaRange {
99 self
100 }
101 }
102
103 impl IntoVaRange for () {
104 fn into_va_range(self) -> VaRange {
105 VaRange::Lower
106 }
107 }
108}
109
110#[derive(Copy, Clone, Debug, Eq, PartialEq)]
112pub struct El1And0;
113
114impl TranslationRegime for El1And0 {
115 type Attributes = El1Attributes;
116
117 type Asid = usize;
118 type VaRange = VaRange;
119
120 fn invalidate_va(va: VirtualAddress) {
121 #[allow(unused)]
122 let va = va.0 >> 12;
123 #[cfg(all(not(test), target_arch = "aarch64"))]
124 unsafe {
127 asm!(
128 "tlbi vaae1is, {va}",
129 va = in(reg) va,
130 options(preserves_flags, nostack),
131 );
132 }
133 }
134
135 #[allow(
136 unused_mut,
137 unused_assignments,
138 unused_variables,
139 reason = "used only on aarch64"
140 )]
141 unsafe fn activate(root_pa: PhysicalAddress, asid: usize, va_range: VaRange) -> usize {
142 let mut previous_ttbr = usize::MAX;
143 #[cfg(all(not(test), target_arch = "aarch64"))]
144 unsafe {
147 match va_range {
148 VaRange::Lower => asm!(
149 "mrs {previous_ttbr}, ttbr0_el1",
150 "msr ttbr0_el1, {ttbrval}",
151 "isb",
152 ttbrval = in(reg) root_pa.0 | (asid << 48),
153 previous_ttbr = out(reg) previous_ttbr,
154 options(preserves_flags),
155 ),
156 VaRange::Upper => asm!(
157 "mrs {previous_ttbr}, ttbr1_el1",
158 "msr ttbr1_el1, {ttbrval}",
159 "isb",
160 ttbrval = in(reg) root_pa.0 | (asid << 48),
161 previous_ttbr = out(reg) previous_ttbr,
162 options(preserves_flags),
163 ),
164 }
165 }
166 previous_ttbr
167 }
168
169 #[allow(
170 unused_mut,
171 unused_assignments,
172 unused_variables,
173 reason = "used only on aarch64"
174 )]
175 unsafe fn deactivate(previous_ttbr: usize, asid: usize, va_range: VaRange) {
176 #[cfg(all(not(test), target_arch = "aarch64"))]
177 unsafe {
180 match va_range {
181 VaRange::Lower => asm!(
182 "msr ttbr0_el1, {ttbrval}",
183 "isb",
184 "tlbi aside1, {asid}",
185 "dsb nsh",
186 "isb",
187 asid = in(reg) asid << 48,
188 ttbrval = in(reg) previous_ttbr,
189 options(preserves_flags),
190 ),
191 VaRange::Upper => asm!(
192 "msr ttbr1_el1, {ttbrval}",
193 "isb",
194 "tlbi aside1, {asid}",
195 "dsb nsh",
196 "isb",
197 asid = in(reg) asid << 48,
198 ttbrval = in(reg) previous_ttbr,
199 options(preserves_flags),
200 ),
201 }
202 }
203 }
204}
205
206#[derive(Copy, Clone, Debug, Eq, PartialEq)]
208pub struct El2And0;
209
210impl TranslationRegime for El2And0 {
211 type Attributes = El1Attributes;
212
213 type Asid = usize;
214 type VaRange = VaRange;
215
216 fn invalidate_va(va: VirtualAddress) {
217 #[allow(unused)]
218 let va = va.0 >> 12;
219 #[cfg(all(not(test), target_arch = "aarch64"))]
220 unsafe {
223 asm!(
224 "tlbi vae2is, {va}",
225 va = in(reg) va,
226 options(preserves_flags, nostack),
227 );
228 }
229 }
230
231 #[allow(
232 unused_mut,
233 unused_assignments,
234 unused_variables,
235 reason = "used only on aarch64"
236 )]
237 unsafe fn activate(root_pa: PhysicalAddress, asid: usize, va_range: VaRange) -> usize {
238 let mut previous_ttbr = usize::MAX;
239 #[cfg(all(not(test), target_arch = "aarch64"))]
240 unsafe {
243 match va_range {
244 VaRange::Lower => asm!(
245 "mrs {previous_ttbr}, ttbr0_el2",
246 "msr ttbr0_el2, {ttbrval}",
247 "isb",
248 ttbrval = in(reg) root_pa.0 | (asid << 48),
249 previous_ttbr = out(reg) previous_ttbr,
250 options(preserves_flags),
251 ),
252 VaRange::Upper => asm!(
253 "mrs {previous_ttbr}, s3_4_c2_c0_1", "msr s3_4_c2_c0_1, {ttbrval}",
255 "isb",
256 ttbrval = in(reg) root_pa.0 | (asid << 48),
257 previous_ttbr = out(reg) previous_ttbr,
258 options(preserves_flags),
259 ),
260 }
261 }
262 previous_ttbr
263 }
264
265 #[allow(
266 unused_mut,
267 unused_assignments,
268 unused_variables,
269 reason = "used only on aarch64"
270 )]
271 unsafe fn deactivate(previous_ttbr: usize, asid: usize, va_range: VaRange) {
272 #[cfg(all(not(test), target_arch = "aarch64"))]
273 unsafe {
276 match va_range {
277 VaRange::Lower => asm!(
278 "msr ttbr0_el2, {ttbrval}",
279 "isb",
280 "tlbi aside1, {asid}",
281 "dsb nsh",
282 "isb",
283 asid = in(reg) asid << 48,
284 ttbrval = in(reg) previous_ttbr,
285 options(preserves_flags),
286 ),
287 VaRange::Upper => asm!(
288 "msr s3_4_c2_c0_1, {ttbrval}", "isb",
290 "tlbi aside1, {asid}",
291 "dsb nsh",
292 "isb",
293 asid = in(reg) asid << 48,
294 ttbrval = in(reg) previous_ttbr,
295 options(preserves_flags),
296 ),
297 }
298 }
299 }
300}
301
302#[derive(Copy, Clone, Debug, Eq, PartialEq)]
304pub struct El2;
305
306impl TranslationRegime for El2 {
307 type Attributes = El23Attributes;
308
309 type Asid = ();
310 type VaRange = ();
311
312 fn invalidate_va(va: VirtualAddress) {
313 #[allow(unused)]
314 let va = va.0 >> 12;
315 #[cfg(all(not(test), target_arch = "aarch64"))]
316 unsafe {
319 asm!(
320 "tlbi vae2is, {va}",
321 va = in(reg) va,
322 options(preserves_flags, nostack),
323 );
324 }
325 }
326
327 #[allow(
328 unused_mut,
329 unused_assignments,
330 unused_variables,
331 reason = "used only on aarch64"
332 )]
333 unsafe fn activate(root_pa: PhysicalAddress, asid: (), va_range: ()) -> usize {
334 let mut previous_ttbr = usize::MAX;
335 #[cfg(all(not(test), target_arch = "aarch64"))]
336 unsafe {
339 asm!(
340 "mrs {previous_ttbr}, ttbr0_el2",
341 "msr ttbr0_el2, {ttbrval}",
342 "isb",
343 ttbrval = in(reg) root_pa.0,
344 previous_ttbr = out(reg) previous_ttbr,
345 options(preserves_flags),
346 );
347 }
348 previous_ttbr
349 }
350
351 unsafe fn deactivate(_previous_ttbr: usize, _asid: (), _va_range: ()) {
352 panic!("EL2 page table can't safely be deactivated.");
353 }
354}
355
356#[derive(Copy, Clone, Debug, Eq, PartialEq)]
358pub struct El3;
359
360impl TranslationRegime for El3 {
361 type Attributes = El23Attributes;
362
363 type Asid = ();
364 type VaRange = ();
365
366 fn invalidate_va(va: VirtualAddress) {
367 #[allow(unused)]
368 let va = va.0 >> 12;
369 #[cfg(all(not(test), target_arch = "aarch64"))]
370 unsafe {
373 asm!(
374 "tlbi vae3is, {va}",
375 va = in(reg) va,
376 options(preserves_flags, nostack),
377 );
378 }
379 }
380
381 #[allow(
382 unused_mut,
383 unused_assignments,
384 unused_variables,
385 reason = "used only on aarch64"
386 )]
387 unsafe fn activate(root_pa: PhysicalAddress, asid: (), va_range: ()) -> usize {
388 let mut previous_ttbr = usize::MAX;
389 #[cfg(all(not(test), target_arch = "aarch64"))]
390 unsafe {
393 asm!(
394 "mrs {previous_ttbr}, ttbr0_el3",
395 "msr ttbr0_el3, {ttbrval}",
396 "isb",
397 ttbrval = in(reg) root_pa.0,
398 previous_ttbr = out(reg) previous_ttbr,
399 options(preserves_flags),
400 );
401 }
402 previous_ttbr
403 }
404
405 unsafe fn deactivate(_previous_ttbr: usize, _asid: (), _va_range: ()) {
406 panic!("EL3 page table can't safely be deactivated.");
407 }
408}
409
410#[derive(Copy, Clone, Debug, Eq, PartialEq)]
412pub struct Stage2;
413
414impl TranslationRegime for Stage2 {
415 type Attributes = Stage2Attributes;
416
417 type Asid = ();
418 type VaRange = ();
419
420 fn invalidate_va(va: VirtualAddress) {
421 #[allow(unused)]
422 let va = va.0 >> 12;
423 #[cfg(all(not(test), target_arch = "aarch64"))]
424 unsafe {
427 asm!(
428 "tlbi ipas2e1is, {va}",
429 va = in(reg) va,
430 options(preserves_flags, nostack),
431 );
432 }
433 }
434
435 #[allow(
436 unused_mut,
437 unused_assignments,
438 unused_variables,
439 reason = "used only on aarch64"
440 )]
441 unsafe fn activate(root_pa: PhysicalAddress, asid: (), va_range: ()) -> usize {
442 let mut previous_ttbr = usize::MAX;
443 #[cfg(all(not(test), target_arch = "aarch64"))]
444 unsafe {
447 asm!(
448 "mrs {previous_ttbr}, vttbr_el2",
449 "msr vttbr_el2, {ttbrval}",
450 "isb",
451 ttbrval = in(reg) root_pa.0,
452 previous_ttbr = out(reg) previous_ttbr,
453 options(preserves_flags),
454 );
455 }
456 previous_ttbr
457 }
458
459 #[allow(
460 unused_mut,
461 unused_assignments,
462 unused_variables,
463 reason = "used only on aarch64"
464 )]
465 unsafe fn deactivate(previous_ttbr: usize, asid: (), va_range: ()) {
466 #[cfg(all(not(test), target_arch = "aarch64"))]
467 unsafe {
470 asm!(
471 "tlbi vmalls12e1",
474 "dsb nsh",
475 "isb",
476 "msr vttbr_el2, {ttbrval}",
477 "isb",
478 ttbrval = in(reg) previous_ttbr,
479 options(preserves_flags),
480 );
481 }
482 }
483}
484
485#[derive(Clone, Eq, PartialEq)]
487pub struct MemoryRegion(Range<VirtualAddress>);
488
489pub(crate) fn granularity_at_level(level: usize) -> usize {
492 PAGE_SIZE << ((LEAF_LEVEL - level) * BITS_PER_LEVEL)
493}
494
495pub trait Translation<A: PagingAttributes> {
499 fn allocate_table(&mut self) -> (NonNull<PageTable<A>>, PhysicalAddress);
502
503 unsafe fn deallocate_table(&mut self, page_table: NonNull<PageTable<A>>);
510
511 fn physical_to_virtual(&self, pa: PhysicalAddress) -> NonNull<PageTable<A>>;
513}
514
515impl MemoryRegion {
516 pub const fn new(start: usize, end: usize) -> MemoryRegion {
521 MemoryRegion(
522 VirtualAddress(align_down(start, PAGE_SIZE))..VirtualAddress(align_up(end, PAGE_SIZE)),
523 )
524 }
525
526 pub const fn start(&self) -> VirtualAddress {
528 self.0.start
529 }
530
531 pub const fn end(&self) -> VirtualAddress {
533 self.0.end
534 }
535
536 pub const fn len(&self) -> usize {
538 self.0.end.0 - self.0.start.0
539 }
540
541 pub const fn is_empty(&self) -> bool {
543 self.0.start.0 == self.0.end.0
544 }
545
546 fn split(&self, level: usize) -> ChunkedIterator<'_> {
547 ChunkedIterator {
548 range: self,
549 granularity: granularity_at_level(level),
550 start: self.0.start.0,
551 }
552 }
553
554 pub(crate) fn is_block(&self, level: usize) -> bool {
556 let gran = granularity_at_level(level);
557 (self.0.start.0 | self.0.end.0) & (gran - 1) == 0
558 }
559}
560
561impl From<Range<VirtualAddress>> for MemoryRegion {
562 fn from(range: Range<VirtualAddress>) -> Self {
563 Self::new(range.start.0, range.end.0)
564 }
565}
566
567impl Display for MemoryRegion {
568 fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> {
569 write!(f, "{}..{}", self.0.start, self.0.end)
570 }
571}
572
573impl Debug for MemoryRegion {
574 fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> {
575 Display::fmt(self, f)
576 }
577}
578
579bitflags! {
580 #[derive(Copy, Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
582 pub struct Constraints: usize {
583 const NO_BLOCK_MAPPINGS = 1 << 0;
585 const NO_CONTIGUOUS_HINT = 1 << 1;
587 }
588}
589
590pub struct RootTable<R: TranslationRegime, T: Translation<R::Attributes>> {
592 table: PageTableWithLevel<T, R::Attributes>,
593 translation: T,
594 pa: PhysicalAddress,
595 va_range: R::VaRange,
596 _regime: PhantomData<R>,
597}
598
599impl<R: TranslationRegime<VaRange = ()>, T: Translation<R::Attributes>> RootTable<R, T> {
600 pub fn new(translation: T, level: usize, regime: R) -> Self {
606 Self::new_impl(translation, level, regime, ())
607 }
608}
609
610impl<R: TranslationRegime<VaRange = VaRange>, T: Translation<R::Attributes>> RootTable<R, T> {
611 pub fn with_va_range(translation: T, level: usize, regime: R, va_range: VaRange) -> Self {
617 Self::new_impl(translation, level, regime, va_range)
618 }
619
620 pub fn va_range(&self) -> VaRange {
624 self.va_range
625 }
626}
627
628impl<R: TranslationRegime, T: Translation<R::Attributes>> RootTable<R, T> {
629 fn new_impl(mut translation: T, level: usize, _regime: R, va_range: R::VaRange) -> Self {
630 if level > LEAF_LEVEL {
631 panic!("Invalid root table level {}.", level);
632 }
633 let (table, pa) = PageTableWithLevel::new(&mut translation, level);
634 RootTable {
635 table,
636 translation,
637 pa,
638 va_range,
639 _regime: PhantomData,
640 }
641 }
642
643 pub fn size(&self) -> usize {
648 granularity_at_level(self.table.level) << BITS_PER_LEVEL
649 }
650
651 pub fn map_range(
661 &mut self,
662 range: &MemoryRegion,
663 pa: PhysicalAddress,
664 flags: R::Attributes,
665 constraints: Constraints,
666 ) -> Result<(), MapError> {
667 if flags.contains(R::Attributes::TABLE_OR_PAGE) {
668 return Err(MapError::InvalidFlags(flags.bits()));
669 }
670 self.verify_region(range)?;
671 self.table
672 .map_range(&mut self.translation, range, pa, flags, constraints);
673 Ok(())
674 }
675
676 pub fn to_physical(&self) -> PhysicalAddress {
678 self.pa
679 }
680
681 pub fn translation(&self) -> &T {
683 &self.translation
684 }
685
686 pub(crate) fn modify_range<F>(
725 &mut self,
726 range: &MemoryRegion,
727 f: &F,
728 live: bool,
729 ) -> Result<bool, MapError>
730 where
731 F: Fn(&MemoryRegion, &mut UpdatableDescriptor<R::Attributes>) -> Result<(), ()> + ?Sized,
732 {
733 self.verify_region(range)?;
734 self.table
735 .modify_range::<F, R>(&mut self.translation, range, f, live)
736 }
737
738 pub(crate) fn va_range_or_unit(&self) -> R::VaRange {
739 self.va_range
740 }
741
742 pub fn walk_range<F>(&self, range: &MemoryRegion, f: &mut F) -> Result<(), MapError>
766 where
767 F: FnMut(&MemoryRegion, &Descriptor<R::Attributes>, usize) -> Result<(), ()>,
768 {
769 self.visit_range(range, &mut |mr, desc, level| {
770 f(mr, desc, level).map_err(|_| MapError::PteUpdateFault(desc.bits()))
771 })
772 }
773
774 pub fn compact_subtables(&mut self) {
781 self.table.compact_subtables(&mut self.translation);
782 }
783
784 pub(crate) fn visit_range<F>(&self, range: &MemoryRegion, f: &mut F) -> Result<(), MapError>
786 where
787 F: FnMut(&MemoryRegion, &Descriptor<R::Attributes>, usize) -> Result<(), MapError>,
788 {
789 self.verify_region(range)?;
790 self.table.visit_range(&self.translation, range, f)
791 }
792
793 #[cfg(all(test, feature = "alloc"))]
798 pub(crate) fn mapping_level(&self, va: VirtualAddress) -> Option<usize> {
799 self.table.mapping_level(&self.translation, va)
800 }
801
802 fn verify_region(&self, region: &MemoryRegion) -> Result<(), MapError> {
804 if region.end() < region.start() {
805 return Err(MapError::RegionBackwards(region.clone()));
806 }
807 match self.va_range.into_va_range() {
808 VaRange::Lower => {
809 if (region.start().0 as isize) < 0 {
810 return Err(MapError::AddressRange(region.start()));
811 } else if region.end().0 > self.size() {
812 return Err(MapError::AddressRange(region.end()));
813 }
814 }
815 VaRange::Upper => {
816 if region.start().0 as isize >= 0
817 || (region.start().0 as isize).unsigned_abs() > self.size()
818 {
819 return Err(MapError::AddressRange(region.start()));
820 }
821 }
822 }
823 Ok(())
824 }
825}
826
827impl<R: TranslationRegime, T: Translation<R::Attributes>> Debug for RootTable<R, T> {
828 fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> {
829 writeln!(
830 f,
831 "RootTable {{ pa: {}, translation_regime: {:?}, va_range: {:?}, level: {}, table:",
832 self.pa, PhantomData::<R>, self.va_range, self.table.level
833 )?;
834 self.table.fmt_indented(f, &self.translation, 0)?;
835 write!(f, "}}")
836 }
837}
838
839impl<R: TranslationRegime, T: Translation<R::Attributes>> Drop for RootTable<R, T> {
840 fn drop(&mut self) {
841 unsafe { self.table.free(&mut self.translation) }
846 }
847}
848
849struct ChunkedIterator<'a> {
850 range: &'a MemoryRegion,
851 granularity: usize,
852 start: usize,
853}
854
855impl Iterator for ChunkedIterator<'_> {
856 type Item = MemoryRegion;
857
858 fn next(&mut self) -> Option<MemoryRegion> {
859 if !self.range.0.contains(&VirtualAddress(self.start)) {
860 return None;
861 }
862 let end = self
863 .range
864 .0
865 .end
866 .0
867 .min((self.start | (self.granularity - 1)) + 1);
868 let c = MemoryRegion::new(self.start, end);
869 self.start = end;
870 Some(c)
871 }
872}
873
874#[derive(Debug)]
877pub(crate) struct PageTableWithLevel<T: Translation<A>, A: PagingAttributes> {
878 table: NonNull<PageTable<A>>,
879 level: usize,
880 _translation: PhantomData<T>,
881}
882
883unsafe impl<T: Translation<A> + Send, A: PagingAttributes> Send for PageTableWithLevel<T, A> {}
886
887unsafe impl<T: Translation<A> + Sync, A: PagingAttributes> Sync for PageTableWithLevel<T, A> {}
889
890impl<T: Translation<A>, A: PagingAttributes> PageTableWithLevel<T, A> {
891 fn new(translation: &mut T, level: usize) -> (Self, PhysicalAddress) {
894 assert!(level <= LEAF_LEVEL);
895 let (table, pa) = translation.allocate_table();
896 (
897 Self::from_pointer(table, level),
900 pa,
901 )
902 }
903
904 pub(crate) fn from_pointer(table: NonNull<PageTable<A>>, level: usize) -> Self {
905 Self {
906 table,
907 level,
908 _translation: PhantomData,
909 }
910 }
911
912 fn get_entry(&self, va: VirtualAddress) -> &Descriptor<A> {
914 let shift = PAGE_SHIFT + (LEAF_LEVEL - self.level) * BITS_PER_LEVEL;
915 let index = (va.0 >> shift) % (1 << BITS_PER_LEVEL);
916 let table = unsafe { self.table.as_ref() };
920 &table.entries[index]
921 }
922
923 fn get_entry_mut(&mut self, va: VirtualAddress) -> &mut Descriptor<A> {
925 let shift = PAGE_SHIFT + (LEAF_LEVEL - self.level) * BITS_PER_LEVEL;
926 let index = (va.0 >> shift) % (1 << BITS_PER_LEVEL);
927 let table = unsafe { self.table.as_mut() };
931 &mut table.entries[index]
932 }
933
934 fn split_entry(
937 translation: &mut T,
938 chunk: &MemoryRegion,
939 entry: &mut Descriptor<A>,
940 level: usize,
941 ) -> Self {
942 let granularity = granularity_at_level(level);
943 let (mut subtable, subtable_pa) = Self::new(translation, level + 1);
944 let old_flags = entry.flags();
945 let old_pa = entry.output_address();
946 if !old_flags.contains(A::TABLE_OR_PAGE) && (!old_flags.is_empty() || old_pa.0 != 0) {
947 let a = align_down(chunk.0.start.0, granularity);
950 let b = align_up(chunk.0.end.0, granularity);
951 subtable.map_range(
952 translation,
953 &MemoryRegion::new(a, b),
954 old_pa,
955 old_flags,
956 Constraints::empty(),
957 );
958 }
959 entry.set(subtable_pa, A::TABLE_OR_PAGE | A::VALID);
963 subtable
964 }
965
966 fn map_range(
978 &mut self,
979 translation: &mut T,
980 range: &MemoryRegion,
981 mut pa: PhysicalAddress,
982 flags: A,
983 constraints: Constraints,
984 ) {
985 let level = self.level;
986 let granularity = granularity_at_level(level);
987
988 for chunk in range.split(level) {
989 let entry = self.get_entry_mut(chunk.0.start);
990
991 if level == LEAF_LEVEL {
992 if flags.contains(A::VALID) {
993 entry.set(pa, flags | A::TABLE_OR_PAGE);
995 } else {
996 entry.set(PhysicalAddress(0), flags);
998 }
999 } else if !entry.is_table_or_page()
1000 && entry.flags() == flags
1001 && entry.output_address().0 == pa.0 - chunk.0.start.0 % granularity
1002 {
1003 } else if chunk.is_block(level)
1006 && !entry.is_table_or_page()
1007 && is_aligned(pa.0, granularity)
1008 && !constraints.contains(Constraints::NO_BLOCK_MAPPINGS)
1009 && level > 0
1010 {
1011 if flags.contains(A::VALID) {
1015 entry.set(pa, flags);
1016 } else {
1017 entry.set(PhysicalAddress(0), flags);
1018 }
1019 } else if chunk.is_block(level)
1020 && let Some(mut subtable) = entry.subtable(translation, level)
1021 && !flags.contains(A::VALID)
1022 {
1023 entry.set(PhysicalAddress(0), flags);
1027
1028 unsafe {
1032 subtable.free(translation);
1033 }
1034 } else {
1035 let mut subtable = entry
1036 .subtable(translation, level)
1037 .unwrap_or_else(|| Self::split_entry(translation, &chunk, entry, level));
1038 subtable.map_range(translation, &chunk, pa, flags, constraints);
1039 }
1040 pa.0 += chunk.len();
1041 }
1042 }
1043
1044 fn fmt_indented(
1045 &self,
1046 f: &mut Formatter,
1047 translation: &T,
1048 indentation: usize,
1049 ) -> Result<(), fmt::Error> {
1050 const WIDTH: usize = 3;
1051 let table = unsafe { self.table.as_ref() };
1054
1055 let mut i = 0;
1056 while i < table.entries.len() {
1057 if let Some(subtable) = table.entries[i].subtable(translation, self.level) {
1058 writeln!(
1059 f,
1060 "{:indentation$}{: <WIDTH$} : {:?}",
1061 "", i, table.entries[i],
1062 )?;
1063 subtable.fmt_indented(f, translation, indentation + 2)?;
1064 i += 1;
1065 } else {
1066 let first_contiguous = i;
1067 let first_entry = table.entries[i].bits();
1068 let granularity = granularity_at_level(self.level);
1069 while i < table.entries.len()
1070 && (table.entries[i].bits() == first_entry
1071 || (first_entry != 0
1072 && table.entries[i].bits()
1073 == first_entry + granularity * (i - first_contiguous)))
1074 {
1075 i += 1;
1076 }
1077 if i - 1 == first_contiguous {
1078 write!(f, "{:indentation$}{: <WIDTH$} : ", "", first_contiguous)?;
1079 } else {
1080 write!(
1081 f,
1082 "{:indentation$}{: <WIDTH$}-{: <WIDTH$}: ",
1083 "",
1084 first_contiguous,
1085 i - 1,
1086 )?;
1087 }
1088 if first_entry == 0 {
1089 writeln!(f, "0")?;
1090 } else {
1091 writeln!(f, "{:?}", Descriptor::<A>::new(first_entry))?;
1092 }
1093 }
1094 }
1095 Ok(())
1096 }
1097
1098 unsafe fn free(&mut self, translation: &mut T) {
1106 let table = unsafe { self.table.as_ref() };
1109 for entry in &table.entries {
1110 if let Some(mut subtable) = entry.subtable(translation, self.level) {
1111 unsafe {
1114 subtable.free(translation);
1115 }
1116 }
1117 }
1118 unsafe {
1121 translation.deallocate_table(self.table);
1123 }
1124 }
1125
1126 fn modify_range<F, R: TranslationRegime<Attributes = A>>(
1129 &mut self,
1130 translation: &mut T,
1131 range: &MemoryRegion,
1132 f: &F,
1133 live: bool,
1134 ) -> Result<bool, MapError>
1135 where
1136 F: Fn(&MemoryRegion, &mut UpdatableDescriptor<A>) -> Result<(), ()> + ?Sized,
1137 {
1138 let mut modified = false;
1139 let level = self.level;
1140 for chunk in range.split(level) {
1141 let entry = self.get_entry_mut(chunk.0.start);
1142 if let Some(mut subtable) = entry.subtable(translation, level).or_else(|| {
1143 if !chunk.is_block(level) {
1144 Some(Self::split_entry(translation, &chunk, entry, level))
1147 } else {
1148 None
1149 }
1150 }) {
1151 modified |= subtable.modify_range::<F, R>(translation, &chunk, f, live)?;
1152 } else {
1153 let bits = entry.bits();
1154 let mut desc = UpdatableDescriptor::new(entry, level, live);
1155 f(&chunk, &mut desc).map_err(|_| MapError::PteUpdateFault(bits))?;
1156
1157 if live && desc.updated() {
1158 R::invalidate_va(chunk.start());
1160 modified = true;
1161 }
1162 }
1163 }
1164 Ok(modified)
1165 }
1166
1167 fn visit_range<F, E>(&self, translation: &T, range: &MemoryRegion, f: &mut F) -> Result<(), E>
1170 where
1171 F: FnMut(&MemoryRegion, &Descriptor<A>, usize) -> Result<(), E>,
1172 {
1173 let level = self.level;
1174 for chunk in range.split(level) {
1175 let entry = self.get_entry(chunk.0.start);
1176 if let Some(subtable) = entry.subtable(translation, level) {
1177 subtable.visit_range(translation, &chunk, f)?;
1178 } else {
1179 f(&chunk, entry, level)?;
1180 }
1181 }
1182 Ok(())
1183 }
1184
1185 pub fn compact_subtables(&mut self, translation: &mut T) -> bool {
1190 let table = unsafe { self.table.as_mut() };
1193
1194 let mut all_empty = true;
1195 for entry in &mut table.entries {
1196 if let Some(mut subtable) = entry.subtable(translation, self.level)
1197 && subtable.compact_subtables(translation)
1198 {
1199 entry.set(PhysicalAddress(0), A::default());
1200
1201 unsafe {
1205 subtable.free(translation);
1206 }
1207 }
1208 if entry.bits() != 0 {
1209 all_empty = false;
1210 }
1211 }
1212 all_empty
1213 }
1214
1215 #[cfg(all(test, feature = "alloc"))]
1220 pub(crate) fn mapping_level(&self, translation: &T, va: VirtualAddress) -> Option<usize> {
1221 let entry = self.get_entry(va);
1222 if let Some(subtable) = entry.subtable(translation, self.level) {
1223 subtable.mapping_level(translation, va)
1224 } else {
1225 if entry.is_valid() {
1226 Some(self.level)
1227 } else {
1228 None
1229 }
1230 }
1231 }
1232}
1233
1234#[repr(C, align(4096))]
1236pub struct PageTable<A: PagingAttributes> {
1237 entries: [Descriptor<A>; 1 << BITS_PER_LEVEL],
1238}
1239
1240impl<A: PagingAttributes> PageTable<A> {
1241 pub const EMPTY: Self = Self {
1243 entries: [Descriptor::EMPTY; 1 << BITS_PER_LEVEL],
1244 };
1245
1246 #[cfg(feature = "alloc")]
1249 pub fn new() -> NonNull<Self> {
1250 unsafe { allocate_zeroed() }
1252 }
1253
1254 pub fn write_to(&self, page: &mut [u8]) -> Result<(), ()> {
1259 if page.len() != self.entries.len() * size_of::<Descriptor<A>>() {
1260 return Err(());
1261 }
1262 for (chunk, desc) in page
1263 .chunks_exact_mut(size_of::<Descriptor<A>>())
1264 .zip(self.entries.iter())
1265 {
1266 chunk.copy_from_slice(&desc.bits().to_le_bytes());
1267 }
1268 Ok(())
1269 }
1270}
1271
1272impl<A: PagingAttributes> Default for PageTable<A> {
1273 fn default() -> Self {
1274 Self::EMPTY
1275 }
1276}
1277
1278#[cfg(feature = "alloc")]
1284unsafe fn allocate_zeroed<T>() -> NonNull<T> {
1285 let layout = Layout::new::<T>();
1286 assert_ne!(layout.size(), 0);
1287 let pointer = unsafe { alloc_zeroed(layout) };
1289 if pointer.is_null() {
1290 handle_alloc_error(layout);
1291 }
1292 unsafe { NonNull::new_unchecked(pointer as *mut T) }
1294}
1295
1296#[cfg(feature = "alloc")]
1303pub(crate) unsafe fn deallocate<T>(ptr: NonNull<T>) {
1304 let layout = Layout::new::<T>();
1305 unsafe {
1307 dealloc(ptr.as_ptr() as *mut u8, layout);
1308 }
1309}
1310
1311const fn align_down(value: usize, alignment: usize) -> usize {
1312 value & !(alignment - 1)
1313}
1314
1315const fn align_up(value: usize, alignment: usize) -> usize {
1316 ((value - 1) | (alignment - 1)) + 1
1317}
1318
1319pub(crate) const fn is_aligned(value: usize, alignment: usize) -> bool {
1320 value & (alignment - 1) == 0
1321}
1322
1323#[cfg(test)]
1324mod tests {
1325 use super::*;
1326 #[cfg(feature = "alloc")]
1327 use crate::target::TargetAllocator;
1328 #[cfg(feature = "alloc")]
1329 use alloc::{format, string::ToString, vec, vec::Vec};
1330
1331 #[cfg(feature = "alloc")]
1332 #[test]
1333 fn display_memory_region() {
1334 let region = MemoryRegion::new(0x1234, 0x56789);
1335 assert_eq!(
1336 ®ion.to_string(),
1337 "0x0000000000001000..0x0000000000057000"
1338 );
1339 assert_eq!(
1340 &format!("{:?}", region),
1341 "0x0000000000001000..0x0000000000057000"
1342 );
1343 }
1344
1345 #[test]
1346 fn subtract_virtual_address() {
1347 let low = VirtualAddress(0x12);
1348 let high = VirtualAddress(0x1234);
1349 assert_eq!(high - low, 0x1222);
1350 }
1351
1352 #[cfg(debug_assertions)]
1353 #[test]
1354 #[should_panic]
1355 fn subtract_virtual_address_overflow() {
1356 let low = VirtualAddress(0x12);
1357 let high = VirtualAddress(0x1234);
1358
1359 let _ = low - high;
1361 }
1362
1363 #[test]
1364 fn add_virtual_address() {
1365 assert_eq!(VirtualAddress(0x1234) + 0x42, VirtualAddress(0x1276));
1366 }
1367
1368 #[test]
1369 fn subtract_physical_address() {
1370 let low = PhysicalAddress(0x12);
1371 let high = PhysicalAddress(0x1234);
1372 assert_eq!(high - low, 0x1222);
1373 }
1374
1375 #[cfg(debug_assertions)]
1376 #[test]
1377 #[should_panic]
1378 fn subtract_physical_address_overflow() {
1379 let low = PhysicalAddress(0x12);
1380 let high = PhysicalAddress(0x1234);
1381
1382 let _ = low - high;
1384 }
1385
1386 #[test]
1387 fn add_physical_address() {
1388 assert_eq!(PhysicalAddress(0x1234) + 0x42, PhysicalAddress(0x1276));
1389 }
1390
1391 #[test]
1392 fn invalid_descriptor() {
1393 let desc = Descriptor::<El1Attributes>::new(0usize);
1394 assert!(!desc.is_valid());
1395 assert!(!desc.flags().contains(El1Attributes::VALID));
1396 }
1397
1398 #[test]
1399 fn set_descriptor() {
1400 const PHYSICAL_ADDRESS: usize = 0x12340000;
1401 let mut desc = Descriptor::<El1Attributes>::new(0usize);
1402 assert!(!desc.is_valid());
1403 desc.set(
1404 PhysicalAddress(PHYSICAL_ADDRESS),
1405 El1Attributes::TABLE_OR_PAGE
1406 | El1Attributes::USER
1407 | El1Attributes::SWFLAG_1
1408 | El1Attributes::VALID,
1409 );
1410 assert!(desc.is_valid());
1411 assert_eq!(
1412 desc.flags(),
1413 El1Attributes::TABLE_OR_PAGE
1414 | El1Attributes::USER
1415 | El1Attributes::SWFLAG_1
1416 | El1Attributes::VALID
1417 );
1418 assert_eq!(desc.output_address(), PhysicalAddress(PHYSICAL_ADDRESS));
1419 }
1420
1421 #[test]
1422 fn modify_descriptor_flags() {
1423 let mut desc = Descriptor::<El1Attributes>::new(0usize);
1424 assert!(!desc.is_valid());
1425 desc.set(
1426 PhysicalAddress(0x12340000),
1427 El1Attributes::TABLE_OR_PAGE | El1Attributes::USER | El1Attributes::SWFLAG_1,
1428 );
1429 UpdatableDescriptor::new(&mut desc, 3, true)
1430 .modify_flags(
1431 El1Attributes::DBM | El1Attributes::SWFLAG_3,
1432 El1Attributes::VALID | El1Attributes::SWFLAG_1,
1433 )
1434 .unwrap();
1435 assert!(!desc.is_valid());
1436 assert_eq!(
1437 desc.flags(),
1438 El1Attributes::TABLE_OR_PAGE
1439 | El1Attributes::USER
1440 | El1Attributes::SWFLAG_3
1441 | El1Attributes::DBM
1442 );
1443 }
1444
1445 #[test]
1446 #[should_panic]
1447 fn modify_descriptor_table_or_page_flag() {
1448 let mut desc = Descriptor::<El1Attributes>::new(0usize);
1449 assert!(!desc.is_valid());
1450 desc.set(
1451 PhysicalAddress(0x12340000),
1452 El1Attributes::TABLE_OR_PAGE | El1Attributes::USER | El1Attributes::SWFLAG_1,
1453 );
1454 UpdatableDescriptor::new(&mut desc, 3, false)
1455 .modify_flags(El1Attributes::VALID, El1Attributes::TABLE_OR_PAGE)
1456 .unwrap();
1457 }
1458
1459 #[cfg(feature = "alloc")]
1460 #[test]
1461 fn unaligned_chunks() {
1462 let region = MemoryRegion::new(0x0000_2000, 0x0020_5000);
1463 let chunks = region.split(LEAF_LEVEL - 1).collect::<Vec<_>>();
1464 assert_eq!(
1465 chunks,
1466 vec![
1467 MemoryRegion::new(0x0000_2000, 0x0020_0000),
1468 MemoryRegion::new(0x0020_0000, 0x0020_5000),
1469 ]
1470 );
1471 }
1472
1473 #[test]
1474 fn table_or_page() {
1475 assert!(!Descriptor::<El1Attributes>::new(0b00).is_table_or_page());
1477 assert!(!Descriptor::<El1Attributes>::new(0b10).is_table_or_page());
1478
1479 assert!(!Descriptor::<El1Attributes>::new(0b01).is_table_or_page());
1481
1482 assert!(Descriptor::<El1Attributes>::new(0b11).is_table_or_page());
1484 }
1485
1486 #[test]
1487 fn table_or_page_unknown_bits() {
1488 const UNKNOWN: usize = 1 << 50 | 1 << 52;
1490
1491 assert!(!Descriptor::<El1Attributes>::new(UNKNOWN | 0b00).is_table_or_page());
1493 assert!(!Descriptor::<El1Attributes>::new(UNKNOWN | 0b10).is_table_or_page());
1494
1495 assert!(!Descriptor::<El1Attributes>::new(UNKNOWN | 0b01).is_table_or_page());
1497
1498 assert!(Descriptor::<El1Attributes>::new(UNKNOWN | 0b11).is_table_or_page());
1500 }
1501
1502 #[cfg(feature = "alloc")]
1503 #[test]
1504 fn debug_roottable_empty() {
1505 let table = RootTable::with_va_range(TargetAllocator::new(0), 1, El1And0, VaRange::Lower);
1506 assert_eq!(
1507 format!("{table:?}"),
1508"RootTable { pa: 0x0000000000000000, translation_regime: PhantomData<aarch64_paging::paging::El1And0>, va_range: Lower, level: 1, table:
15090 -511: 0
1510}"
1511 );
1512 }
1513
1514 #[cfg(feature = "alloc")]
1515 #[test]
1516 fn debug_roottable_contiguous() {
1517 let mut table =
1518 RootTable::with_va_range(TargetAllocator::new(0), 1, El1And0, VaRange::Lower);
1519 table
1520 .map_range(
1521 &MemoryRegion::new(PAGE_SIZE * 3, PAGE_SIZE * 6),
1522 PhysicalAddress(PAGE_SIZE * 3),
1523 El1Attributes::VALID | El1Attributes::NON_GLOBAL,
1524 Constraints::empty(),
1525 )
1526 .unwrap();
1527 table
1528 .map_range(
1529 &MemoryRegion::new(PAGE_SIZE * 6, PAGE_SIZE * 7),
1530 PhysicalAddress(PAGE_SIZE * 6),
1531 El1Attributes::VALID | El1Attributes::READ_ONLY,
1532 Constraints::empty(),
1533 )
1534 .unwrap();
1535 table
1536 .map_range(
1537 &MemoryRegion::new(PAGE_SIZE * 8, PAGE_SIZE * 9),
1538 PhysicalAddress(PAGE_SIZE * 8),
1539 El1Attributes::VALID | El1Attributes::READ_ONLY,
1540 Constraints::empty(),
1541 )
1542 .unwrap();
1543 assert_eq!(
1544 format!("{table:?}"),
1545"RootTable { pa: 0x0000000000000000, translation_regime: PhantomData<aarch64_paging::paging::El1And0>, va_range: Lower, level: 1, table:
15460 : 0x00000000001003 (0x0000000000001000, El1Attributes(VALID | TABLE_OR_PAGE))
1547 0 : 0x00000000002003 (0x0000000000002000, El1Attributes(VALID | TABLE_OR_PAGE))
1548 0 -2 : 0\n 3 -5 : 0x00000000003803 (0x0000000000003000, El1Attributes(VALID | TABLE_OR_PAGE | NON_GLOBAL))
1549 6 : 0x00000000006083 (0x0000000000006000, El1Attributes(VALID | TABLE_OR_PAGE | READ_ONLY))
1550 7 : 0
1551 8 : 0x00000000008083 (0x0000000000008000, El1Attributes(VALID | TABLE_OR_PAGE | READ_ONLY))
1552 9 -511: 0
1553 1 -511: 0
15541 -511: 0
1555}"
1556 );
1557 }
1558
1559 #[cfg(feature = "alloc")]
1560 #[test]
1561 fn debug_roottable_contiguous_block() {
1562 let mut table =
1563 RootTable::with_va_range(TargetAllocator::new(0), 1, El1And0, VaRange::Lower);
1564 const BLOCK_SIZE: usize = PAGE_SIZE * 512;
1565 table
1566 .map_range(
1567 &MemoryRegion::new(BLOCK_SIZE * 3, BLOCK_SIZE * 6),
1568 PhysicalAddress(BLOCK_SIZE * 3),
1569 El1Attributes::VALID | El1Attributes::NON_GLOBAL,
1570 Constraints::empty(),
1571 )
1572 .unwrap();
1573 table
1574 .map_range(
1575 &MemoryRegion::new(BLOCK_SIZE * 6, BLOCK_SIZE * 7),
1576 PhysicalAddress(BLOCK_SIZE * 6),
1577 El1Attributes::VALID | El1Attributes::READ_ONLY,
1578 Constraints::empty(),
1579 )
1580 .unwrap();
1581 table
1582 .map_range(
1583 &MemoryRegion::new(BLOCK_SIZE * 8, BLOCK_SIZE * 9),
1584 PhysicalAddress(BLOCK_SIZE * 8),
1585 El1Attributes::VALID | El1Attributes::READ_ONLY,
1586 Constraints::empty(),
1587 )
1588 .unwrap();
1589 assert_eq!(
1590 format!("{table:?}"),
1591"RootTable { pa: 0x0000000000000000, translation_regime: PhantomData<aarch64_paging::paging::El1And0>, va_range: Lower, level: 1, table:
15920 : 0x00000000001003 (0x0000000000001000, El1Attributes(VALID | TABLE_OR_PAGE))
1593 0 -2 : 0
1594 3 -5 : 0x00000000600801 (0x0000000000600000, El1Attributes(VALID | NON_GLOBAL))
1595 6 : 0x00000000c00081 (0x0000000000c00000, El1Attributes(VALID | READ_ONLY))
1596 7 : 0
1597 8 : 0x00000001000081 (0x0000000001000000, El1Attributes(VALID | READ_ONLY))
1598 9 -511: 0
15991 -511: 0
1600}"
1601 );
1602 }
1603}