page_table_generic/
frame.rs1use crate::{
2 FrameAllocator, PageTableEntry, PagingError, PagingResult, PhysAddr, PteConfig, TableMeta,
3 VirtAddr,
4};
5
6#[derive(Clone, Copy)]
8pub struct Frame<T: TableMeta, A: FrameAllocator> {
9 pub paddr: PhysAddr,
10 pub allocator: A,
11 _marker: core::marker::PhantomData<T>,
12}
13
14impl<T: TableMeta, A: FrameAllocator> core::fmt::Debug for Frame<T, A> {
15 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
16 f.debug_struct("Frame")
17 .field("paddr", &format_args!("{:#x}", self.paddr.raw()))
18 .finish()
19 }
20}
21
22impl<T, A> Frame<T, A>
23where
24 T: TableMeta,
25 A: FrameAllocator,
26{
27 pub(crate) const PT_INDEX_SHIFT: usize = T::PAGE_SIZE.trailing_zeros() as usize;
28 pub(crate) const PT_INDEX_BITS: usize = cal_index_bits::<T>();
29 pub(crate) const PT_VALID_BITS: usize = Self::PT_INDEX_BITS + Self::PT_INDEX_SHIFT;
30 pub(crate) const LEN: usize = T::PAGE_SIZE / core::mem::size_of::<T::P>();
31 pub(crate) const PT_LEVEL: usize = T::LEVEL_BITS.len();
32
33 pub fn new(allocator: A) -> PagingResult<Self> {
35 let paddr = allocator.alloc_frame().ok_or(PagingError::NoMemory)?;
36 unsafe {
37 let vaddr = allocator.phys_to_virt(paddr);
38 core::ptr::write_bytes(vaddr, 0, T::PAGE_SIZE);
39 }
40
41 Ok(Self {
42 paddr,
43 allocator,
44 _marker: core::marker::PhantomData,
45 })
46 }
47
48 pub fn from_paddr(paddr: PhysAddr, allocator: A) -> Self {
50 Self {
51 paddr,
52 allocator,
53 _marker: core::marker::PhantomData,
54 }
55 }
56
57 pub fn from_pte(pte: &T::P, level: usize, allocator: A) -> Self {
59 let config = pte.to_config(level > 1);
60 Self::from_paddr(config.paddr, allocator)
61 }
62
63 pub fn as_slice_mut(&mut self) -> &mut [T::P] {
65 let vaddr = self.allocator.phys_to_virt(self.paddr);
66 unsafe { core::slice::from_raw_parts_mut(vaddr as *mut T::P, Self::LEN) }
67 }
68
69 pub fn as_slice(&self) -> &[T::P] {
71 let vaddr = self.allocator.phys_to_virt(self.paddr);
72 unsafe { core::slice::from_raw_parts(vaddr as *const T::P, Self::LEN) }
73 }
74
75 pub fn level_size(level: usize) -> usize {
81 if level == 1 {
82 return T::PAGE_SIZE;
83 }
84 let total_levels = T::LEVEL_BITS.len();
87 let shift = T::LEVEL_BITS
88 .iter()
89 .skip(total_levels - level + 1)
90 .sum::<usize>();
91 T::PAGE_SIZE << shift
92 }
93
94 pub fn virt_to_index(vaddr: VirtAddr, level: usize) -> usize {
97 if level == 0 || level > Self::PT_LEVEL {
98 panic!("Invalid level: {} (valid: 1..={})", level, Self::PT_LEVEL);
99 }
100
101 let page_shift = T::PAGE_SIZE.trailing_zeros() as usize;
107 let total_levels = T::LEVEL_BITS.len();
108
109 let shift = if level == 1 {
111 page_shift
112 } else {
113 page_shift
114 + T::LEVEL_BITS
115 .iter()
116 .skip(total_levels - level + 1)
117 .sum::<usize>()
118 };
119
120 let level_index_bits = T::LEVEL_BITS[total_levels - level];
122 let mask = (1 << level_index_bits) - 1;
123
124 (vaddr.raw() >> shift) & mask
125 }
126
127 pub fn reconstruct_vaddr(index: usize, level: usize, base_vaddr: VirtAddr) -> VirtAddr {
130 let entry_size = Self::level_size(level);
131 base_vaddr + index * entry_size
132 }
133
134 pub fn deallocate_recursive(&mut self, level: usize) {
151 self.deallocate_children(level);
153
154 self.allocator.dealloc_frame(self.paddr);
156 }
157
158 pub fn deallocate_children(&mut self, level: usize) {
167 for i in (0..Self::LEN).rev() {
169 let entry_info = {
171 let entries = self.as_slice();
172 if i < entries.len() {
173 let config = entries[i].to_config(level > 1);
174 (config.valid, config.huge, config.paddr)
175 } else {
176 (false, false, crate::PhysAddr::new(0))
177 }
178 };
179
180 let (is_valid, is_huge, paddr) = entry_info;
181
182 if !is_valid {
183 continue;
184 }
185
186 if is_huge || level == 1 {
188 continue;
189 }
190 else {
192 let mut child_frame = Frame::<T, A>::from_paddr(paddr, self.allocator.clone());
193 child_frame.deallocate_recursive(level - 1);
194
195 let entries_mut = self.as_slice_mut();
197 let invalid_config = PteConfig {
198 valid: false,
199 ..Default::default()
200 };
201 entries_mut[i] = T::P::from_config(invalid_config);
202 }
203 }
204 }
205
206 pub fn translate_recursive(&self, vaddr: VirtAddr, level: usize) -> PagingResult<T::P> {
216 let (pte, _) = self.translate_recursive_with_level(vaddr, level)?;
217 Ok(pte)
218 }
219
220 pub fn translate_recursive_with_level(
230 &self,
231 vaddr: VirtAddr,
232 level: usize,
233 ) -> PagingResult<(T::P, usize)> {
234 let index = Self::virt_to_index(vaddr, level);
236
237 let entries = self.as_slice();
239 let pte = entries[index];
240
241 let config = pte.to_config(level > 1);
243 if !config.valid {
244 return Err(PagingError::not_mapped());
245 }
246
247 if config.huge || level == 1 {
249 return Ok((pte, level));
250 }
251
252 if level > 1 {
254 let child_frame: Frame<T, A> = Frame::from_pte(&pte, level, self.allocator.clone());
255 return child_frame.translate_recursive_with_level(vaddr, level - 1);
256 }
257
258 Err(PagingError::hierarchy_error(
260 "Invalid page table level during translation",
261 ))
262 }
263
264 pub fn dealloc_entry_recursive(&mut self, index: usize, level: usize) -> bool {
275 if index >= Self::LEN || level <= 1 {
276 return false;
277 }
278
279 let entries = self.as_slice();
280 let entry = &entries[index];
281 let config = entry.to_config(level > 1);
282
283 if config.valid && !config.huge {
284 let mut child_frame = Frame::<T, A>::from_pte(entry, level, self.allocator.clone());
286 child_frame.deallocate_recursive(level - 1);
287
288 let entries_mut = self.as_slice_mut();
290 let invalid_config = PteConfig {
291 valid: false,
292 ..Default::default()
293 };
294 entries_mut[index] = T::P::from_config(invalid_config);
295
296 true
297 } else {
298 false
299 }
300 }
301}
302
303const fn cal_index_bits<T: TableMeta>() -> usize {
304 let mut bits = 0;
305 let len = T::LEVEL_BITS.len();
306 let mut i = 0;
307 while i < len {
308 bits += T::LEVEL_BITS[i];
309 i += 1;
310 }
311 bits
312}