1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
// Axel '0vercl0k' Souchet - November 9 2025
//! Everything related to virtual memory.
use core::slice;
use std::cmp::min;
use std::mem::MaybeUninit;
use crate::error::{Error, PageReadError, PxeKind, Result};
use crate::gxa::{Gpa, Gva, Gxa};
use crate::parse::KernelDumpParser;
use crate::phys;
use crate::pxe::{Pfn, Pxe};
use crate::structs::{PageKind, Pod};
/// The details related to a virtual to physical address translation.
///
/// If you are wondering why there is no 'readable' field, it is because
/// [`Reader::translate`] returns an error if one of the PXE is
/// marked as not present. In other words, if the translation succeeds, the page
/// is at least readable.
#[derive(Debug)]
pub struct Translation {
/// The physical address backing the virtual address that was requested.
pub pfn: Pfn,
/// The byte offset in that physical page.
pub offset: u64,
/// The kind of physical page.
pub page_kind: PageKind,
/// Is the page writable?
pub writable: bool,
/// Is the page executable?
pub executable: bool,
/// Is the page user accessible?
pub user_accessible: bool,
}
impl Translation {
#[must_use]
pub fn huge_page(pxes: &[Pxe; 2], gva: Gva) -> Self {
Self::inner_new(pxes, gva)
}
#[must_use]
pub fn large_page(pxes: &[Pxe; 3], gva: Gva) -> Self {
Self::inner_new(pxes, gva)
}
#[must_use]
pub fn new(pxes: &[Pxe; 4], gva: Gva) -> Self {
Self::inner_new(pxes, gva)
}
fn inner_new(pxes: &[Pxe], gva: Gva) -> Self {
let writable = pxes.iter().all(Pxe::writable);
let executable = pxes.iter().all(Pxe::executable);
let user_accessible = pxes.iter().all(Pxe::user_accessible);
let pfn = pxes.last().map(|p| p.pfn).expect("at least one pxe");
let page_kind = match pxes.len() {
4 => PageKind::Normal,
3 => PageKind::Large,
2 => PageKind::Huge,
_ => unreachable!("pxes len should be between 2 and 4"),
};
let offset = page_kind.page_offset(gva.u64());
Self {
pfn,
offset,
page_kind,
writable,
executable,
user_accessible,
}
}
#[must_use]
pub fn gpa(&self) -> Gpa {
self.pfn.gpa_with_offset(self.offset)
}
}
pub(crate) fn ignore_non_fatal<T>(r: Result<T>) -> Result<Option<T>> {
match r {
Ok(o) => Ok(Some(o)),
Err(Error::PageRead(_) | Error::PartialRead { .. }) => Ok(None),
Err(e) => Err(e),
}
}
/// A reader lets you translate & read virtual memory from a dump file.
pub struct Reader<'parser> {
parser: &'parser KernelDumpParser,
dtb: Gpa,
}
impl<'parser> Reader<'parser> {
pub fn new(parser: &'parser KernelDumpParser) -> Self {
Self::with_dtb(parser, Gpa::new(parser.headers().directory_table_base))
}
pub fn with_dtb(parser: &'parser KernelDumpParser, dtb: Gpa) -> Self {
Self { parser, dtb }
}
/// Translate a [`Gva`] into a [`Gpa`].
#[expect(clippy::similar_names)]
pub fn translate(&self, gva: Gva) -> Result<Translation> {
let read_pxe = |gpa: Gpa, pxe: PxeKind| -> Result<Pxe> {
let r = phys::Reader::new(self.parser);
let Ok(pxe) = r.read_struct::<u64>(gpa).map(Pxe::from) else {
// If the physical page isn't in the dump, enrich the error by adding the gva
// that was getting translated as well as the pxe level we were at.
return Err(PageReadError::NotInDump {
gva: Some((gva, Some(pxe))),
gpa,
}
.into());
};
Ok(pxe)
};
// Aligning in case PCID bits are set (bits 11:0)
let pml4_base = self.dtb.page_align();
let pml4e_gpa = Gpa::new(pml4_base.u64() + (gva.pml4e_idx() * 8));
let pml4e = read_pxe(pml4e_gpa, PxeKind::Pml4e)?;
if !pml4e.present() {
return Err(PageReadError::NotPresent {
gva,
which_pxe: PxeKind::Pml4e,
}
.into());
}
let pdpt_base = pml4e.pfn.gpa();
let pdpte_gpa = Gpa::new(pdpt_base.u64() + (gva.pdpe_idx() * 8));
let pdpte = read_pxe(pdpte_gpa, PxeKind::Pdpte)?;
if !pdpte.present() {
return Err(PageReadError::NotPresent {
gva,
which_pxe: PxeKind::Pdpte,
}
.into());
}
// huge pages:
// 7 (PS) - Page size; must be 1 (otherwise, this entry references a page
// directory; see Table 4-1.
let pd_base = pdpte.pfn.gpa();
if pdpte.large_page() {
return Ok(Translation::huge_page(&[pml4e, pdpte], gva));
}
let pde_gpa = Gpa::new(pd_base.u64() + (gva.pde_idx() * 8));
let pde = read_pxe(pde_gpa, PxeKind::Pde)?;
if !pde.present() {
return Err(PageReadError::NotPresent {
gva,
which_pxe: PxeKind::Pde,
}
.into());
}
// large pages:
// 7 (PS) - Page size; must be 1 (otherwise, this entry references a page
// table; see Table 4-18.
let pt_base = pde.pfn.gpa();
if pde.large_page() {
return Ok(Translation::large_page(&[pml4e, pdpte, pde], gva));
}
let pte_gpa = Gpa::new(pt_base.u64() + (gva.pte_idx() * 8));
let pte = read_pxe(pte_gpa, PxeKind::Pte)?;
if !pte.present() {
// We'll allow reading from a transition PTE, so return an error only if it's
// not one, otherwise we'll carry on.
if !pte.transition() {
return Err(PageReadError::NotPresent {
gva,
which_pxe: PxeKind::Pte,
}
.into());
}
}
Ok(Translation::new(&[pml4e, pdpte, pde, pte], gva))
}
/// Read the exact amount of bytes asked by the user & return a
/// [`Error::PartialRead`] error if it couldn't read as much as wanted.
pub fn read_exact(&self, gva: Gva, buf: &mut [u8]) -> Result<()> {
// Amount of bytes left to read.
let mut amount_left = buf.len();
// Total amount of bytes that we have successfully read.
let mut total_read = 0;
// The current gva we are reading from.
let mut addr = gva;
// Let's try to read as much as the user wants.
while amount_left > 0 {
// Translate the gva into a gpa.
let translation = match self.translate(addr) {
Ok(t) => t,
Err(Error::PageRead(reason)) => {
return Err(Error::PartialRead {
expected_amount: buf.len(),
actual_amount: total_read,
reason,
});
}
// ..otherwise this is an error.
Err(e) => return Err(e),
};
// We need to take care of reads that straddle different virtual memory pages.
// First, figure out the maximum amount of bytes we can read off this page.
let left_in_page =
usize::try_from(translation.page_kind.size() - translation.offset).unwrap();
// Then, either we read it until its end, or we stop before if we can get by
// with less.
let amount_wanted = min(amount_left, left_in_page);
// Figure out where we should read into.
let slice = &mut buf[total_read..total_read + amount_wanted];
// Read the physical memory!
let gpa = translation.gpa();
match phys::Reader::new(self.parser).read_exact(gpa, slice) {
Ok(()) => {}
Err(Error::PartialRead {
actual_amount,
reason: PageReadError::NotInDump { gva: None, gpa },
..
}) => {
// Augment `NotInDump` with the `gva` as `phys::Reader::read_exact` doesn't know
// anything about it.
let reason = PageReadError::NotInDump {
gva: Some((addr, None)),
gpa,
};
return Err(Error::PartialRead {
expected_amount: buf.len(),
actual_amount: total_read + actual_amount,
reason,
});
}
Err(Error::PartialRead { .. }) => {
// We should never get there; `phys::Reader::read_exact` can only return a
// [`PageReadError::NotInDump`] error if it cannot read the gpa because it
// isn't in the dump.
unreachable!();
}
Err(e) => return Err(e),
}
// Update the total amount of read bytes and how much work we have left.
total_read += amount_wanted;
amount_left -= amount_wanted;
// We have more work to do, so let's move to the next page.
addr = addr.next_aligned_page();
}
// Yay, we read as much bytes as the user wanted!
Ok(())
}
/// Read the virtual memory starting at `gva` into `buf`. If it cannot read
/// as much as asked by the user because the dump file is missing a physical
/// memory page or because one of the PXE is non present, the function
/// doesn't error out and return the amount of bytes that was
/// successfully read.
pub fn read(&self, gva: Gva, buf: &mut [u8]) -> Result<usize> {
match self.read_exact(gva, buf) {
Ok(()) => Ok(buf.len()),
Err(Error::PartialRead { actual_amount, .. }) => Ok(actual_amount),
Err(e) => Err(e),
}
}
/// Read a `T` from virtual memory.
pub fn read_struct<T: Pod>(&self, gva: Gva) -> Result<T> {
let mut t: MaybeUninit<T> = MaybeUninit::uninit();
let size_of_t = size_of_val(&t);
let slice_over_t =
unsafe { slice::from_raw_parts_mut(t.as_mut_ptr().cast::<u8>(), size_of_t) };
self.read_exact(gva, slice_over_t)?;
Ok(unsafe { t.assume_init() })
}
/// Try to translate `gva` into [`Gpa`].
pub fn try_translate(&self, gva: Gva) -> Result<Option<Translation>> {
ignore_non_fatal(self.translate(gva))
}
/// Try to read the exact amount of bytes asked by the user.
pub fn try_read_exact(&self, gva: Gva, buf: &mut [u8]) -> Result<Option<()>> {
ignore_non_fatal(self.read_exact(gva, buf))
}
/// Try to read a `T` from virtual memory.
pub fn try_read_struct<T: Pod>(&self, gva: Gva) -> Result<Option<T>> {
ignore_non_fatal(self.read_struct(gva))
}
}