1use std::{ffi::CStr, marker::PhantomData, mem};
2
3use bytemuck::pod_read_unaligned;
4
5use crate::{MappedAddressView, Offset};
6
7pub type Va = u64;
9
10pub trait Pod: bytemuck::Pod {}
12
13impl<T: bytemuck::Pod> Pod for T {}
14
15#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
17#[repr(transparent)]
18pub struct Ptr<T: ?Sized> {
19 mapped: Offset,
20 _marker: PhantomData<fn() -> T>,
21}
22
23impl<T: ?Sized> Copy for Ptr<T> {}
24
25impl<T: ?Sized> Clone for Ptr<T> {
26 fn clone(&self) -> Self {
27 *self
28 }
29}
30
31impl<T: ?Sized> Ptr<T> {
32 #[inline]
34 pub const fn null() -> Self {
35 Self {
36 mapped: 0,
37 _marker: PhantomData,
38 }
39 }
40
41 #[inline]
43 pub const fn from_mapped(mapped: Offset) -> Self {
44 Self {
45 mapped,
46 _marker: PhantomData,
47 }
48 }
49
50 #[inline]
52 pub const fn is_null(self) -> bool {
53 self.mapped == 0
54 }
55
56 #[inline]
58 pub const fn addr(self) -> Offset {
59 self.mapped
60 }
61
62 #[inline]
64 pub const fn cast<U: ?Sized>(self) -> Ptr<U> {
65 Ptr::from_mapped(self.mapped)
66 }
67
68 #[inline]
70 pub fn offset<U: ?Sized>(self, bytes: Offset) -> Option<Ptr<U>> {
71 self.mapped.checked_add(bytes).map(Ptr::from_mapped)
72 }
73}
74
75pub trait TypedView: MappedAddressView {
77 fn read_pod_copy<T: Pod>(&self, mapped: Offset) -> Option<T> {
79 let bytes = self.mapped_slice_strict(mapped, mem::size_of::<T>())?;
80 Some(pod_read_unaligned(bytes))
81 }
82
83 #[inline]
85 fn deref_copy<T: Pod>(&self, ptr: Ptr<T>) -> Option<T> {
86 self.read_pod_copy(ptr.addr())
87 }
88
89 fn deref<T: Pod>(&self, ptr: Ptr<T>) -> Option<&T> {
94 let bytes = self.mapped_slice_strict(ptr.addr(), mem::size_of::<T>())?;
95 bytemuck::try_from_bytes::<T>(bytes).ok()
96 }
97
98 #[inline]
100 fn deref_c_str(&self, ptr: Ptr<CStr>) -> Option<&CStr> {
101 if ptr.is_null() {
102 return None;
103 }
104 self.mapped_c_str(ptr.addr())
105 }
106
107 fn mapped_slice_strict(&self, mapped: Offset, size: usize) -> Option<&[u8]> {
108 let file_start = self.mapped_to_file_offset(mapped)?;
109 if size == 0 {
110 return self.image().get(file_start..file_start);
111 }
112
113 let width_minus_one = size.checked_sub(1)?;
114 let mapped_span = Offset::try_from(width_minus_one).ok()?;
115 let mapped_end = mapped.checked_add(mapped_span)?;
116 let file_end = self.mapped_to_file_offset(mapped_end)?;
117 let expected_end = file_start.checked_add(width_minus_one)?;
118 if file_end != expected_end {
119 return None;
120 }
121
122 let file_end_exclusive = file_start.checked_add(size)?;
123 self.image().get(file_start..file_end_exclusive)
124 }
125}
126
127impl<T: MappedAddressView + ?Sized> TypedView for T {}
128
129#[cfg(test)]
130mod tests {
131 use std::ffi::CStr;
132
133 use super::{Ptr, TypedView};
134 use crate::{MappedAddressView, Offset};
135
136 #[repr(C)]
137 #[derive(Clone, Copy, Debug, PartialEq, Eq, bytemuck::Pod, bytemuck::Zeroable)]
138 struct Pair {
139 a: u16,
140 b: u16,
141 }
142
143 #[derive(Debug)]
144 struct TestView {
145 bytes: Vec<u8>,
146 }
147
148 impl MappedAddressView for TestView {
149 fn image(&self) -> &[u8] {
150 &self.bytes
151 }
152
153 fn mapped_to_file_offset(&self, mapped_offset: Offset) -> Option<usize> {
154 if (100..108).contains(&mapped_offset) {
155 return usize::try_from(mapped_offset - 100).ok();
156 }
157 if (200..204).contains(&mapped_offset) {
158 return usize::try_from(mapped_offset - 192).ok();
159 }
160 None
161 }
162
163 fn file_offset_to_mapped(&self, file_offset: usize) -> Option<Offset> {
164 if file_offset < 8 {
165 return Offset::try_from(file_offset).ok().map(|value| value + 100);
166 }
167 if (8..12).contains(&file_offset) {
168 return Offset::try_from(file_offset).ok().map(|value| value + 192);
169 }
170 None
171 }
172 }
173
174 #[test]
175 fn ptr_helpers_behave_as_expected() {
176 let ptr = Ptr::<u32>::from_mapped(0x1000);
177 assert!(!ptr.is_null());
178 assert_eq!(ptr.addr(), 0x1000);
179 assert_eq!(Ptr::<u32>::null().addr(), 0);
180 assert!(Ptr::<u32>::null().is_null());
181
182 let cast = ptr.cast::<u8>();
183 assert_eq!(cast.addr(), 0x1000);
184
185 let next = ptr.offset::<u32>(8).expect("offset should not overflow");
186 assert_eq!(next.addr(), 0x1008);
187 assert!(ptr.offset::<u32>(u64::MAX).is_none());
188 }
189
190 #[test]
191 fn read_pod_copy_accepts_unaligned_data() {
192 let view = TestView {
193 bytes: vec![0xFF, 0x22, 0x11, 0x44, 0x33, 0, 0, 0, 0, 0, 0, 0],
194 };
195 let value = view
196 .read_pod_copy::<Pair>(101)
197 .expect("unaligned POD reads should use copy path");
198 assert_eq!(
199 value,
200 Pair {
201 a: 0x1122,
202 b: 0x3344
203 }
204 );
205 }
206
207 #[test]
208 fn deref_requires_alignment() {
209 let view = TestView {
210 bytes: vec![0, 0x22, 0x11, 0x44, 0x33, 0, 0, 0, 0, 0, 0, 0],
211 };
212 assert!(view.deref::<Pair>(Ptr::from_mapped(101)).is_none());
213 assert!(view.deref::<Pair>(Ptr::from_mapped(100)).is_some());
214 }
215
216 #[test]
217 fn strict_slice_rejects_mapped_holes() {
218 let view = TestView {
219 bytes: vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 0, 0],
220 };
221 assert!(view.read_pod_copy::<u32>(106).is_none());
222 }
223
224 #[test]
225 fn deref_c_str_works_for_non_null_ptrs() {
226 let view = TestView {
227 bytes: vec![0, 0, 0, 0, 0, 0, 0, 0, b'f', b'o', b'o', 0],
228 };
229 let ptr = Ptr::<u8>::from_mapped(200).cast::<CStr>();
230 let value = view
231 .deref_c_str(ptr)
232 .expect("valid mapped C string should decode");
233 assert_eq!(value.to_str().expect("ASCII fixture"), "foo");
234
235 assert!(view.deref_c_str(Ptr::<CStr>::null()).is_none());
236 }
237
238 #[test]
239 fn deref_c_str_rejects_unterminated_bytes() {
240 let view = TestView {
241 bytes: vec![0, 0, 0, 0, 0, 0, 0, 0, b'f', b'o', b'o', b'x'],
242 };
243 let ptr = Ptr::<u8>::from_mapped(203).cast::<CStr>();
244 assert!(view.deref_c_str(ptr).is_none());
245 }
246}