wraith/manipulation/manual_map/
mod.rs1#[cfg(all(not(feature = "std"), feature = "alloc"))]
29use alloc::{format, string::String};
30
31#[cfg(feature = "std")]
32use std::{format, string::String};
33
34mod allocator;
35mod entry;
36mod mapper;
37mod parser;
38mod relocator;
39mod resolver;
40mod tls;
41
42pub use allocator::MappedMemory;
43pub use entry::reason;
44pub use parser::ParsedPe;
45
46use crate::error::{Result, WraithError};
47use core::marker::PhantomData;
48
49pub mod state {
51 pub struct Parsed;
53 pub struct Allocated;
55 pub struct SectionsMapped;
57 pub struct Relocated;
59 pub struct ImportsResolved;
61 pub struct TlsProcessed;
63 pub struct Ready;
65}
66
67pub struct ManualMapper<S> {
72 pe: ParsedPe,
73 memory: Option<MappedMemory>,
74 _state: PhantomData<S>,
75}
76
77impl ManualMapper<state::Parsed> {
78 pub fn parse(data: &[u8]) -> Result<Self> {
80 let pe = ParsedPe::parse(data)?;
81 Ok(Self {
82 pe,
83 memory: None,
84 _state: PhantomData,
85 })
86 }
87
88 #[cfg(feature = "std")]
90 pub fn from_file(path: &str) -> Result<Self> {
91 let data = std::fs::read(path).map_err(|e| WraithError::InvalidPeFormat {
92 reason: format!("failed to read file: {e}"),
93 })?;
94 Self::parse(&data)
95 }
96
97 #[cfg(not(feature = "std"))]
99 pub fn from_file(_path: &str) -> Result<Self> {
100 Err(WraithError::InvalidPeFormat {
101 reason: "file operations not available in no_std".into(),
102 })
103 }
104
105 pub fn pe(&self) -> &ParsedPe {
107 &self.pe
108 }
109
110 pub fn allocate(self) -> Result<ManualMapper<state::Allocated>> {
114 let size = self.pe.size_of_image();
115 let preferred_base = self.pe.preferred_base();
116
117 let memory = allocator::allocate_image(size, preferred_base)?;
118
119 Ok(ManualMapper {
120 pe: self.pe,
121 memory: Some(memory),
122 _state: PhantomData,
123 })
124 }
125
126 pub fn allocate_at(self, base: usize) -> Result<ManualMapper<state::Allocated>> {
130 let size = self.pe.size_of_image();
131 let memory = allocator::allocate_at(base, size)?;
132
133 Ok(ManualMapper {
134 pe: self.pe,
135 memory: Some(memory),
136 _state: PhantomData,
137 })
138 }
139
140 pub fn allocate_anywhere(self) -> Result<ManualMapper<state::Allocated>> {
142 let size = self.pe.size_of_image();
143 let memory = allocator::allocate_anywhere(size)?;
144
145 Ok(ManualMapper {
146 pe: self.pe,
147 memory: Some(memory),
148 _state: PhantomData,
149 })
150 }
151}
152
153impl ManualMapper<state::Allocated> {
154 pub fn base(&self) -> usize {
156 self.memory.as_ref().unwrap().base()
157 }
158
159 pub fn pe(&self) -> &ParsedPe {
161 &self.pe
162 }
163
164 pub fn map_sections(mut self) -> Result<ManualMapper<state::SectionsMapped>> {
166 let memory = self.memory.as_mut().unwrap();
167 mapper::map_sections(&self.pe, memory)?;
168
169 Ok(ManualMapper {
170 pe: self.pe,
171 memory: self.memory,
172 _state: PhantomData,
173 })
174 }
175}
176
177impl ManualMapper<state::SectionsMapped> {
178 pub fn base(&self) -> usize {
180 self.memory.as_ref().unwrap().base()
181 }
182
183 pub fn pe(&self) -> &ParsedPe {
185 &self.pe
186 }
187
188 pub fn relocate(mut self) -> Result<ManualMapper<state::Relocated>> {
190 let memory = self.memory.as_mut().unwrap();
191 let delta = memory.base() as i64 - self.pe.preferred_base() as i64;
192
193 if delta != 0 {
194 relocator::apply_relocations(&self.pe, memory, delta)?;
195 }
196
197 Ok(ManualMapper {
198 pe: self.pe,
199 memory: self.memory,
200 _state: PhantomData,
201 })
202 }
203
204 pub fn skip_relocations(self) -> ManualMapper<state::Relocated> {
206 ManualMapper {
207 pe: self.pe,
208 memory: self.memory,
209 _state: PhantomData,
210 }
211 }
212}
213
214impl ManualMapper<state::Relocated> {
215 pub fn base(&self) -> usize {
217 self.memory.as_ref().unwrap().base()
218 }
219
220 pub fn pe(&self) -> &ParsedPe {
222 &self.pe
223 }
224
225 pub fn resolve_imports(mut self) -> Result<ManualMapper<state::ImportsResolved>> {
227 let memory = self.memory.as_mut().unwrap();
228 resolver::resolve_imports(&self.pe, memory)?;
229
230 Ok(ManualMapper {
231 pe: self.pe,
232 memory: self.memory,
233 _state: PhantomData,
234 })
235 }
236
237 pub fn resolve_imports_with<F>(
239 mut self,
240 resolver_fn: F,
241 ) -> Result<ManualMapper<state::ImportsResolved>>
242 where
243 F: Fn(&str, &str) -> Option<usize>,
244 {
245 let memory = self.memory.as_mut().unwrap();
246 resolver::resolve_imports_custom(&self.pe, memory, resolver_fn)?;
247
248 Ok(ManualMapper {
249 pe: self.pe,
250 memory: self.memory,
251 _state: PhantomData,
252 })
253 }
254
255 pub fn skip_imports(self) -> ManualMapper<state::ImportsResolved> {
257 ManualMapper {
258 pe: self.pe,
259 memory: self.memory,
260 _state: PhantomData,
261 }
262 }
263}
264
265impl ManualMapper<state::ImportsResolved> {
266 pub fn base(&self) -> usize {
268 self.memory.as_ref().unwrap().base()
269 }
270
271 pub fn pe(&self) -> &ParsedPe {
273 &self.pe
274 }
275
276 pub fn process_tls(mut self) -> Result<ManualMapper<state::TlsProcessed>> {
278 let memory = self.memory.as_mut().unwrap();
279 tls::process_tls(&self.pe, memory)?;
280
281 Ok(ManualMapper {
282 pe: self.pe,
283 memory: self.memory,
284 _state: PhantomData,
285 })
286 }
287
288 pub fn skip_tls(self) -> ManualMapper<state::TlsProcessed> {
290 ManualMapper {
291 pe: self.pe,
292 memory: self.memory,
293 _state: PhantomData,
294 }
295 }
296}
297
298impl ManualMapper<state::TlsProcessed> {
299 pub fn base(&self) -> usize {
301 self.memory.as_ref().unwrap().base()
302 }
303
304 pub fn pe(&self) -> &ParsedPe {
306 &self.pe
307 }
308
309 pub fn finalize(mut self) -> Result<ManualMapper<state::Ready>> {
311 let memory = self.memory.as_mut().unwrap();
312 mapper::set_section_protections(&self.pe, memory)?;
313
314 Ok(ManualMapper {
315 pe: self.pe,
316 memory: self.memory,
317 _state: PhantomData,
318 })
319 }
320
321 pub fn finalize_without_protections(self) -> ManualMapper<state::Ready> {
323 ManualMapper {
324 pe: self.pe,
325 memory: self.memory,
326 _state: PhantomData,
327 }
328 }
329}
330
331impl ManualMapper<state::Ready> {
332 pub fn call_entry_point(&self) -> Result<bool> {
334 let memory = self.memory.as_ref().unwrap();
335 entry::call_dll_attach(&self.pe, memory)
336 }
337
338 pub fn call_entry_point_with_reason(&self, call_reason: u32) -> Result<bool> {
340 let memory = self.memory.as_ref().unwrap();
341 entry::call_entry_point(&self.pe, memory, call_reason)
342 }
343
344 pub fn get_export(&self, name: &str) -> Result<usize> {
346 let memory = self.memory.as_ref().unwrap();
347 resolver::get_mapped_export(&self.pe, memory, name)
348 }
349
350 pub fn get_export_by_ordinal(&self, ordinal: u16) -> Result<usize> {
352 let memory = self.memory.as_ref().unwrap();
353 resolver::get_mapped_export_by_ordinal(&self.pe, memory, ordinal)
354 }
355
356 pub fn base(&self) -> usize {
358 self.memory.as_ref().unwrap().base()
359 }
360
361 pub fn size(&self) -> usize {
363 self.memory.as_ref().unwrap().size()
364 }
365
366 pub fn pe(&self) -> &ParsedPe {
368 &self.pe
369 }
370
371 pub fn into_memory(mut self) -> MappedMemory {
373 self.memory.take().unwrap()
374 }
375
376 pub fn ptr_at(&self, offset: usize) -> *mut u8 {
378 self.memory.as_ref().unwrap().ptr_at(offset)
379 }
380
381 pub fn unmap(mut self) -> Result<()> {
383 if let Some(memory) = self.memory.take() {
384 let _ = entry::call_dll_detach(&self.pe, &memory);
386 memory.free()?;
387 }
388 Ok(())
389 }
390}
391
392pub fn map_pe(data: &[u8]) -> Result<ManualMapper<state::Ready>> {
394 ManualMapper::parse(data)?
395 .allocate()?
396 .map_sections()?
397 .relocate()?
398 .resolve_imports()?
399 .process_tls()?
400 .finalize()
401}
402
403pub fn map_file(path: &str) -> Result<ManualMapper<state::Ready>> {
405 ManualMapper::from_file(path)?
406 .allocate()?
407 .map_sections()?
408 .relocate()?
409 .resolve_imports()?
410 .process_tls()?
411 .finalize()
412}
413
414pub fn map_and_call(data: &[u8]) -> Result<ManualMapper<state::Ready>> {
416 let mapper = map_pe(data)?;
417 mapper.call_entry_point()?;
418 Ok(mapper)
419}
420
421pub fn map_file_and_call(path: &str) -> Result<ManualMapper<state::Ready>> {
423 let mapper = map_file(path)?;
424 mapper.call_entry_point()?;
425 Ok(mapper)
426}
427
428#[cfg(test)]
429mod tests {
430 use super::*;
431
432 #[test]
433 fn test_parse_and_allocate() {
434 let exe_path = std::env::current_exe().unwrap();
435 let data = std::fs::read(&exe_path).unwrap();
436
437 let mapper = ManualMapper::parse(&data).unwrap();
438 assert!(mapper.pe().size_of_image() > 0);
439
440 let mapper = mapper.allocate().unwrap();
441 assert!(mapper.base() != 0);
442 }
443
444 #[test]
445 fn test_map_sections() {
446 let exe_path = std::env::current_exe().unwrap();
447 let data = std::fs::read(&exe_path).unwrap();
448
449 let mapper = ManualMapper::parse(&data)
450 .unwrap()
451 .allocate()
452 .unwrap()
453 .map_sections()
454 .unwrap();
455
456 assert!(mapper.base() != 0);
458 }
459
460 }