wraith/manipulation/manual_map/
mod.rs

1//! Manual PE mapping - LoadLibrary bypass
2//!
3//! This module provides a complete PE loader that maps DLLs without using
4//! the Windows loader, creating "ghost DLLs" invisible to GetModuleHandle.
5//!
6//! # Example
7//!
8//! ```no_run
9//! use wraith::manipulation::manual_map::{ManualMapper, map_file};
10//!
11//! // convenience function for quick mapping
12//! let mapper = map_file(r"C:\path\to\module.dll")?;
13//! println!("Mapped at {:#x}", mapper.base());
14//!
15//! // or step-by-step for more control
16//! let mapper = ManualMapper::from_file(r"C:\path\to\module.dll")?
17//!     .allocate()?
18//!     .map_sections()?
19//!     .relocate()?
20//!     .resolve_imports()?
21//!     .process_tls()?
22//!     .finalize()?;
23//!
24//! mapper.call_entry_point()?;
25//! # Ok::<(), wraith::WraithError>(())
26//! ```
27
28#[cfg(all(not(feature = "std"), feature = "alloc"))]
29use alloc::{format, string::String};
30
31#[cfg(feature = "std")]
32use std::{format, string::String};
33
34mod allocator;
35mod entry;
36mod mapper;
37mod parser;
38mod relocator;
39mod resolver;
40mod tls;
41
42pub use allocator::MappedMemory;
43pub use entry::reason;
44pub use parser::ParsedPe;
45
46use crate::error::{Result, WraithError};
47use core::marker::PhantomData;
48
49/// type-state markers for manual mapping stages
50pub mod state {
51    /// PE has been parsed but no memory allocated
52    pub struct Parsed;
53    /// Memory has been allocated for the image
54    pub struct Allocated;
55    /// PE sections have been mapped to memory
56    pub struct SectionsMapped;
57    /// Base relocations have been applied
58    pub struct Relocated;
59    /// Import Address Table has been resolved
60    pub struct ImportsResolved;
61    /// TLS callbacks have been processed
62    pub struct TlsProcessed;
63    /// Image is ready for execution
64    pub struct Ready;
65}
66
67/// manual mapper with type-state progression
68///
69/// the type parameter ensures mapping steps happen in correct order:
70/// Parsed -> Allocated -> SectionsMapped -> Relocated -> ImportsResolved -> TlsProcessed -> Ready
71pub struct ManualMapper<S> {
72    pe: ParsedPe,
73    memory: Option<MappedMemory>,
74    _state: PhantomData<S>,
75}
76
77impl ManualMapper<state::Parsed> {
78    /// parse PE from bytes
79    pub fn parse(data: &[u8]) -> Result<Self> {
80        let pe = ParsedPe::parse(data)?;
81        Ok(Self {
82            pe,
83            memory: None,
84            _state: PhantomData,
85        })
86    }
87
88    /// parse PE from file
89    #[cfg(feature = "std")]
90    pub fn from_file(path: &str) -> Result<Self> {
91        let data = std::fs::read(path).map_err(|e| WraithError::InvalidPeFormat {
92            reason: format!("failed to read file: {e}"),
93        })?;
94        Self::parse(&data)
95    }
96
97    /// parse PE from file (no_std stub)
98    #[cfg(not(feature = "std"))]
99    pub fn from_file(_path: &str) -> Result<Self> {
100        Err(WraithError::InvalidPeFormat {
101            reason: "file operations not available in no_std".into(),
102        })
103    }
104
105    /// get reference to parsed PE
106    pub fn pe(&self) -> &ParsedPe {
107        &self.pe
108    }
109
110    /// allocate memory for the PE image
111    ///
112    /// tries preferred base first, falls back to any available address
113    pub fn allocate(self) -> Result<ManualMapper<state::Allocated>> {
114        let size = self.pe.size_of_image();
115        let preferred_base = self.pe.preferred_base();
116
117        let memory = allocator::allocate_image(size, preferred_base)?;
118
119        Ok(ManualMapper {
120            pe: self.pe,
121            memory: Some(memory),
122            _state: PhantomData,
123        })
124    }
125
126    /// allocate at specific address
127    ///
128    /// fails if address is not available
129    pub fn allocate_at(self, base: usize) -> Result<ManualMapper<state::Allocated>> {
130        let size = self.pe.size_of_image();
131        let memory = allocator::allocate_at(base, size)?;
132
133        Ok(ManualMapper {
134            pe: self.pe,
135            memory: Some(memory),
136            _state: PhantomData,
137        })
138    }
139
140    /// allocate anywhere (no preference)
141    pub fn allocate_anywhere(self) -> Result<ManualMapper<state::Allocated>> {
142        let size = self.pe.size_of_image();
143        let memory = allocator::allocate_anywhere(size)?;
144
145        Ok(ManualMapper {
146            pe: self.pe,
147            memory: Some(memory),
148            _state: PhantomData,
149        })
150    }
151}
152
153impl ManualMapper<state::Allocated> {
154    /// get allocated base address
155    pub fn base(&self) -> usize {
156        self.memory.as_ref().unwrap().base()
157    }
158
159    /// get reference to parsed PE
160    pub fn pe(&self) -> &ParsedPe {
161        &self.pe
162    }
163
164    /// map PE sections to allocated memory
165    pub fn map_sections(mut self) -> Result<ManualMapper<state::SectionsMapped>> {
166        let memory = self.memory.as_mut().unwrap();
167        mapper::map_sections(&self.pe, memory)?;
168
169        Ok(ManualMapper {
170            pe: self.pe,
171            memory: self.memory,
172            _state: PhantomData,
173        })
174    }
175}
176
177impl ManualMapper<state::SectionsMapped> {
178    /// get base address
179    pub fn base(&self) -> usize {
180        self.memory.as_ref().unwrap().base()
181    }
182
183    /// get reference to parsed PE
184    pub fn pe(&self) -> &ParsedPe {
185        &self.pe
186    }
187
188    /// apply base relocations
189    pub fn relocate(mut self) -> Result<ManualMapper<state::Relocated>> {
190        let memory = self.memory.as_mut().unwrap();
191        let delta = memory.base() as i64 - self.pe.preferred_base() as i64;
192
193        if delta != 0 {
194            relocator::apply_relocations(&self.pe, memory, delta)?;
195        }
196
197        Ok(ManualMapper {
198            pe: self.pe,
199            memory: self.memory,
200            _state: PhantomData,
201        })
202    }
203
204    /// skip relocations (use if loaded at preferred base)
205    pub fn skip_relocations(self) -> ManualMapper<state::Relocated> {
206        ManualMapper {
207            pe: self.pe,
208            memory: self.memory,
209            _state: PhantomData,
210        }
211    }
212}
213
214impl ManualMapper<state::Relocated> {
215    /// get base address
216    pub fn base(&self) -> usize {
217        self.memory.as_ref().unwrap().base()
218    }
219
220    /// get reference to parsed PE
221    pub fn pe(&self) -> &ParsedPe {
222        &self.pe
223    }
224
225    /// resolve import address table
226    pub fn resolve_imports(mut self) -> Result<ManualMapper<state::ImportsResolved>> {
227        let memory = self.memory.as_mut().unwrap();
228        resolver::resolve_imports(&self.pe, memory)?;
229
230        Ok(ManualMapper {
231            pe: self.pe,
232            memory: self.memory,
233            _state: PhantomData,
234        })
235    }
236
237    /// resolve imports with custom resolver function
238    pub fn resolve_imports_with<F>(
239        mut self,
240        resolver_fn: F,
241    ) -> Result<ManualMapper<state::ImportsResolved>>
242    where
243        F: Fn(&str, &str) -> Option<usize>,
244    {
245        let memory = self.memory.as_mut().unwrap();
246        resolver::resolve_imports_custom(&self.pe, memory, resolver_fn)?;
247
248        Ok(ManualMapper {
249            pe: self.pe,
250            memory: self.memory,
251            _state: PhantomData,
252        })
253    }
254
255    /// skip import resolution (use if PE has no imports or manually resolved)
256    pub fn skip_imports(self) -> ManualMapper<state::ImportsResolved> {
257        ManualMapper {
258            pe: self.pe,
259            memory: self.memory,
260            _state: PhantomData,
261        }
262    }
263}
264
265impl ManualMapper<state::ImportsResolved> {
266    /// get base address
267    pub fn base(&self) -> usize {
268        self.memory.as_ref().unwrap().base()
269    }
270
271    /// get reference to parsed PE
272    pub fn pe(&self) -> &ParsedPe {
273        &self.pe
274    }
275
276    /// process TLS callbacks
277    pub fn process_tls(mut self) -> Result<ManualMapper<state::TlsProcessed>> {
278        let memory = self.memory.as_mut().unwrap();
279        tls::process_tls(&self.pe, memory)?;
280
281        Ok(ManualMapper {
282            pe: self.pe,
283            memory: self.memory,
284            _state: PhantomData,
285        })
286    }
287
288    /// skip TLS processing
289    pub fn skip_tls(self) -> ManualMapper<state::TlsProcessed> {
290        ManualMapper {
291            pe: self.pe,
292            memory: self.memory,
293            _state: PhantomData,
294        }
295    }
296}
297
298impl ManualMapper<state::TlsProcessed> {
299    /// get base address
300    pub fn base(&self) -> usize {
301        self.memory.as_ref().unwrap().base()
302    }
303
304    /// get reference to parsed PE
305    pub fn pe(&self) -> &ParsedPe {
306        &self.pe
307    }
308
309    /// finalize mapping with proper memory protections
310    pub fn finalize(mut self) -> Result<ManualMapper<state::Ready>> {
311        let memory = self.memory.as_mut().unwrap();
312        mapper::set_section_protections(&self.pe, memory)?;
313
314        Ok(ManualMapper {
315            pe: self.pe,
316            memory: self.memory,
317            _state: PhantomData,
318        })
319    }
320
321    /// finalize without setting protections (keeps RW everywhere)
322    pub fn finalize_without_protections(self) -> ManualMapper<state::Ready> {
323        ManualMapper {
324            pe: self.pe,
325            memory: self.memory,
326            _state: PhantomData,
327        }
328    }
329}
330
331impl ManualMapper<state::Ready> {
332    /// call DllMain with DLL_PROCESS_ATTACH
333    pub fn call_entry_point(&self) -> Result<bool> {
334        let memory = self.memory.as_ref().unwrap();
335        entry::call_dll_attach(&self.pe, memory)
336    }
337
338    /// call DllMain with custom reason
339    pub fn call_entry_point_with_reason(&self, call_reason: u32) -> Result<bool> {
340        let memory = self.memory.as_ref().unwrap();
341        entry::call_entry_point(&self.pe, memory, call_reason)
342    }
343
344    /// get export address by name
345    pub fn get_export(&self, name: &str) -> Result<usize> {
346        let memory = self.memory.as_ref().unwrap();
347        resolver::get_mapped_export(&self.pe, memory, name)
348    }
349
350    /// get export address by ordinal
351    pub fn get_export_by_ordinal(&self, ordinal: u16) -> Result<usize> {
352        let memory = self.memory.as_ref().unwrap();
353        resolver::get_mapped_export_by_ordinal(&self.pe, memory, ordinal)
354    }
355
356    /// get base address of mapped image
357    pub fn base(&self) -> usize {
358        self.memory.as_ref().unwrap().base()
359    }
360
361    /// get size of mapped image
362    pub fn size(&self) -> usize {
363        self.memory.as_ref().unwrap().size()
364    }
365
366    /// get reference to parsed PE
367    pub fn pe(&self) -> &ParsedPe {
368        &self.pe
369    }
370
371    /// consume and return raw memory handle
372    pub fn into_memory(mut self) -> MappedMemory {
373        self.memory.take().unwrap()
374    }
375
376    /// get pointer to specific offset in mapped image
377    pub fn ptr_at(&self, offset: usize) -> *mut u8 {
378        self.memory.as_ref().unwrap().ptr_at(offset)
379    }
380
381    /// unmap and free memory
382    pub fn unmap(mut self) -> Result<()> {
383        if let Some(memory) = self.memory.take() {
384            // call DllMain with DLL_PROCESS_DETACH first (ignore errors)
385            let _ = entry::call_dll_detach(&self.pe, &memory);
386            memory.free()?;
387        }
388        Ok(())
389    }
390}
391
392/// convenience function: map PE from bytes with all default steps
393pub fn map_pe(data: &[u8]) -> Result<ManualMapper<state::Ready>> {
394    ManualMapper::parse(data)?
395        .allocate()?
396        .map_sections()?
397        .relocate()?
398        .resolve_imports()?
399        .process_tls()?
400        .finalize()
401}
402
403/// convenience function: map PE from file with all default steps
404pub fn map_file(path: &str) -> Result<ManualMapper<state::Ready>> {
405    ManualMapper::from_file(path)?
406        .allocate()?
407        .map_sections()?
408        .relocate()?
409        .resolve_imports()?
410        .process_tls()?
411        .finalize()
412}
413
414/// convenience function: map PE from bytes and call entry point
415pub fn map_and_call(data: &[u8]) -> Result<ManualMapper<state::Ready>> {
416    let mapper = map_pe(data)?;
417    mapper.call_entry_point()?;
418    Ok(mapper)
419}
420
421/// convenience function: map PE from file and call entry point
422pub fn map_file_and_call(path: &str) -> Result<ManualMapper<state::Ready>> {
423    let mapper = map_file(path)?;
424    mapper.call_entry_point()?;
425    Ok(mapper)
426}
427
428#[cfg(test)]
429mod tests {
430    use super::*;
431
432    #[test]
433    fn test_parse_and_allocate() {
434        let exe_path = std::env::current_exe().unwrap();
435        let data = std::fs::read(&exe_path).unwrap();
436
437        let mapper = ManualMapper::parse(&data).unwrap();
438        assert!(mapper.pe().size_of_image() > 0);
439
440        let mapper = mapper.allocate().unwrap();
441        assert!(mapper.base() != 0);
442    }
443
444    #[test]
445    fn test_map_sections() {
446        let exe_path = std::env::current_exe().unwrap();
447        let data = std::fs::read(&exe_path).unwrap();
448
449        let mapper = ManualMapper::parse(&data)
450            .unwrap()
451            .allocate()
452            .unwrap()
453            .map_sections()
454            .unwrap();
455
456        // verify MZ header was copied
457        assert!(mapper.base() != 0);
458    }
459
460    // note: full integration tests that call entry points should be done
461    // with actual test DLLs, not the running executable
462}