wraith/manipulation/inline_hook/hook/
chain.rs1use crate::error::{Result, WraithError};
7use crate::util::memory::ProtectionGuard;
8use crate::manipulation::inline_hook::arch::Architecture;
9use crate::manipulation::inline_hook::trampoline::{ExecutableMemory, TrampolineBuilder};
10use core::marker::PhantomData;
11
12const PAGE_EXECUTE_READWRITE: u32 = 0x40;
13
14struct ChainEntry {
16 detour: usize,
18 trampoline: ExecutableMemory,
20 priority: i32,
22}
23
24pub struct HookChain<A: Architecture> {
30 target: usize,
32 entries: Vec<ChainEntry>,
34 original_bytes: Vec<u8>,
36 prologue_size: usize,
38 current_hook: Vec<u8>,
40 _arch: PhantomData<A>,
41}
42
43impl<A: Architecture> HookChain<A> {
44 pub fn new(target: usize) -> Result<Self> {
48 let min_size = A::MIN_HOOK_SIZE;
49
50 let target_bytes = unsafe {
52 core::slice::from_raw_parts(target as *const u8, 64)
53 };
54
55 let boundary = A::find_instruction_boundary(target_bytes, min_size)
56 .ok_or_else(|| WraithError::HookDetectionFailed {
57 function: format!("{:#x}", target),
58 reason: "failed to find instruction boundary".into(),
59 })?;
60
61 let original_bytes = target_bytes[..boundary].to_vec();
62
63 Ok(Self {
64 target,
65 entries: Vec::new(),
66 original_bytes,
67 prologue_size: boundary,
68 current_hook: Vec::new(),
69 _arch: PhantomData,
70 })
71 }
72
73 pub fn add(&mut self, detour: usize, priority: i32) -> Result<usize> {
78 let pos = self.entries
80 .iter()
81 .position(|e| e.priority > priority)
82 .unwrap_or(self.entries.len());
83
84 let next_target = if pos < self.entries.len() {
87 self.entries[pos].detour
89 } else {
90 self.target
92 };
93
94 let trampoline = self.build_trampoline_to(next_target)?;
95 let trampoline_addr = trampoline.base();
96
97 self.entries.insert(pos, ChainEntry {
99 detour,
100 trampoline,
101 priority,
102 });
103
104 self.rebuild_trampolines_before(pos)?;
106
107 self.update_target_hook()?;
109
110 Ok(trampoline_addr)
111 }
112
113 pub fn remove(&mut self, detour: usize) -> Result<bool> {
117 let pos = match self.entries.iter().position(|e| e.detour == detour) {
118 Some(p) => p,
119 None => return Ok(false),
120 };
121
122 self.entries.remove(pos);
123
124 if self.entries.is_empty() {
125 self.restore_original()?;
127 } else {
128 self.rebuild_all_trampolines()?;
130 self.update_target_hook()?;
131 }
132
133 Ok(true)
134 }
135
136 pub fn original(&self) -> Option<usize> {
140 self.entries.last().map(|e| e.trampoline.base())
141 }
142
143 pub fn len(&self) -> usize {
145 self.entries.len()
146 }
147
148 pub fn is_empty(&self) -> bool {
150 self.entries.is_empty()
151 }
152
153 pub fn target(&self) -> usize {
155 self.target
156 }
157
158 pub fn restore(mut self) -> Result<()> {
160 self.restore_original()?;
161 Ok(())
162 }
163
164 fn build_trampoline_to(&self, target: usize) -> Result<ExecutableMemory> {
166 let mut memory = ExecutableMemory::allocate_near(self.target, 64)?;
167
168 if target == self.target {
170 let mut code = Vec::with_capacity(self.prologue_size + A::JMP_ABS_SIZE);
171
172 let mut src_offset = 0;
174 while src_offset < self.prologue_size {
175 let remaining = &self.original_bytes[src_offset..];
176 let insn_len = A::find_instruction_boundary(remaining, 1).unwrap_or(1);
177 let instruction = &self.original_bytes[src_offset..src_offset + insn_len];
178
179 if A::needs_relocation(instruction) {
180 let old_addr = self.target + src_offset;
181 let new_addr = memory.base() + code.len();
182 if let Some(relocated) = A::relocate_instruction(instruction, old_addr, new_addr) {
183 code.extend_from_slice(&relocated);
184 } else {
185 code.extend_from_slice(instruction);
186 }
187 } else {
188 code.extend_from_slice(instruction);
189 }
190
191 src_offset += insn_len;
192 }
193
194 let continuation = self.target + self.prologue_size;
196 let jmp_location = memory.base() + code.len();
197
198 if let Some(jmp) = A::encode_jmp_rel(jmp_location, continuation) {
199 code.extend_from_slice(&jmp);
200 } else {
201 code.extend_from_slice(&A::encode_jmp_abs(continuation));
202 }
203
204 memory.write(&code)?;
205 } else {
206 let jmp = A::encode_jmp_rel(memory.base(), target)
208 .unwrap_or_else(|| A::encode_jmp_abs(target));
209 memory.write(&jmp)?;
210 }
211
212 memory.flush_icache()?;
213
214 Ok(memory)
215 }
216
217 fn rebuild_trampolines_before(&mut self, pos: usize) -> Result<()> {
219 if pos > 0 {
222 let new_target = self.entries[pos].detour;
223 let prev = &mut self.entries[pos - 1];
224
225 let mut new_tramp = ExecutableMemory::allocate_near(self.target, 64)?;
227 let jmp = A::encode_jmp_rel(new_tramp.base(), new_target)
228 .unwrap_or_else(|| A::encode_jmp_abs(new_target));
229 new_tramp.write(&jmp)?;
230 new_tramp.flush_icache()?;
231
232 prev.trampoline = new_tramp;
233 }
234
235 Ok(())
236 }
237
238 fn rebuild_all_trampolines(&mut self) -> Result<()> {
240 let len = self.entries.len();
241
242 for i in 0..len {
243 let next_target = if i + 1 < len {
244 self.entries[i + 1].detour
245 } else {
246 self.target };
248
249 let new_tramp = self.build_trampoline_to(next_target)?;
250 self.entries[i].trampoline = new_tramp;
251 }
252
253 Ok(())
254 }
255
256 fn update_target_hook(&mut self) -> Result<()> {
258 if self.entries.is_empty() {
259 return self.restore_original();
260 }
261
262 let first_detour = self.entries[0].detour;
263
264 let hook_stub = A::encode_jmp_rel(self.target, first_detour)
266 .unwrap_or_else(|| A::encode_jmp_abs(first_detour));
267
268 let mut padded = hook_stub.clone();
269 if padded.len() < self.prologue_size {
270 let padding = A::encode_nop_sled(self.prologue_size - padded.len());
271 padded.extend_from_slice(&padding);
272 }
273
274 {
276 let _guard = ProtectionGuard::new(
277 self.target,
278 self.prologue_size,
279 PAGE_EXECUTE_READWRITE,
280 )?;
281
282 unsafe {
283 core::ptr::copy_nonoverlapping(
284 padded.as_ptr(),
285 self.target as *mut u8,
286 self.prologue_size,
287 );
288 }
289 }
290
291 flush_icache(self.target, self.prologue_size)?;
292 self.current_hook = padded;
293
294 Ok(())
295 }
296
297 fn restore_original(&mut self) -> Result<()> {
299 let _guard = ProtectionGuard::new(
300 self.target,
301 self.prologue_size,
302 PAGE_EXECUTE_READWRITE,
303 )?;
304
305 unsafe {
306 core::ptr::copy_nonoverlapping(
307 self.original_bytes.as_ptr(),
308 self.target as *mut u8,
309 self.prologue_size,
310 );
311 }
312
313 flush_icache(self.target, self.prologue_size)?;
314 self.current_hook.clear();
315
316 Ok(())
317 }
318}
319
320impl<A: Architecture> Drop for HookChain<A> {
321 fn drop(&mut self) {
322 let _ = self.restore_original();
324 }
325}
326
327fn flush_icache(address: usize, size: usize) -> Result<()> {
328 let result = unsafe {
329 FlushInstructionCache(
330 GetCurrentProcess(),
331 address as *const _,
332 size,
333 )
334 };
335
336 if result == 0 {
337 Err(WraithError::from_last_error("FlushInstructionCache"))
338 } else {
339 Ok(())
340 }
341}
342
343#[link(name = "kernel32")]
344extern "system" {
345 fn FlushInstructionCache(
346 hProcess: *mut core::ffi::c_void,
347 lpBaseAddress: *const core::ffi::c_void,
348 dwSize: usize,
349 ) -> i32;
350
351 fn GetCurrentProcess() -> *mut core::ffi::c_void;
352}