wraith/manipulation/inline_hook/hook/
chain.rs1#[cfg(all(not(feature = "std"), feature = "alloc"))]
7use alloc::{format, string::String, vec::Vec};
8
9#[cfg(feature = "std")]
10use std::{format, string::String, vec::Vec};
11
12use crate::error::{Result, WraithError};
13use crate::util::memory::ProtectionGuard;
14use crate::manipulation::inline_hook::arch::Architecture;
15use crate::manipulation::inline_hook::trampoline::ExecutableMemory;
16use core::marker::PhantomData;
17
18const PAGE_EXECUTE_READWRITE: u32 = 0x40;
19
20struct ChainEntry {
22 detour: usize,
24 trampoline: ExecutableMemory,
26 priority: i32,
28}
29
30pub struct HookChain<A: Architecture> {
36 target: usize,
38 entries: Vec<ChainEntry>,
40 original_bytes: Vec<u8>,
42 prologue_size: usize,
44 current_hook: Vec<u8>,
46 _arch: PhantomData<A>,
47}
48
49impl<A: Architecture> HookChain<A> {
50 pub fn new(target: usize) -> Result<Self> {
54 let min_size = A::MIN_HOOK_SIZE;
55
56 let target_bytes = unsafe {
58 core::slice::from_raw_parts(target as *const u8, 64)
59 };
60
61 let boundary = A::find_instruction_boundary(target_bytes, min_size)
62 .ok_or_else(|| WraithError::HookDetectionFailed {
63 function: format!("{:#x}", target),
64 reason: "failed to find instruction boundary".into(),
65 })?;
66
67 let original_bytes = target_bytes[..boundary].to_vec();
68
69 Ok(Self {
70 target,
71 entries: Vec::new(),
72 original_bytes,
73 prologue_size: boundary,
74 current_hook: Vec::new(),
75 _arch: PhantomData,
76 })
77 }
78
79 pub fn add(&mut self, detour: usize, priority: i32) -> Result<usize> {
84 let pos = self.entries
86 .iter()
87 .position(|e| e.priority > priority)
88 .unwrap_or(self.entries.len());
89
90 let next_target = if pos < self.entries.len() {
93 self.entries[pos].detour
95 } else {
96 self.target
98 };
99
100 let trampoline = self.build_trampoline_to(next_target)?;
101 let trampoline_addr = trampoline.base();
102
103 self.entries.insert(pos, ChainEntry {
105 detour,
106 trampoline,
107 priority,
108 });
109
110 self.rebuild_trampolines_before(pos)?;
112
113 self.update_target_hook()?;
115
116 Ok(trampoline_addr)
117 }
118
119 pub fn remove(&mut self, detour: usize) -> Result<bool> {
123 let pos = match self.entries.iter().position(|e| e.detour == detour) {
124 Some(p) => p,
125 None => return Ok(false),
126 };
127
128 self.entries.remove(pos);
129
130 if self.entries.is_empty() {
131 self.restore_original()?;
133 } else {
134 self.rebuild_all_trampolines()?;
136 self.update_target_hook()?;
137 }
138
139 Ok(true)
140 }
141
142 pub fn original(&self) -> Option<usize> {
146 self.entries.last().map(|e| e.trampoline.base())
147 }
148
149 pub fn len(&self) -> usize {
151 self.entries.len()
152 }
153
154 pub fn is_empty(&self) -> bool {
156 self.entries.is_empty()
157 }
158
159 pub fn target(&self) -> usize {
161 self.target
162 }
163
164 pub fn restore(mut self) -> Result<()> {
166 self.restore_original()?;
167 Ok(())
168 }
169
170 fn build_trampoline_to(&self, target: usize) -> Result<ExecutableMemory> {
172 let mut memory = ExecutableMemory::allocate_near(self.target, 64)?;
173
174 if target == self.target {
176 let mut code = Vec::with_capacity(self.prologue_size + A::JMP_ABS_SIZE);
177
178 let mut src_offset = 0;
180 while src_offset < self.prologue_size {
181 let remaining = &self.original_bytes[src_offset..];
182 let insn_len = A::find_instruction_boundary(remaining, 1).unwrap_or(1);
183 let instruction = &self.original_bytes[src_offset..src_offset + insn_len];
184
185 if A::needs_relocation(instruction) {
186 let old_addr = self.target + src_offset;
187 let new_addr = memory.base() + code.len();
188 if let Some(relocated) = A::relocate_instruction(instruction, old_addr, new_addr) {
189 code.extend_from_slice(&relocated);
190 } else {
191 code.extend_from_slice(instruction);
192 }
193 } else {
194 code.extend_from_slice(instruction);
195 }
196
197 src_offset += insn_len;
198 }
199
200 let continuation = self.target + self.prologue_size;
202 let jmp_location = memory.base() + code.len();
203
204 if let Some(jmp) = A::encode_jmp_rel(jmp_location, continuation) {
205 code.extend_from_slice(&jmp);
206 } else {
207 code.extend_from_slice(&A::encode_jmp_abs(continuation));
208 }
209
210 memory.write(&code)?;
211 } else {
212 let jmp = A::encode_jmp_rel(memory.base(), target)
214 .unwrap_or_else(|| A::encode_jmp_abs(target));
215 memory.write(&jmp)?;
216 }
217
218 memory.flush_icache()?;
219
220 Ok(memory)
221 }
222
223 fn rebuild_trampolines_before(&mut self, pos: usize) -> Result<()> {
225 if pos > 0 {
228 let new_target = self.entries[pos].detour;
229 let prev = &mut self.entries[pos - 1];
230
231 let mut new_tramp = ExecutableMemory::allocate_near(self.target, 64)?;
233 let jmp = A::encode_jmp_rel(new_tramp.base(), new_target)
234 .unwrap_or_else(|| A::encode_jmp_abs(new_target));
235 new_tramp.write(&jmp)?;
236 new_tramp.flush_icache()?;
237
238 prev.trampoline = new_tramp;
239 }
240
241 Ok(())
242 }
243
244 fn rebuild_all_trampolines(&mut self) -> Result<()> {
246 let len = self.entries.len();
247
248 for i in 0..len {
249 let next_target = if i + 1 < len {
250 self.entries[i + 1].detour
251 } else {
252 self.target };
254
255 let new_tramp = self.build_trampoline_to(next_target)?;
256 self.entries[i].trampoline = new_tramp;
257 }
258
259 Ok(())
260 }
261
262 fn update_target_hook(&mut self) -> Result<()> {
264 if self.entries.is_empty() {
265 return self.restore_original();
266 }
267
268 let first_detour = self.entries[0].detour;
269
270 let hook_stub = A::encode_jmp_rel(self.target, first_detour)
272 .unwrap_or_else(|| A::encode_jmp_abs(first_detour));
273
274 let mut padded = hook_stub.clone();
275 if padded.len() < self.prologue_size {
276 let padding = A::encode_nop_sled(self.prologue_size - padded.len());
277 padded.extend_from_slice(&padding);
278 }
279
280 {
282 let _guard = ProtectionGuard::new(
283 self.target,
284 self.prologue_size,
285 PAGE_EXECUTE_READWRITE,
286 )?;
287
288 unsafe {
289 core::ptr::copy_nonoverlapping(
290 padded.as_ptr(),
291 self.target as *mut u8,
292 self.prologue_size,
293 );
294 }
295 }
296
297 flush_icache(self.target, self.prologue_size)?;
298 self.current_hook = padded;
299
300 Ok(())
301 }
302
303 fn restore_original(&mut self) -> Result<()> {
305 let _guard = ProtectionGuard::new(
306 self.target,
307 self.prologue_size,
308 PAGE_EXECUTE_READWRITE,
309 )?;
310
311 unsafe {
312 core::ptr::copy_nonoverlapping(
313 self.original_bytes.as_ptr(),
314 self.target as *mut u8,
315 self.prologue_size,
316 );
317 }
318
319 flush_icache(self.target, self.prologue_size)?;
320 self.current_hook.clear();
321
322 Ok(())
323 }
324}
325
326impl<A: Architecture> Drop for HookChain<A> {
327 fn drop(&mut self) {
328 let _ = self.restore_original();
330 }
331}
332
333fn flush_icache(address: usize, size: usize) -> Result<()> {
334 let result = unsafe {
335 FlushInstructionCache(
336 GetCurrentProcess(),
337 address as *const _,
338 size,
339 )
340 };
341
342 if result == 0 {
343 Err(WraithError::from_last_error("FlushInstructionCache"))
344 } else {
345 Ok(())
346 }
347}
348
349#[link(name = "kernel32")]
350extern "system" {
351 fn FlushInstructionCache(
352 hProcess: *mut core::ffi::c_void,
353 lpBaseAddress: *const core::ffi::c_void,
354 dwSize: usize,
355 ) -> i32;
356
357 fn GetCurrentProcess() -> *mut core::ffi::c_void;
358}