wraith/manipulation/inline_hook/asm/
encoder.rs1#[cfg(all(not(feature = "std"), feature = "alloc"))]
6use alloc::vec::Vec;
7
8#[cfg(feature = "std")]
9use std::vec::Vec;
10
11pub struct Encoder {
13 buffer: Vec<u8>,
14}
15
16impl Encoder {
17 pub fn new() -> Self {
19 Self {
20 buffer: Vec::with_capacity(64),
21 }
22 }
23
24 pub fn with_capacity(capacity: usize) -> Self {
26 Self {
27 buffer: Vec::with_capacity(capacity),
28 }
29 }
30
31 pub fn bytes(&self) -> &[u8] {
33 &self.buffer
34 }
35
36 pub fn into_bytes(self) -> Vec<u8> {
38 self.buffer
39 }
40
41 pub fn len(&self) -> usize {
43 self.buffer.len()
44 }
45
46 pub fn is_empty(&self) -> bool {
48 self.buffer.is_empty()
49 }
50
51 pub fn clear(&mut self) {
53 self.buffer.clear();
54 }
55
56 pub fn raw(&mut self, bytes: &[u8]) -> &mut Self {
58 self.buffer.extend_from_slice(bytes);
59 self
60 }
61
62 pub fn byte(&mut self, b: u8) -> &mut Self {
64 self.buffer.push(b);
65 self
66 }
67
68 pub fn nop(&mut self) -> &mut Self {
72 self.buffer.push(0x90);
73 self
74 }
75
76 pub fn nop_sled(&mut self, count: usize) -> &mut Self {
78 let mut remaining = count;
79 while remaining > 0 {
80 match remaining {
81 1 => {
82 self.buffer.push(0x90);
83 remaining -= 1;
84 }
85 2 => {
86 self.buffer.extend_from_slice(&[0x66, 0x90]);
87 remaining -= 2;
88 }
89 3 => {
90 self.buffer.extend_from_slice(&[0x0F, 0x1F, 0x00]);
91 remaining -= 3;
92 }
93 4 => {
94 self.buffer.extend_from_slice(&[0x0F, 0x1F, 0x40, 0x00]);
95 remaining -= 4;
96 }
97 5 => {
98 self.buffer
99 .extend_from_slice(&[0x0F, 0x1F, 0x44, 0x00, 0x00]);
100 remaining -= 5;
101 }
102 6 => {
103 self.buffer
104 .extend_from_slice(&[0x66, 0x0F, 0x1F, 0x44, 0x00, 0x00]);
105 remaining -= 6;
106 }
107 7 => {
108 self.buffer
109 .extend_from_slice(&[0x0F, 0x1F, 0x80, 0x00, 0x00, 0x00, 0x00]);
110 remaining -= 7;
111 }
112 _ => {
113 self.buffer
114 .extend_from_slice(&[0x0F, 0x1F, 0x84, 0x00, 0x00, 0x00, 0x00, 0x00]);
115 remaining -= 8;
116 }
117 }
118 }
119 self
120 }
121
122 pub fn int3(&mut self) -> &mut Self {
124 self.buffer.push(0xCC);
125 self
126 }
127
128 pub fn ret(&mut self) -> &mut Self {
130 self.buffer.push(0xC3);
131 self
132 }
133
134 pub fn push_imm32(&mut self, value: u32) -> &mut Self {
136 self.buffer.push(0x68);
137 self.buffer.extend_from_slice(&value.to_le_bytes());
138 self
139 }
140
141 pub fn push_imm8(&mut self, value: i8) -> &mut Self {
143 self.buffer.push(0x6A);
144 self.buffer.push(value as u8);
145 self
146 }
147
148 pub fn jmp_rel32(&mut self, from: usize, to: usize) -> &mut Self {
150 let offset = (to as i64 - from as i64 - 5) as i32;
151 self.buffer.push(0xE9);
152 self.buffer.extend_from_slice(&offset.to_le_bytes());
153 self
154 }
155
156 pub fn jmp_rel8(&mut self, offset: i8) -> &mut Self {
158 self.buffer.push(0xEB);
159 self.buffer.push(offset as u8);
160 self
161 }
162
163 pub fn call_rel32(&mut self, from: usize, to: usize) -> &mut Self {
165 let offset = (to as i64 - from as i64 - 5) as i32;
166 self.buffer.push(0xE8);
167 self.buffer.extend_from_slice(&offset.to_le_bytes());
168 self
169 }
170
171 #[cfg(target_arch = "x86_64")]
175 pub fn jmp_abs64(&mut self, target: u64) -> &mut Self {
176 self.buffer
178 .extend_from_slice(&[0xFF, 0x25, 0x00, 0x00, 0x00, 0x00]);
179 self.buffer.extend_from_slice(&target.to_le_bytes());
180 self
181 }
182
183 #[cfg(target_arch = "x86_64")]
185 pub fn mov_rax_imm64(&mut self, value: u64) -> &mut Self {
186 self.buffer.extend_from_slice(&[0x48, 0xB8]);
188 self.buffer.extend_from_slice(&value.to_le_bytes());
189 self
190 }
191
192 #[cfg(target_arch = "x86_64")]
194 pub fn jmp_rax(&mut self) -> &mut Self {
195 self.buffer.extend_from_slice(&[0xFF, 0xE0]);
197 self
198 }
199
200 #[cfg(target_arch = "x86_64")]
202 pub fn call_rax(&mut self) -> &mut Self {
203 self.buffer.extend_from_slice(&[0xFF, 0xD0]);
205 self
206 }
207
208 #[cfg(target_arch = "x86_64")]
210 pub fn call_abs64(&mut self, target: u64) -> &mut Self {
211 self.buffer
213 .extend_from_slice(&[0xFF, 0x15, 0x00, 0x00, 0x00, 0x00]);
214 self.buffer.extend_from_slice(&target.to_le_bytes());
215 self
216 }
217
218 #[cfg(target_arch = "x86_64")]
220 pub fn push_rax(&mut self) -> &mut Self {
221 self.buffer.push(0x50);
222 self
223 }
224
225 #[cfg(target_arch = "x86_64")]
227 pub fn pop_rax(&mut self) -> &mut Self {
228 self.buffer.push(0x58);
229 self
230 }
231
232 #[cfg(target_arch = "x86_64")]
234 pub fn sub_rsp_imm8(&mut self, value: i8) -> &mut Self {
235 self.buffer.extend_from_slice(&[0x48, 0x83, 0xEC]);
237 self.buffer.push(value as u8);
238 self
239 }
240
241 #[cfg(target_arch = "x86_64")]
243 pub fn add_rsp_imm8(&mut self, value: i8) -> &mut Self {
244 self.buffer.extend_from_slice(&[0x48, 0x83, 0xC4]);
246 self.buffer.push(value as u8);
247 self
248 }
249
250 #[cfg(target_arch = "x86")]
254 pub fn push_eax(&mut self) -> &mut Self {
255 self.buffer.push(0x50);
256 self
257 }
258
259 #[cfg(target_arch = "x86")]
261 pub fn pop_eax(&mut self) -> &mut Self {
262 self.buffer.push(0x58);
263 self
264 }
265
266 #[cfg(target_arch = "x86")]
268 pub fn push_ebp(&mut self) -> &mut Self {
269 self.buffer.push(0x55);
270 self
271 }
272
273 #[cfg(target_arch = "x86")]
275 pub fn mov_ebp_esp(&mut self) -> &mut Self {
276 self.buffer.extend_from_slice(&[0x8B, 0xEC]);
277 self
278 }
279
280 #[cfg(target_arch = "x86")]
282 pub fn jmp_abs32(&mut self, target: u32) -> &mut Self {
283 self.push_imm32(target);
285 self.ret();
286 self
287 }
288}
289
290impl Default for Encoder {
291 fn default() -> Self {
292 Self::new()
293 }
294}
295
296impl AsRef<[u8]> for Encoder {
297 fn as_ref(&self) -> &[u8] {
298 &self.buffer
299 }
300}
301
302#[cfg(test)]
303mod tests {
304 use super::*;
305
306 #[test]
307 fn test_nop() {
308 let mut enc = Encoder::new();
309 enc.nop();
310 assert_eq!(enc.bytes(), &[0x90]);
311 }
312
313 #[test]
314 fn test_nop_sled() {
315 for size in 1..=16 {
316 let mut enc = Encoder::new();
317 enc.nop_sled(size);
318 assert_eq!(enc.len(), size);
319 }
320 }
321
322 #[test]
323 fn test_push_ret() {
324 let mut enc = Encoder::new();
325 enc.push_imm32(0xDEADBEEF).ret();
326 assert_eq!(enc.bytes(), &[0x68, 0xEF, 0xBE, 0xAD, 0xDE, 0xC3]);
327 }
328
329 #[test]
330 fn test_jmp_rel32() {
331 let mut enc = Encoder::new();
332 enc.jmp_rel32(0x1000, 0x1100);
333 assert_eq!(enc.len(), 5);
334 assert_eq!(enc.bytes()[0], 0xE9);
335 }
336
337 #[cfg(target_arch = "x86_64")]
338 #[test]
339 fn test_jmp_abs64() {
340 let mut enc = Encoder::new();
341 enc.jmp_abs64(0xDEADBEEF12345678);
342 assert_eq!(enc.len(), 14);
343 assert_eq!(&enc.bytes()[0..6], &[0xFF, 0x25, 0x00, 0x00, 0x00, 0x00]);
344 }
345
346 #[cfg(target_arch = "x86_64")]
347 #[test]
348 fn test_mov_jmp_rax() {
349 let mut enc = Encoder::new();
350 enc.mov_rax_imm64(0xDEADBEEF12345678).jmp_rax();
351 assert_eq!(enc.len(), 12);
352 }
353}