1#[cfg(all(not(feature = "std"), feature = "alloc"))]
12use alloc::{format, string::String, vec, vec::Vec};
13
14#[cfg(feature = "std")]
15use std::{format, string::String, vec, vec::Vec};
16
17use iced_x86::{
18 BlockEncoder, BlockEncoderOptions, Code, Decoder, DecoderOptions,
19 FlowControl, Instruction, InstructionBlock, OpKind,
20};
21
22#[derive(Debug)]
24pub struct RelocationResult {
25 pub bytes: Vec<u8>,
27 pub original_length: usize,
29 pub new_length: usize,
31 pub size_changed: bool,
33 pub success: bool,
35 pub error: Option<String>,
37}
38
39impl RelocationResult {
40 fn success(bytes: Vec<u8>, original_length: usize) -> Self {
41 let new_length = bytes.len();
42 Self {
43 bytes,
44 original_length,
45 new_length,
46 size_changed: new_length != original_length,
47 success: true,
48 error: None,
49 }
50 }
51
52 fn failure(original_length: usize, error: impl Into<String>) -> Self {
53 Self {
54 bytes: Vec::new(),
55 original_length,
56 new_length: 0,
57 size_changed: false,
58 success: false,
59 error: Some(error.into()),
60 }
61 }
62}
63
64pub struct InstructionRelocator {
66 bitness: u32,
67}
68
69impl InstructionRelocator {
70 #[cfg(target_arch = "x86_64")]
72 pub fn native() -> Self {
73 Self { bitness: 64 }
74 }
75
76 #[cfg(target_arch = "x86")]
77 pub fn native() -> Self {
78 Self { bitness: 32 }
79 }
80
81 pub fn x64() -> Self {
83 Self { bitness: 64 }
84 }
85
86 pub fn x86() -> Self {
88 Self { bitness: 32 }
89 }
90
91 pub fn relocate_instruction(
93 &self,
94 bytes: &[u8],
95 old_address: u64,
96 new_address: u64,
97 ) -> RelocationResult {
98 if bytes.is_empty() {
99 return RelocationResult::failure(0, "empty instruction");
100 }
101
102 let mut decoder = Decoder::with_ip(
104 self.bitness,
105 bytes,
106 old_address,
107 DecoderOptions::NONE,
108 );
109
110 if !decoder.can_decode() {
111 return RelocationResult::failure(bytes.len(), "cannot decode instruction");
112 }
113
114 let instruction = decoder.decode();
115 if instruction.is_invalid() {
116 return RelocationResult::failure(bytes.len(), "invalid instruction");
117 }
118
119 let original_length = instruction.len();
120
121 if !needs_relocation(&instruction, self.bitness) {
123 return RelocationResult::success(bytes[..original_length].to_vec(), original_length);
125 }
126
127 let mut new_instruction = instruction;
129 new_instruction.set_ip(new_address);
130
131 let instructions = [new_instruction];
132 let block = InstructionBlock::new(&instructions, new_address);
133
134 let result = BlockEncoder::encode(
135 self.bitness,
136 block,
137 BlockEncoderOptions::DONT_FIX_BRANCHES,
138 );
139
140 match result {
141 Ok(encoded) => {
142 RelocationResult::success(encoded.code_buffer, original_length)
143 }
144 Err(_) => {
145 self.relocate_manually(&instruction, old_address, new_address, bytes)
147 }
148 }
149 }
150
151 pub fn relocate_block(
153 &self,
154 bytes: &[u8],
155 old_address: u64,
156 new_address: u64,
157 ) -> Result<Vec<u8>, String> {
158 if bytes.is_empty() {
159 return Ok(Vec::new());
160 }
161
162 let mut instructions = Vec::new();
164 let mut decoder = Decoder::with_ip(
165 self.bitness,
166 bytes,
167 old_address,
168 DecoderOptions::NONE,
169 );
170
171 while decoder.can_decode() {
172 let instruction = decoder.decode();
173 if instruction.is_invalid() {
174 break;
175 }
176 instructions.push(instruction);
177 }
178
179 if instructions.is_empty() {
180 return Err("no valid instructions found".into());
181 }
182
183 let mut offset = 0u64;
185 let mut new_instructions = Vec::with_capacity(instructions.len());
186
187 for mut instruction in instructions {
188 let old_ip = instruction.ip();
189 let new_ip = new_address + (old_ip - old_address);
190 instruction.set_ip(new_ip);
191 new_instructions.push(instruction);
192 }
193
194 let block = InstructionBlock::new(&new_instructions, new_address);
196
197 BlockEncoder::encode(
198 self.bitness,
199 block,
200 BlockEncoderOptions::NONE,
201 )
202 .map(|result| result.code_buffer)
203 .map_err(|e| format!("block encoding failed: {:?}", e))
204 }
205
206 fn relocate_manually(
208 &self,
209 instruction: &Instruction,
210 old_address: u64,
211 new_address: u64,
212 bytes: &[u8],
213 ) -> RelocationResult {
214 let original_length = instruction.len();
215 let flow = instruction.flow_control();
216
217 match flow {
218 FlowControl::UnconditionalBranch => {
219 self.relocate_jump(instruction, old_address, new_address, bytes)
220 }
221 FlowControl::ConditionalBranch => {
222 self.relocate_conditional_jump(instruction, old_address, new_address, bytes)
223 }
224 FlowControl::Call => {
225 self.relocate_call(instruction, old_address, new_address, bytes)
226 }
227 _ => {
228 if self.bitness == 64 && instruction.is_ip_rel_memory_operand() {
230 self.relocate_rip_relative(instruction, old_address, new_address, bytes)
231 } else {
232 RelocationResult::success(bytes[..original_length].to_vec(), original_length)
234 }
235 }
236 }
237 }
238
239 fn relocate_jump(
240 &self,
241 instruction: &Instruction,
242 old_address: u64,
243 new_address: u64,
244 bytes: &[u8],
245 ) -> RelocationResult {
246 let original_length = instruction.len();
247
248 let target = match instruction.op0_kind() {
250 OpKind::NearBranch16 | OpKind::NearBranch32 | OpKind::NearBranch64 => {
251 instruction.near_branch_target()
252 }
253 _ => {
254 return RelocationResult::success(bytes[..original_length].to_vec(), original_length);
256 }
257 };
258
259 let new_offset = (target as i64) - (new_address as i64) - 5;
261
262 if new_offset >= i32::MIN as i64 && new_offset <= i32::MAX as i64 {
264 let mut result = vec![0xE9];
266 result.extend_from_slice(&(new_offset as i32).to_le_bytes());
267 RelocationResult::success(result, original_length)
268 } else if self.bitness == 64 {
269 let mut result = vec![0xFF, 0x25, 0x00, 0x00, 0x00, 0x00];
272 result.extend_from_slice(&target.to_le_bytes());
273 RelocationResult::success(result, original_length)
274 } else {
275 let mut result = vec![0x68];
277 result.extend_from_slice(&(target as u32).to_le_bytes());
278 result.push(0xC3);
279 RelocationResult::success(result, original_length)
280 }
281 }
282
283 fn relocate_conditional_jump(
284 &self,
285 instruction: &Instruction,
286 old_address: u64,
287 new_address: u64,
288 bytes: &[u8],
289 ) -> RelocationResult {
290 let original_length = instruction.len();
291
292 let target = match instruction.op0_kind() {
294 OpKind::NearBranch16 | OpKind::NearBranch32 | OpKind::NearBranch64 => {
295 instruction.near_branch_target()
296 }
297 _ => {
298 return RelocationResult::failure(original_length, "unexpected operand kind");
299 }
300 };
301
302 let code = instruction.code();
304 let cc = get_condition_code(code);
305
306 let new_offset = (target as i64) - (new_address as i64) - 6;
308
309 if new_offset >= i32::MIN as i64 && new_offset <= i32::MAX as i64 {
311 let mut result = vec![0x0F, 0x80 + cc];
313 result.extend_from_slice(&(new_offset as i32).to_le_bytes());
314 RelocationResult::success(result, original_length)
315 } else if self.bitness == 64 {
316 let inverted_cc = cc ^ 1; let mut result = vec![0x70 + inverted_cc, 14]; result.extend_from_slice(&[0xFF, 0x25, 0x00, 0x00, 0x00, 0x00]); result.extend_from_slice(&target.to_le_bytes());
324 RelocationResult::success(result, original_length)
325 } else {
326 let inverted_cc = cc ^ 1;
328 let mut result = vec![0x70 + inverted_cc, 6]; result.push(0x68); result.extend_from_slice(&(target as u32).to_le_bytes());
331 result.push(0xC3); RelocationResult::success(result, original_length)
333 }
334 }
335
336 fn relocate_call(
337 &self,
338 instruction: &Instruction,
339 old_address: u64,
340 new_address: u64,
341 bytes: &[u8],
342 ) -> RelocationResult {
343 let original_length = instruction.len();
344
345 let target = match instruction.op0_kind() {
347 OpKind::NearBranch16 | OpKind::NearBranch32 | OpKind::NearBranch64 => {
348 instruction.near_branch_target()
349 }
350 _ => {
351 return RelocationResult::success(bytes[..original_length].to_vec(), original_length);
353 }
354 };
355
356 let new_offset = (target as i64) - (new_address as i64) - 5;
358
359 if new_offset >= i32::MIN as i64 && new_offset <= i32::MAX as i64 {
361 let mut result = vec![0xE8];
363 result.extend_from_slice(&(new_offset as i32).to_le_bytes());
364 RelocationResult::success(result, original_length)
365 } else if self.bitness == 64 {
366 let mut result = vec![0xFF, 0x15, 0x00, 0x00, 0x00, 0x00];
370 result.extend_from_slice(&target.to_le_bytes());
371 RelocationResult::success(result, original_length)
372 } else {
373 RelocationResult::failure(original_length, "call too far for x86")
376 }
377 }
378
379 fn relocate_rip_relative(
380 &self,
381 instruction: &Instruction,
382 old_address: u64,
383 new_address: u64,
384 bytes: &[u8],
385 ) -> RelocationResult {
386 let original_length = instruction.len();
387
388 let target = instruction.ip_rel_memory_address();
390
391 let new_disp = (target as i64) - (new_address as i64) - (original_length as i64);
393
394 if new_disp < i32::MIN as i64 || new_disp > i32::MAX as i64 {
396 return RelocationResult::failure(
397 original_length,
398 "RIP-relative target too far after relocation",
399 );
400 }
401
402 let mut result = bytes[..original_length].to_vec();
404
405 let imm_size = get_immediate_size(instruction);
408 let disp_offset = original_length - 4 - imm_size;
409
410 let new_disp_bytes = (new_disp as i32).to_le_bytes();
412 result[disp_offset..disp_offset + 4].copy_from_slice(&new_disp_bytes);
413
414 RelocationResult::success(result, original_length)
415 }
416}
417
418fn needs_relocation(instruction: &Instruction, bitness: u32) -> bool {
419 let flow = instruction.flow_control();
420
421 match flow {
422 FlowControl::UnconditionalBranch
423 | FlowControl::ConditionalBranch
424 | FlowControl::Call => {
425 matches!(
427 instruction.op0_kind(),
428 OpKind::NearBranch16 | OpKind::NearBranch32 | OpKind::NearBranch64
429 )
430 }
431 _ => {
432 bitness == 64 && instruction.is_ip_rel_memory_operand()
434 }
435 }
436}
437
438fn get_condition_code(code: Code) -> u8 {
439 match code {
440 Code::Jo_rel8_16 | Code::Jo_rel8_32 | Code::Jo_rel8_64
441 | Code::Jo_rel16 | Code::Jo_rel32_32 | Code::Jo_rel32_64 => 0x0,
442
443 Code::Jno_rel8_16 | Code::Jno_rel8_32 | Code::Jno_rel8_64
444 | Code::Jno_rel16 | Code::Jno_rel32_32 | Code::Jno_rel32_64 => 0x1,
445
446 Code::Jb_rel8_16 | Code::Jb_rel8_32 | Code::Jb_rel8_64
447 | Code::Jb_rel16 | Code::Jb_rel32_32 | Code::Jb_rel32_64 => 0x2,
448
449 Code::Jae_rel8_16 | Code::Jae_rel8_32 | Code::Jae_rel8_64
450 | Code::Jae_rel16 | Code::Jae_rel32_32 | Code::Jae_rel32_64 => 0x3,
451
452 Code::Je_rel8_16 | Code::Je_rel8_32 | Code::Je_rel8_64
453 | Code::Je_rel16 | Code::Je_rel32_32 | Code::Je_rel32_64 => 0x4,
454
455 Code::Jne_rel8_16 | Code::Jne_rel8_32 | Code::Jne_rel8_64
456 | Code::Jne_rel16 | Code::Jne_rel32_32 | Code::Jne_rel32_64 => 0x5,
457
458 Code::Jbe_rel8_16 | Code::Jbe_rel8_32 | Code::Jbe_rel8_64
459 | Code::Jbe_rel16 | Code::Jbe_rel32_32 | Code::Jbe_rel32_64 => 0x6,
460
461 Code::Ja_rel8_16 | Code::Ja_rel8_32 | Code::Ja_rel8_64
462 | Code::Ja_rel16 | Code::Ja_rel32_32 | Code::Ja_rel32_64 => 0x7,
463
464 Code::Js_rel8_16 | Code::Js_rel8_32 | Code::Js_rel8_64
465 | Code::Js_rel16 | Code::Js_rel32_32 | Code::Js_rel32_64 => 0x8,
466
467 Code::Jns_rel8_16 | Code::Jns_rel8_32 | Code::Jns_rel8_64
468 | Code::Jns_rel16 | Code::Jns_rel32_32 | Code::Jns_rel32_64 => 0x9,
469
470 Code::Jp_rel8_16 | Code::Jp_rel8_32 | Code::Jp_rel8_64
471 | Code::Jp_rel16 | Code::Jp_rel32_32 | Code::Jp_rel32_64 => 0xA,
472
473 Code::Jnp_rel8_16 | Code::Jnp_rel8_32 | Code::Jnp_rel8_64
474 | Code::Jnp_rel16 | Code::Jnp_rel32_32 | Code::Jnp_rel32_64 => 0xB,
475
476 Code::Jl_rel8_16 | Code::Jl_rel8_32 | Code::Jl_rel8_64
477 | Code::Jl_rel16 | Code::Jl_rel32_32 | Code::Jl_rel32_64 => 0xC,
478
479 Code::Jge_rel8_16 | Code::Jge_rel8_32 | Code::Jge_rel8_64
480 | Code::Jge_rel16 | Code::Jge_rel32_32 | Code::Jge_rel32_64 => 0xD,
481
482 Code::Jle_rel8_16 | Code::Jle_rel8_32 | Code::Jle_rel8_64
483 | Code::Jle_rel16 | Code::Jle_rel32_32 | Code::Jle_rel32_64 => 0xE,
484
485 Code::Jg_rel8_16 | Code::Jg_rel8_32 | Code::Jg_rel8_64
486 | Code::Jg_rel16 | Code::Jg_rel32_32 | Code::Jg_rel32_64 => 0xF,
487
488 _ => 0x4, }
490}
491
492fn get_immediate_size(instruction: &Instruction) -> usize {
493 for i in 0..instruction.op_count() {
494 match instruction.op_kind(i) {
495 OpKind::Immediate8 | OpKind::Immediate8_2nd | OpKind::Immediate8to16
496 | OpKind::Immediate8to32 | OpKind::Immediate8to64 => return 1,
497 OpKind::Immediate16 => return 2,
498 OpKind::Immediate32 | OpKind::Immediate32to64 => return 4,
499 OpKind::Immediate64 => return 8,
500 _ => {}
501 }
502 }
503 0
504}
505
506pub fn relocate_one(
508 bytes: &[u8],
509 old_address: u64,
510 new_address: u64,
511) -> RelocationResult {
512 InstructionRelocator::native().relocate_instruction(bytes, old_address, new_address)
513}
514
515pub fn relocate_block(
517 bytes: &[u8],
518 old_address: u64,
519 new_address: u64,
520) -> Result<Vec<u8>, String> {
521 InstructionRelocator::native().relocate_block(bytes, old_address, new_address)
522}
523
524pub fn instruction_needs_relocation(bytes: &[u8], address: u64) -> bool {
526 let bitness = if cfg!(target_arch = "x86_64") { 64 } else { 32 };
527 let mut decoder = Decoder::with_ip(bitness, bytes, address, DecoderOptions::NONE);
528
529 if !decoder.can_decode() {
530 return false;
531 }
532
533 let instruction = decoder.decode();
534 if instruction.is_invalid() {
535 return false;
536 }
537
538 needs_relocation(&instruction, bitness)
539}
540
541#[cfg(test)]
542mod tests {
543 use super::*;
544
545 #[test]
546 fn test_relocate_nop() {
547 let relocator = InstructionRelocator::x64();
548 let nop = [0x90u8];
549
550 let result = relocator.relocate_instruction(&nop, 0x1000, 0x2000);
551 assert!(result.success);
552 assert_eq!(result.bytes, vec![0x90]);
553 assert!(!result.size_changed);
554 }
555
556 #[test]
557 fn test_relocate_push() {
558 let relocator = InstructionRelocator::x64();
559 let push = [0x55u8]; let result = relocator.relocate_instruction(&push, 0x1000, 0x2000);
562 assert!(result.success);
563 assert_eq!(result.bytes, vec![0x55]);
564 assert!(!result.size_changed);
565 }
566
567 #[test]
568 fn test_relocate_jmp_rel32() {
569 let relocator = InstructionRelocator::x64();
570 let jmp = [0xE9, 0x00, 0x01, 0x00, 0x00];
572
573 let result = relocator.relocate_instruction(&jmp, 0x1000, 0x2000);
575 assert!(result.success);
576 assert_eq!(result.bytes.len(), 5);
577 assert_eq!(result.bytes[0], 0xE9);
578
579 let new_offset = i32::from_le_bytes(result.bytes[1..5].try_into().unwrap());
581 assert_eq!(new_offset, -0xF00);
582 }
583
584 #[test]
585 fn test_relocate_call_rel32() {
586 let relocator = InstructionRelocator::x64();
587 let call = [0xE8, 0x00, 0x00, 0x00, 0x00];
589
590 let result = relocator.relocate_instruction(&call, 0x1000, 0x2000);
592 assert!(result.success);
593 assert_eq!(result.bytes.len(), 5);
594 assert_eq!(result.bytes[0], 0xE8);
595
596 let new_offset = i32::from_le_bytes(result.bytes[1..5].try_into().unwrap());
598 assert_eq!(new_offset, -0x1000);
599 }
600
601 #[test]
602 fn test_relocate_jz_short() {
603 let relocator = InstructionRelocator::x64();
604 let jz = [0x74, 0x10];
606
607 let result = relocator.relocate_instruction(&jz, 0x1000, 0x2000);
609 assert!(result.success);
610
611 assert!(result.bytes.len() >= 2);
613 if result.bytes[0] == 0x0F && result.bytes[1] == 0x84 {
614 assert_eq!(result.bytes.len(), 6);
616 }
617 }
618
619 #[test]
620 fn test_relocate_block() {
621 let relocator = InstructionRelocator::x64();
622 let prologue = [0x55, 0x48, 0x89, 0xE5, 0x48, 0x83, 0xEC, 0x28];
624
625 let result = relocator.relocate_block(&prologue, 0x1000, 0x2000);
626 assert!(result.is_ok());
627
628 let relocated = result.unwrap();
629 assert_eq!(relocated.len(), prologue.len());
631 }
632
633 #[test]
634 fn test_needs_relocation() {
635 let jmp = [0xE9, 0x00, 0x00, 0x00, 0x00];
637 assert!(instruction_needs_relocation(&jmp, 0x1000));
638
639 let call = [0xE8, 0x00, 0x00, 0x00, 0x00];
641 assert!(instruction_needs_relocation(&call, 0x1000));
642
643 let push = [0x55];
645 assert!(!instruction_needs_relocation(&push, 0x1000));
646
647 let nop = [0x90];
649 assert!(!instruction_needs_relocation(&nop, 0x1000));
650 }
651}