1mod insn;
2
3use goblin::elf::{Elf, SectionHeader};
4use int_enum::IntEnum;
5
6use crate::{
7 BIT, BIT_U64, ModuleErr, Result,
8 arch::{Ptr, aarch64::insn::*, get_rela_sym_idx, get_rela_type},
9 loader::*,
10};
11
12#[derive(Debug, Clone, Copy, Default)]
13#[repr(C)]
14pub struct ModuleArchSpecific {}
15
16#[repr(u32)]
17#[derive(Debug, Clone, Copy, IntEnum, PartialEq, Eq)]
18#[allow(non_camel_case_types)]
19pub enum ArchRelocationType {
21 R_ARM_NONE = 0,
23 R_AARCH64_NONE = 256,
24 R_AARCH64_ABS64 = 257,
26 R_AARCH64_ABS32 = 258,
27 R_AARCH64_ABS16 = 259,
28 R_AARCH64_PREL64 = 260,
29 R_AARCH64_PREL32 = 261,
30 R_AARCH64_PREL16 = 262,
31 R_AARCH64_MOVW_UABS_G0 = 263,
33 R_AARCH64_MOVW_UABS_G0_NC = 264,
34 R_AARCH64_MOVW_UABS_G1 = 265,
35 R_AARCH64_MOVW_UABS_G1_NC = 266,
36 R_AARCH64_MOVW_UABS_G2 = 267,
37 R_AARCH64_MOVW_UABS_G2_NC = 268,
38 R_AARCH64_MOVW_UABS_G3 = 269,
39 R_AARCH64_MOVW_SABS_G0 = 270,
40 R_AARCH64_MOVW_SABS_G1 = 271,
41 R_AARCH64_MOVW_SABS_G2 = 272,
42 R_AARCH64_LD_PREL_LO19 = 273,
43 R_AARCH64_ADR_PREL_LO21 = 274,
44 R_AARCH64_ADR_PREL_PG_HI21 = 275,
45 R_AARCH64_ADR_PREL_PG_HI21_NC = 276,
46 R_AARCH64_ADD_ABS_LO12_NC = 277,
47 R_AARCH64_LDST8_ABS_LO12_NC = 278,
48 R_AARCH64_TSTBR14 = 279,
49 R_AARCH64_CONDBR19 = 280,
50 R_AARCH64_JUMP26 = 282,
51 R_AARCH64_CALL26 = 283,
52 R_AARCH64_LDST16_ABS_LO12_NC = 284,
53 R_AARCH64_LDST32_ABS_LO12_NC = 285,
54 R_AARCH64_LDST64_ABS_LO12_NC = 286,
55 R_AARCH64_LDST128_ABS_LO12_NC = 299,
56 R_AARCH64_MOVW_PREL_G0 = 287,
57 R_AARCH64_MOVW_PREL_G0_NC = 288,
58 R_AARCH64_MOVW_PREL_G1 = 289,
59 R_AARCH64_MOVW_PREL_G1_NC = 290,
60 R_AARCH64_MOVW_PREL_G2 = 291,
61 R_AARCH64_MOVW_PREL_G2_NC = 292,
62 R_AARCH64_MOVW_PREL_G3 = 293,
63 R_AARCH64_RELATIVE = 1027,
64}
65
66type Arm64RelTy = ArchRelocationType;
67
68const fn do_reloc(op: Aarch64RelocOp, location: Ptr, address: u64) -> u64 {
69 match op {
70 Aarch64RelocOp::RELOC_OP_ABS => address,
71 Aarch64RelocOp::RELOC_OP_PREL => address.wrapping_sub(location.0),
72 Aarch64RelocOp::RELOC_OP_PAGE => (address & !0xfff).wrapping_sub(location.0 & !0xfff),
73 Aarch64RelocOp::RELOC_OP_NONE => 0,
74 }
75}
76
77fn is_forbidden_offset_for_adrp(address: u64) -> bool {
81 ((address & 0xfff) >= 0xff8) && false
82}
83
84impl ArchRelocationType {
85 fn reloc_data(
87 &self,
88 op: Aarch64RelocOp,
89 location: Ptr,
90 address: u64,
91 len: usize,
92 ) -> Result<bool> {
93 let s_addr = do_reloc(op, location, address) as i64;
94 match len {
106 16 => {
107 location.write::<i16>(s_addr as i16);
108 match op {
109 Aarch64RelocOp::RELOC_OP_ABS => Ok(s_addr < 0 || s_addr > u16::MAX as i64),
110 Aarch64RelocOp::RELOC_OP_PREL => {
111 Ok(s_addr < i16::MIN as i64 || s_addr > i16::MAX as i64)
112 }
113 _ => {
114 unreachable!("Unsupported operation for AArch64 16-bit relocation")
115 }
116 }
117 }
118 32 => {
119 location.write::<i32>(s_addr as i32);
120 match op {
121 Aarch64RelocOp::RELOC_OP_ABS => Ok(s_addr < 0 || s_addr > u32::MAX as i64),
122 Aarch64RelocOp::RELOC_OP_PREL => {
123 Ok(s_addr < i32::MIN as i64 || s_addr > i32::MAX as i64)
124 }
125 _ => {
126 unreachable!("Unsupported operation for AArch64 32-bit relocation")
127 }
128 }
129 }
130 64 => {
131 location.write::<u64>(s_addr as u64);
132 Ok(false)
133 }
134 _ => unreachable!("Unsupported length for AArch64 relocation"),
135 }
136 }
137
138 fn reloc_insn_movw(
140 &self,
141 op: Aarch64RelocOp,
142 location: Ptr,
143 address: u64,
144 lsb: i32,
145 imm_type: Aarch64InsnMovwImmType,
146 ) -> Result<bool> {
147 let mut insn = location.read::<u32>();
148 let s_addr = do_reloc(op, location, address) as i64;
149
150 let mut imm = (s_addr >> lsb) as u64;
151 if imm_type == Aarch64InsnMovwImmType::AARCH64_INSN_IMM_MOVNZ {
152 insn &= !(3 << 29);
158 if s_addr >= 0 {
159 insn |= 2 << 29;
161 } else {
162 imm = !imm;
169 }
170 }
171 insn = aarch64_insn_encode_immediate(Aarch64InsnImmType::AARCH64_INSN_IMM_16, insn, imm);
173 location.write::<u32>(insn);
174
175 if imm > u16::MAX as u64 {
176 Ok(true)
177 } else {
178 Ok(false)
179 }
180 }
181
182 fn reloc_insn_imm(
184 &self,
185 op: Aarch64RelocOp,
186 location: Ptr,
187 address: u64,
188 lsb: i32,
189 len: i32,
190 imm_type: Aarch64InsnImmType,
191 ) -> Result<bool> {
192 let mut insn = location.read::<u32>();
193 let mut s_addr = do_reloc(op, location, address) as i64;
195 s_addr >>= lsb;
196 let imm_mask = (BIT_U64!(lsb + len) - 1) >> lsb;
198 let imm = (s_addr as u64) & imm_mask;
199
200 insn = aarch64_insn_encode_immediate(imm_type, insn, imm);
202
203 location.write::<u32>(insn);
204
205 s_addr = (s_addr & !((imm_mask >> 1) as i64)) >> (len - 1);
211
212 if (s_addr + 1) as u64 >= 2 {
218 Ok(true)
219 } else {
220 Ok(false)
221 }
222 }
223
224 fn reloc_insn_adrp(&self, location: Ptr, address: u64) -> Result<bool> {
225 if !is_forbidden_offset_for_adrp(location.0) {
226 return self.reloc_insn_imm(
227 Aarch64RelocOp::RELOC_OP_PAGE,
228 location,
229 address,
230 12,
231 21,
232 Aarch64InsnImmType::AARCH64_INSN_IMM_ADR,
233 );
234 }
235 let ovf = self.reloc_insn_imm(
237 Aarch64RelocOp::RELOC_OP_PREL,
238 location,
239 address & !0xfff,
240 0,
241 21,
242 Aarch64InsnImmType::AARCH64_INSN_IMM_ADR,
243 )?;
244 if !ovf {
245 let mut insn = location.read::<u32>();
246 insn &= !BIT!(31); location.write::<u32>(insn);
248 Ok(false)
249 } else {
250 log::error!("ADR out of range for veneer emission");
252 Err(ModuleErr::ENOEXEC)
253 }
254 }
255
256 fn apply_relocation(&self, location: u64, address: u64) -> Result<()> {
257 let mut check_overflow = true;
259 let location = Ptr(location);
260 let ovf = match self {
261 Arm64RelTy::R_ARM_NONE | Arm64RelTy::R_AARCH64_NONE => false,
262 Arm64RelTy::R_AARCH64_ABS64 => {
264 check_overflow = false;
265 self.reloc_data(Aarch64RelocOp::RELOC_OP_ABS, location, address, 64)?
266 }
267 Arm64RelTy::R_AARCH64_ABS32 => {
268 self.reloc_data(Aarch64RelocOp::RELOC_OP_ABS, location, address, 32)?
269 }
270 Arm64RelTy::R_AARCH64_ABS16 => {
271 self.reloc_data(Aarch64RelocOp::RELOC_OP_ABS, location, address, 16)?
272 }
273 Arm64RelTy::R_AARCH64_PREL64 => {
274 check_overflow = false;
275
276 self.reloc_data(Aarch64RelocOp::RELOC_OP_PREL, location, address, 64)?
277 }
278 Arm64RelTy::R_AARCH64_PREL32 => {
279 self.reloc_data(Aarch64RelocOp::RELOC_OP_PREL, location, address, 32)?
280 }
281 Arm64RelTy::R_AARCH64_PREL16 => {
282 self.reloc_data(Aarch64RelocOp::RELOC_OP_PREL, location, address, 16)?
283 }
284 Arm64RelTy::R_AARCH64_MOVW_UABS_G0_NC | Arm64RelTy::R_AARCH64_MOVW_UABS_G0 => {
286 if *self == Arm64RelTy::R_AARCH64_MOVW_UABS_G0_NC {
287 check_overflow = false;
288 }
289 self.reloc_insn_movw(
290 Aarch64RelocOp::RELOC_OP_ABS,
291 location,
292 address,
293 0,
294 Aarch64InsnMovwImmType::AARCH64_INSN_IMM_MOVKZ,
295 )?
296 }
297 Arm64RelTy::R_AARCH64_MOVW_UABS_G1_NC | Arm64RelTy::R_AARCH64_MOVW_UABS_G1 => {
298 if *self == Arm64RelTy::R_AARCH64_MOVW_UABS_G1_NC {
299 check_overflow = false;
300 }
301 self.reloc_insn_movw(
302 Aarch64RelocOp::RELOC_OP_ABS,
303 location,
304 address,
305 16,
306 Aarch64InsnMovwImmType::AARCH64_INSN_IMM_MOVKZ,
307 )?
308 }
309 Arm64RelTy::R_AARCH64_MOVW_UABS_G2_NC | Arm64RelTy::R_AARCH64_MOVW_UABS_G2 => {
310 if *self == Arm64RelTy::R_AARCH64_MOVW_UABS_G2_NC {
311 check_overflow = false;
312 }
313 self.reloc_insn_movw(
314 Aarch64RelocOp::RELOC_OP_ABS,
315 location,
316 address,
317 32,
318 Aarch64InsnMovwImmType::AARCH64_INSN_IMM_MOVKZ,
319 )?
320 }
321 Arm64RelTy::R_AARCH64_MOVW_UABS_G3 => {
322 check_overflow = false;
324 self.reloc_insn_movw(
325 Aarch64RelocOp::RELOC_OP_ABS,
326 location,
327 address,
328 48,
329 Aarch64InsnMovwImmType::AARCH64_INSN_IMM_MOVKZ,
330 )?
331 }
332 Arm64RelTy::R_AARCH64_MOVW_SABS_G0 => self.reloc_insn_movw(
333 Aarch64RelocOp::RELOC_OP_ABS,
334 location,
335 address,
336 0,
337 Aarch64InsnMovwImmType::AARCH64_INSN_IMM_MOVNZ,
338 )?,
339 Arm64RelTy::R_AARCH64_MOVW_SABS_G1 => self.reloc_insn_movw(
340 Aarch64RelocOp::RELOC_OP_ABS,
341 location,
342 address,
343 16,
344 Aarch64InsnMovwImmType::AARCH64_INSN_IMM_MOVNZ,
345 )?,
346 Arm64RelTy::R_AARCH64_MOVW_SABS_G2 => self.reloc_insn_movw(
347 Aarch64RelocOp::RELOC_OP_ABS,
348 location,
349 address,
350 32,
351 Aarch64InsnMovwImmType::AARCH64_INSN_IMM_MOVNZ,
352 )?,
353 Arm64RelTy::R_AARCH64_MOVW_PREL_G0_NC | Arm64RelTy::R_AARCH64_MOVW_PREL_G0 => {
354 let mut imm_type = Aarch64InsnMovwImmType::AARCH64_INSN_IMM_MOVNZ;
355 if *self == Arm64RelTy::R_AARCH64_MOVW_PREL_G0_NC {
356 check_overflow = false;
357 imm_type = Aarch64InsnMovwImmType::AARCH64_INSN_IMM_MOVKZ;
358 }
359 self.reloc_insn_movw(
360 Aarch64RelocOp::RELOC_OP_PREL,
361 location,
362 address,
363 0,
364 imm_type,
365 )?
366 }
367 Arm64RelTy::R_AARCH64_MOVW_PREL_G1_NC | Arm64RelTy::R_AARCH64_MOVW_PREL_G1 => {
368 let mut imm_type = Aarch64InsnMovwImmType::AARCH64_INSN_IMM_MOVNZ;
369 if *self == Arm64RelTy::R_AARCH64_MOVW_PREL_G1_NC {
370 check_overflow = false;
371 imm_type = Aarch64InsnMovwImmType::AARCH64_INSN_IMM_MOVKZ;
372 }
373 self.reloc_insn_movw(
374 Aarch64RelocOp::RELOC_OP_PREL,
375 location,
376 address,
377 16,
378 imm_type,
379 )?
380 }
381 Arm64RelTy::R_AARCH64_MOVW_PREL_G2_NC | Arm64RelTy::R_AARCH64_MOVW_PREL_G2 => {
382 let mut imm_type = Aarch64InsnMovwImmType::AARCH64_INSN_IMM_MOVNZ;
383 if *self == Arm64RelTy::R_AARCH64_MOVW_PREL_G2_NC {
384 check_overflow = false;
385 imm_type = Aarch64InsnMovwImmType::AARCH64_INSN_IMM_MOVKZ;
386 }
387 self.reloc_insn_movw(
388 Aarch64RelocOp::RELOC_OP_PREL,
389 location,
390 address,
391 32,
392 imm_type,
393 )?
394 }
395 Arm64RelTy::R_AARCH64_MOVW_PREL_G3 => {
396 check_overflow = false;
398 self.reloc_insn_movw(
399 Aarch64RelocOp::RELOC_OP_PREL,
400 location,
401 address,
402 48,
403 Aarch64InsnMovwImmType::AARCH64_INSN_IMM_MOVNZ,
404 )?
405 }
406 Arm64RelTy::R_AARCH64_LD_PREL_LO19 => self.reloc_insn_imm(
408 Aarch64RelocOp::RELOC_OP_PREL,
409 location,
410 address,
411 2,
412 19,
413 Aarch64InsnImmType::AARCH64_INSN_IMM_19,
414 )?,
415 Arm64RelTy::R_AARCH64_ADR_PREL_LO21 => self.reloc_insn_imm(
416 Aarch64RelocOp::RELOC_OP_PREL,
417 location,
418 address,
419 0,
420 21,
421 Aarch64InsnImmType::AARCH64_INSN_IMM_ADR,
422 )?,
423 Arm64RelTy::R_AARCH64_ADR_PREL_PG_HI21_NC | Arm64RelTy::R_AARCH64_ADR_PREL_PG_HI21 => {
424 if *self == Arm64RelTy::R_AARCH64_ADR_PREL_PG_HI21_NC {
425 check_overflow = false;
426 }
427 self.reloc_insn_adrp(location, address)?
429 }
430 Arm64RelTy::R_AARCH64_ADD_ABS_LO12_NC | Arm64RelTy::R_AARCH64_LDST8_ABS_LO12_NC => {
431 check_overflow = false;
432 self.reloc_insn_imm(
433 Aarch64RelocOp::RELOC_OP_ABS,
434 location,
435 address,
436 0,
437 12,
438 Aarch64InsnImmType::AARCH64_INSN_IMM_12,
439 )?
440 }
441 Arm64RelTy::R_AARCH64_LDST16_ABS_LO12_NC => {
442 check_overflow = false;
443 self.reloc_insn_imm(
444 Aarch64RelocOp::RELOC_OP_ABS,
445 location,
446 address,
447 1,
448 11,
449 Aarch64InsnImmType::AARCH64_INSN_IMM_12,
450 )?
451 }
452 Arm64RelTy::R_AARCH64_LDST32_ABS_LO12_NC => {
453 check_overflow = false;
454 self.reloc_insn_imm(
455 Aarch64RelocOp::RELOC_OP_ABS,
456 location,
457 address,
458 2,
459 10,
460 Aarch64InsnImmType::AARCH64_INSN_IMM_12,
461 )?
462 }
463 Arm64RelTy::R_AARCH64_LDST64_ABS_LO12_NC => {
464 check_overflow = false;
465 self.reloc_insn_imm(
466 Aarch64RelocOp::RELOC_OP_ABS,
467 location,
468 address,
469 3,
470 9,
471 Aarch64InsnImmType::AARCH64_INSN_IMM_12,
472 )?
473 }
474 Arm64RelTy::R_AARCH64_LDST128_ABS_LO12_NC => {
475 check_overflow = false;
476 self.reloc_insn_imm(
477 Aarch64RelocOp::RELOC_OP_ABS,
478 location,
479 address,
480 4,
481 8,
482 Aarch64InsnImmType::AARCH64_INSN_IMM_12,
483 )?
484 }
485 Arm64RelTy::R_AARCH64_TSTBR14 => self.reloc_insn_imm(
486 Aarch64RelocOp::RELOC_OP_PREL,
487 location,
488 address,
489 2,
490 14,
491 Aarch64InsnImmType::AARCH64_INSN_IMM_14,
492 )?,
493 Arm64RelTy::R_AARCH64_CONDBR19 => self.reloc_insn_imm(
494 Aarch64RelocOp::RELOC_OP_PREL,
495 location,
496 address,
497 2,
498 19,
499 Aarch64InsnImmType::AARCH64_INSN_IMM_19,
500 )?,
501 Arm64RelTy::R_AARCH64_JUMP26 | Arm64RelTy::R_AARCH64_CALL26 => {
502 let ovf = self.reloc_insn_imm(
503 Aarch64RelocOp::RELOC_OP_PREL,
504 location,
505 address,
506 2,
507 26,
508 Aarch64InsnImmType::AARCH64_INSN_IMM_26,
509 )?;
510 if ovf {
511 unimplemented!(
513 "Veneer emission for out-of-range AArch64 JUMP26/CALL26 not implemented"
514 );
515 }
516 ovf
517 }
518 _ => {
519 log::error!("Relocation type {:?} not implemented yet", self);
520 return Err(ModuleErr::ENOEXEC);
521 }
522 };
523 if check_overflow && ovf {
524 log::error!("Overflow detected during relocation type {:?}", self);
525 return Err(ModuleErr::ENOEXEC);
526 }
527 Ok(())
528 }
529}
530
531pub struct ArchRelocate;
532
533#[allow(unused_assignments)]
534impl ArchRelocate {
535 pub fn apply_relocate_add<H: KernelModuleHelper>(
537 rela_list: &[goblin::elf64::reloc::Rela],
538 rel_section: &SectionHeader,
539 sechdrs: &[SectionHeader],
540 load_info: &ModuleLoadInfo,
541 module: &ModuleOwner<H>,
542 ) -> Result<()> {
543 for rela in rela_list {
544 let rel_type = get_rela_type(rela.r_info);
545 let sym_idx = get_rela_sym_idx(rela.r_info);
546
547 let location = sechdrs[rel_section.sh_info as usize].sh_addr + rela.r_offset;
549 let (sym, sym_name) = &load_info.syms[sym_idx];
550
551 let reloc_type = Arm64RelTy::try_from(rel_type).map_err(|_| {
552 log::error!(
553 "[{:?}]: Invalid relocation type: {}",
554 module.name(),
555 rel_type
556 );
557 ModuleErr::ENOEXEC
558 })?;
559 let target_addr = sym.st_value.wrapping_add(rela.r_addend as u64);
561
562 log::info!(
564 "[{:?}]: Applying relocation {:?} at location {:#x} with target addr {:#x}",
565 module.name(),
566 reloc_type,
567 location,
568 target_addr
569 );
570
571 let res = reloc_type.apply_relocation(location, target_addr);
572 match res {
573 Err(e) => {
574 log::error!("[{:?}]: ({}) {:?}", module.name(), sym_name, e);
575 return Err(e);
576 }
577 Ok(_) => { }
578 }
579 }
580 Ok(())
581 }
582}
583
584pub fn module_frob_arch_sections<H: KernelModuleHelper>(
585 elf: &mut Elf,
586 owner: &mut ModuleOwner<H>,
587) -> Result<()> {
588 Ok(())
589}