Skip to main content

kmod_loader/arch/aarch64/
mod.rs

1mod insn;
2
3use goblin::elf::{Elf, SectionHeader};
4use int_enum::IntEnum;
5
6use crate::{
7    BIT, BIT_U64, ModuleErr, Result,
8    arch::{Ptr, aarch64::insn::*, get_rela_sym_idx, get_rela_type},
9    loader::*,
10};
11
12#[derive(Debug, Clone, Copy, Default)]
13#[repr(C)]
14pub struct ModuleArchSpecific {}
15
16#[repr(u32)]
17#[derive(Debug, Clone, Copy, IntEnum, PartialEq, Eq)]
18#[allow(non_camel_case_types)]
19/// See <https://github.com/gimli-rs/object/blob/af3ca8a2817c8119e9b6d801bd678a8f1880309d/crates/examples/src/readobj/elf.rs#L2310C1-L2437C3>
20pub enum ArchRelocationType {
21    // Miscellaneous
22    R_ARM_NONE = 0,
23    R_AARCH64_NONE = 256,
24    // Data
25    R_AARCH64_ABS64 = 257,
26    R_AARCH64_ABS32 = 258,
27    R_AARCH64_ABS16 = 259,
28    R_AARCH64_PREL64 = 260,
29    R_AARCH64_PREL32 = 261,
30    R_AARCH64_PREL16 = 262,
31    // Instructions
32    R_AARCH64_MOVW_UABS_G0 = 263,
33    R_AARCH64_MOVW_UABS_G0_NC = 264,
34    R_AARCH64_MOVW_UABS_G1 = 265,
35    R_AARCH64_MOVW_UABS_G1_NC = 266,
36    R_AARCH64_MOVW_UABS_G2 = 267,
37    R_AARCH64_MOVW_UABS_G2_NC = 268,
38    R_AARCH64_MOVW_UABS_G3 = 269,
39    R_AARCH64_MOVW_SABS_G0 = 270,
40    R_AARCH64_MOVW_SABS_G1 = 271,
41    R_AARCH64_MOVW_SABS_G2 = 272,
42    R_AARCH64_LD_PREL_LO19 = 273,
43    R_AARCH64_ADR_PREL_LO21 = 274,
44    R_AARCH64_ADR_PREL_PG_HI21 = 275,
45    R_AARCH64_ADR_PREL_PG_HI21_NC = 276,
46    R_AARCH64_ADD_ABS_LO12_NC = 277,
47    R_AARCH64_LDST8_ABS_LO12_NC = 278,
48    R_AARCH64_TSTBR14 = 279,
49    R_AARCH64_CONDBR19 = 280,
50    R_AARCH64_JUMP26 = 282,
51    R_AARCH64_CALL26 = 283,
52    R_AARCH64_LDST16_ABS_LO12_NC = 284,
53    R_AARCH64_LDST32_ABS_LO12_NC = 285,
54    R_AARCH64_LDST64_ABS_LO12_NC = 286,
55    R_AARCH64_LDST128_ABS_LO12_NC = 299,
56    R_AARCH64_MOVW_PREL_G0 = 287,
57    R_AARCH64_MOVW_PREL_G0_NC = 288,
58    R_AARCH64_MOVW_PREL_G1 = 289,
59    R_AARCH64_MOVW_PREL_G1_NC = 290,
60    R_AARCH64_MOVW_PREL_G2 = 291,
61    R_AARCH64_MOVW_PREL_G2_NC = 292,
62    R_AARCH64_MOVW_PREL_G3 = 293,
63    R_AARCH64_RELATIVE = 1027,
64}
65
66type Arm64RelTy = ArchRelocationType;
67
68const fn do_reloc(op: Aarch64RelocOp, location: Ptr, address: u64) -> u64 {
69    match op {
70        Aarch64RelocOp::RELOC_OP_ABS => address,
71        Aarch64RelocOp::RELOC_OP_PREL => address.wrapping_sub(location.0),
72        Aarch64RelocOp::RELOC_OP_PAGE => (address & !0xfff).wrapping_sub(location.0 & !0xfff),
73        Aarch64RelocOp::RELOC_OP_NONE => 0,
74    }
75}
76
77/// TODO: Implement the function
78///
79/// See <https://elixir.bootlin.com/linux/v6.6/source/arch/arm64/include/asm/module.h#L45>
80fn is_forbidden_offset_for_adrp(address: u64) -> bool {
81    ((address & 0xfff) >= 0xff8) && false
82}
83
84impl ArchRelocationType {
85    /// See <https://elixir.bootlin.com/linux/v6.6/source/arch/arm64/kernel/module.c#L177>
86    fn reloc_data(
87        &self,
88        op: Aarch64RelocOp,
89        location: Ptr,
90        address: u64,
91        len: usize,
92    ) -> Result<bool> {
93        let s_addr = do_reloc(op, location, address) as i64;
94        /*
95         * The ELF psABI for AArch64 documents the 16-bit and 32-bit place
96         * relative and absolute relocations as having a range of [-2^15, 2^16)
97         * or [-2^31, 2^32), respectively. However, in order to be able to
98         * detect overflows reliably, we have to choose whether we interpret
99         * such quantities as signed or as unsigned, and stick with it.
100         * The way we organize our address space requires a signed
101         * interpretation of 32-bit relative references, so let's use that
102         * for all R_AARCH64_PRELxx relocations. This means our upper
103         * bound for overflow detection should be Sxx_MAX rather than Uxx_MAX.
104         */
105        match len {
106            16 => {
107                location.write::<i16>(s_addr as i16);
108                match op {
109                    Aarch64RelocOp::RELOC_OP_ABS => Ok(s_addr < 0 || s_addr > u16::MAX as i64),
110                    Aarch64RelocOp::RELOC_OP_PREL => {
111                        Ok(s_addr < i16::MIN as i64 || s_addr > i16::MAX as i64)
112                    }
113                    _ => {
114                        unreachable!("Unsupported operation for AArch64 16-bit relocation")
115                    }
116                }
117            }
118            32 => {
119                location.write::<i32>(s_addr as i32);
120                match op {
121                    Aarch64RelocOp::RELOC_OP_ABS => Ok(s_addr < 0 || s_addr > u32::MAX as i64),
122                    Aarch64RelocOp::RELOC_OP_PREL => {
123                        Ok(s_addr < i32::MIN as i64 || s_addr > i32::MAX as i64)
124                    }
125                    _ => {
126                        unreachable!("Unsupported operation for AArch64 32-bit relocation")
127                    }
128                }
129            }
130            64 => {
131                location.write::<u64>(s_addr as u64);
132                Ok(false)
133            }
134            _ => unreachable!("Unsupported length for AArch64 relocation"),
135        }
136    }
137
138    /// See <https://elixir.bootlin.com/linux/v6.6/source/arch/arm64/kernel/module.c#L241>
139    fn reloc_insn_movw(
140        &self,
141        op: Aarch64RelocOp,
142        location: Ptr,
143        address: u64,
144        lsb: i32,
145        imm_type: Aarch64InsnMovwImmType,
146    ) -> Result<bool> {
147        let mut insn = location.read::<u32>();
148        let s_addr = do_reloc(op, location, address) as i64;
149
150        let mut imm = (s_addr >> lsb) as u64;
151        if imm_type == Aarch64InsnMovwImmType::AARCH64_INSN_IMM_MOVNZ {
152            /*
153             * For signed MOVW relocations, we have to manipulate the
154             * instruction encoding depending on whether or not the
155             * immediate is less than zero.
156             */
157            insn &= !(3 << 29);
158            if s_addr >= 0 {
159                // >=0: Set the instruction to MOVZ (opcode 10b).
160                insn |= 2 << 29;
161            } else {
162                /*
163                 * <0: Set the instruction to MOVN (opcode 00b).
164                 *     Since we've masked the opcode already, we
165                 *     don't need to do anything other than
166                 *     inverting the new immediate field.
167                 */
168                imm = !imm;
169            }
170        }
171        // Update the instruction with the new encoding.
172        insn = aarch64_insn_encode_immediate(Aarch64InsnImmType::AARCH64_INSN_IMM_16, insn, imm);
173        location.write::<u32>(insn);
174
175        if imm > u16::MAX as u64 {
176            Ok(true)
177        } else {
178            Ok(false)
179        }
180    }
181
182    /// See <https://elixir.bootlin.com/linux/v6.6/source/arch/arm64/kernel/module.c#L282>
183    fn reloc_insn_imm(
184        &self,
185        op: Aarch64RelocOp,
186        location: Ptr,
187        address: u64,
188        lsb: i32,
189        len: i32,
190        imm_type: Aarch64InsnImmType,
191    ) -> Result<bool> {
192        let mut insn = location.read::<u32>();
193        // Calculate the relocation value.
194        let mut s_addr = do_reloc(op, location, address) as i64;
195        s_addr >>= lsb;
196        // Extract the value bits and shift them to bit 0.
197        let imm_mask = (BIT_U64!(lsb + len) - 1) >> lsb;
198        let imm = (s_addr as u64) & imm_mask;
199
200        // Update the instruction's immediate field.
201        insn = aarch64_insn_encode_immediate(imm_type, insn, imm);
202
203        location.write::<u32>(insn);
204
205        /*
206         * Extract the upper value bits (including the sign bit) and
207         * shift them to bit 0.
208         */
209        // sval = (s64)(sval & ~(imm_mask >> 1)) >> (len - 1);
210        s_addr = (s_addr & !((imm_mask >> 1) as i64)) >> (len - 1);
211
212        /*
213         * Overflow has occurred if the upper bits are not all equal to
214         * the sign bit of the value.
215         */
216
217        if (s_addr + 1) as u64 >= 2 {
218            Ok(true)
219        } else {
220            Ok(false)
221        }
222    }
223
224    fn reloc_insn_adrp(&self, location: Ptr, address: u64) -> Result<bool> {
225        if !is_forbidden_offset_for_adrp(location.0) {
226            return self.reloc_insn_imm(
227                Aarch64RelocOp::RELOC_OP_PAGE,
228                location,
229                address,
230                12,
231                21,
232                Aarch64InsnImmType::AARCH64_INSN_IMM_ADR,
233            );
234        }
235        // patch ADRP to ADR if it is in range
236        let ovf = self.reloc_insn_imm(
237            Aarch64RelocOp::RELOC_OP_PREL,
238            location,
239            address & !0xfff,
240            0,
241            21,
242            Aarch64InsnImmType::AARCH64_INSN_IMM_ADR,
243        )?;
244        if !ovf {
245            let mut insn = location.read::<u32>();
246            insn &= !BIT!(31); // clear bit 31 to convert ADRP to ADR
247            location.write::<u32>(insn);
248            Ok(false)
249        } else {
250            //  out of range for ADR -> emit a veneer
251            log::error!("ADR out of range for veneer emission");
252            Err(ModuleErr::ENOEXEC)
253        }
254    }
255
256    fn apply_relocation(&self, location: u64, address: u64) -> Result<()> {
257        // Check for overflow by default.
258        let mut check_overflow = true;
259        let location = Ptr(location);
260        let ovf = match self {
261            Arm64RelTy::R_ARM_NONE | Arm64RelTy::R_AARCH64_NONE => false,
262            // Data relocations.
263            Arm64RelTy::R_AARCH64_ABS64 => {
264                check_overflow = false;
265                self.reloc_data(Aarch64RelocOp::RELOC_OP_ABS, location, address, 64)?
266            }
267            Arm64RelTy::R_AARCH64_ABS32 => {
268                self.reloc_data(Aarch64RelocOp::RELOC_OP_ABS, location, address, 32)?
269            }
270            Arm64RelTy::R_AARCH64_ABS16 => {
271                self.reloc_data(Aarch64RelocOp::RELOC_OP_ABS, location, address, 16)?
272            }
273            Arm64RelTy::R_AARCH64_PREL64 => {
274                check_overflow = false;
275
276                self.reloc_data(Aarch64RelocOp::RELOC_OP_PREL, location, address, 64)?
277            }
278            Arm64RelTy::R_AARCH64_PREL32 => {
279                self.reloc_data(Aarch64RelocOp::RELOC_OP_PREL, location, address, 32)?
280            }
281            Arm64RelTy::R_AARCH64_PREL16 => {
282                self.reloc_data(Aarch64RelocOp::RELOC_OP_PREL, location, address, 16)?
283            }
284            // MOVW instruction relocations
285            Arm64RelTy::R_AARCH64_MOVW_UABS_G0_NC | Arm64RelTy::R_AARCH64_MOVW_UABS_G0 => {
286                if *self == Arm64RelTy::R_AARCH64_MOVW_UABS_G0_NC {
287                    check_overflow = false;
288                }
289                self.reloc_insn_movw(
290                    Aarch64RelocOp::RELOC_OP_ABS,
291                    location,
292                    address,
293                    0,
294                    Aarch64InsnMovwImmType::AARCH64_INSN_IMM_MOVKZ,
295                )?
296            }
297            Arm64RelTy::R_AARCH64_MOVW_UABS_G1_NC | Arm64RelTy::R_AARCH64_MOVW_UABS_G1 => {
298                if *self == Arm64RelTy::R_AARCH64_MOVW_UABS_G1_NC {
299                    check_overflow = false;
300                }
301                self.reloc_insn_movw(
302                    Aarch64RelocOp::RELOC_OP_ABS,
303                    location,
304                    address,
305                    16,
306                    Aarch64InsnMovwImmType::AARCH64_INSN_IMM_MOVKZ,
307                )?
308            }
309            Arm64RelTy::R_AARCH64_MOVW_UABS_G2_NC | Arm64RelTy::R_AARCH64_MOVW_UABS_G2 => {
310                if *self == Arm64RelTy::R_AARCH64_MOVW_UABS_G2_NC {
311                    check_overflow = false;
312                }
313                self.reloc_insn_movw(
314                    Aarch64RelocOp::RELOC_OP_ABS,
315                    location,
316                    address,
317                    32,
318                    Aarch64InsnMovwImmType::AARCH64_INSN_IMM_MOVKZ,
319                )?
320            }
321            Arm64RelTy::R_AARCH64_MOVW_UABS_G3 => {
322                // We're using the top bits so we can't overflow.
323                check_overflow = false;
324                self.reloc_insn_movw(
325                    Aarch64RelocOp::RELOC_OP_ABS,
326                    location,
327                    address,
328                    48,
329                    Aarch64InsnMovwImmType::AARCH64_INSN_IMM_MOVKZ,
330                )?
331            }
332            Arm64RelTy::R_AARCH64_MOVW_SABS_G0 => self.reloc_insn_movw(
333                Aarch64RelocOp::RELOC_OP_ABS,
334                location,
335                address,
336                0,
337                Aarch64InsnMovwImmType::AARCH64_INSN_IMM_MOVNZ,
338            )?,
339            Arm64RelTy::R_AARCH64_MOVW_SABS_G1 => self.reloc_insn_movw(
340                Aarch64RelocOp::RELOC_OP_ABS,
341                location,
342                address,
343                16,
344                Aarch64InsnMovwImmType::AARCH64_INSN_IMM_MOVNZ,
345            )?,
346            Arm64RelTy::R_AARCH64_MOVW_SABS_G2 => self.reloc_insn_movw(
347                Aarch64RelocOp::RELOC_OP_ABS,
348                location,
349                address,
350                32,
351                Aarch64InsnMovwImmType::AARCH64_INSN_IMM_MOVNZ,
352            )?,
353            Arm64RelTy::R_AARCH64_MOVW_PREL_G0_NC | Arm64RelTy::R_AARCH64_MOVW_PREL_G0 => {
354                let mut imm_type = Aarch64InsnMovwImmType::AARCH64_INSN_IMM_MOVNZ;
355                if *self == Arm64RelTy::R_AARCH64_MOVW_PREL_G0_NC {
356                    check_overflow = false;
357                    imm_type = Aarch64InsnMovwImmType::AARCH64_INSN_IMM_MOVKZ;
358                }
359                self.reloc_insn_movw(
360                    Aarch64RelocOp::RELOC_OP_PREL,
361                    location,
362                    address,
363                    0,
364                    imm_type,
365                )?
366            }
367            Arm64RelTy::R_AARCH64_MOVW_PREL_G1_NC | Arm64RelTy::R_AARCH64_MOVW_PREL_G1 => {
368                let mut imm_type = Aarch64InsnMovwImmType::AARCH64_INSN_IMM_MOVNZ;
369                if *self == Arm64RelTy::R_AARCH64_MOVW_PREL_G1_NC {
370                    check_overflow = false;
371                    imm_type = Aarch64InsnMovwImmType::AARCH64_INSN_IMM_MOVKZ;
372                }
373                self.reloc_insn_movw(
374                    Aarch64RelocOp::RELOC_OP_PREL,
375                    location,
376                    address,
377                    16,
378                    imm_type,
379                )?
380            }
381            Arm64RelTy::R_AARCH64_MOVW_PREL_G2_NC | Arm64RelTy::R_AARCH64_MOVW_PREL_G2 => {
382                let mut imm_type = Aarch64InsnMovwImmType::AARCH64_INSN_IMM_MOVNZ;
383                if *self == Arm64RelTy::R_AARCH64_MOVW_PREL_G2_NC {
384                    check_overflow = false;
385                    imm_type = Aarch64InsnMovwImmType::AARCH64_INSN_IMM_MOVKZ;
386                }
387                self.reloc_insn_movw(
388                    Aarch64RelocOp::RELOC_OP_PREL,
389                    location,
390                    address,
391                    32,
392                    imm_type,
393                )?
394            }
395            Arm64RelTy::R_AARCH64_MOVW_PREL_G3 => {
396                // We're using the top bits so we can't overflow.
397                check_overflow = false;
398                self.reloc_insn_movw(
399                    Aarch64RelocOp::RELOC_OP_PREL,
400                    location,
401                    address,
402                    48,
403                    Aarch64InsnMovwImmType::AARCH64_INSN_IMM_MOVNZ,
404                )?
405            }
406            // Immediate instruction relocations.
407            Arm64RelTy::R_AARCH64_LD_PREL_LO19 => self.reloc_insn_imm(
408                Aarch64RelocOp::RELOC_OP_PREL,
409                location,
410                address,
411                2,
412                19,
413                Aarch64InsnImmType::AARCH64_INSN_IMM_19,
414            )?,
415            Arm64RelTy::R_AARCH64_ADR_PREL_LO21 => self.reloc_insn_imm(
416                Aarch64RelocOp::RELOC_OP_PREL,
417                location,
418                address,
419                0,
420                21,
421                Aarch64InsnImmType::AARCH64_INSN_IMM_ADR,
422            )?,
423            Arm64RelTy::R_AARCH64_ADR_PREL_PG_HI21_NC | Arm64RelTy::R_AARCH64_ADR_PREL_PG_HI21 => {
424                if *self == Arm64RelTy::R_AARCH64_ADR_PREL_PG_HI21_NC {
425                    check_overflow = false;
426                }
427                // https://elixir.bootlin.com/linux/v6.6/source/arch/arm64/kernel/module.c#L491
428                self.reloc_insn_adrp(location, address)?
429            }
430            Arm64RelTy::R_AARCH64_ADD_ABS_LO12_NC | Arm64RelTy::R_AARCH64_LDST8_ABS_LO12_NC => {
431                check_overflow = false;
432                self.reloc_insn_imm(
433                    Aarch64RelocOp::RELOC_OP_ABS,
434                    location,
435                    address,
436                    0,
437                    12,
438                    Aarch64InsnImmType::AARCH64_INSN_IMM_12,
439                )?
440            }
441            Arm64RelTy::R_AARCH64_LDST16_ABS_LO12_NC => {
442                check_overflow = false;
443                self.reloc_insn_imm(
444                    Aarch64RelocOp::RELOC_OP_ABS,
445                    location,
446                    address,
447                    1,
448                    11,
449                    Aarch64InsnImmType::AARCH64_INSN_IMM_12,
450                )?
451            }
452            Arm64RelTy::R_AARCH64_LDST32_ABS_LO12_NC => {
453                check_overflow = false;
454                self.reloc_insn_imm(
455                    Aarch64RelocOp::RELOC_OP_ABS,
456                    location,
457                    address,
458                    2,
459                    10,
460                    Aarch64InsnImmType::AARCH64_INSN_IMM_12,
461                )?
462            }
463            Arm64RelTy::R_AARCH64_LDST64_ABS_LO12_NC => {
464                check_overflow = false;
465                self.reloc_insn_imm(
466                    Aarch64RelocOp::RELOC_OP_ABS,
467                    location,
468                    address,
469                    3,
470                    9,
471                    Aarch64InsnImmType::AARCH64_INSN_IMM_12,
472                )?
473            }
474            Arm64RelTy::R_AARCH64_LDST128_ABS_LO12_NC => {
475                check_overflow = false;
476                self.reloc_insn_imm(
477                    Aarch64RelocOp::RELOC_OP_ABS,
478                    location,
479                    address,
480                    4,
481                    8,
482                    Aarch64InsnImmType::AARCH64_INSN_IMM_12,
483                )?
484            }
485            Arm64RelTy::R_AARCH64_TSTBR14 => self.reloc_insn_imm(
486                Aarch64RelocOp::RELOC_OP_PREL,
487                location,
488                address,
489                2,
490                14,
491                Aarch64InsnImmType::AARCH64_INSN_IMM_14,
492            )?,
493            Arm64RelTy::R_AARCH64_CONDBR19 => self.reloc_insn_imm(
494                Aarch64RelocOp::RELOC_OP_PREL,
495                location,
496                address,
497                2,
498                19,
499                Aarch64InsnImmType::AARCH64_INSN_IMM_19,
500            )?,
501            Arm64RelTy::R_AARCH64_JUMP26 | Arm64RelTy::R_AARCH64_CALL26 => {
502                let ovf = self.reloc_insn_imm(
503                    Aarch64RelocOp::RELOC_OP_PREL,
504                    location,
505                    address,
506                    2,
507                    26,
508                    Aarch64InsnImmType::AARCH64_INSN_IMM_26,
509                )?;
510                if ovf {
511                    // TODO: address = module_emit_plt_entry()
512                    unimplemented!(
513                        "Veneer emission for out-of-range AArch64 JUMP26/CALL26 not implemented"
514                    );
515                }
516                ovf
517            }
518            _ => {
519                log::error!("Relocation type {:?} not implemented yet", self);
520                return Err(ModuleErr::ENOEXEC);
521            }
522        };
523        if check_overflow && ovf {
524            log::error!("Overflow detected during relocation type {:?}", self);
525            return Err(ModuleErr::ENOEXEC);
526        }
527        Ok(())
528    }
529}
530
531pub struct ArchRelocate;
532
533#[allow(unused_assignments)]
534impl ArchRelocate {
535    /// See <https://elixir.bootlin.com/linux/v6.6/source/arch/arm64/kernel/module.c#L344>
536    pub fn apply_relocate_add<H: KernelModuleHelper>(
537        rela_list: &[goblin::elf64::reloc::Rela],
538        rel_section: &SectionHeader,
539        sechdrs: &[SectionHeader],
540        load_info: &ModuleLoadInfo,
541        module: &ModuleOwner<H>,
542    ) -> Result<()> {
543        for rela in rela_list {
544            let rel_type = get_rela_type(rela.r_info);
545            let sym_idx = get_rela_sym_idx(rela.r_info);
546
547            // loc corresponds to P in the AArch64 ELF document.
548            let location = sechdrs[rel_section.sh_info as usize].sh_addr + rela.r_offset;
549            let (sym, sym_name) = &load_info.syms[sym_idx];
550
551            let reloc_type = Arm64RelTy::try_from(rel_type).map_err(|_| {
552                log::error!(
553                    "[{:?}]: Invalid relocation type: {}",
554                    module.name(),
555                    rel_type
556                );
557                ModuleErr::ENOEXEC
558            })?;
559            // val corresponds to (S + A) in the AArch64 ELF document.
560            let target_addr = sym.st_value.wrapping_add(rela.r_addend as u64);
561
562            // Perform the static relocation.
563            log::info!(
564                "[{:?}]: Applying relocation {:?} at location {:#x} with target addr {:#x}",
565                module.name(),
566                reloc_type,
567                location,
568                target_addr
569            );
570
571            let res = reloc_type.apply_relocation(location, target_addr);
572            match res {
573                Err(e) => {
574                    log::error!("[{:?}]: ({}) {:?}", module.name(), sym_name, e);
575                    return Err(e);
576                }
577                Ok(_) => { /* Successfully applied relocation */ }
578            }
579        }
580        Ok(())
581    }
582}
583
584pub fn module_frob_arch_sections<H: KernelModuleHelper>(
585    elf: &mut Elf,
586    owner: &mut ModuleOwner<H>,
587) -> Result<()> {
588    Ok(())
589}