dynamic_loader_cache/
ld_elf_so_hints.rs1#[cfg(test)]
10mod tests;
11
12use alloc::borrow::Cow;
13use alloc::rc::Rc;
14use core::iter::FusedIterator;
15use core::mem::{offset_of, size_of};
16use std::fs::read_dir;
17use std::path::Path;
18
19use nom::branch::alt as nom_alt;
20use nom::bytes::complete::{tag as nom_tag, take as nom_take};
21use nom::combinator::peek as nom_peek;
22use nom::number::complete::u32 as nom_u32;
23use nom::number::Endianness;
24use nom::sequence::{preceded as nom_preceded, terminated as nom_terminated};
25
26use crate::utils::{map_file, path_from_bytes};
27use crate::{CacheProvider, Error, Result};
28
29pub(crate) static CACHE_FILE_PATHS: &[&str] =
30 &["/var/run/ld-elf.so.hints", "/var/run/ld-elf32.so.hints"];
31
32const MAGIC: u32 = 0x74_6e_68_45;
33const MAGIC_LE32: [u8; 4] = MAGIC.to_le_bytes();
34const MAGIC_BE32: [u8; 4] = MAGIC.to_be_bytes();
35
36const VERSION: u32 = 1_u32;
37
38#[repr(C)]
39struct Header {
40 magic: u32,
42 version: u32,
44 string_table_offset: u32,
46 string_table_size: u32,
48 dir_list_offset: u32,
50 dir_list_size: u32,
52 spare: [u32; 26],
54}
55
56#[derive(Debug)]
57struct ParsedHeader {
58 string_table_offset: usize,
59 string_table_size: usize,
60 dir_list_offset: usize,
61 dir_list_size: usize,
62}
63
64impl ParsedHeader {
65 fn parse(path: &Path, bytes: &[u8]) -> Result<Self> {
66 let (input, byte_order) =
67 Self::parse_byte_order(bytes).map_err(|r| Error::from_nom_parse(r, bytes, path))?;
68
69 let (_input, fields) = Self::parse_fields(input, byte_order)
70 .map_err(|r| Error::from_nom_parse(r, input, path))?;
71
72 let result = Self {
73 string_table_offset: usize::try_from(fields.0)?,
74 string_table_size: usize::try_from(fields.1)?,
75 dir_list_offset: usize::try_from(fields.2)?,
76 dir_list_size: usize::try_from(fields.3)?,
77 };
78
79 result.validate(path, bytes).map(|()| result)
80 }
81
82 fn parse_byte_order(bytes: &[u8]) -> nom::IResult<&[u8], Endianness> {
83 use nom::Parser;
84
85 nom_alt((
86 nom_tag(&MAGIC_LE32[..]).map(|_| Endianness::Little),
87 nom_tag(&MAGIC_BE32[..]).map(|_| Endianness::Big),
88 ))
89 .parse(bytes)
90 }
91
92 fn parse_fields(
93 bytes: &[u8],
94 byte_order: Endianness,
95 ) -> nom::IResult<&[u8], (u32, u32, u32, u32)> {
96 use nom::Parser;
97
98 let version_bytes = match byte_order {
99 Endianness::Big => VERSION.to_be_bytes(),
100 Endianness::Little => VERSION.to_le_bytes(),
101 Endianness::Native => VERSION.to_ne_bytes(),
102 };
103
104 let mut parser = (
105 nom_preceded(nom_tag(&version_bytes[..]), nom_u32(byte_order)),
106 nom_u32(byte_order),
107 nom_u32(byte_order),
108 nom_terminated(
109 nom_u32(byte_order),
110 nom_take(size_of::<Header>().saturating_sub(offset_of!(Header, spare))),
111 ),
112 );
113
114 parser.parse(bytes)
115 }
116
117 fn validate(&self, path: &Path, bytes: &[u8]) -> Result<()> {
118 use nom::Parser;
119
120 let size_after_string_table = bytes.len().saturating_sub(self.string_table_offset);
121 let max_dir_list_size = size_after_string_table
122 .saturating_sub(self.dir_list_offset)
123 .saturating_sub(1);
124
125 if self.string_table_size > size_after_string_table
126 || self.dir_list_offset > size_after_string_table
127 || self.dir_list_size > max_dir_list_size
128 {
129 let r = nom::error::make_error(bytes, nom::error::ErrorKind::TooLarge);
130 return Err(Error::from_nom_parse(nom::Err::Error(r), bytes, path));
131 }
132
133 let string_table_end = self
134 .string_table_offset
135 .saturating_add(self.string_table_size);
136 let dir_list_end = self
137 .string_table_offset
138 .saturating_add(self.dir_list_offset)
139 .saturating_add(self.dir_list_size)
140 .saturating_add(1);
141 let min_size = string_table_end.max(dir_list_end);
142
143 nom_peek(nom_take(min_size))
144 .parse(bytes)
145 .map(|_| ())
146 .map_err(|r| Error::from_nom_parse(r, bytes, path))
147 }
148}
149
150#[derive(Debug)]
156pub struct Cache {
157 map: memmap2::Mmap,
158 dir_list_offset: usize,
159 dir_list_size: usize,
160}
161
162impl Cache {
163 pub fn load(path: impl AsRef<Path>) -> Result<Self> {
165 let path = path.as_ref();
166 let map = map_file(path)?;
167 let header = ParsedHeader::parse(path, &map)?;
168
169 let dir_list_offset = header
170 .string_table_offset
171 .saturating_add(header.dir_list_offset);
172
173 Ok(Self {
174 map,
175 dir_list_offset,
176 dir_list_size: header.dir_list_size,
177 })
178 }
179
180 pub fn iter(&self) -> Result<impl FusedIterator<Item = Result<crate::Entry<'_>>> + '_> {
182 let bytes = &self.map[self.dir_list_range()];
183
184 let iter = bytes
185 .split(|&b| b == b':')
186 .map(path_from_bytes)
187 .filter_map(Result::ok)
188 .map(Rc::new)
189 .filter_map(|path| {
190 read_dir(path.as_ref().as_ref())
191 .ok()
192 .map(move |dirs| dirs.map(move |entries| (Rc::clone(&path), entries)))
193 })
194 .flatten()
195 .map(|(path, entry)| match entry {
196 Ok(entry) => Ok(crate::Entry {
197 file_name: Cow::Owned(entry.file_name()),
198 full_path: Cow::Owned(entry.path()),
199 }),
200
201 Err(source) => {
202 let path = path.as_ref().as_ref().into();
203 Err(Error::ReadDir { path, source })
204 }
205 });
206
207 Ok(iter)
208 }
209
210 fn dir_list_range(&self) -> core::ops::Range<usize> {
211 let end = self.dir_list_offset.saturating_add(self.dir_list_size);
212 self.dir_list_offset..end
213 }
214}
215
216impl CacheProvider for Cache {
217 fn entries_iter<'cache>(
218 &'cache self,
219 ) -> Result<Box<dyn FusedIterator<Item = Result<crate::Entry<'cache>>> + 'cache>> {
220 let iter = self.iter()?;
221 Ok(Box::new(iter))
222 }
223}