1use alloc::vec::Vec;
2use core::cmp::Ordering;
3use core::ops::Deref;
4use core::ops::DerefMut;
5use core::ops::Range;
6use log::warn;
7
8use crate::align_down;
9use crate::align_up;
10use crate::check_u32;
11use crate::zero;
12use crate::BlockRead;
13use crate::BlockWrite;
14use crate::ByteOrder;
15use crate::Class;
16use crate::ElfRead;
17use crate::ElfSeek;
18use crate::ElfWrite;
19use crate::EntityIo;
20use crate::Error;
21use crate::Header;
22use crate::SegmentFlags;
23use crate::SegmentKind;
24
25#[derive(Debug)]
27#[cfg_attr(test, derive(PartialEq, Eq))]
28pub struct ProgramHeader {
29 entries: Vec<Segment>,
30}
31
32impl BlockRead for ProgramHeader {
33 fn read<R: ElfRead>(
34 reader: &mut R,
35 class: Class,
36 byte_order: ByteOrder,
37 len: u64,
38 ) -> Result<Self, Error> {
39 let num_segments = len / class.segment_len() as u64;
41 let mut entries = Vec::with_capacity(num_segments as usize);
42 for _ in 0..num_segments {
43 let entry = Segment::read(reader, class, byte_order)?;
44 entries.push(entry);
45 }
46 let ret = Self { entries };
47 Ok(ret)
48 }
49}
50
51impl BlockWrite for ProgramHeader {
52 fn write<W: ElfWrite>(
53 &self,
54 writer: &mut W,
55 class: Class,
56 byte_order: ByteOrder,
57 ) -> Result<(), Error> {
58 for entry in self.entries.iter() {
59 entry.write(writer, class, byte_order)?;
60 }
61 Ok(())
62 }
63}
64
65impl ProgramHeader {
66 pub fn check(&self, header: &Header, page_size: u64) -> Result<(), Error> {
68 for segment in self.entries.iter() {
69 segment.check(header.class)?;
70 }
71 self.check_sorted()?;
72 self.check_overlap(page_size)?;
73 self.check_entry_point(header.entry_point)?;
74 self.check_count()?;
75 self.check_order()?;
76 self.check_phdr()?;
77 Ok(())
78 }
79
80 pub fn finish(&mut self) {
84 self.entries.sort_unstable_by(|a, b| {
85 if a.kind == SegmentKind::ProgramHeader {
87 return Ordering::Less;
88 }
89 if b.kind == SegmentKind::ProgramHeader {
90 return Ordering::Greater;
91 }
92 if a.kind == SegmentKind::Interpreter {
94 return Ordering::Less;
95 }
96 if b.kind == SegmentKind::Interpreter {
97 return Ordering::Greater;
98 }
99 a.virtual_address.cmp(&b.virtual_address)
100 });
101 }
102
103 fn check_sorted(&self) -> Result<(), Error> {
104 let mut prev: Option<&Segment> = None;
105 for segment in self.entries.iter() {
106 if segment.kind != SegmentKind::Loadable {
107 continue;
108 }
109 if let Some(prev) = prev.as_ref() {
110 let segment_start = segment.virtual_address;
111 let prev_start = prev.virtual_address;
112 if prev_start > segment_start {
113 return Err(Error::SegmentsNotSorted);
114 }
115 }
116 prev = Some(segment);
117 }
118 Ok(())
119 }
120
121 fn check_overlap(&self, page_size: u64) -> Result<(), Error> {
122 let filters = [
123 |segment: &Segment, page_size: u64| {
124 if segment.kind != SegmentKind::Loadable {
125 return None;
126 }
127 let segment_start = align_down(segment.virtual_address, page_size);
130 let segment_end = align_up(segment_start + segment.memory_size, page_size);
131 if segment_start == segment_end {
132 return None;
133 }
134 Some(segment_start..segment_end)
135 },
136 |segment: &Segment, _page_size: u64| {
137 if segment.kind != SegmentKind::Loadable {
138 return None;
139 }
140 let segment_start = segment.offset;
141 let segment_end = segment_start + segment.file_size;
142 if segment_start == segment_end {
143 return None;
144 }
145 Some(segment_start..segment_end)
146 },
147 ];
148 for filter in filters.into_iter() {
149 let mut ranges = self
150 .entries
151 .iter()
152 .filter_map(|segment| filter(segment, page_size))
153 .collect::<Vec<_>>();
154 ranges.sort_unstable_by_key(|segment| segment.start);
155 for i in 1..ranges.len() {
156 let cur = &ranges[i];
157 let prev = &ranges[i - 1];
158 if prev.end > cur.start {
159 return Err(Error::SegmentsOverlap(
160 prev.start, prev.end, cur.start, cur.end,
161 ));
162 }
163 }
164 }
165 Ok(())
166 }
167
168 fn check_entry_point(&self, entry_point: u64) -> Result<(), Error> {
169 if entry_point != 0
170 && !self.entries.iter().any(|segment| {
171 segment.kind == SegmentKind::Loadable
172 && segment.virtual_address_range().contains(&entry_point)
173 })
174 {
175 return Err(Error::InvalidEntryPoint(entry_point));
176 }
177 Ok(())
178 }
179
180 fn check_count(&self) -> Result<(), Error> {
181 use SegmentKind::*;
182 for kind in [ProgramHeader, Interpreter] {
183 if self
184 .entries
185 .iter()
186 .filter(|segment| segment.kind == kind)
187 .count()
188 > 1
189 {
190 return Err(Error::MultipleSegments(kind));
191 }
192 }
193 Ok(())
194 }
195
196 fn check_order(&self) -> Result<(), Error> {
197 use SegmentKind::*;
198 let mut load_found = false;
199 for segment in self.entries.iter() {
200 match segment.kind {
201 ProgramHeader if load_found => {
202 return Err(Error::NotPreceedingLoadSegment(segment.kind))
203 }
204 Interpreter if load_found => {
205 warn!("{}", Error::NotPreceedingLoadSegment(segment.kind));
208 }
209 Loadable => load_found = true,
210 _ => {}
211 }
212 }
213 Ok(())
214 }
215
216 fn check_phdr(&self) -> Result<(), Error> {
217 let Some(phdr) = self
218 .entries
219 .iter()
220 .find(|entry| entry.kind == SegmentKind::ProgramHeader)
221 else {
222 return Ok(());
223 };
224 if !self.entries.iter().any(|segment| {
225 if segment.kind != SegmentKind::Loadable {
226 return false;
227 }
228 let segment_start = segment.virtual_address;
229 let segment_end = segment_start + segment.memory_size;
230 let phdr_start = phdr.virtual_address;
231 let phdr_end = phdr_start + phdr.memory_size;
232 segment_start <= phdr_start && phdr_start <= segment_end && phdr_end <= segment_end
233 }) {
234 return Err(Error::InvalidProgramHeaderSegment(
235 "PHDR segment should be covered by a LOAD segment",
236 ));
237 }
238 Ok(())
239 }
240
241 pub(crate) fn free<W: ElfWrite + ElfSeek>(
242 &mut self,
243 writer: &mut W,
244 i: usize,
245 ) -> Result<Segment, Error> {
246 let segment = self.entries.remove(i);
247 segment.clear_content(writer)?;
248 Ok(segment)
249 }
250
251 pub(crate) fn add(&mut self, segment: Segment) -> usize {
252 if segment.kind == SegmentKind::Null {
254 let i = self.entries.len();
255 self.entries.push(segment);
256 return i;
257 }
258 let spare_index = self
259 .entries
260 .iter()
261 .position(|segment| segment.kind == SegmentKind::Null);
262 let i = match spare_index {
263 Some(i) => {
264 self.entries[i] = segment;
266 i
267 }
268 None => {
269 let i = self.entries.len();
271 self.entries.push(segment);
272 i
273 }
274 };
275 let segment = &self.entries[i];
276 log::trace!(
277 "Adding segment [{i}] {:?}, file offsets {:#x}..{:#x}, memory offsets {:#x}..{:#x}",
278 segment.kind,
279 segment.offset,
280 segment.offset + segment.file_size,
281 segment.virtual_address,
282 segment.virtual_address + segment.memory_size
283 );
284 i
285 }
286}
287
288impl Deref for ProgramHeader {
289 type Target = Vec<Segment>;
290 fn deref(&self) -> &Self::Target {
291 &self.entries
292 }
293}
294
295impl DerefMut for ProgramHeader {
296 fn deref_mut(&mut self) -> &mut Self::Target {
297 &mut self.entries
298 }
299}
300
301#[derive(Debug)]
307#[cfg_attr(test, derive(PartialEq, Eq))]
308pub struct Segment {
309 pub kind: SegmentKind,
311 pub flags: SegmentFlags,
313 pub offset: u64,
315 pub virtual_address: u64,
317 pub physical_address: u64,
321 pub file_size: u64,
323 pub memory_size: u64,
325 pub align: u64,
333}
334
335impl EntityIo for Segment {
336 fn read<R: ElfRead>(
337 reader: &mut R,
338 class: Class,
339 byte_order: ByteOrder,
340 ) -> Result<Self, Error> {
341 let kind: SegmentKind = reader.read_u32(byte_order)?.into();
342 let mut flags = 0;
343 if class == Class::Elf64 {
344 flags = reader.read_u32(byte_order)?;
345 }
346 let offset = reader.read_word(class, byte_order)?;
347 let virtual_address = reader.read_word(class, byte_order)?;
348 let physical_address = reader.read_word(class, byte_order)?;
349 let file_size = reader.read_word(class, byte_order)?;
350 let memory_size = reader.read_word(class, byte_order)?;
351 if class == Class::Elf32 {
352 flags = reader.read_u32(byte_order)?;
353 }
354 let align = reader.read_word(class, byte_order)?;
355 Ok(Self {
356 kind,
357 flags: SegmentFlags::from_bits_retain(flags),
358 offset,
359 virtual_address,
360 physical_address,
361 file_size,
362 memory_size,
363 align,
364 })
365 }
366
367 fn write<W: ElfWrite>(
368 &self,
369 writer: &mut W,
370 class: Class,
371 byte_order: ByteOrder,
372 ) -> Result<(), Error> {
373 writer.write_u32(byte_order, self.kind.as_u32())?;
374 if class == Class::Elf64 {
375 writer.write_u32(byte_order, self.flags.bits())?;
376 }
377 writer.write_word(class, byte_order, self.offset)?;
378 writer.write_word(class, byte_order, self.virtual_address)?;
379 writer.write_word(class, byte_order, self.physical_address)?;
380 writer.write_word(class, byte_order, self.file_size)?;
381 writer.write_word(class, byte_order, self.memory_size)?;
382 if class == Class::Elf32 {
383 writer.write_u32(byte_order, self.flags.bits())?;
384 }
385 writer.write_word(class, byte_order, self.align)?;
386 Ok(())
387 }
388}
389
390impl Segment {
391 pub fn read_content<R: ElfRead + ElfSeek, T: BlockRead>(
393 &self,
394 reader: &mut R,
395 class: Class,
396 byte_order: ByteOrder,
397 ) -> Result<T, Error> {
398 reader.seek(self.offset)?;
399 T::read(reader, class, byte_order, self.file_size)
400 }
401
402 pub fn write_content<W: ElfWrite + ElfSeek, T: BlockWrite + ?Sized>(
404 &self,
405 writer: &mut W,
406 class: Class,
407 byte_order: ByteOrder,
408 content: &T,
409 ) -> Result<(), Error> {
410 writer.seek(self.offset)?;
411 content.write(writer, class, byte_order)?;
412 Ok(())
413 }
414
415 pub fn clear_content<W: ElfWrite + ElfSeek>(&self, writer: &mut W) -> Result<(), Error> {
417 zero(writer, self.offset, self.file_size)?;
418 Ok(())
419 }
420
421 pub const fn physical_address_range(&self) -> Range<u64> {
423 let start = self.physical_address;
424 let end = start + self.memory_size;
425 start..end
426 }
427
428 pub const fn virtual_address_range(&self) -> Range<u64> {
430 let start = self.virtual_address;
431 let end = start + self.memory_size;
432 start..end
433 }
434
435 pub const fn file_offset_range(&self) -> Range<u64> {
437 let start = self.offset;
438 let end = start + self.file_size;
439 start..end
440 }
441
442 pub fn check(&self, class: Class) -> Result<(), Error> {
444 self.check_overflow(class)?;
445 self.check_align()?;
446 Ok(())
447 }
448
449 fn check_overflow(&self, class: Class) -> Result<(), Error> {
450 match class {
451 Class::Elf32 => {
452 check_u32(self.offset, "Segment offset")?;
453 check_u32(self.virtual_address, "Segment virtual address")?;
454 check_u32(self.physical_address, "Segment physical address")?;
455 check_u32(self.file_size, "Segment in-file size")?;
456 check_u32(self.memory_size, "Segment in-memory size")?;
457 check_u32(self.align, "Segment align")?;
458 let offset = self.offset as u32;
459 let file_size = self.file_size as u32;
460 let virtual_address = self.virtual_address as u32;
461 let physical_address = self.physical_address as u32;
462 let memory_size = self.memory_size as u32;
463 if offset.checked_add(file_size).is_none() {
464 return Err(Error::TooBig("Segment in-file size"));
465 }
466 if virtual_address.checked_add(memory_size).is_none()
467 || physical_address.checked_add(memory_size).is_none()
468 {
469 return Err(Error::TooBig("Segment in-memory size"));
470 }
471 }
472 Class::Elf64 => {
473 if self.offset.checked_add(self.file_size).is_none() {
474 return Err(Error::TooBig("Segment in-file size"));
475 }
476 if self.virtual_address.checked_add(self.memory_size).is_none()
477 || self
478 .physical_address
479 .checked_add(self.memory_size)
480 .is_none()
481 {
482 return Err(Error::TooBig("Segment in-memory size"));
483 }
484 }
485 }
486 Ok(())
487 }
488
489 fn check_align(&self) -> Result<(), Error> {
490 if !align_is_valid(self.align) {
491 return Err(Error::InvalidAlign(self.align));
492 }
493 if self.kind == SegmentKind::Loadable
494 && self.align != 0
495 && self.offset % self.align != self.virtual_address % self.align
496 {
497 let file_start = self.virtual_address;
498 let file_end = file_start + self.file_size;
499 let memory_start = self.virtual_address;
500 let memory_end = memory_start + self.memory_size;
501 return Err(Error::MisalignedSegment(
502 file_start,
503 file_end,
504 memory_start,
505 memory_end,
506 self.align,
507 ));
508 }
509 Ok(())
510 }
511}
512
513const fn align_is_valid(align: u64) -> bool {
514 align == 0 || align.is_power_of_two()
515}
516
517#[cfg(test)]
518mod tests {
519 use super::*;
520
521 use arbitrary::Unstructured;
522
523 use crate::constants::*;
524 use crate::test::test_block_io;
525 use crate::test::test_entity_io;
526 use crate::test::ArbitraryWithClass;
527
528 #[test]
529 fn segment_io() {
530 test_entity_io::<Segment>();
531 }
532
533 #[test]
534 fn program_header_io() {
535 test_block_io::<ProgramHeader>();
536 }
537
538 impl ArbitraryWithClass<'_> for ProgramHeader {
539 fn arbitrary(u: &mut Unstructured<'_>, class: Class) -> arbitrary::Result<Self> {
540 let num_entries = u.arbitrary_len::<[u8; SEGMENT_LEN_64]>()?;
541 let mut entries: Vec<Segment> = Vec::with_capacity(num_entries);
542 for _ in 0..num_entries {
543 entries.push(Segment::arbitrary(u, class)?);
544 }
545 Ok(ProgramHeader { entries })
546 }
547 }
548
549 impl ArbitraryWithClass<'_> for Segment {
550 fn arbitrary(u: &mut Unstructured<'_>, class: Class) -> arbitrary::Result<Self> {
551 Ok(Self {
552 kind: u.arbitrary()?,
553 flags: SegmentFlags::from_bits_retain(u.arbitrary()?),
554 offset: class.arbitrary_word(u)?,
555 virtual_address: class.arbitrary_word(u)?,
556 physical_address: class.arbitrary_word(u)?,
557 file_size: class.arbitrary_word(u)?,
558 memory_size: class.arbitrary_word(u)?,
559 align: class.arbitrary_align(u)?,
560 })
561 }
562 }
563}