1pub use uefi_raw::table::boot::MemoryAttribute as EFIMemoryAttribute;
5pub use uefi_raw::table::boot::MemoryDescriptor as EFIMemoryDesc;
6pub use uefi_raw::table::boot::MemoryType as EFIMemoryAreaType;
7
8use crate::tag::TagHeader;
9use crate::{TagType, TagTypeId};
10use core::fmt::{Debug, Formatter};
11use core::marker::PhantomData;
12use core::mem;
13use multiboot2_common::{MaybeDynSized, Tag};
14#[cfg(feature = "builder")]
15use {alloc::boxed::Box, core::slice, multiboot2_common::new_boxed};
16
17#[derive(ptr_meta::Pointee, Debug, PartialEq, Eq)]
28#[repr(C, align(8))]
29pub struct MemoryMapTag {
30 header: TagHeader,
31 entry_size: u32,
32 entry_version: u32,
33 areas: [MemoryArea],
34}
35
36impl MemoryMapTag {
37 #[cfg(feature = "builder")]
39 #[must_use]
40 pub fn new(areas: &[MemoryArea]) -> Box<Self> {
41 let header = TagHeader::new(Self::ID, 0);
42 let entry_size = (mem::size_of::<MemoryArea>() as u32).to_ne_bytes();
43 let entry_version = 0_u32.to_ne_bytes();
44 let areas = {
45 let ptr = areas.as_ptr().cast::<u8>();
46 let len = mem::size_of_val(areas);
47 unsafe { slice::from_raw_parts(ptr, len) }
48 };
49 new_boxed(header, &[&entry_size, &entry_version, areas])
50 }
51
52 #[must_use]
54 pub const fn entry_size(&self) -> u32 {
55 self.entry_size
56 }
57
58 #[must_use]
60 pub const fn entry_version(&self) -> u32 {
61 self.entry_version
62 }
63
64 #[must_use]
69 pub fn memory_areas(&self) -> &[MemoryArea] {
70 assert_eq!(self.entry_size as usize, mem::size_of::<MemoryArea>());
72 &self.areas
73 }
74}
75
76impl MaybeDynSized for MemoryMapTag {
77 type Header = TagHeader;
78
79 const BASE_SIZE: usize = mem::size_of::<TagHeader>() + 2 * mem::size_of::<u32>();
80
81 fn dst_len(header: &TagHeader) -> usize {
82 assert!(header.size as usize >= Self::BASE_SIZE);
83 let size = header.size as usize - Self::BASE_SIZE;
84 assert_eq!(size % mem::size_of::<MemoryArea>(), 0);
85 size / mem::size_of::<MemoryArea>()
86 }
87}
88
89impl Tag for MemoryMapTag {
90 type IDType = TagType;
91
92 const ID: TagType = TagType::Mmap;
93}
94
95#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
97#[repr(C)]
98pub struct MemoryArea {
99 base_addr: u64,
100 length: u64,
101 typ: MemoryAreaTypeId,
102 _reserved: u32,
103}
104
105impl MemoryArea {
106 pub fn new(base_addr: u64, length: u64, typ: impl Into<MemoryAreaTypeId>) -> Self {
108 Self {
109 base_addr,
110 length,
111 typ: typ.into(),
112 _reserved: 0,
113 }
114 }
115
116 #[must_use]
118 pub const fn start_address(&self) -> u64 {
119 self.base_addr
120 }
121
122 #[must_use]
124 pub const fn end_address(&self) -> u64 {
125 self.base_addr + self.length
126 }
127
128 #[must_use]
130 pub const fn size(&self) -> u64 {
131 self.length
132 }
133
134 #[must_use]
136 pub const fn typ(&self) -> MemoryAreaTypeId {
137 self.typ
138 }
139}
140
141impl Debug for MemoryArea {
142 fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
143 f.debug_struct("MemoryArea")
144 .field("base_addr", &self.base_addr)
145 .field("length", &self.length)
146 .field("typ", &self.typ)
147 .finish()
148 }
149}
150
151#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
153#[repr(C)]
154pub struct MemoryAreaTypeId(u32);
155
156impl From<u32> for MemoryAreaTypeId {
157 fn from(value: u32) -> Self {
158 Self(value)
159 }
160}
161
162impl From<MemoryAreaTypeId> for u32 {
163 fn from(value: MemoryAreaTypeId) -> Self {
164 value.0
165 }
166}
167
168impl Debug for MemoryAreaTypeId {
169 fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
170 let mt = MemoryAreaType::from(*self);
171 Debug::fmt(&mt, f)
172 }
173}
174
175#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
182pub enum MemoryAreaType {
183 Available, Reserved, AcpiAvailable, ReservedHibernate, Defective, Custom(u32),
202}
203
204impl From<MemoryAreaTypeId> for MemoryAreaType {
205 fn from(value: MemoryAreaTypeId) -> Self {
206 match value.0 {
207 1 => Self::Available,
208 2 => Self::Reserved,
209 3 => Self::AcpiAvailable,
210 4 => Self::ReservedHibernate,
211 5 => Self::Defective,
212 val => Self::Custom(val),
213 }
214 }
215}
216
217impl From<MemoryAreaType> for MemoryAreaTypeId {
218 fn from(value: MemoryAreaType) -> Self {
219 let integer = match value {
220 MemoryAreaType::Available => 1,
221 MemoryAreaType::Reserved => 2,
222 MemoryAreaType::AcpiAvailable => 3,
223 MemoryAreaType::ReservedHibernate => 4,
224 MemoryAreaType::Defective => 5,
225 MemoryAreaType::Custom(val) => val,
226 };
227 integer.into()
228 }
229}
230
231impl PartialEq<MemoryAreaType> for MemoryAreaTypeId {
232 fn eq(&self, other: &MemoryAreaType) -> bool {
233 let val: Self = (*other).into();
234 let val: u32 = val.0;
235 self.0.eq(&val)
236 }
237}
238
239impl PartialEq<MemoryAreaTypeId> for MemoryAreaType {
240 fn eq(&self, other: &MemoryAreaTypeId) -> bool {
241 let val: MemoryAreaTypeId = (*self).into();
242 let val: u32 = val.0;
243 other.0.eq(&val)
244 }
245}
246
247#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
262#[repr(C)]
263pub struct BasicMemoryInfoTag {
264 header: TagHeader,
265 memory_lower: u32,
266 memory_upper: u32,
267}
268
269impl BasicMemoryInfoTag {
270 #[must_use]
272 pub fn new(memory_lower: u32, memory_upper: u32) -> Self {
273 Self {
274 header: TagHeader::new(Self::ID, mem::size_of::<Self>().try_into().unwrap()),
275 memory_lower,
276 memory_upper,
277 }
278 }
279
280 #[must_use]
281 pub const fn memory_lower(&self) -> u32 {
283 self.memory_lower
284 }
285
286 #[must_use]
287 pub const fn memory_upper(&self) -> u32 {
289 self.memory_upper
290 }
291}
292
293impl MaybeDynSized for BasicMemoryInfoTag {
294 type Header = TagHeader;
295
296 const BASE_SIZE: usize = mem::size_of::<Self>();
297
298 fn dst_len(_: &TagHeader) {}
299}
300
301impl Tag for BasicMemoryInfoTag {
302 type IDType = TagType;
303
304 const ID: TagType = TagType::BasicMeminfo;
305}
306
307#[derive(ptr_meta::Pointee, PartialEq, Eq, PartialOrd, Ord, Hash)]
310#[repr(C)]
311pub struct EFIMemoryMapTag {
312 header: TagHeader,
313 desc_size: u32,
317 desc_version: u32,
321 memory_map: [u8],
330}
331
332impl EFIMemoryMapTag {
333 #[cfg(feature = "builder")]
335 #[must_use]
336 pub fn new_from_descs(descs: &[EFIMemoryDesc]) -> Box<Self> {
337 let efi_mmap = {
338 let ptr = descs.as_ptr().cast::<u8>();
339 let len = mem::size_of_val(descs);
340 unsafe { slice::from_raw_parts(ptr, len) }
341 };
342
343 Self::new_from_map(
344 mem::size_of::<EFIMemoryDesc>() as u32,
345 EFIMemoryDesc::VERSION,
346 efi_mmap,
347 )
348 }
349
350 #[cfg(feature = "builder")]
352 #[must_use]
353 pub fn new_from_map(desc_size: u32, desc_version: u32, efi_mmap: &[u8]) -> Box<Self> {
354 let header = TagHeader::new(Self::ID, 0);
355 assert_ne!(desc_size, 0);
356 let desc_size = desc_size.to_ne_bytes();
357 let desc_version = desc_version.to_ne_bytes();
358 new_boxed(header, &[&desc_size, &desc_version, efi_mmap])
359 }
360
361 #[must_use]
366 pub fn memory_areas(&self) -> EFIMemoryAreaIter {
367 assert_eq!(self.desc_version, EFIMemoryDesc::VERSION);
370 assert_eq!(
371 self.memory_map
372 .as_ptr()
373 .align_offset(mem::align_of::<EFIMemoryDesc>()),
374 0
375 );
376
377 EFIMemoryAreaIter::new(self)
378 }
379}
380
381impl Debug for EFIMemoryMapTag {
382 fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
383 f.debug_struct("EFIMemoryMapTag")
384 .field("typ", &self.header.typ)
385 .field("size", &self.header.size)
386 .field("desc_size", &self.desc_size)
387 .field("desc_version", &self.desc_version)
388 .field("buf", &self.memory_map.as_ptr())
389 .field("buf_len", &self.memory_map.len())
390 .field("entries", &self.memory_areas())
391 .finish()
392 }
393}
394
395impl MaybeDynSized for EFIMemoryMapTag {
396 type Header = TagHeader;
397
398 const BASE_SIZE: usize = mem::size_of::<TagTypeId>() + 3 * mem::size_of::<u32>();
399
400 fn dst_len(header: &TagHeader) -> usize {
401 assert!(header.size as usize >= Self::BASE_SIZE);
402 header.size as usize - Self::BASE_SIZE
403 }
404}
405
406impl Tag for EFIMemoryMapTag {
407 type IDType = TagType;
408
409 const ID: TagType = TagType::EfiMmap;
410}
411
412#[derive(Clone)]
414pub struct EFIMemoryAreaIter<'a> {
415 mmap_tag: &'a EFIMemoryMapTag,
416 i: usize,
417 entries: usize,
418 phantom: PhantomData<&'a EFIMemoryDesc>,
419}
420
421impl<'a> EFIMemoryAreaIter<'a> {
422 fn new(mmap_tag: &'a EFIMemoryMapTag) -> Self {
423 let desc_size = mmap_tag.desc_size as usize;
424 let mmap_len = mmap_tag.memory_map.len();
425 assert_eq!(mmap_len % desc_size, 0, "memory map length must be a multiple of `desc_size` by definition. The MBI seems to be corrupt.");
426 Self {
427 mmap_tag,
428 i: 0,
429 entries: mmap_len / desc_size,
430 phantom: PhantomData,
431 }
432 }
433}
434
435impl<'a> Iterator for EFIMemoryAreaIter<'a> {
436 type Item = &'a EFIMemoryDesc;
437 fn next(&mut self) -> Option<&'a EFIMemoryDesc> {
438 if self.i >= self.entries {
439 return None;
440 }
441
442 let desc = unsafe {
443 self.mmap_tag
444 .memory_map
445 .as_ptr()
446 .add(self.i * self.mmap_tag.desc_size as usize)
447 .cast::<EFIMemoryDesc>()
448 .as_ref()
449 .unwrap()
450 };
451
452 self.i += 1;
453
454 Some(desc)
455 }
456}
457
458impl ExactSizeIterator for EFIMemoryAreaIter<'_> {
459 fn len(&self) -> usize {
460 self.entries
461 }
462}
463
464impl Debug for EFIMemoryAreaIter<'_> {
465 fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
466 let mut debug = f.debug_list();
467 let iter = self.clone();
468 for elem in iter {
469 debug.entry(elem);
470 }
471 debug.finish()
472 }
473}
474
475#[cfg(all(test, feature = "builder"))]
476mod tests {
477 use super::*;
478 use std::mem::size_of;
479
480 #[test]
481 fn test_create_old_mmap() {
482 let _mmap = MemoryMapTag::new(&[]);
483 let mmap = MemoryMapTag::new(&[
484 MemoryArea::new(0x1000, 0x2000, MemoryAreaType::Available),
485 MemoryArea::new(0x2000, 0x3000, MemoryAreaType::Available),
486 ]);
487 dbg!(mmap);
488 }
489
490 #[test]
491 fn efi_construct_and_parse() {
492 let descs = [
493 EFIMemoryDesc {
494 ty: EFIMemoryAreaType::CONVENTIONAL,
495 phys_start: 0x1000,
496 virt_start: 0x1000,
497 page_count: 1,
498 att: Default::default(),
499 },
500 EFIMemoryDesc {
501 ty: EFIMemoryAreaType::LOADER_DATA,
502 phys_start: 0x2000,
503 virt_start: 0x2000,
504 page_count: 3,
505 att: Default::default(),
506 },
507 ];
508 let efi_mmap_tag = EFIMemoryMapTag::new_from_descs(&descs);
509
510 let mut iter = efi_mmap_tag.memory_areas();
511
512 assert_eq!(iter.next(), Some(&descs[0]));
513 assert_eq!(iter.next(), Some(&descs[1]));
514
515 assert_eq!(iter.next(), None);
516 }
517
518 #[test]
522 fn efi_test_real_data() {
523 const DESC_SIZE: u32 = 48;
524 const DESC_VERSION: u32 = 1;
525 const MMAP_RAW: [u64; 60] = [
528 3, 0, 0, 1, 15, 0, 7, 4096, 0, 134, 15, 0, 4, 552960, 0, 1, 15, 0, 7, 557056, 0, 24,
529 15, 0, 7, 1048576, 0, 1792, 15, 0, 10, 8388608, 0, 8, 15, 0, 7, 8421376, 0, 3, 15, 0,
530 10, 8433664, 0, 1, 15, 0, 7, 8437760, 0, 4, 15, 0, 10, 8454144, 0, 240, 15, 0,
531 ];
532 let buf = MMAP_RAW;
533 let buf = unsafe {
534 core::slice::from_raw_parts(buf.as_ptr().cast::<u8>(), buf.len() * size_of::<u64>())
535 };
536 let tag = EFIMemoryMapTag::new_from_map(DESC_SIZE, DESC_VERSION, buf);
537 let entries = tag.memory_areas().copied().collect::<alloc::vec::Vec<_>>();
538 let expected = [
539 EFIMemoryDesc {
540 ty: EFIMemoryAreaType::BOOT_SERVICES_CODE,
541 phys_start: 0x0,
542 virt_start: 0x0,
543 page_count: 0x1,
544 att: EFIMemoryAttribute::UNCACHEABLE
545 | EFIMemoryAttribute::WRITE_COMBINE
546 | EFIMemoryAttribute::WRITE_THROUGH
547 | EFIMemoryAttribute::WRITE_BACK,
548 },
549 EFIMemoryDesc {
550 ty: EFIMemoryAreaType::CONVENTIONAL,
551 phys_start: 0x1000,
552 virt_start: 0x0,
553 page_count: 0x86,
554 att: EFIMemoryAttribute::UNCACHEABLE
555 | EFIMemoryAttribute::WRITE_COMBINE
556 | EFIMemoryAttribute::WRITE_THROUGH
557 | EFIMemoryAttribute::WRITE_BACK,
558 },
559 EFIMemoryDesc {
560 ty: EFIMemoryAreaType::BOOT_SERVICES_DATA,
561 phys_start: 0x87000,
562 virt_start: 0x0,
563 page_count: 0x1,
564 att: EFIMemoryAttribute::UNCACHEABLE
565 | EFIMemoryAttribute::WRITE_COMBINE
566 | EFIMemoryAttribute::WRITE_THROUGH
567 | EFIMemoryAttribute::WRITE_BACK,
568 },
569 EFIMemoryDesc {
570 ty: EFIMemoryAreaType::CONVENTIONAL,
571 phys_start: 0x88000,
572 virt_start: 0x0,
573 page_count: 0x18,
574 att: EFIMemoryAttribute::UNCACHEABLE
575 | EFIMemoryAttribute::WRITE_COMBINE
576 | EFIMemoryAttribute::WRITE_THROUGH
577 | EFIMemoryAttribute::WRITE_BACK,
578 },
579 EFIMemoryDesc {
580 ty: EFIMemoryAreaType::CONVENTIONAL,
581 phys_start: 0x100000,
582 virt_start: 0x0,
583 page_count: 0x700,
584 att: EFIMemoryAttribute::UNCACHEABLE
585 | EFIMemoryAttribute::WRITE_COMBINE
586 | EFIMemoryAttribute::WRITE_THROUGH
587 | EFIMemoryAttribute::WRITE_BACK,
588 },
589 EFIMemoryDesc {
590 ty: EFIMemoryAreaType::ACPI_NON_VOLATILE,
591 phys_start: 0x800000,
592 virt_start: 0x0,
593 page_count: 0x8,
594 att: EFIMemoryAttribute::UNCACHEABLE
595 | EFIMemoryAttribute::WRITE_COMBINE
596 | EFIMemoryAttribute::WRITE_THROUGH
597 | EFIMemoryAttribute::WRITE_BACK,
598 },
599 EFIMemoryDesc {
600 ty: EFIMemoryAreaType::CONVENTIONAL,
601 phys_start: 0x808000,
602 virt_start: 0x0,
603 page_count: 0x3,
604 att: EFIMemoryAttribute::UNCACHEABLE
605 | EFIMemoryAttribute::WRITE_COMBINE
606 | EFIMemoryAttribute::WRITE_THROUGH
607 | EFIMemoryAttribute::WRITE_BACK,
608 },
609 EFIMemoryDesc {
610 ty: EFIMemoryAreaType::ACPI_NON_VOLATILE,
611 phys_start: 0x80b000,
612 virt_start: 0x0,
613 page_count: 0x1,
614 att: EFIMemoryAttribute::UNCACHEABLE
615 | EFIMemoryAttribute::WRITE_COMBINE
616 | EFIMemoryAttribute::WRITE_THROUGH
617 | EFIMemoryAttribute::WRITE_BACK,
618 },
619 EFIMemoryDesc {
620 ty: EFIMemoryAreaType::CONVENTIONAL,
621 phys_start: 0x80c000,
622 virt_start: 0x0,
623 page_count: 0x4,
624 att: EFIMemoryAttribute::UNCACHEABLE
625 | EFIMemoryAttribute::WRITE_COMBINE
626 | EFIMemoryAttribute::WRITE_THROUGH
627 | EFIMemoryAttribute::WRITE_BACK,
628 },
629 EFIMemoryDesc {
630 ty: EFIMemoryAreaType::ACPI_NON_VOLATILE,
631 phys_start: 0x810000,
632 virt_start: 0x0,
633 page_count: 0xf0,
634 att: EFIMemoryAttribute::UNCACHEABLE
635 | EFIMemoryAttribute::WRITE_COMBINE
636 | EFIMemoryAttribute::WRITE_THROUGH
637 | EFIMemoryAttribute::WRITE_BACK,
638 },
639 ];
640 assert_eq!(entries.as_slice(), &expected);
641 }
642}