use super::*;
use crate::boot;
use core::fmt::{Debug, Display, Formatter};
use core::ops::{Index, IndexMut};
use core::ptr;
use core::ptr::NonNull;
use uefi_raw::PhysicalAddress;
#[derive(Copy, Clone, Debug)]
pub enum MemoryMapError {
Misaligned,
InvalidSize,
}
impl Display for MemoryMapError {
fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
Debug::fmt(self, f)
}
}
impl core::error::Error for MemoryMapError {}
#[derive(Debug)]
pub struct MemoryMapRef<'a> {
buf: &'a [u8],
meta: MemoryMapMeta,
len: usize,
}
impl<'a> MemoryMapRef<'a> {
pub fn new(buffer: &'a [u8], meta: MemoryMapMeta) -> Result<Self, MemoryMapError> {
if buffer.as_ptr().align_offset(8) != 0 {
return Err(MemoryMapError::Misaligned);
}
if buffer.len() < meta.map_size {
return Err(MemoryMapError::InvalidSize);
}
Ok(Self {
buf: buffer,
meta,
len: meta.entry_count(),
})
}
}
impl MemoryMap for MemoryMapRef<'_> {
fn meta(&self) -> MemoryMapMeta {
self.meta
}
fn key(&self) -> MemoryMapKey {
self.meta.map_key
}
fn len(&self) -> usize {
self.len
}
fn buffer(&self) -> &[u8] {
self.buf
}
fn entries(&self) -> MemoryMapIter<'_> {
MemoryMapIter {
memory_map: self,
index: 0,
}
}
}
impl Index<usize> for MemoryMapRef<'_> {
type Output = MemoryDescriptor;
fn index(&self, index: usize) -> &Self::Output {
self.get(index).unwrap()
}
}
#[derive(Debug)]
pub struct MemoryMapRefMut<'a> {
buf: &'a mut [u8],
meta: MemoryMapMeta,
len: usize,
}
impl<'a> MemoryMapRefMut<'a> {
pub fn new(buffer: &'a mut [u8], meta: MemoryMapMeta) -> Result<Self, MemoryMapError> {
if buffer.as_ptr().align_offset(8) != 0 {
return Err(MemoryMapError::Misaligned);
}
if buffer.len() < meta.map_size {
return Err(MemoryMapError::InvalidSize);
}
Ok(Self {
buf: buffer,
meta,
len: meta.entry_count(),
})
}
}
impl MemoryMap for MemoryMapRefMut<'_> {
fn meta(&self) -> MemoryMapMeta {
self.meta
}
fn key(&self) -> MemoryMapKey {
self.meta.map_key
}
fn len(&self) -> usize {
self.len
}
fn buffer(&self) -> &[u8] {
self.buf
}
fn entries(&self) -> MemoryMapIter<'_> {
MemoryMapIter {
memory_map: self,
index: 0,
}
}
}
impl MemoryMapMut for MemoryMapRefMut<'_> {
fn sort(&mut self) {
self.qsort(0, self.len - 1);
}
unsafe fn buffer_mut(&mut self) -> &mut [u8] {
self.buf
}
}
impl MemoryMapRefMut<'_> {
fn qsort(&mut self, low: usize, high: usize) {
if low >= high {
return;
}
let p = self.partition(low, high);
self.qsort(low, p);
self.qsort(p + 1, high);
}
fn partition(&mut self, low: usize, high: usize) -> usize {
let pivot = self.get_element_phys_addr(low + (high - low) / 2);
let mut left_index = low.wrapping_sub(1);
let mut right_index = high.wrapping_add(1);
loop {
while {
left_index = left_index.wrapping_add(1);
self.get_element_phys_addr(left_index) < pivot
} {}
while {
right_index = right_index.wrapping_sub(1);
self.get_element_phys_addr(right_index) > pivot
} {}
if left_index >= right_index {
return right_index;
}
self.swap(left_index, right_index);
}
}
fn swap(&mut self, index1: usize, index2: usize) {
assert!(index1 < self.len);
assert!(index2 < self.len);
if index1 == index2 {
return;
}
let base = self.buf.as_mut_ptr();
let offset1 = index1 * self.meta.desc_size;
let offset2 = index2 * self.meta.desc_size;
unsafe {
ptr::swap_nonoverlapping(base.add(offset1), base.add(offset2), self.meta.desc_size);
}
}
fn get_element_phys_addr(&self, index: usize) -> PhysicalAddress {
assert!(index < self.len);
let offset = index.checked_mul(self.meta.desc_size).unwrap();
let elem = unsafe { &*self.buf.as_ptr().add(offset).cast::<MemoryDescriptor>() };
elem.phys_start
}
}
impl Index<usize> for MemoryMapRefMut<'_> {
type Output = MemoryDescriptor;
fn index(&self, index: usize) -> &Self::Output {
self.get(index).unwrap()
}
}
impl IndexMut<usize> for MemoryMapRefMut<'_> {
fn index_mut(&mut self, index: usize) -> &mut Self::Output {
self.get_mut(index).unwrap()
}
}
#[derive(Debug)]
#[allow(clippy::len_without_is_empty)] pub(crate) struct MemoryMapBackingMemory(NonNull<[u8]>);
impl MemoryMapBackingMemory {
pub(crate) fn new(memory_type: MemoryType) -> crate::Result<Self> {
let memory_map_meta = boot::memory_map_size();
let len = Self::safe_allocation_size_hint(memory_map_meta);
let ptr = boot::allocate_pool(memory_type, len)?.as_ptr();
assert_eq!(ptr.align_offset(align_of::<MemoryDescriptor>()), 0);
assert_eq!(memory_map_meta.map_size % memory_map_meta.desc_size, 0);
unsafe { Ok(Self::from_raw(ptr, len)) }
}
unsafe fn from_raw(ptr: *mut u8, len: usize) -> Self {
assert_eq!(ptr.align_offset(align_of::<MemoryDescriptor>()), 0);
let ptr = NonNull::new(ptr).expect("UEFI should never return a null ptr. An error should have been reflected via an Err earlier.");
let slice = NonNull::slice_from_raw_parts(ptr, len);
Self(slice)
}
#[cfg(test)]
pub(crate) fn from_slice(buffer: &mut [u8]) -> Self {
let len = buffer.len();
unsafe { Self::from_raw(buffer.as_mut_ptr(), len) }
}
#[must_use]
const fn safe_allocation_size_hint(mmm: MemoryMapMeta) -> usize {
const EXTRA_ENTRIES: usize = 8;
let extra_size = mmm.desc_size * EXTRA_ENTRIES;
mmm.map_size + extra_size
}
#[must_use]
pub const fn as_slice(&self) -> &[u8] {
unsafe { self.0.as_ref() }
}
#[must_use]
pub const fn as_mut_slice(&mut self) -> &mut [u8] {
unsafe { self.0.as_mut() }
}
}
impl Drop for MemoryMapBackingMemory {
fn drop(&mut self) {
if boot::are_boot_services_active() {
let res = unsafe { boot::free_pool(self.0.cast()) };
if let Err(e) = res {
log::error!("Failed to deallocate memory map: {e:?}");
}
} else {
log::debug!(
"Boot services are exited. Memory map won't be freed using the UEFI boot services allocator."
);
}
}
}
#[derive(Debug)]
pub struct MemoryMapOwned {
pub(crate) buf: MemoryMapBackingMemory,
pub(crate) meta: MemoryMapMeta,
pub(crate) len: usize,
}
impl MemoryMapOwned {
pub(crate) fn from_initialized_mem(buf: MemoryMapBackingMemory, meta: MemoryMapMeta) -> Self {
assert!(meta.desc_size >= size_of::<MemoryDescriptor>());
let len = meta.entry_count();
Self { buf, meta, len }
}
}
impl MemoryMap for MemoryMapOwned {
fn meta(&self) -> MemoryMapMeta {
self.meta
}
fn key(&self) -> MemoryMapKey {
self.meta.map_key
}
fn len(&self) -> usize {
self.len
}
fn buffer(&self) -> &[u8] {
self.buf.as_slice()
}
fn entries(&self) -> MemoryMapIter<'_> {
MemoryMapIter {
memory_map: self,
index: 0,
}
}
}
impl MemoryMapMut for MemoryMapOwned {
fn sort(&mut self) {
let mut reference = MemoryMapRefMut {
buf: self.buf.as_mut_slice(),
meta: self.meta,
len: self.len,
};
reference.sort();
}
unsafe fn buffer_mut(&mut self) -> &mut [u8] {
self.buf.as_mut_slice()
}
}
impl Index<usize> for MemoryMapOwned {
type Output = MemoryDescriptor;
fn index(&self, index: usize) -> &Self::Output {
self.get(index).unwrap()
}
}
impl IndexMut<usize> for MemoryMapOwned {
fn index_mut(&mut self, index: usize) -> &mut Self::Output {
self.get_mut(index).unwrap()
}
}
#[cfg(test)]
mod tests {
use super::*;
use alloc::vec::Vec;
use size_of;
const BASE_MMAP_UNSORTED: [MemoryDescriptor; 3] = [
MemoryDescriptor {
ty: MemoryType::CONVENTIONAL,
phys_start: 0x3000,
virt_start: 0x3000,
page_count: 1,
att: MemoryAttribute::WRITE_BACK,
},
MemoryDescriptor {
ty: MemoryType::CONVENTIONAL,
phys_start: 0x2000,
virt_start: 0x2000,
page_count: 1,
att: MemoryAttribute::WRITE_BACK,
},
MemoryDescriptor {
ty: MemoryType::CONVENTIONAL,
phys_start: 0x1000,
virt_start: 0x1000,
page_count: 1,
att: MemoryAttribute::WRITE_BACK,
},
];
const fn new_mmap_memory() -> [MemoryDescriptor; 3] {
BASE_MMAP_UNSORTED
}
fn mmap_raw<'a>(memory: &mut [MemoryDescriptor]) -> (&'a mut [u8], MemoryMapMeta) {
let desc_size = size_of::<MemoryDescriptor>();
let len = size_of_val(memory);
let ptr = memory.as_mut_ptr().cast::<u8>();
let slice = unsafe { core::slice::from_raw_parts_mut(ptr, len) };
let meta = MemoryMapMeta {
map_size: len,
desc_size,
map_key: Default::default(),
desc_version: MemoryDescriptor::VERSION,
};
(slice, meta)
}
#[test]
fn memory_map_ref() {
let mut memory = new_mmap_memory();
let (mmap, meta) = mmap_raw(&mut memory);
let mmap = MemoryMapRef::new(mmap, meta).unwrap();
assert_eq!(mmap.entries().count(), 3);
assert_eq!(
mmap.entries().copied().collect::<Vec<_>>().as_slice(),
&BASE_MMAP_UNSORTED
);
assert!(!mmap.is_sorted());
}
#[test]
fn memory_map_ref_mut() {
let mut memory = new_mmap_memory();
let (mmap, meta) = mmap_raw(&mut memory);
let mut mmap = MemoryMapRefMut::new(mmap, meta).unwrap();
assert_eq!(mmap.entries().count(), 3);
assert_eq!(
mmap.entries().copied().collect::<Vec<_>>().as_slice(),
&BASE_MMAP_UNSORTED
);
assert!(!mmap.is_sorted());
mmap.sort();
assert!(mmap.is_sorted());
}
#[test]
fn memory_map_owned() {
let mut memory = new_mmap_memory();
let (mmap, meta) = mmap_raw(&mut memory);
let mmap = MemoryMapBackingMemory::from_slice(mmap);
let mut mmap = MemoryMapOwned::from_initialized_mem(mmap, meta);
assert_eq!(mmap.entries().count(), 3);
assert_eq!(
mmap.entries().copied().collect::<Vec<_>>().as_slice(),
&BASE_MMAP_UNSORTED
);
assert!(!mmap.is_sorted());
mmap.sort();
assert!(mmap.is_sorted());
}
}