pub mod node;
mod token;
use core::{
fmt::{self, Debug, Display, Formatter, Write},
mem::{size_of, size_of_val},
ptr::NonNull,
slice,
};
#[cfg(feature = "alloc")]
use std_alloc::{borrow::ToOwned, boxed::Box, vec::Vec};
use zerocopy::{AsBytes, FromBytes, FromZeroes, Ref};
use crate::{
BlobError, DeserializeNode, DeserializeProperty, MemReserveEntries, NodeContext, Path,
};
pub use node::Node;
pub use token::*;
#[cfg(target_pointer_width = "16")]
type DtUint = u16;
#[cfg(not(target_pointer_width = "16"))]
type DtUint = u32;
type Result<T, E = BlobError> = core::result::Result<T, E>;
pub(crate) const DTB_OPTIMAL_ALIGN: usize = 8;
pub const DTB_MAGIC: [u8; 4] = 0xd00d_feed_u32.to_be_bytes();
pub(crate) const LAST_COMPATIBLE_VERSION: u32 = 0x10;
const STRUCT_BLOCK_OPTIMAL_ALIGN: usize = 4;
#[derive(AsBytes)]
#[repr(C)]
pub(crate) struct Header {
pub magic: [u8; 4],
pub totalsize: u32,
pub off_dt_struct: u32,
pub off_dt_strings: u32,
pub off_mem_rsvmap: u32,
pub version: u32,
pub last_comp_version: u32,
pub boot_cpuid_phys: u32,
pub size_dt_strings: u32,
pub size_dt_struct: u32,
}
impl Header {
pub const SIZE: usize = size_of::<Self>();
}
#[repr(transparent)]
pub struct Devicetree {
blob: [u64],
}
impl Devicetree {
pub unsafe fn from_ptr(ptr: NonNull<u64>) -> Result<&'static Self> {
let ptr: *const u64 = ptr.as_ptr();
let size = unsafe {
let blob = slice::from_raw_parts(ptr, Header::SIZE / DTB_OPTIMAL_ALIGN);
Self::check_magic(blob)?;
Self::totalsize(blob)
}?;
if isize::try_from(size).is_err() || usize::overflowing_add(ptr as usize, size).1 {
return Err(BlobError::InvalidTotalsize);
}
let slice_len = (size + DTB_OPTIMAL_ALIGN - 1) / DTB_OPTIMAL_ALIGN;
unsafe { Self::from_slice_internal(slice::from_raw_parts(ptr, slice_len)) }
}
pub fn from_slice(blob: &[u64]) -> Result<&Self> {
let size = Self::safe_checks(blob)?;
unsafe { Self::from_slice_internal(&blob[..size]) }
}
#[cfg(feature = "alloc")]
pub fn from_vec(mut blob: Vec<u64>) -> Result<Box<Self>> {
let len = Self::safe_checks(&blob)?;
blob.truncate(len);
let this = unsafe { Self::from_box_unchecked(blob.into_boxed_slice()) };
this.late_checks()?;
Ok(this)
}
#[cfg(feature = "alloc")]
#[inline]
pub(crate) unsafe fn from_box_unchecked(blob: Box<[u64]>) -> Box<Self> {
unsafe { Box::from_raw(Box::into_raw(blob) as *mut Self) }
}
#[cfg(feature = "alloc")]
pub fn from_unaligned(blob: &[u8]) -> Result<Box<Self>> {
let capacity = (blob.len() + DTB_OPTIMAL_ALIGN - 1) / DTB_OPTIMAL_ALIGN;
let mut aligned_blob: Vec<u64> = Vec::with_capacity(capacity);
aligned_blob
.spare_capacity_mut()
.last_mut()
.ok_or(BlobError::UnexpectedEnd)?
.write(0);
unsafe {
core::ptr::copy_nonoverlapping(
blob.as_ptr(),
aligned_blob.as_mut_ptr() as *mut u8,
blob.len(),
);
aligned_blob.set_len(capacity);
}
Self::from_vec(aligned_blob)
}
unsafe fn from_slice_internal(blob: &[u64]) -> Result<&Self> {
let this: &Self = unsafe { core::mem::transmute(blob) };
this.late_checks()?;
Ok(this)
}
fn safe_checks(blob: &[u64]) -> Result<usize> {
if size_of_val(blob) < Header::SIZE {
return Err(BlobError::UnexpectedEnd);
}
let size = unsafe {
Self::check_magic(blob)?;
Self::totalsize(blob)
}?;
if size_of_val(blob) < size {
return Err(BlobError::InvalidTotalsize);
}
Ok((size + DTB_OPTIMAL_ALIGN - 1) / DTB_OPTIMAL_ALIGN)
}
unsafe fn check_magic(blob: &[u64]) -> Result<()> {
if unsafe { *(blob as *const _ as *const [u8; 4]) } != DTB_MAGIC {
return Err(BlobError::NoMagicSignature);
}
Ok(())
}
unsafe fn totalsize(blob: &[u64]) -> Result<usize> {
let header = blob as *const _ as *const Header;
let size = u32::from_be(unsafe { (*header).totalsize });
usize::try_from(size)
.ok()
.filter(|&s| s >= Header::SIZE)
.ok_or(BlobError::InvalidTotalsize)
}
fn late_checks(&self) -> Result<()> {
if self.header().last_comp_version != LAST_COMPATIBLE_VERSION.to_be() {
return Err(BlobError::IncompatibleVersion);
}
let exact_size = self.exact_size() as usize;
let (struct_offset, struct_size) = Option::zip(
usize::try_from(u32::from_be(self.header().off_dt_struct))
.ok()
.filter(|&o| o >= Header::SIZE),
usize::try_from(u32::from_be(self.header().size_dt_struct)).ok(),
)
.ok_or(BlobError::BlockOutOfBounds)?;
let struct_end_offset = usize::checked_add(struct_offset, struct_size)
.filter(|&e| e <= exact_size)
.ok_or(BlobError::BlockOutOfBounds)?;
if struct_offset % STRUCT_BLOCK_OPTIMAL_ALIGN != 0
|| struct_size % STRUCT_BLOCK_OPTIMAL_ALIGN != 0
{
return Err(BlobError::UnalignedBlock);
}
let strings_offset = usize::try_from(u32::from_be(self.header().off_dt_strings))
.map_err(|_| BlobError::BlockOutOfBounds)?;
if struct_end_offset > strings_offset {
return Err(BlobError::InvalidBlockOrder);
}
if !usize::try_from(u32::from_be(self.header().size_dt_strings))
.ok()
.and_then(|s| usize::checked_add(strings_offset, s))
.is_some_and(|e| e <= exact_size)
{
return Err(BlobError::BlockOutOfBounds);
}
Ok(())
}
#[inline]
fn header(&self) -> &Header {
unsafe { &*(self as *const _ as *const Header) }
}
pub fn exact_size(&self) -> u32 {
u32::from_be(self.header().totalsize)
}
pub fn version(&self) -> u32 {
u32::from_be(self.header().version)
}
#[inline]
pub fn last_comp_version(&self) -> u32 {
LAST_COMPATIBLE_VERSION
}
pub fn boot_core_id(&self) -> u32 {
u32::from_be(self.header().boot_cpuid_phys)
}
#[inline]
pub fn blob_u8(&self) -> &[u8] {
self.blob.as_bytes()
}
#[inline]
pub fn blob_u32(&self) -> &[u32] {
Ref::new_slice(self.blob_u8()).unwrap().into_slice()
}
#[inline]
pub fn blob(&self) -> &[u64] {
&self.blob
}
pub fn struct_blob(&self) -> &[u32] {
let offset = u32::from_be(self.header().off_dt_struct) as usize;
let len = u32::from_be(self.header().size_dt_struct) as usize;
unsafe {
crate::util::slice_get_with_len_unchecked(
self.blob_u32(),
offset / STRUCT_BLOCK_OPTIMAL_ALIGN,
len / STRUCT_BLOCK_OPTIMAL_ALIGN,
)
}
}
fn blob_with_struct_block_end(&self) -> &[u8] {
let offset = u32::from_be(self.header().off_dt_struct) as usize;
let len = u32::from_be(self.header().size_dt_struct) as usize;
unsafe { self.blob_u8().get_unchecked(..offset + len) }
}
pub fn strings_blob(&self) -> &[u8] {
let offset = u32::from_be(self.header().off_dt_strings) as usize;
let len = u32::from_be(self.header().size_dt_strings) as usize;
unsafe { crate::util::slice_get_with_len_unchecked(self.blob_u8(), offset, len) }
}
pub fn get_node<P: Path + ?Sized>(&self, path: &P) -> crate::Result<Option<Node<'_>>> {
let components = path.as_components()?;
let mut node = self.root_node()?;
for name in components {
let Some(n) = node.get_child(name)? else {
return Ok(None);
};
node = n;
}
Ok(Some(node))
}
pub fn get_node_strict<P: Path + ?Sized>(&self, path: &P) -> crate::Result<Option<Node<'_>>> {
let components = path.as_components()?;
let mut node = self.root_node()?;
for name in components {
let Some(n) = node.get_child_strict(name)? else {
return Ok(None);
};
node = n;
}
Ok(Some(node))
}
pub fn parse_root<'dtb, T: DeserializeNode<'dtb>>(&'dtb self) -> crate::Result<T> {
T::deserialize(&self.root_node()?, NodeContext::default()).map(|(v, _)| v)
}
pub fn mem_reserve_entries(&self) -> Result<MemReserveEntries<'_>> {
let offset = u32::from_be(self.header().off_mem_rsvmap);
if offset % DTB_OPTIMAL_ALIGN as u32 != 0 {
return Err(BlobError::UnalignedBlock);
}
let offset = usize::try_from(offset)
.ok()
.filter(|&o| o >= Header::SIZE)
.ok_or(BlobError::BlockOutOfBounds)?;
let end_offset = u32::from_be(self.header().off_dt_struct) as usize;
Ok(MemReserveEntries {
blob: self
.blob
.get(offset / DTB_OPTIMAL_ALIGN..end_offset / DTB_OPTIMAL_ALIGN)
.ok_or(BlobError::InvalidBlockOrder)?,
})
}
}
impl Debug for Devicetree {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
f.debug_struct("Devicetree")
.field("size", &self.exact_size())
.field("version", &self.version())
.field("last_comp_version", &self.last_comp_version())
.field("boot_core_id", &self.boot_core_id())
.finish_non_exhaustive()
}
}
impl<'a> From<&'a Devicetree> for &'a [u64] {
#[inline]
fn from(dt: &'a Devicetree) -> Self {
&dt.blob
}
}
#[cfg(feature = "alloc")]
impl<'a> From<&'a Devicetree> for Box<Devicetree> {
fn from(this: &'a Devicetree) -> Self {
this.to_owned()
}
}
#[cfg(feature = "alloc")]
impl ToOwned for Devicetree {
type Owned = Box<Self>;
fn to_owned(&self) -> Self::Owned {
unsafe { Devicetree::from_box_unchecked(self.blob.into()) }
}
}
#[derive(Clone, Copy)]
pub struct Property<'dtb> {
name_blob: &'dtb [u8],
value: &'dtb [u8],
}
impl<'dtb> Property<'dtb> {
pub fn name(self) -> Result<&'dtb str> {
crate::util::str_from_ascii(crate::util::get_c_str(self.name_blob)?)
.ok_or(BlobError::InvalidString)
}
#[inline]
pub fn value(self) -> &'dtb [u8] {
debug_assert_eq!(self.value.as_ptr() as usize % 4, 0);
self.value
}
pub fn contextless_parse<T: DeserializeProperty<'dtb>>(self) -> crate::Result<T> {
T::deserialize(self, NodeContext::default())
}
}
impl<'dtb> Debug for Property<'dtb> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
f.debug_struct("Property")
.field("name", &self.name())
.field("value", &self.value)
.finish()
}
}
impl<'dtb> Display for Property<'dtb> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
struct HexArray<const N: usize>([u8; N]);
impl<const N: usize> Display for HexArray<N> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
const HEX_STRING: &[u8] = b"0123456789abcdef";
let buf = self.0.map(|n| {
u16::from_ne_bytes([HEX_STRING[n as usize >> 4], HEX_STRING[n as usize & 0x0f]])
});
f.write_str(unsafe { core::str::from_utf8_unchecked(buf.as_bytes()) })
}
}
f.write_str(self.name().map_err(|_| fmt::Error)?)?;
if let [ref rest @ .., last_byte] = *self.value {
f.write_str(" = ")?;
let is_strings = 'is_strings: {
if last_byte != 0 {
break 'is_strings false;
}
if rest.is_empty() {
break 'is_strings true;
}
let mut prev_was_printing_char = false;
rest.iter().all(|&b| {
match b {
0 if prev_was_printing_char => prev_was_printing_char = false,
b' '..=b'~' => prev_was_printing_char = true,
_ => return false,
}
true
}) && prev_was_printing_char
};
if is_strings {
f.write_char('"')?;
for &b in rest {
if b == 0 {
f.write_str("\", \"")?;
} else {
f.write_char(b as char)?;
};
}
f.write_char('"')?;
} else {
f.write_char('[')?;
let len = self.value.len();
if len % 4 == 0 {
for bytes in rest.chunks_exact(4) {
write!(f, "{} ", HexArray(<[u8; 4]>::try_from(bytes).unwrap()))?;
}
HexArray(<[u8; 3]>::try_from(&rest[len - 4..]).unwrap()).fmt(f)?;
} else {
for &b in rest {
write!(f, "{} ", HexArray([b]))?;
}
}
write!(f, "{}]", HexArray([last_byte]))?;
}
}
f.write_char(';')
}
}
#[derive(Clone, Debug)]
pub enum Item<'dtb> {
Property(Property<'dtb>),
Child(Node<'dtb>),
}
impl<'dtb> Item<'dtb> {
pub fn name(self) -> Result<&'dtb str> {
match self {
Self::Property(prop) => prop.name(),
Self::Child(node) => node.name(),
}
}
}
#[derive(AsBytes, FromBytes, FromZeroes)]
#[repr(C)]
pub(crate) struct RawReserveEntry {
pub address: u64,
pub size: u64,
}
impl RawReserveEntry {
pub const FIELD_COUNT: usize = size_of::<Self>() / DTB_OPTIMAL_ALIGN;
}