1#[cfg(doc)]
2use crate::base::parse::ParsedTok;
3#[cfg(doc)]
4use crate::base::*;
5
6use core::mem::size_of;
7use core::ptr;
8use core::slice;
9
10use crate::error::{DevTreeError, Result};
11
12use crate::priv_util::SliceRead;
13use crate::spec::{fdt_header, FDT_MAGIC};
14
15use fallible_iterator::FallibleIterator;
16
17use super::iters::{
18 DevTreeCompatibleNodeIter, DevTreeIter, DevTreeNodeIter, DevTreeParseIter, DevTreePropIter,
19 DevTreeReserveEntryIter,
20};
21use super::DevTreeNode;
22
23const fn is_aligned<T>(offset: usize) -> bool {
24 offset % size_of::<T>() == 0
25}
26
27const fn verify_offset_aligned<T>(offset: usize) -> Result<usize> {
28 let i: [Result<usize>; 2] = [Err(DevTreeError::ParseError), Ok(offset)];
29 i[is_aligned::<T>(offset) as usize]
30}
31
32macro_rules! get_be32_field {
33 ( $f:ident, $s:ident , $buf:expr ) => {
34 $buf.read_be_u32(offset_of!($s, $f))
35 };
36}
37
38#[derive(Copy, Clone, Debug)]
43pub struct DevTree<'dt> {
44 buf: &'dt [u8],
45}
46
47impl<'dt> PartialEq for DevTree<'dt> {
48 fn eq(&self, other: &Self) -> bool {
49 ptr::eq(self.buf, other.buf)
50 }
51}
52
53impl<'dt> DevTree<'dt> {
54 pub const MIN_HEADER_SIZE: usize = size_of::<fdt_header>();
55 #[inline]
65 pub unsafe fn verify_magic(buf: &[u8]) -> Result<()> {
66 if get_be32_field!(magic, fdt_header, buf)? != FDT_MAGIC {
67 Err(DevTreeError::InvalidMagicNumber)
68 } else {
69 Ok(())
70 }
71 }
72
73 #[inline]
98 pub unsafe fn read_totalsize(buf: &[u8]) -> Result<usize> {
99 verify_offset_aligned::<u32>(buf.as_ptr() as usize)
101 .map_err(|_| DevTreeError::InvalidParameter("Unaligned buffer provided"))?;
102
103 Self::verify_magic(buf)?;
105 Ok(get_be32_field!(totalsize, fdt_header, buf)? as usize)
106 }
107
108 #[inline]
118 unsafe fn from_safe_slice(buf: &'dt [u8]) -> Result<Self> {
119 let ret = Self { buf };
120 verify_offset_aligned::<u32>(ret.off_mem_rsvmap())?;
122 verify_offset_aligned::<u32>(ret.off_dt_struct())?;
123 Ok(ret)
124 }
125
126 #[inline]
135 pub unsafe fn new(buf: &'dt [u8]) -> Result<Self> {
136 if Self::read_totalsize(buf)? < buf.len() {
137 Err(DevTreeError::ParseError)
138 } else {
139 Self::from_safe_slice(buf)
140 }
141 }
142
143 #[inline]
151 pub unsafe fn from_raw_pointer(addr: *const u8) -> Result<Self> {
152 let buf: &[u8] = slice::from_raw_parts(addr, Self::MIN_HEADER_SIZE);
153 let buf_size = Self::read_totalsize(buf)?;
154 let buf: &[u8] = slice::from_raw_parts(addr, buf_size);
155
156 Self::from_safe_slice(buf)
157 }
158
159 #[inline]
162 #[must_use]
163 pub fn totalsize(&self) -> usize {
164 unsafe { get_be32_field!(totalsize, fdt_header, self.buf).unwrap() as usize }
165 }
166
167 #[inline]
169 #[must_use]
170 pub fn off_mem_rsvmap(&self) -> usize {
171 unsafe { get_be32_field!(off_mem_rsvmap, fdt_header, self.buf).unwrap() as usize }
172 }
173
174 #[inline]
176 #[must_use]
177 pub fn off_dt_struct(&self) -> usize {
178 unsafe { get_be32_field!(off_dt_struct, fdt_header, self.buf).unwrap() as usize }
179 }
180
181 #[inline]
183 #[must_use]
184 pub fn off_dt_strings(&self) -> usize {
185 unsafe { get_be32_field!(off_dt_strings, fdt_header, self.buf).unwrap() as usize }
186 }
187
188 #[inline]
190 #[must_use]
191 pub fn magic(&self) -> u32 {
192 unsafe { get_be32_field!(magic, fdt_header, self.buf).unwrap() }
193 }
194
195 #[inline]
197 #[must_use]
198 pub fn version(&self) -> u32 {
199 unsafe { get_be32_field!(version, fdt_header, self.buf).unwrap() }
200 }
201
202 #[inline]
204 #[must_use]
205 pub fn boot_cpuid_phys(&self) -> u32 {
206 unsafe { get_be32_field!(boot_cpuid_phys, fdt_header, self.buf).unwrap() }
207 }
208
209 #[inline]
211 #[must_use]
212 pub fn last_comp_version(&self) -> u32 {
213 unsafe { get_be32_field!(last_comp_version, fdt_header, self.buf).unwrap() }
214 }
215
216 #[inline]
218 #[must_use]
219 pub fn size_dt_strings(&self) -> u32 {
220 unsafe { get_be32_field!(size_dt_strings, fdt_header, self.buf).unwrap() }
221 }
222
223 #[inline]
225 #[must_use]
226 pub fn size_dt_struct(&self) -> u32 {
227 unsafe { get_be32_field!(size_dt_struct, fdt_header, self.buf).unwrap() }
228 }
229
230 pub(crate) unsafe fn ptr_at<T>(&self, offset: usize) -> Result<*const T> {
239 if offset + size_of::<T>() > self.buf.len() {
240 Err(DevTreeError::InvalidOffset)
241 } else {
242 Ok(self.buf.as_ptr().add(offset) as *const T)
243 }
244 }
245
246 #[must_use]
248 pub fn reserved_entries(&self) -> DevTreeReserveEntryIter {
249 DevTreeReserveEntryIter::new(self)
250 }
251
252 pub fn nodes(&self) -> DevTreeNodeIter<'_, 'dt> {
254 DevTreeNodeIter(DevTreeIter::new(self))
255 }
256
257 #[must_use]
258 pub fn props(&self) -> DevTreePropIter<'_, 'dt> {
259 DevTreePropIter(DevTreeIter::new(self))
260 }
261
262 pub fn items(&self) -> DevTreeIter<'_, 'dt> {
264 DevTreeIter::new(self)
265 }
266
267 #[must_use]
269 pub fn parse_iter(&self) -> DevTreeParseIter<'_, 'dt> {
270 DevTreeParseIter::new(self)
271 }
272
273 pub fn compatible_nodes<'s, 'a: 's>(
276 &'a self,
277 string: &'s str,
278 ) -> DevTreeCompatibleNodeIter<'s, 'a, 'dt> {
279 DevTreeCompatibleNodeIter {
280 iter: self.items(),
281 string,
282 }
283 }
284
285 pub fn buf(&self) -> &'dt [u8] {
286 self.buf
287 }
288
289 pub fn root(&self) -> Result<Option<DevTreeNode<'_, 'dt>>> {
291 self.nodes().next()
292 }
293}