1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70
use crate::{DepthFirstIter, TableIter};
use hff_core::{Chunk, Header, Semver, Table};
use std::{fmt::Debug, mem::size_of};
/// The Hff structure data. This is an immutable representation of the
/// content of an Hff stream.
#[derive(Debug, Clone, PartialEq)]
pub struct Hff {
/// Was the structure in native endian?
native: bool,
/// The version of the file format.
version: Semver,
/// The tables found in the header structure.
tables: Vec<Table>,
/// The chunks found within the header structure.
chunks: Vec<Chunk>,
}
impl Hff {
/// Create a new Hff wrapper.
pub fn new(
header: Header,
tables: impl Into<Vec<Table>>,
chunks: impl Into<Vec<Chunk>>,
) -> Self {
Self {
native: header.is_native_endian(),
version: header.version(),
tables: tables.into(),
chunks: chunks.into(),
}
}
/// Return if the structure of the source was in native endian.
pub fn is_native_endian(&self) -> bool {
self.native
}
/// Return the version of the file structure the file was read from.
pub fn version(&self) -> Semver {
self.version
}
/// Get the offset from the start of the file to the start of the chunk data.
pub fn offset_to_data(&self) -> usize {
size_of::<Header>()
+ (size_of::<Table>() * self.tables.len())
+ (size_of::<Chunk>() * self.chunks.len())
}
/// Get an iterator over the tables in depth first order.
pub fn depth_first(&self) -> DepthFirstIter {
DepthFirstIter::new(self)
}
/// Get an iterator over the child tables.
pub fn tables(&self) -> TableIter {
TableIter::new(self, 0)
}
/// Get access to the table array.
pub(super) fn tables_array(&self) -> &[Table] {
&self.tables
}
/// Get access to the chunk array.
pub(super) fn chunks_array(&self) -> &[Chunk] {
&self.chunks
}
}