use hff_std::hff_core::write::ChunkDesc;
use hff_std::utilities::Hierarchical;
use hff_std::*;
use std::{
fs::{read_dir, File},
path::{Path, PathBuf},
};
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum Structure {
Directory(PathBuf, Vec<Structure>),
File(PathBuf),
}
impl Structure {
pub fn new(path: &Path, recursive: bool) -> Result<Self> {
use normpath::PathExt;
let path: PathBuf = path.normalize()?.into();
if path.exists() {
let metadata = path.metadata()?;
let file_type = metadata.file_type();
if file_type.is_file() {
Ok(Self::File(path.into()))
} else if file_type.is_dir() {
Ok(Self::scan_directory(path.into(), recursive)?)
} else {
Err(Error::Invalid(format!("Invalid root: {:?}", path)))
}
} else {
Err(Error::Invalid(format!("Invalid root: {:?}", path)))
}
}
pub fn strip_prefix(self, prefix: &Path) -> Result<Self> {
match self {
Self::Directory(path, children) => {
let p = path.strip_prefix(prefix)?;
let cp = prefix.join(p);
let mut c = vec![];
for child in children {
c.push(child.strip_prefix(&cp)?);
}
Ok(Self::Directory(p.into(), c))
}
Self::File(path) => Ok(Self::File(path)),
}
}
fn scan_directory(path: PathBuf, recursive: bool) -> Result<Self> {
let mut result = vec![];
let mut reader = read_dir(&path)?;
while let Some(entry) = reader.next() {
match entry {
Ok(entry) => {
let metadata = entry.metadata()?;
if metadata.file_type().is_file() {
result.push(Self::File(entry.path().file_name().unwrap().into()));
} else if metadata.file_type().is_dir() {
if recursive {
let path = entry.path();
result.push(Self::scan_directory(path, recursive)?);
}
}
}
Err(e) => return Err(e.into()),
}
}
Ok(Self::Directory(path.into(), result))
}
pub fn to_tables<'a, E: ByteOrder>(
self,
root: &Path,
compression: impl Fn(&Path) -> Option<u32>,
) -> Result<TableBuilder<'a>> {
match self {
Self::File(file) => archive_single_file::<E>(root, file, &compression),
Self::Directory(path, children) => {
archive_directory::<E>(root, path, children, &compression)
}
}
}
}
fn archive_directory<'a, E: ByteOrder>(
root: &Path,
path: PathBuf,
structure: Vec<Structure>,
compression: &impl Fn(&Path) -> Option<u32>,
) -> Result<TableBuilder<'a>> {
let (tables, chunks, files, hierarchy) =
archive_level::<E>(root, path.clone(), structure, compression)?;
Ok(table((super::HFF_DIR, Ecc::INVALID))
.metadata(Hierarchical::new(path.display().to_string(), files, hierarchy).to_bytes::<E>()?)?
.children(tables)
.chunks(chunks))
}
fn archive_level<'a, E: ByteOrder>(
root: &Path,
path: PathBuf,
children: Vec<Structure>,
compression: &impl Fn(&Path) -> Option<u32>,
) -> Result<(
Vec<TableBuilder<'a>>,
Vec<ChunkDesc<'a>>,
Vec<String>,
Vec<Hierarchical>,
)> {
let mut tables = vec![];
let mut chunks = vec![];
let mut files = vec![];
let mut hierarchy = vec![];
for child in children {
match child {
Structure::File(file) => {
let file: std::path::PathBuf = file.into();
files.push(file.display().to_string());
let path = root.join(path.join(&file));
chunks.push(file_to_chunk(compression, path.into())?);
}
Structure::Directory(p, c) => {
let root = root.join(&path);
let (t, c, f, h) = archive_level::<E>(&root, p.clone(), c, compression)?;
hierarchy.push(Hierarchical::new(p.display().to_string(), f, h));
tables.push(
table((super::HFF_DIR, Ecc::INVALID))
.children(t.into_iter())
.chunks(c.into_iter()),
)
}
}
}
Ok((tables, chunks, files, hierarchy))
}
fn archive_single_file<'a, E: ByteOrder>(
root: &Path,
file: PathBuf,
compression: &impl Fn(&Path) -> Option<u32>,
) -> Result<TableBuilder<'a>> {
let file_path = root.join(&file);
match hff_std::open(File::open(&file_path)?) {
Ok(hff) => Ok(hff_to_table::<E>(file, hff)?),
Err(_) => {
let file_path: std::path::PathBuf = file_path.into();
let chunk = file_to_chunk(compression, file_path)?;
Ok(table((super::HFF_FILE, Ecc::INVALID))
.chunks([chunk])
.metadata(
Hierarchical::new(file.display().to_string(), vec![], vec![])
.to_bytes::<E>()?,
)?)
}
}
}
fn hff_to_table<'a, E: ByteOrder>(file: PathBuf, hff: Hff<StdReader>) -> Result<TableBuilder<'a>> {
let mut children = vec![];
for t in hff.tables() {
children.push(resolve_table(&hff, t)?);
}
let result = table((super::HFF_EMBEDDED, hff.content_type()))
.metadata(Hierarchical::new(file.display().to_string(), vec![], vec![]).to_bytes::<E>()?)?
.children(children.into_iter());
Ok(result)
}
fn resolve_table<'a, 'b>(
hff: &'a Hff<StdReader>,
t: TableView<'a, StdReader>,
) -> Result<TableBuilder<'b>> {
let mut chunks = vec![];
for c in t.chunks() {
chunks.push(chunk(c.identifier(), hff.get(&c)?)?);
}
let mut children = vec![];
for t in t.iter() {
children.push(resolve_table(hff, t)?);
}
Ok(table(t.identifier())
.chunks(chunks.into_iter())
.children(children.into_iter()))
}
fn file_to_chunk<'a, F: Fn(&Path) -> Option<u32>>(
compression: &F,
file_path: std::path::PathBuf,
) -> Result<ChunkDesc<'a>> {
let compression = compression(file_path.as_path().into());
let chunk = if let Some(compression) = compression {
let size = file_path.metadata()?.len();
chunk((super::HFF_FILE, size), (compression, file_path))?
} else {
chunk((super::HFF_FILE, Ecc::INVALID), file_path)?
};
Ok(chunk)
}