use std::io::Read;
use crate::{
archiver::{
parent::{ItemWithParent, ParentResult},
tree::TreeType,
tree_archiver::TreeItem,
},
backend::{
ReadSourceOpen,
decrypt::DecryptWriteBackend,
node::{Node, NodeType},
},
blob::{
BlobId, BlobType, DataId,
packer::{PackSizer, Packer, PackerStats},
},
chunker::ChunkIter,
crypto::hasher::hash,
error::{ErrorKind, RusticError, RusticResult},
index::{ReadGlobalIndex, indexer::SharedIndexer},
progress::Progress,
repofile::configfile::ConfigFile,
};
#[derive(Clone)]
pub(crate) struct FileArchiver<'a, BE: DecryptWriteBackend, I: ReadGlobalIndex> {
index: &'a I,
data_packer: Packer<BE>,
config: ConfigFile,
}
impl<'a, BE: DecryptWriteBackend, I: ReadGlobalIndex> FileArchiver<'a, BE, I> {
pub(crate) fn new(
be: BE,
index: &'a I,
indexer: SharedIndexer<BE>,
config: &ConfigFile,
) -> RusticResult<Self> {
let pack_sizer =
PackSizer::from_config(config, BlobType::Data, index.total_size(BlobType::Data));
let data_packer = Packer::new(be, BlobType::Data, indexer, pack_sizer)?;
Ok(Self {
index,
data_packer,
config: config.clone(),
})
}
pub(crate) fn process<O: ReadSourceOpen>(
&self,
item: ItemWithParent<Option<O>>,
p: &Progress,
) -> RusticResult<TreeItem> {
Ok(match item {
TreeType::NewTree(item) => TreeType::NewTree(item),
TreeType::EndTree => TreeType::EndTree,
TreeType::Other((path, node, (open, parent))) => {
let (node, filesize) = if matches!(parent, ParentResult::Matched(())) {
let size = node.meta.size;
p.inc(size);
(node, size)
} else if node.node_type == NodeType::File {
let r = open
.ok_or_else(
|| RusticError::new(
ErrorKind::Internal,
"Failed to unpack tree type optional at `{path}`. Option should contain a value, but contained `None`.",
)
.attach_context("path", path.display().to_string())
.ask_report(),
)?
.open()
.map_err(|err| {
err
.overwrite_kind(ErrorKind::InputOutput)
.prepend_guidance_line("Failed to open ReadSourceOpen at `{path}`")
.attach_context("path", path.display().to_string())
})?;
self.backup_reader(r, node, p).map_err(|err| {
err.prepend_guidance_line("Error while backing up `{path}`")
.attach_context("path", path.display().to_string())
})?
} else {
(node, 0)
};
TreeType::Other((path, node, (parent, filesize)))
}
})
}
fn backup_reader(
&self,
r: impl Read + Send + 'static,
node: Node,
p: &Progress,
) -> RusticResult<(Node, u64)> {
let chunks: Vec<_> = ChunkIter::from_config(
&self.config,
r,
usize::try_from(node.meta.size).unwrap_or(usize::MAX),
)?
.map(|chunk| {
let chunk = chunk?;
let id = hash(&chunk);
let size = chunk.len() as u64;
if !self.index.has_data(&DataId::from(id)) {
self.data_packer.add(chunk.into(), BlobId::from(id))?;
}
p.inc(size);
Ok((DataId::from(id), size))
})
.collect::<RusticResult<_>>()?;
let filesize = chunks.iter().map(|x| x.1).sum();
let content = chunks.into_iter().map(|x| x.0).collect();
let mut node = node;
node.content = Some(content);
Ok((node, filesize))
}
pub(crate) fn finalize(self) -> RusticResult<PackerStats> {
self.data_packer.finalize()
}
}