use serde_derive::{Serialize, Deserialize};
use std::collections::HashMap;
use std::path::PathBuf;
use std::str::FromStr;
use crate::binary::{ReadBytes, WriteBytes};
use crate::error::Result;
use crate::files::*;
pub const EXTENSION: &str = ".animpack";
#[cfg(test)] mod animpack_test;
#[derive(PartialEq, Clone, Debug, Default, Serialize, Deserialize)]
pub struct AnimPack {
disk_file_path: String,
disk_file_offset: u64,
local_timestamp: u64,
paths: HashMap<String, Vec<String>>,
files: HashMap<String, RFile>,
}
impl Container for AnimPack {
fn disk_file_path(&self) -> &str {
&self.disk_file_path
}
fn files(&self) -> &HashMap<String, RFile> {
&self.files
}
fn files_mut(&mut self) -> &mut HashMap<String, RFile> {
&mut self.files
}
fn disk_file_offset(&self) -> u64 {
self.disk_file_offset
}
fn paths_cache(&self) -> &HashMap<String, Vec<String>> {
&self.paths
}
fn paths_cache_mut(&mut self) -> &mut HashMap<String, Vec<String>> {
&mut self.paths
}
fn local_timestamp(&self) -> u64 {
self.local_timestamp
}
}
impl Decodeable for AnimPack {
fn decode<R: ReadBytes>(data: &mut R, extra_data: &Option<DecodeableExtraData>) -> Result<Self> {
let extra_data = extra_data.as_ref().ok_or(RLibError::DecodingMissingExtraData)?;
let disk_file_path = match extra_data.disk_file_path {
Some(path) => {
let file_path = PathBuf::from_str(path).map_err(|_|RLibError::DecodingMissingExtraDataField("disk_file_path".to_owned()))?;
if file_path.is_file() {
path.to_owned()
} else {
return Err(RLibError::DecodingMissingExtraData)
}
}
None => String::new()
};
let disk_file_offset = extra_data.disk_file_offset;
let disk_file_size = extra_data.data_size;
let local_timestamp = extra_data.timestamp;
let is_encrypted = extra_data.is_encrypted;
let lazy_load = !disk_file_path.is_empty() && !is_encrypted && extra_data.lazy_load;
let file_count = data.read_u32()?;
let mut anim_pack = Self {
disk_file_path,
disk_file_offset,
local_timestamp,
paths: HashMap::new(),
files: if file_count < 50_000 { HashMap::with_capacity(file_count as usize) } else { HashMap::new() },
};
for _ in 0..file_count {
let path_in_container = data.read_sized_string_u8()?.replace('\\', "/");
let size = data.read_u32()?;
if !lazy_load || is_encrypted {
let data = data.read_slice(size as usize, false)?;
let file = RFile {
path: path_in_container.to_owned(),
timestamp: None,
file_type: FileType::AnimPack,
container_name: None,
data: RFileInnerData::Cached(data),
};
anim_pack.files.insert(path_in_container, file);
}
else {
let data_pos = data.stream_position()? - disk_file_offset;
let file = RFile::new_from_container(&anim_pack, size as u64, false, None, data_pos, local_timestamp, &path_in_container)?;
data.seek(SeekFrom::Current(size as i64))?;
anim_pack.files.insert(path_in_container, file);
}
}
anim_pack.paths_cache_generate();
anim_pack.files.par_iter_mut().map(|(_, file)| file.guess_file_type()).collect::<Result<()>>()?;
check_size_mismatch(data.stream_position()? as usize - anim_pack.disk_file_offset as usize, disk_file_size as usize)?;
Ok(anim_pack)
}
}
impl Encodeable for AnimPack {
fn encode<W: WriteBytes>(&mut self, buffer: &mut W, extra_data: &Option<EncodeableExtraData>) -> Result<()> {
buffer.write_u32(self.files.len() as u32)?;
let mut sorted_files = self.files.iter_mut().collect::<Vec<(&String, &mut RFile)>>();
sorted_files.sort_unstable_by_key(|(path, _)| path.to_lowercase());
for (path, file) in sorted_files {
buffer.write_sized_string_u8(path)?;
let data = file.encode(extra_data, false, false, true)?.unwrap();
if data.len() > u32::MAX as usize {
return Err(RLibError::DataTooBigForContainer("AnimPack".to_owned(), u32::MAX as u64, data.len(), path.to_owned()));
}
buffer.write_u32(data.len() as u32)?;
buffer.write_all(&data)?;
}
Ok(())
}
}