use std::io::{BufReader, Cursor};
use crate::binary::{ReadBytes, WriteBytes};
use crate::compression::CompressionFormat;
use crate::encryption::Decryptable;
use crate::error::{RLibError, Result};
use crate::files::{pack::*, RFile};
impl Pack {
pub(crate) fn read_pfh5<R: ReadBytes>(&mut self, data: &mut R, extra_data: &DecodeableExtraData) -> Result<u64> {
let data_len = if extra_data.data_size > 0 { extra_data.data_size } else { data.len()? };
let packs_count = data.read_u32()?;
let packs_index_size = data.read_u32()?;
let files_count = data.read_u32()?;
let files_index_size = data.read_u32()?;
self.header.internal_timestamp = u64::from(data.read_u32()?);
self.files = HashMap::with_capacity(files_count as usize);
let extra_header_size = {
if (self.header.bitmask.contains(PFHFlags::HAS_EXTENDED_HEADER) && data_len < 44) ||
(!self.header.bitmask.contains(PFHFlags::HAS_EXTENDED_HEADER) && data_len < 24) {
return Err(RLibError::PackHeaderNotComplete);
}
0
};
let indexes_size = extra_header_size + packs_index_size + files_index_size;
let buffer_data = data.read_slice(indexes_size as usize, false)?;
let mut buffer_mem = BufReader::new(Cursor::new(buffer_data));
let mut data_pos = data.stream_position()? - extra_data.disk_file_offset;
if self.header.bitmask.contains(PFHFlags::HAS_ENCRYPTED_DATA) &&
self.header.bitmask.contains(PFHFlags::HAS_EXTENDED_HEADER) {
}
if data_len < data_pos {
return Err(RLibError::PackIndexesNotComplete)
}
for _ in 0..packs_count {
self.dependencies.push((true, buffer_mem.read_string_u8_0terminated()?));
}
let files_are_encrypted = if self.header.bitmask.contains(PFHFlags::HAS_ENCRYPTED_DATA) {
Some(self.header.pfh_version)
} else {
None
};
for files_to_read in (0..files_count).rev() {
let size = if self.header.bitmask.contains(PFHFlags::HAS_ENCRYPTED_INDEX) {
buffer_mem.decrypt_u32(files_to_read)?
} else {
buffer_mem.read_u32()?
};
let timestamp = u64::from(if self.header.bitmask.contains(PFHFlags::HAS_INDEX_WITH_TIMESTAMPS) {
if self.header.bitmask.contains(PFHFlags::HAS_ENCRYPTED_INDEX) {
buffer_mem.decrypt_u32(files_to_read)?
} else { buffer_mem.read_u32()? }
} else { 0 });
let is_compressed = buffer_mem.read_bool()?;
self.compress |= is_compressed;
let path = if self.header.bitmask.contains(PFHFlags::HAS_ENCRYPTED_INDEX) {
buffer_mem.decrypt_string(size as u8)?
} else {
buffer_mem.read_string_u8_0terminated()?
}.replace('\\', "/");
let file = RFile::new_from_container(self, size as u64, is_compressed, files_are_encrypted, data_pos, timestamp, &path)?;
self.insert(file)?;
data_pos += u64::from(size);
}
Ok(data_pos)
}
pub(crate) fn write_pfh5<W: WriteBytes>(&mut self, buffer: &mut W, extra_data: &Option<EncodeableExtraData>) -> Result<()> {
let (test_mode, nullify_dates, game_info, compression_format) = if let Some(extra_data) = extra_data {
(extra_data.test_mode, extra_data.nullify_dates, extra_data.game_info, extra_data.compression_format)
} else {
(false, false, None, CompressionFormat::None)
};
let game_info = match game_info {
Some(game_info) => game_info,
None => return Err(RLibError::GameInfoMissingFromEncodingFunction),
};
let mut sorted_files = self.files.iter_mut().map(|(key, file)| (key.replace('/', "\\"), file)).collect::<Vec<(String, &mut RFile)>>();
sorted_files.sort_unstable_by_key(|(path, _)| path.to_lowercase());
let (files_index, files_data): (Vec<_>, Vec<_>) = sorted_files.par_iter_mut()
.map(|(path, file)| {
let mut data = file.encode(extra_data, false, false, true)?.unwrap();
let mut has_been_compressed = false;
if self.compress && file.is_compressible(game_info) && compression_format != CompressionFormat::None {
if let Ok(data_compressed) = data.compress(compression_format) {
data = data_compressed;
has_been_compressed = true;
}
}
let file_index_entry_len = if self.header.bitmask.contains(PFHFlags::HAS_INDEX_WITH_TIMESTAMPS) {
10 + path.len()
} else {
6 + path.len()
};
let mut file_index_entry = Vec::with_capacity(file_index_entry_len);
if data.len() > u32::MAX as usize {
return Err(RLibError::DataTooBigForContainer("Pack".to_owned(), u32::MAX as u64, data.len(), path.to_owned()));
}
file_index_entry.write_u32(data.len() as u32)?;
if self.header.bitmask.contains(PFHFlags::HAS_INDEX_WITH_TIMESTAMPS) {
let timestamp = if nullify_dates {
0
} else {
file.timestamp().unwrap_or(0) as u32
};
file_index_entry.write_u32(timestamp)?;
}
file_index_entry.write_bool(has_been_compressed)?;
file_index_entry.write_string_u8_0terminated(path)?;
Ok((file_index_entry, data))
}).collect::<Result<Vec<(Vec<u8>, Vec<u8>)>>>()?
.into_par_iter()
.unzip();
let mut dependencies_index = vec![];
let mut dependencies_count = 0;
for (hard, dependency) in &self.dependencies {
if *hard {
dependencies_index.write_string_u8_0terminated(dependency)?;
dependencies_count += 1;
}
}
let mut header = vec![];
header.write_string_u8(self.header.pfh_version.value())?;
header.write_u32(self.header.bitmask.bits() | self.header.pfh_file_type.value())?;
header.write_u32(dependencies_count)?;
header.write_u32(dependencies_index.len() as u32)?;
header.write_u32(sorted_files.len() as u32)?;
header.write_u32(files_index.par_iter().map(|x| x.len() as u32).sum())?;
if nullify_dates {
self.header.internal_timestamp = 0;
} else if !test_mode {
self.header.internal_timestamp = current_time()?;
}
header.write_u32(self.header.internal_timestamp as u32)?;
buffer.write_all(&header)?;
buffer.write_all(&dependencies_index)?;
for index in files_index {
buffer.write_all(&index)?;
}
for data in files_data {
buffer.write_all(&data)?;
}
Ok(())
}
}