use bitflags::bitflags;
use getset::*;
use rayon::prelude::*;
use serde_derive::{Serialize, Deserialize};
use serde_json::{from_slice, to_string_pretty};
use itertools::Itertools;
use std::cmp::Ordering;
use std::collections::{BTreeMap, HashMap, HashSet};
use std::fs::File;
use std::io::{BufReader, BufWriter, Cursor, SeekFrom, Write};
use std::path::{Path, PathBuf};
use std::str::FromStr;
use crate::binary::{ReadBytes, WriteBytes};
use crate::compression::{Compressible, CompressionFormat};
use crate::error::{RLibError, Result};
use crate::files::{Container, ContainerPath, Decodeable, DecodeableExtraData, Encodeable, EncodeableExtraData, FileType, Loc, RFile, RFileDecoded, table::DecodedData};
use crate::games::{GameInfo, pfh_file_type::PFHFileType, pfh_version::PFHVersion};
use crate::notes::Note;
use crate::utils::{current_time, last_modified_time_from_file};
#[cfg(test)]
mod pack_test;
mod pack_versions;
pub const EXTENSION: &str = ".pack";
const MFH_PREAMBLE: &str = "MFH";
const TERRY_MAP_PATH: &str = "terrain/tiles/battle/_assembly_kit";
const DEFAULT_BMD_DATA: &str = "bmd_data.bin";
const MISSING_LOCS_PATH_START_EXISTING: &str = "text/aaa_missing_locs_";
const MISSING_LOCS_PATH_START_NEW: &str = "text/zzz_missing_locs_";
const FORT_PERIMETER_HINT: &[u8; 18] = b"AIH_FORT_PERIMETER";
const DEFENSIVE_HILL_HINT: &[u8; 18] = b"AIH_DEFENSIVE_HILL";
const SIEGE_AREA_NODE_HINT: &[u8; 19] = b"AIH_SIEGE_AREA_NODE";
pub const RESERVED_NAME_DEPENDENCIES_MANAGER: &str = "dependencies_manager.rpfm_reserved";
pub const RESERVED_NAME_DEPENDENCIES_MANAGER_V2: &str = "dependencies_manager_v2.rpfm_reserved";
pub const RESERVED_NAME_EXTRA_PACKFILE: &str = "extra_packfile.rpfm_reserved";
pub const RESERVED_NAME_SETTINGS: &str = "settings.rpfm_reserved";
pub const RESERVED_NAME_SETTINGS_EXTRACTED: &str = "settings.rpfm_reserved.json";
pub const RESERVED_NAME_NOTES: &str = "notes.rpfm_reserved";
pub const RESERVED_NAME_NOTES_EXTRACTED: &str = "notes.rpfm_reserved.md";
pub const RESERVED_RFILE_NAMES: [&str; 3] = [RESERVED_NAME_EXTRA_PACKFILE, RESERVED_NAME_SETTINGS, RESERVED_NAME_NOTES];
const AUTHORING_TOOL_CA: &str = "CA_TOOL";
const AUTHORING_TOOL_RPFM: &str = "RPFM";
const AUTHORING_TOOL_SIZE: u32 = 8;
pub const SETTING_KEY_CF: &str = "compression_format";
bitflags! {
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub struct PFHFlags: u32 {
const HAS_EXTENDED_HEADER = 0b0000_0001_0000_0000;
const HAS_ENCRYPTED_INDEX = 0b0000_0000_1000_0000;
const HAS_INDEX_WITH_TIMESTAMPS = 0b0000_0000_0100_0000;
const HAS_ENCRYPTED_DATA = 0b0000_0000_0001_0000;
}
}
#[derive(Debug, Clone, PartialEq, Getters, MutGetters, Setters, Default, Serialize, Deserialize)]
#[getset(get = "pub", get_mut = "pub", set = "pub")]
pub struct Pack {
disk_file_path: String,
disk_file_offset: u64,
local_timestamp: u64,
#[getset(skip)]
compress: bool,
header: PackHeader,
dependencies: Vec<(bool, String)>,
files: HashMap<String, RFile>,
paths: HashMap<String, Vec<String>>,
notes: PackNotes,
settings: PackSettings,
}
#[derive(Debug, Clone, PartialEq, Eq, Getters, Setters, Serialize, Deserialize)]
#[getset(get = "pub", set = "pub")]
pub struct PackHeader {
pfh_version: PFHVersion,
pfh_file_type: PFHFileType,
bitmask: PFHFlags,
internal_timestamp: u64,
game_version: u32,
build_number: u32,
authoring_tool: String,
extra_subheader_data: Vec<u8>,
}
#[derive(Clone, Debug, PartialEq, Eq, Getters, MutGetters, Setters, Serialize, Deserialize)]
#[getset(get = "pub", get_mut = "pub", set = "pub")]
pub struct PackSettings {
settings_text: BTreeMap<String, String>,
settings_string: BTreeMap<String, String>,
settings_bool: BTreeMap<String, bool>,
settings_number: BTreeMap<String, i32>,
}
#[derive(Clone, Debug, PartialEq, Eq, Default, Getters, MutGetters, Setters, Serialize, Deserialize)]
#[getset(get = "pub", get_mut = "pub", set = "pub")]
pub struct PackNotes {
pack_notes: String,
file_notes: HashMap<String, Vec<Note>>,
}
impl Container for Pack {
fn extract_metadata(&mut self, destination_path: &Path) -> Result<Vec<PathBuf>> {
let mut paths = vec![];
let mut data = vec![];
data.write_all(to_string_pretty(&self.notes)?.as_bytes())?;
data.extend_from_slice(b"\n");
let path = destination_path.join(RESERVED_NAME_NOTES_EXTRACTED);
paths.push(path.to_owned());
let mut file = BufWriter::new(File::create(path)?);
file.write_all(&data)?;
file.flush()?;
let mut data = vec![];
data.write_all(to_string_pretty(&self.settings)?.as_bytes())?;
data.extend_from_slice(b"\n");
let path = destination_path.join(RESERVED_NAME_SETTINGS_EXTRACTED);
paths.push(path.to_owned());
let mut file = BufWriter::new(File::create(path)?);
file.write_all(&data)?;
file.flush()?;
Ok(paths)
}
fn insert(&mut self, mut file: RFile) -> Result<Option<ContainerPath>> {
let path_container = file.path_in_container();
let path = file.path_in_container_raw();
if path == RESERVED_NAME_NOTES_EXTRACTED {
self.notes = PackNotes::load(&file.encode(&None, false, false, true)?.unwrap())?;
Ok(None)
} else if path == RESERVED_NAME_SETTINGS_EXTRACTED {
self.settings = PackSettings::load(&file.encode(&None, false, false, true)?.unwrap())?;
Ok(None)
} else if path == RESERVED_NAME_DEPENDENCIES_MANAGER_V2 {
self.dependencies = from_slice(&file.encode(&None, false, false, true)?.unwrap())?;
Ok(None)
}
else {
self.paths_cache_insert_path(path);
self.files.insert(path.to_owned(), file);
Ok(Some(path_container))
}
}
fn disk_file_path(&self) -> &str {
&self.disk_file_path
}
fn files(&self) -> &HashMap<String, RFile> {
&self.files
}
fn files_mut(&mut self) -> &mut HashMap<String, RFile> {
&mut self.files
}
fn disk_file_offset(&self) -> u64 {
self.disk_file_offset
}
fn paths_cache(&self) -> &HashMap<String, Vec<String>> {
&self.paths
}
fn paths_cache_mut(&mut self) -> &mut HashMap<String, Vec<String>> {
&mut self.paths
}
fn internal_timestamp(&self) -> u64 {
self.header.internal_timestamp
}
fn local_timestamp(&self) -> u64 {
self.local_timestamp
}
fn move_path(&mut self, source_path: &ContainerPath, destination_path: &ContainerPath) -> Result<Vec<(ContainerPath, ContainerPath)>> {
match source_path {
ContainerPath::File(source_path) => match destination_path {
ContainerPath::File(destination_path) => {
if RESERVED_RFILE_NAMES.contains(&&**destination_path) {
return Err(RLibError::ReservedFiles);
}
if destination_path.is_empty() {
return Err(RLibError::EmptyDestiny);
}
self.paths_cache_remove_path(source_path);
let mut moved = self.files_mut()
.remove(source_path)
.ok_or_else(|| RLibError::FileNotFound(source_path.to_string()))?;
moved.set_path_in_container_raw(destination_path);
self.insert(moved).map(|x| match x {
Some(x) => vec![(ContainerPath::File(source_path.to_string()), x); 1],
None => Vec::with_capacity(0),
})
},
ContainerPath::Folder(_) => unreachable!("move_path_pack_1"),
},
ContainerPath::Folder(source_path) => match destination_path {
ContainerPath::File(_) => unreachable!("move_path_pack_2"),
ContainerPath::Folder(destination_path) => {
if destination_path.is_empty() {
return Err(RLibError::EmptyDestiny);
}
let mut source_path_end = source_path.to_owned();
if !source_path_end.ends_with('/') {
source_path_end.push('/');
}
let moved_paths = self.files()
.par_iter()
.filter_map(|(path, _)| if path.starts_with(&source_path_end) { Some(path.to_owned()) } else { None })
.collect::<Vec<_>>();
let moved = moved_paths.iter()
.filter_map(|x| {
self.paths_cache_remove_path(x);
self.files_mut().remove(x)
})
.collect::<Vec<_>>();
let mut new_paths = Vec::with_capacity(moved.len());
for mut moved in moved {
let old_path = moved.path_in_container();
let new_path = moved.path_in_container_raw().replacen(source_path, destination_path, 1);
moved.set_path_in_container_raw(&new_path);
if let Some(new_path) = self.insert(moved)? {
new_paths.push((old_path, new_path));
}
}
Ok(new_paths)
},
},
}
}
}
impl Decodeable for Pack {
fn decode<R: ReadBytes>(data: &mut R, extra_data: &Option<DecodeableExtraData>) -> Result<Self> {
Self::read(data, extra_data)
}
}
impl Encodeable for Pack {
fn encode<W: WriteBytes>(&mut self, buffer: &mut W, extra_data: &Option<EncodeableExtraData>) -> Result<()> {
self.write(buffer, extra_data)
}
}
impl Pack {
pub fn new_with_version(pfh_version: PFHVersion) -> Self {
let mut pack = Self::default();
pack.header.pfh_version = pfh_version;
pack
}
pub fn new_with_name_and_version(name: &str, pfh_version: PFHVersion) -> Self {
let mut pack = Self::default();
pack.header.pfh_version = pfh_version;
pack.disk_file_path = name.to_owned();
pack
}
fn read<R: ReadBytes>(data: &mut R, extra_data: &Option<DecodeableExtraData>) -> Result<Self> {
let extra_data = extra_data.as_ref().ok_or(RLibError::DecodingMissingExtraData)?;
let game_info = match extra_data.game_info {
Some(game_info) => game_info,
None => return Err(RLibError::GameInfoMissingFromDecodingFunction),
};
let disk_file_path = match extra_data.disk_file_path {
Some(path) => {
let file_path = PathBuf::from_str(path).map_err(|_|RLibError::DecodingMissingExtraDataField("disk_file_path".to_owned()))?;
if file_path.is_file() {
path.to_owned()
} else {
return Err(RLibError::DecodingMissingExtraData)
}
}
None => String::new()
};
let disk_file_offset = extra_data.disk_file_offset;
let disk_file_size = if extra_data.data_size > 0 { extra_data.data_size } else { data.len()? };
let timestamp = extra_data.timestamp;
let is_encrypted = extra_data.is_encrypted;
let skip_path_cache_generation = extra_data.skip_path_cache_generation;
let lazy_load = !disk_file_path.is_empty() && !is_encrypted && extra_data.lazy_load;
let data_len = disk_file_size;
if data_len < 24 {
return Err(RLibError::PackHeaderNotComplete);
}
let start = if data.read_string_u8(3)? == MFH_PREAMBLE { 8 } else { 0 };
data.seek(SeekFrom::Current(-3))?;
data.seek(SeekFrom::Current(start))?;
let mut pack = Self {
disk_file_path,
disk_file_offset,
local_timestamp: timestamp,
..Default::default()
};
pack.header.pfh_version = PFHVersion::version(&data.read_string_u8(4)?)?;
let pack_type = data.read_u32()?;
pack.header.pfh_file_type = PFHFileType::try_from(pack_type & 15)?;
pack.header.bitmask = PFHFlags::from_bits_truncate(pack_type & !15);
let expected_data_len = match pack.header.pfh_version {
PFHVersion::PFH6 => pack.read_pfh6(data, extra_data)?,
PFHVersion::PFH5 => pack.read_pfh5(data, extra_data)?,
PFHVersion::PFH4 => pack.read_pfh4(data, extra_data)?,
PFHVersion::PFH3 => pack.read_pfh3(data, extra_data)?,
PFHVersion::PFH2 => pack.read_pfh2(data, extra_data)?,
PFHVersion::PFH0 => pack.read_pfh0(data, extra_data)?,
};
if let Some(mut notes) = pack.files.remove(RESERVED_NAME_NOTES) {
notes.load()?;
let data = notes.cached()?;
match PackNotes::load(data) {
Ok(notes) => pack.notes = notes,
Err(_) => {
let len = data.len();
let mut data = Cursor::new(data);
pack.notes = PackNotes::default();
pack.notes.pack_notes = data.read_string_u8(len)?;
}
}
}
if let Some(mut settings) = pack.files.remove(RESERVED_NAME_SETTINGS) {
settings.load()?;
let data = settings.cached()?;
pack.settings.load_and_update(data)?;
}
if let Some(mut deps) = pack.files.remove(RESERVED_NAME_DEPENDENCIES_MANAGER_V2) {
deps.load()?;
let data = deps.cached()?;
pack.dependencies = from_slice(data)?;
}
if !skip_path_cache_generation {
pack.paths_cache_generate();
}
let preferred_cf = game_info.compression_formats_supported().first().cloned().unwrap_or_default();
let current_cf_str = pack.settings().setting_string(SETTING_KEY_CF).cloned().unwrap_or_default();
let current_cf = CompressionFormat::from(&*current_cf_str);
if pack.compress && current_cf == CompressionFormat::None {
pack.settings_mut().set_setting_string(SETTING_KEY_CF, preferred_cf.to_string().as_str());
}
if expected_data_len != data_len { return Err(RLibError::DecodingMismatchSizeError(data_len as usize, expected_data_len as usize)) }
pack.files.par_iter_mut().map(|(_, file)| file.guess_file_type()).collect::<Result<()>>()?;
if !lazy_load {
pack.files.par_iter_mut().try_for_each(|(_, file)| file.load())?;
}
Ok(pack)
}
fn write<W: WriteBytes>(&mut self, buffer: &mut W, extra_data: &Option<EncodeableExtraData>) -> Result<()> {
let test_mode = if let Some(extra_data) = extra_data {
extra_data.test_mode
} else {
false
};
if !test_mode {
if self.header.pfh_file_type == PFHFileType::Mod || self.header.pfh_file_type == PFHFileType::Movie {
let mut data = vec![];
data.write_all(to_string_pretty(&self.notes)?.as_bytes())?;
let file = RFile::new_from_vec(&data, FileType::Text, 0, RESERVED_NAME_NOTES);
self.files.insert(RESERVED_NAME_NOTES.to_owned(), file);
let mut data = vec![];
data.write_all(to_string_pretty(&self.settings)?.as_bytes())?;
let file = RFile::new_from_vec(&data, FileType::Text, 0, RESERVED_NAME_SETTINGS);
self.files.insert(RESERVED_NAME_SETTINGS.to_owned(), file);
let mut data = vec![];
data.write_all(to_string_pretty(&self.dependencies)?.as_bytes())?;
let file = RFile::new_from_vec(&data, FileType::Text, 0, RESERVED_NAME_DEPENDENCIES_MANAGER_V2);
self.files.insert(RESERVED_NAME_DEPENDENCIES_MANAGER_V2.to_owned(), file);
}
}
match self.header.pfh_version {
PFHVersion::PFH6 => self.write_pfh6(buffer, extra_data)?,
PFHVersion::PFH5 => self.write_pfh5(buffer, extra_data)?,
PFHVersion::PFH4 => self.write_pfh4(buffer, extra_data)?,
PFHVersion::PFH3 => self.write_pfh3(buffer, extra_data)?,
PFHVersion::PFH2 => self.write_pfh2(buffer, extra_data)?,
PFHVersion::PFH0 => self.write_pfh0(buffer, extra_data)?,
}
self.remove(&ContainerPath::File(RESERVED_NAME_NOTES.to_owned()));
self.remove(&ContainerPath::File(RESERVED_NAME_SETTINGS.to_owned()));
self.remove(&ContainerPath::File(RESERVED_NAME_DEPENDENCIES_MANAGER_V2.to_owned()));
Ok(())
}
pub fn read_and_merge_ca_packs(game: &GameInfo, game_path: &Path) -> Result<Self> {
let paths = game.ca_packs_paths(game_path)?;
let mut pack = Self::read_and_merge(&paths, game, true, true, false)?;
pack.header_mut().set_pfh_file_type(PFHFileType::Release);
Ok(pack)
}
pub fn read_and_merge(pack_paths: &[PathBuf], game: &GameInfo, lazy_load: bool, ignore_mods: bool, keep_order: bool) -> Result<Self> {
if pack_paths.is_empty() {
return Err(RLibError::NoPacksProvided);
}
let mut extra_data = DecodeableExtraData {
lazy_load,
game_info: Some(game),
..Default::default()
};
if pack_paths.len() == 1 {
let mut data = BufReader::new(File::open(&pack_paths[0])
.map_err(|error| RLibError::IOErrorPath(Box::new(RLibError::IOError(error)), pack_paths[0].to_path_buf()))?);
let path_str = pack_paths[0].to_string_lossy().replace('\\', "/");
extra_data.set_disk_file_path(Some(&path_str));
extra_data.set_timestamp(last_modified_time_from_file(data.get_ref()).unwrap());
return Self::read(&mut data, &Some(extra_data))
}
extra_data.set_skip_path_cache_generation(true);
let mut pack_new = Pack::default();
let mut packs = pack_paths.par_iter()
.map(|path| {
let mut data = BufReader::new(File::open(path)
.map_err(|error| RLibError::IOErrorPath(Box::new(RLibError::IOError(error)), pack_paths[0].to_path_buf()))?);
let path_str = path.to_string_lossy().replace('\\', "/");
let mut extra_data = extra_data.to_owned();
extra_data.set_disk_file_path(Some(&path_str));
extra_data.set_timestamp(last_modified_time_from_file(data.get_ref())?);
Self::read(&mut data, &Some(extra_data))
}).collect::<Result<Vec<Pack>>>()?;
packs.sort_by(|pack_a, pack_b| if pack_a.pfh_file_type() != pack_b.pfh_file_type() {
pack_a.pfh_file_type().cmp(&pack_b.pfh_file_type())
} else if !keep_order {
pack_a.disk_file_path.cmp(&pack_b.disk_file_path)
} else {
Ordering::Equal
});
packs.iter()
.chunk_by(|pack| pack.header.pfh_file_type)
.into_iter()
.for_each(|(pfh_type, packs)| {
if pfh_type != PFHFileType::Mod || !ignore_mods {
let mut packs = packs.collect::<Vec<_>>();
packs.reverse();
packs.iter()
.for_each(|pack| {
pack_new.files_mut().extend(pack.files().clone())
});
}
});
let pack_names = packs.iter().map(|pack| pack.disk_file_name()).collect::<Vec<_>>();
let mut dependencies = packs.iter()
.flat_map(|pack| pack.dependencies()
.iter()
.filter(|(_, dependency)| !pack_names.contains(dependency))
.cloned()
.collect::<Vec<_>>())
.collect::<Vec<_>>();
let mut set = HashSet::new();
dependencies.retain(|x| set.insert(x.clone()));
pack_new.set_dependencies(dependencies);
pack_new.set_pfh_file_type(packs[0].pfh_file_type());
pack_new.set_pfh_version(game.pfh_version_by_file_type(pack_new.pfh_file_type()));
pack_new.paths_cache_generate();
Ok(pack_new)
}
pub fn merge(packs: &[Self]) -> Result<Self> {
if packs.is_empty() {
return Err(RLibError::NoPacksProvided);
}
if packs.len() == 1 {
return Ok(packs[0].clone());
}
let mut pack_new = Pack::default();
let mut pfh_types = packs.iter().map(|pack| pack.pfh_file_type()).collect::<Vec<_>>();
pfh_types.sort();
pfh_types.dedup();
if pfh_types.len() == 1 {
pack_new.set_pfh_file_type(pfh_types[0]);
}
packs.iter()
.chunk_by(|pack| pack.header.pfh_file_type)
.into_iter()
.for_each(|(_, packs)| {
let mut packs = packs.collect::<Vec<_>>();
packs.reverse();
packs.iter()
.for_each(|pack| {
pack_new.files_mut().extend(pack.files().clone())
});
});
let pack_names = packs.iter().map(|pack| pack.disk_file_name()).collect::<Vec<_>>();
let mut dependencies = packs.iter()
.flat_map(|pack| pack.dependencies()
.iter()
.filter(|(_, dependency)| !pack_names.contains(dependency))
.cloned()
.collect::<Vec<_>>())
.collect::<Vec<_>>();
let mut set = HashSet::new();
dependencies.retain(|x| set.insert(x.clone()));
pack_new.set_dependencies(dependencies);
pack_new.set_pfh_file_type(packs[0].pfh_file_type());
pack_new.set_pfh_version(packs[0].pfh_version());
pack_new.paths_cache_generate();
Ok(pack_new)
}
pub fn save(&mut self, path: Option<&Path>, game_info: &GameInfo, extra_data: &Option<EncodeableExtraData>) -> Result<()> {
if let Some(path) = path {
self.disk_file_path = path.to_string_lossy().to_string();
}
self.files.iter_mut().try_for_each(|(_, file)| file.load())?;
let mut file = BufWriter::new(File::create(&self.disk_file_path)?);
let extra_data = if extra_data.is_some() {
extra_data.clone()
} else {
Some(EncodeableExtraData::new_from_game_info(game_info))
};
self.encode(&mut file, &extra_data)
}
pub fn pfh_version(&self) -> PFHVersion {
*self.header.pfh_version()
}
pub fn pfh_file_type(&self) -> PFHFileType {
*self.header.pfh_file_type()
}
pub fn bitmask(&self) -> PFHFlags {
*self.header.bitmask()
}
pub fn internal_timestamp(&self) -> u64 {
*self.header.internal_timestamp()
}
pub fn game_version(&self) -> u32 {
*self.header.game_version()
}
pub fn build_number(&self) -> u32 {
*self.header.build_number()
}
pub fn authoring_tool(&self) -> &str {
self.header.authoring_tool()
}
pub fn extra_subheader_data(&self) -> &[u8] {
self.header.extra_subheader_data()
}
pub fn compression_format(&self) -> CompressionFormat {
let cf = self.settings().setting_string(SETTING_KEY_CF).map(|x| x.to_owned());
CompressionFormat::from(cf.unwrap_or_default().as_str())
}
pub fn set_pfh_version(&mut self, version: PFHVersion) {
self.header.set_pfh_version(version);
}
pub fn set_pfh_file_type(&mut self, file_type: PFHFileType) {
self.header.set_pfh_file_type(file_type);
}
pub fn set_bitmask(&mut self, bitmask: PFHFlags) {
self.header.set_bitmask(bitmask);
}
pub fn set_internal_timestamp(&mut self, timestamp: u64) {
self.header.set_internal_timestamp(timestamp);
}
pub fn set_game_version(&mut self, game_version: u32) {
self.header.set_game_version(game_version);
}
pub fn set_build_number(&mut self, build_number: u32) {
self.header.set_build_number(build_number);
}
pub fn set_authoring_tool(&mut self, authoring_tool: &str) {
self.header.set_authoring_tool(authoring_tool.to_string());
}
pub fn set_extra_subheader_data(&mut self, extra_subheader_data: &[u8]) {
self.header.set_extra_subheader_data(extra_subheader_data.to_vec());
}
pub fn set_compression_format(&mut self, cf: CompressionFormat, game_info: &GameInfo) -> CompressionFormat {
if cf == CompressionFormat::None || !game_info.compression_formats_supported().contains(&cf) {
self.compress = false;
self.settings_mut().set_setting_string(SETTING_KEY_CF, CompressionFormat::None.to_string().as_str());
CompressionFormat::None
} else {
self.compress = true;
self.settings_mut().set_setting_string(SETTING_KEY_CF, cf.to_string().as_str());
cf
}
}
pub fn spoof_ca_authoring_tool(&mut self, spoof: bool) {
if spoof {
self.header.set_authoring_tool(AUTHORING_TOOL_CA.to_string());
} else {
self.header.set_authoring_tool(AUTHORING_TOOL_RPFM.to_string());
}
}
pub fn is_compressible(&self) -> bool {
matches!(self.header.pfh_version, PFHVersion::PFH6 | PFHVersion::PFH5)
}
pub fn missing_locs_paths(&self) -> (String, String) {
(
MISSING_LOCS_PATH_START_EXISTING.to_owned() + &self.disk_file_name() + ".loc",
MISSING_LOCS_PATH_START_NEW.to_owned() + &self.disk_file_name() + ".loc"
)
}
pub fn generate_missing_loc_data(&mut self, existing_locs: &HashMap<String, String>) -> Result<Vec<ContainerPath>> {
let mut new_files = vec![];
let (missing_locs_path_existing, missing_locs_path_new) = self.missing_locs_paths();
let db_tables = self.files_by_type(&[FileType::DB]);
let loc_tables = self.files_by_type(&[FileType::Loc]);
let mut missing_trads_file_new = Loc::new();
let mut missing_trads_file_overwritten = Loc::new();
let loc_keys_from_memory = loc_tables.par_iter().filter_map(|rfile| {
if rfile.path_in_container_raw() != missing_locs_path_new && rfile.path_in_container_raw() != missing_locs_path_existing {
if let Ok(RFileDecoded::Loc(table)) = rfile.decoded() {
Some(table.data().iter().filter_map(|x| {
if let DecodedData::StringU16(data) = &x[0] {
Some(data.to_owned())
} else {
None
}
}).collect::<HashSet<String>>())
} else { None }
} else { None }
}).flatten().collect::<HashSet<String>>();
let (missing_trads_new, missing_trads_overwritten) = db_tables.par_iter().filter_map(|rfile| {
if let Ok(RFileDecoded::DB(table)) = rfile.decoded() {
let definition = table.definition();
let loc_fields = definition.localised_fields();
if !loc_fields.is_empty() {
let table_data = table.data();
let table_name = table.table_name_without_tables();
let localised_order = definition.localised_key_order();
let mut new_rows_new = vec![];
let mut new_rows_overwritten = vec![];
for row in table_data.iter() {
for loc_field in loc_fields {
let key = localised_order.iter().map(|pos| row[*pos as usize].data_to_string()).join("");
if !key.is_empty() {
let loc_key = format!("{}_{}_{}", table_name, loc_field.name(), key);
if let Some(value) = existing_locs.get(&loc_key) {
let mut new_row = missing_trads_file_overwritten.new_row();
new_row[0] = DecodedData::StringU16(loc_key);
new_row[1] = DecodedData::StringU16(value.to_owned());
new_rows_overwritten.push(new_row);
} else if !loc_keys_from_memory.contains(&*loc_key) {
let mut new_row = missing_trads_file_new.new_row();
new_row[0] = DecodedData::StringU16(loc_key);
new_row[1] = DecodedData::StringU16("PLACEHOLDER".to_owned());
new_rows_new.push(new_row);
}
}
}
}
return Some((new_rows_new, new_rows_overwritten))
}
}
None
}).collect::<(Vec<Vec<Vec<DecodedData>>>, Vec<Vec<Vec<DecodedData>>>)>();
let missing_trads_new = missing_trads_new.into_iter().flatten().collect::<Vec<_>>();
let missing_trads_overwritten = missing_trads_overwritten.into_iter().flatten().collect::<Vec<_>>();
if !missing_trads_new.is_empty() {
let _ = missing_trads_file_new.set_data(&missing_trads_new);
let packed_file = RFile::new_from_decoded(&RFileDecoded::Loc(missing_trads_file_new), 0, &missing_locs_path_new);
new_files.push(self.insert(packed_file)?.unwrap());
}
if !missing_trads_overwritten.is_empty() && !self.settings.setting_bool("do_not_generate_existing_locs").unwrap_or(&false) {
let _ = missing_trads_file_overwritten.set_data(&missing_trads_overwritten);
let packed_file = RFile::new_from_decoded(&RFileDecoded::Loc(missing_trads_file_overwritten), 0, &missing_locs_path_existing);
new_files.push(self.insert(packed_file)?.unwrap());
}
Ok(new_files)
}
pub fn patch_siege_ai(&mut self) -> Result<(String, Vec<ContainerPath>)> {
if self.files().is_empty() {
return Err(RLibError::PatchSiegeAIEmptyPack)
}
let mut files_patched = 0;
let mut files_to_delete: Vec<ContainerPath> = vec![];
let mut multiple_defensive_hill_hints = false;
for file in self.files_by_path_mut(&ContainerPath::Folder(TERRY_MAP_PATH.to_owned()), true) {
let path = file.path_in_container_raw();
let idx = path.rfind('/').unwrap_or(0);
let name = if path.get(idx + 1..).is_some() {
&path[idx + 1..]
} else {
continue
};
if name == DEFAULT_BMD_DATA || (name.starts_with("catchment_") && name.ends_with(".bin")) {
file.load()?;
let data = file.cached_mut()?;
if data.windows(19).any(|window: &[u8]|window == SIEGE_AREA_NODE_HINT) {
if let Some(index) = data.windows(18).position(|window: &[u8]|window == DEFENSIVE_HILL_HINT) {
data.splice(index..index + 18, FORT_PERIMETER_HINT.iter().cloned());
files_patched += 1;
}
if data.windows(18).any(|window: &[u8]|window == DEFENSIVE_HILL_HINT) {
multiple_defensive_hill_hints = true;
}
}
}
else if name.ends_with(".xml") {
files_to_delete.push(ContainerPath::File(file.path_in_container_raw().to_string()));
}
}
files_to_delete.iter().for_each(|x| { self.remove(x); });
if files_patched == 0 && files_to_delete.is_empty() { Err(RLibError::PatchSiegeAINoPatchableFiles) }
else if files_patched == 0 {
Ok((format!("No file suitable for patching has been found.\n{} files deleted.", files_to_delete.len()), files_to_delete))
}
else if multiple_defensive_hill_hints {
if files_to_delete.is_empty() {
Ok((format!("{files_patched} files patched.\nNo file suitable for deleting has been found.\
\n\n\
WARNING: Multiple Defensive Hints have been found and we only patched the first one.\
If you are using SiegeAI, you should only have one Defensive Hill in the map (the \
one acting as the perimeter of your fort/city/castle). Due to SiegeAI being present, \
in the map, normal Defensive Hills will not work anyways, and the only thing they do \
is interfere with the patching process. So, if your map doesn't work properly after \
patching, delete all the extra Defensive Hill Hints. They are the culprit."), files_to_delete))
}
else {
Ok((format!("{} files patched.\n{} files deleted.\
\n\n\
WARNING: Multiple Defensive Hints have been found and we only patched the first one.\
If you are using SiegeAI, you should only have one Defensive Hill in the map (the \
one acting as the perimeter of your fort/city/castle). Due to SiegeAI being present, \
in the map, normal Defensive Hills will not work anyways, and the only thing they do \
is interfere with the patching process. So, if your map doesn't work properly after \
patching, delete all the extra Defensive Hill Hints. They are the culprit.",
files_patched, files_to_delete.len()), files_to_delete))
}
}
else if files_to_delete.is_empty() {
Ok((format!("{files_patched} files patched.\nNo file suitable for deleting has been found."), files_to_delete))
}
else {
Ok((format!("{} files patched.\n{} files deleted.", files_patched, files_to_delete.len()), files_to_delete))
}
}
}
impl PackNotes {
pub fn load(data: &[u8]) -> Result<Self> {
from_slice(data).map_err(From::from)
}
pub fn notes_by_path(&self, path: &str) -> Vec<Note> {
let path_lower = path.to_lowercase();
self.file_notes()
.iter()
.filter(|(path, _)| path.is_empty() || path_lower.starts_with(*path) || &&path_lower == path)
.flat_map(|(_, notes)| notes.to_vec())
.collect()
}
pub fn add_note(&mut self, mut note: Note) -> Note {
let mut path = note.path().to_lowercase();
if path.starts_with("db/") {
let mut new_path = path.split('/').collect::<Vec<_>>();
if new_path.len() == 3 {
new_path.pop();
}
path = new_path.join("/");
}
note.set_path(path.to_owned());
match self.file_notes_mut().get_mut(&path) {
Some(notes) => {
if *note.id() == 0 {
let id = notes.iter().map(|note| note.id()).max().unwrap();
note.set_id(*id + 1);
} else {
notes.retain(|x| x.id() != note.id());
}
notes.push(note.clone());
note
},
None => {
let notes = vec![note.clone()];
self.file_notes_mut().insert(path.to_owned(), notes);
note
}
}
}
pub fn delete_note(&mut self, path: &str, id: u64) {
let path_lower = path.to_lowercase();
if let Some(notes) = self.file_notes_mut().get_mut(&path_lower) {
notes.retain(|note| note.id() != &id);
if notes.is_empty() {
self.file_notes_mut().remove(&path_lower);
}
}
}
}
impl PackSettings {
pub fn load(data: &[u8]) -> Result<Self> {
from_slice(data).map_err(From::from)
}
pub fn load_and_update(&mut self, data: &[u8]) -> Result<()> {
let settings: Self = from_slice(data)?;
self.settings_bool.extend(settings.settings_bool);
self.settings_number.extend(settings.settings_number);
self.settings_string.extend(settings.settings_string);
self.settings_text.extend(settings.settings_text);
Ok(())
}
pub fn setting_string(&self, key: &str) -> Option<&String> {
self.settings_string.get(key)
}
pub fn setting_text(&self, key: &str) -> Option<&String> {
self.settings_text.get(key)
}
pub fn setting_bool(&self, key: &str) -> Option<&bool> {
self.settings_bool.get(key)
}
pub fn setting_number(&self, key: &str) -> Option<&i32> {
self.settings_number.get(key)
}
pub fn set_setting_string(&mut self, key: &str, value: &str) {
self.settings_string.insert(key.to_owned(), value.to_owned());
}
pub fn set_setting_text(&mut self, key: &str, value: &str) {
self.settings_text.insert(key.to_owned(), value.to_owned());
}
pub fn set_setting_bool(&mut self, key: &str, value: bool) {
self.settings_bool.insert(key.to_owned(), value);
}
pub fn set_setting_number(&mut self, key: &str, value: i32) {
self.settings_number.insert(key.to_owned(), value);
}
pub fn diagnostics_files_to_ignore(&self) -> Option<Vec<(String, Vec<String>, Vec<String>)>> {
self.settings_text.get("diagnostics_files_to_ignore").map(|files_to_ignore| {
let files = files_to_ignore.split('\n').collect::<Vec<&str>>();
files.iter().filter_map(|x| {
if !x.starts_with('#') {
let path = x.splitn(3, ';').collect::<Vec<&str>>();
if path.len() == 3 {
Some((path[0].to_string(), path[1].split(',').filter_map(|y| if !y.is_empty() { Some(y.to_owned()) } else { None }).collect::<Vec<String>>(), path[2].split(',').filter_map(|y| if !y.is_empty() { Some(y.to_owned()) } else { None }).collect::<Vec<String>>()))
} else if path.len() == 2 {
Some((path[0].to_string(), path[1].split(',').filter_map(|y| if !y.is_empty() { Some(y.to_owned()) } else { None }).collect::<Vec<String>>(), vec![]))
} else if path.len() == 1 {
Some((path[0].to_string(), vec![], vec![]))
} else {
None
}
} else {
None
}
}).collect::<Vec<(String, Vec<String>, Vec<String>)>>()
})
}
}
impl Default for PackHeader {
fn default() -> Self {
Self {
pfh_version: Default::default(),
pfh_file_type: Default::default(),
bitmask: Default::default(),
internal_timestamp: Default::default(),
game_version: Default::default(),
build_number: Default::default(),
authoring_tool: AUTHORING_TOOL_RPFM.to_owned(),
extra_subheader_data: Default::default(),
}
}
}
impl Default for PFHFlags {
fn default() -> Self {
Self::empty()
}
}
impl Default for PackSettings {
fn default() -> Self {
let mut settings = Self {
settings_text: BTreeMap::new(),
settings_string: BTreeMap::new(),
settings_bool: BTreeMap::new(),
settings_number: BTreeMap::new(),
};
settings.settings_text_mut().insert("diagnostics_files_to_ignore".to_owned(), "".to_owned());
settings.settings_text_mut().insert("import_files_to_ignore".to_owned(), "".to_owned());
settings.settings_bool_mut().insert("disable_autosaves".to_owned(), false);
settings.settings_bool_mut().insert("do_not_generate_existing_locs".to_owned(), false);
settings.settings_string_mut().insert(SETTING_KEY_CF.to_owned(), "None".to_owned());
settings
}
}