use super::super::lspk::{CompressionMethod, FileTableEntry, LspkReader};
use super::decompression::decompress_data;
use super::helpers::get_part_path;
use crate::error::{Error, Result};
use rayon::prelude::*;
use std::collections::HashMap;
use std::fs::File;
use std::io::{Read, Seek, SeekFrom};
use std::path::{Path, PathBuf};
pub struct PakReaderCache {
tables: HashMap<PathBuf, Vec<FileTableEntry>>,
max_paks: usize,
access_order: Vec<PathBuf>,
}
impl PakReaderCache {
#[must_use]
pub fn new(max_paks: usize) -> Self {
Self {
tables: HashMap::new(),
max_paks: max_paks.max(1),
access_order: Vec::new(),
}
}
fn ensure_loaded(&mut self, pak_path: &Path) -> Result<()> {
if self.tables.contains_key(pak_path) {
self.update_access_order(pak_path);
return Ok(());
}
let file = File::open(pak_path)?;
let mut reader = LspkReader::with_path(file, pak_path);
let entries = reader.list_files()?;
while self.tables.len() >= self.max_paks && !self.access_order.is_empty() {
let to_evict = self.access_order.remove(0);
self.tables.remove(&to_evict);
}
self.tables.insert(pak_path.to_path_buf(), entries);
self.access_order.push(pak_path.to_path_buf());
Ok(())
}
fn update_access_order(&mut self, pak_path: &Path) {
if let Some(pos) = self.access_order.iter().position(|p| p == pak_path) {
self.access_order.remove(pos);
}
self.access_order.push(pak_path.to_path_buf());
}
pub fn read_files_bulk(
&mut self,
pak_path: &Path,
file_paths: &[&str],
) -> Result<HashMap<String, Vec<u8>>> {
use std::collections::HashSet;
self.ensure_loaded(pak_path)?;
let requested: HashSet<&str> = file_paths.iter().copied().collect();
let table = self
.tables
.get(pak_path)
.ok_or_else(|| Error::FileNotFoundInPak(pak_path.to_string_lossy().to_string()))?;
let entries_to_read: Vec<&FileTableEntry> = table
.iter()
.filter(|e| requested.contains(e.path.to_string_lossy().as_ref()))
.collect();
if entries_to_read.is_empty() {
return Ok(HashMap::new());
}
let mut entries_by_part: HashMap<u8, Vec<&FileTableEntry>> = HashMap::new();
for entry in entries_to_read {
entries_by_part
.entry(entry.archive_part)
.or_default()
.push(entry);
}
let mut compressed_files: Vec<(String, Vec<u8>, CompressionMethod, u32)> = Vec::new();
for (part, mut part_entries) in entries_by_part {
let part_path = match get_part_path(pak_path, part) {
Some(p) => p,
None => continue,
};
if !part_path.exists() {
tracing::warn!("Archive part file not found: {}", part_path.display());
continue;
}
part_entries.sort_by_key(|e| e.offset);
let mut part_file = match File::open(&part_path) {
Ok(f) => f,
Err(_) => continue,
};
for entry in part_entries {
if part_file.seek(SeekFrom::Start(entry.offset)).is_err() {
continue;
}
let mut compressed_data = vec![0u8; entry.size_compressed as usize];
if part_file.read_exact(&mut compressed_data).is_err() {
continue;
}
compressed_files.push((
entry.path.to_string_lossy().to_string(),
compressed_data,
entry.compression,
entry.size_decompressed,
));
}
}
let results: Vec<(String, Vec<u8>)> = compressed_files
.par_iter()
.filter_map(|(path, data, compression, expected_size)| {
decompress_data(data, *compression, *expected_size)
.ok()
.map(|bytes| (path.clone(), bytes))
})
.collect();
Ok(results.into_iter().collect())
}
}