#![allow(clippy::cast_possible_truncation)]
use std::collections::HashMap;
use std::fs::File;
use std::io::{BufReader, Read, Seek, SeekFrom};
use std::path::{Path, PathBuf};
use super::{
CompressionMethod, FileTableEntry, LspkFooter, LspkHeader, MAGIC, MAX_VERSION, MIN_VERSION,
PATH_LENGTH, PakContents, PakFile, PakPhase, PakProgress, TABLE_ENTRY_SIZE,
};
use crate::error::{Error, Result};
pub type ProgressCallback<'a> = &'a (dyn Fn(&PakProgress) + Sync + Send);
pub struct LspkReader<R: Read + Seek> {
reader: BufReader<R>,
header: Option<LspkHeader>,
footer: Option<LspkFooter>,
file_table: Vec<FileTableEntry>,
pak_path: Option<PathBuf>,
part_readers: HashMap<u8, BufReader<File>>,
}
impl<R: Read + Seek> LspkReader<R> {
pub fn with_path(reader: R, path: impl AsRef<Path>) -> Self {
Self {
reader: BufReader::new(reader),
header: None,
footer: None,
file_table: Vec::new(),
pak_path: Some(path.as_ref().to_path_buf()),
part_readers: HashMap::new(),
}
}
fn get_part_path(&self, part: u8) -> Option<PathBuf> {
let base_path = self.pak_path.as_ref()?;
if part == 0 {
return Some(base_path.clone());
}
let stem = base_path.file_stem()?.to_str()?;
let ext = base_path.extension()?.to_str()?;
let parent = base_path.parent()?;
Some(parent.join(format!("{stem}_{part}.{ext}")))
}
fn get_part_reader(&mut self, part: u8) -> Result<&mut dyn ReadSeek> {
if part == 0 {
return Ok(&mut self.reader);
}
if !self.part_readers.contains_key(&part) {
let part_path = self
.get_part_path(part)
.ok_or(Error::ArchivePartNotFound { part })?;
if !part_path.exists() {
return Err(Error::ArchivePartMissing {
path: part_path.clone(),
});
}
let file = File::open(&part_path)?;
self.part_readers.insert(part, BufReader::new(file));
}
Ok(self
.part_readers
.get_mut(&part)
.expect("part reader just inserted"))
}
}
trait ReadSeek: Read + Seek {}
impl<T: Read + Seek> ReadSeek for T {}
impl<R: Read + Seek> LspkReader<R> {
pub(crate) fn read_header(&mut self) -> Result<&LspkHeader> {
self.reader.seek(SeekFrom::Start(0))?;
let mut magic = [0u8; 4];
self.reader.read_exact(&mut magic)?;
if magic != MAGIC {
return Err(Error::InvalidPakMagic);
}
let mut version_bytes = [0u8; 4];
self.reader.read_exact(&mut version_bytes)?;
let version = u32::from_le_bytes(version_bytes);
if !(MIN_VERSION..=MAX_VERSION).contains(&version) {
return Err(Error::InvalidFormat(format!(
"unsupported PAK version: {version} (supported: {MIN_VERSION}-{MAX_VERSION})"
)));
}
let mut offset_bytes = [0u8; 8];
self.reader.read_exact(&mut offset_bytes)?;
let footer_offset = u64::from_le_bytes(offset_bytes);
self.header = Some(LspkHeader {
magic,
version,
footer_offset,
});
Ok(self.header.as_ref().expect("header was just set"))
}
pub(crate) fn read_footer(&mut self) -> Result<&LspkFooter> {
let header = self.header.as_ref().ok_or(Error::PakHeaderNotRead)?;
self.reader.seek(SeekFrom::Start(header.footer_offset))?;
let mut num_files_bytes = [0u8; 4];
self.reader.read_exact(&mut num_files_bytes)?;
let num_files = u32::from_le_bytes(num_files_bytes);
let mut table_size_bytes = [0u8; 4];
self.reader.read_exact(&mut table_size_bytes)?;
let table_size_compressed = u32::from_le_bytes(table_size_bytes);
self.footer = Some(LspkFooter {
num_files,
table_size_compressed,
});
Ok(self.footer.as_ref().expect("footer was just set"))
}
pub fn read_file_table(&mut self) -> Result<&[FileTableEntry]> {
let footer = self.footer.as_ref().ok_or(Error::PakFooterNotRead)?;
let header = self.header.as_ref().ok_or(Error::PakHeaderNotRead)?;
let num_files = footer.num_files as usize;
let table_size_compressed = footer.table_size_compressed as usize;
let table_size_decompressed = num_files * TABLE_ENTRY_SIZE;
let mut compressed_table = vec![0u8; table_size_compressed];
self.reader.read_exact(&mut compressed_table)?;
let decompressed_table =
lz4_flex::block::decompress(&compressed_table, table_size_decompressed).map_err(
|e| Error::DecompressionError(format!("Failed to decompress file table: {e}")),
)?;
self.file_table.clear();
self.file_table.reserve(num_files);
for i in 0..num_files {
let entry_start = i * TABLE_ENTRY_SIZE;
let entry_bytes = &decompressed_table[entry_start..entry_start + TABLE_ENTRY_SIZE];
let entry = self.parse_file_entry(entry_bytes, header.version)?;
self.file_table.push(entry);
}
Ok(&self.file_table)
}
fn parse_file_entry(&self, bytes: &[u8], _version: u32) -> Result<FileTableEntry> {
let path_end = bytes[..PATH_LENGTH]
.iter()
.position(|&b| b == 0)
.unwrap_or(PATH_LENGTH);
let path_str = String::from_utf8_lossy(&bytes[..path_end]);
let path = PathBuf::from(path_str.as_ref());
let offset_low = u32::from_le_bytes(bytes[256..260].try_into().expect("fixed-size slice"));
let offset_high = u16::from_le_bytes(bytes[260..262].try_into().expect("fixed-size slice"));
let mut offset = u64::from(offset_low) | (u64::from(offset_high) << 32);
offset &= 0x0000_FFFF_FFFF_FFFF;
let archive_part = bytes[262];
let flags = bytes[263];
let compression = CompressionMethod::from_flags(flags);
let size_compressed =
u32::from_le_bytes(bytes[264..268].try_into().expect("fixed-size slice"));
let size_decompressed =
u32::from_le_bytes(bytes[268..272].try_into().expect("fixed-size slice"));
Ok(FileTableEntry {
path,
offset,
size_compressed,
size_decompressed,
compression,
flags,
archive_part,
})
}
pub fn decompress_file(&mut self, entry: &FileTableEntry) -> Result<Vec<u8>> {
let reader = self.get_part_reader(entry.archive_part)?;
reader.seek(SeekFrom::Start(entry.offset))?;
let mut compressed = vec![0u8; entry.size_compressed as usize];
reader.read_exact(&mut compressed)?;
if entry.compression == CompressionMethod::None || entry.size_decompressed == 0 {
return Ok(compressed);
}
match entry.compression {
CompressionMethod::None => Ok(compressed),
CompressionMethod::Lz4 => {
self.decompress_lz4(&compressed, entry.size_decompressed as usize, &entry.path)
}
CompressionMethod::Zlib => {
self.decompress_zlib(&compressed, entry.size_decompressed as usize, &entry.path)
}
}
}
fn decompress_lz4(
&self,
compressed: &[u8],
expected_size: usize,
path: &Path,
) -> Result<Vec<u8>> {
if let Ok(data) = lz4_flex::block::decompress(compressed, expected_size) {
return Ok(data);
}
let larger_size = expected_size.saturating_mul(2).max(65536);
if let Ok(data) = lz4_flex::block::decompress(compressed, larger_size) {
return Ok(data);
}
if let Ok(data) = lz4_flex::decompress_size_prepended(compressed) {
return Ok(data);
}
let mut decoder = lz4_flex::frame::FrameDecoder::new(compressed);
let mut decompressed = Vec::with_capacity(expected_size);
if decoder.read_to_end(&mut decompressed).is_ok() && !decompressed.is_empty() {
return Ok(decompressed);
}
let path_display = path.display();
let compressed_len = compressed.len();
Err(Error::DecompressionError(format!(
"Failed to decompress LZ4 data for {path_display}: all methods failed (compressed: {compressed_len} bytes, expected: {expected_size} bytes)"
)))
}
fn decompress_zlib(
&self,
compressed: &[u8],
expected_size: usize,
path: &Path,
) -> Result<Vec<u8>> {
use flate2::read::ZlibDecoder;
let mut decoder = ZlibDecoder::new(compressed);
let mut decompressed = Vec::with_capacity(expected_size);
decoder.read_to_end(&mut decompressed).map_err(|e| {
let path_display = path.display();
Error::DecompressionError(format!(
"Failed to decompress Zlib data for {path_display}: {e}"
))
})?;
Ok(decompressed)
}
pub fn read_all(&mut self, progress: Option<ProgressCallback>) -> Result<PakContents> {
let progress = progress.unwrap_or(&|_| {});
progress(&PakProgress {
phase: PakPhase::ReadingHeader,
current: 0,
total: 1,
current_file: None,
});
self.read_header()?;
self.read_footer()?;
progress(&PakProgress {
phase: PakPhase::ReadingTable,
current: 0,
total: 1,
current_file: None,
});
self.read_file_table()?;
let version = self.header.as_ref().ok_or(Error::PakHeaderNotRead)?.version;
let mut contents = PakContents::new(version);
let total_files = self.file_table.len();
let entries: Vec<_> = self.file_table.clone();
for (i, entry) in entries.iter().enumerate() {
let file_name = entry.path.file_name().map_or_else(
|| entry.path.to_string_lossy().to_string(),
|n| n.to_string_lossy().to_string(),
);
progress(&PakProgress {
phase: PakPhase::DecompressingFiles,
current: i + 1,
total: total_files,
current_file: Some(file_name),
});
match self.decompress_file(entry) {
Ok(data) => {
contents.files.push(PakFile {
path: entry.path.clone(),
data,
});
}
Err(e) => {
contents.errors.push((entry.path.clone(), e.to_string()));
}
}
}
progress(&PakProgress {
phase: PakPhase::Complete,
current: total_files,
total: total_files,
current_file: None,
});
Ok(contents)
}
pub fn list_files(&mut self) -> Result<Vec<FileTableEntry>> {
if self.header.is_none() {
self.read_header()?;
}
if self.footer.is_none() {
self.read_footer()?;
}
if self.file_table.is_empty() {
self.read_file_table()?;
}
Ok(self.file_table.clone())
}
}