use crate::prng::Prng;
use crate::rvz::Group;
use crate::rvz::GroupExceptionRead;
use crate::rvz::Hashes;
use crate::rvz::Metadata;
use crate::rvz::Partition;
use crate::rvz::PartitionData;
use crate::rvz::RawData;
use std::cmp;
use std::io;
use std::io::Cursor;
use std::io::Read;
use std::io::Seek;
use std::io::SeekFrom;
use std::ops::Range;
use byteorder::BigEndian;
use byteorder::ReadBytesExt;
#[derive(Clone, Copy, Debug)]
pub enum RegionType {
Raw(RawData),
Partition(Partition, PartitionData),
}
#[derive(Clone, Copy, Debug)]
pub struct Bound {
pub start: u64,
pub end: u64,
}
impl Bound {
#[must_use]
pub const fn contains(&self, val: u64) -> bool {
(val >= self.start) && (val < self.end)
}
#[must_use]
pub fn overlap(&self, other: &Self) -> Option<Self> {
if self.contains(other.start) || self.contains(other.end) {
return Some(Self {
start: cmp::max(self.start, other.start),
end: cmp::min(self.end, other.end),
});
}
None
}
}
impl From<Range<u64>> for Bound {
fn from(val: Range<u64>) -> Self {
Self {
start: val.start,
end: val.end,
}
}
}
#[derive(Clone, Debug)]
pub struct Region {
pub range: Bound,
pub type_: RegionType,
}
impl Region {
#[must_use]
pub const fn group_index(&self) -> u32 {
match self.type_ {
RegionType::Raw(raw) => raw.group_index,
RegionType::Partition(_, partition) => partition.group_index,
}
}
#[must_use]
pub const fn group_count(&self) -> u32 {
match self.type_ {
RegionType::Raw(raw) => raw.group_count,
RegionType::Partition(_, partition) => partition.group_count,
}
}
#[must_use]
pub const fn start(&self) -> u64 {
match self.type_ {
RegionType::Raw(raw) => raw.raw_data_offset & !0x7FFF,
RegionType::Partition(_, _) => self.range.start,
}
}
#[must_use]
pub const fn partition_start(&self) -> u64 {
match self.type_ {
RegionType::Raw(raw) => raw.raw_data_offset & !0x7FFF,
RegionType::Partition(partition, _) => {
(partition.partition_data[0].first_sector as u64) * 0x8000
}
}
}
#[must_use]
pub const fn end(&self) -> u64 {
match self.type_ {
RegionType::Raw(raw) => {
let start = self.start();
let extra = raw.raw_data_offset & 0x7FFF;
let size = raw.raw_data_size + extra;
start + size
}
RegionType::Partition(_, _) => self.range.end,
}
}
#[must_use]
pub const fn size(&self) -> u64 {
self.end() - self.start()
}
#[must_use]
pub const fn contains(&self, position: u64) -> bool {
self.range.contains(position)
}
#[must_use]
pub(crate) fn group_index_of_position(
&self,
metadata: &Metadata,
position: u64,
) -> Option<u32> {
if self.contains(position) {
let offset = self.region_offset(position);
let chunk_size = u64::from(metadata.disc.chunk_size);
let group_offset = offset / chunk_size;
let index = u32::try_from(u64::from(self.group_index()) + group_offset).unwrap();
return Some(index);
}
None
}
#[must_use]
pub(crate) fn group_of_position<'meta>(
&self,
metadata: &'meta Metadata,
position: u64,
) -> Option<(u32, &'meta Group)> {
self.group_index_of_position(metadata, position)
.map(|index| (index, &metadata.groups[index as usize]))
}
#[must_use]
pub(crate) const fn position_of_group_index(
&self,
metadata: &Metadata,
group_index: u32,
) -> Option<u64> {
if (self.group_index() <= group_index)
&& (group_index < (self.group_index() + self.group_count()))
{
let delta = (group_index - self.group_index()) as u64;
let chunk_size = metadata.disc.chunk_size as u64;
Some(self.start() + (delta * chunk_size))
} else {
None
}
}
#[must_use]
pub(crate) const fn region_offset(&self, position: u64) -> u64 {
position - self.start()
}
#[must_use]
pub(crate) const fn partition_offset(&self, position: u64) -> u64 {
match self.type_ {
RegionType::Raw(_) => position,
RegionType::Partition(partition, _) => {
position - (partition.partition_data[0].first_sector as u64) * 0x8000
}
}
}
#[must_use]
pub(crate) fn partition_hashes<'meta>(
&self,
metadata: &'meta Metadata,
position: u64,
) -> Option<&'meta Hashes> {
match self.type_ {
RegionType::Raw(_) => None,
RegionType::Partition(_, _) => {
let sector = u32::try_from(position / 0x8000).unwrap();
let mut index = 0;
for (i, partition) in metadata.partitions.iter().enumerate() {
let start = partition.partition_data[0].first_sector;
let end = partition.partition_data[1].first_sector
+ partition.partition_data[1].sector_count;
if (start..end).contains(§or) {
index = i;
break;
}
}
if metadata.hashes.is_empty() {
None
} else {
Some(&metadata.hashes[index])
}
}
}
}
pub(crate) fn read_group<T: Seek + Read>(
&self,
io: &mut T,
metadata: &Metadata,
position: u64,
) -> io::Result<Box<[u8]>> {
let Some((index, group)) = self.group_of_position(metadata, position) else {
return Err(io::Error::from(io::ErrorKind::InvalidInput));
};
let position = self.position_of_group_index(metadata, index).unwrap();
let chunk_size = metadata.disc.chunk_size;
if !(position - self.start()).is_multiple_of(chunk_size.into()) {
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"position must be group aligned",
));
}
let data_size = group.data_size;
let compressed = 0x8000_0000 & data_size != 0;
let data_size = !0x8000_0000u32 & data_size;
if data_size == 0 {
return Ok(vec![0u8; chunk_size as usize].into_boxed_slice());
}
if data_size > chunk_size {
return Err(io::Error::new(
io::ErrorKind::InvalidData,
"group size is unexpectedly large",
));
}
let extracted = extract_compressed(io, metadata, group)?;
let data = match self.type_ {
RegionType::Raw(_) => process_raw(group, extracted, position, self, chunk_size)?,
RegionType::Partition(_, _) => process_partition(
metadata, group, extracted, position, self, chunk_size, compressed,
)?,
};
let last_index = self.group_index() + self.group_count() - 1;
let group_index = self.group_index_of_position(metadata, position).unwrap();
if group_index == last_index {
let last_group_len = (self.end() - self.start()) % u64::from(chunk_size);
if last_group_len != (data.len() as u64) % u64::from(chunk_size) {
return Err(io::Error::new(
io::ErrorKind::InvalidData,
"unexpected data left",
));
}
} else if data.len() != (chunk_size as usize) {
return Err(io::Error::new(
io::ErrorKind::InvalidData,
"not enough data in group",
));
}
Ok(data)
}
}
fn process_packed(data: &[u8], offset: u64, chunk_size: u32) -> io::Result<Box<[u8]>> {
let cursor_size = data.len();
let mut current_position = offset;
let mut cursor = Cursor::new(data);
let mut output = vec![];
#[allow(clippy::cast_possible_truncation)]
while (cursor.position() as usize) < cursor_size {
let size: u32 = cursor.read_u32::<BigEndian>()?;
let use_algorithm = 0x8000_0000u32 & size != 0;
let size = !0x8000_0000u32 & size;
assert!(size <= chunk_size);
if use_algorithm {
assert!(cursor_size - (cursor.position() as usize) >= 68);
let mut prng = Prng::new(&mut cursor)?;
prng.seek(SeekFrom::Current(
i64::try_from(current_position % 0x8000).unwrap(),
))?;
prng.take(size.into()).read_to_end(&mut output)?;
} else {
cursor.by_ref().take(size.into()).read_to_end(&mut output)?;
}
current_position += u64::from(size);
}
Ok(output.into_boxed_slice())
}
fn extract_compressed<T: Read + Seek>(
io: &mut T,
metadata: &Metadata,
group: &Group,
) -> io::Result<Box<[u8]>> {
let data_size = group.data_size;
let compressed = 0x8000_0000u32 & data_size != 0;
let data_size = 0x7FFF_FFFFu32 & data_size;
let mut raw_group = Vec::with_capacity(data_size as usize);
let group_position = u64::from(group.data_offset_div4) * 4;
io.seek(SeekFrom::Start(group_position))?;
let mut io = io.take(data_size.into());
io.read_to_end(&mut raw_group)?;
let result = if compressed {
let mut cursor = Cursor::new(raw_group);
let mut io = metadata.disc.compression_io(&mut cursor)?;
let mut out = vec![];
io.read_to_end(&mut out)?;
out
} else {
raw_group
};
Ok(result.into_boxed_slice())
}
fn process_raw(
group: &Group,
decompressed: Box<[u8]>,
position: u64,
region: &Region,
chunk_size: u32,
) -> io::Result<Box<[u8]>> {
let packed = group.packed_size != 0;
if packed {
process_packed(&decompressed, position, chunk_size)
} else {
if (decompressed.len() != (chunk_size as usize))
&& (u64::from(region.group_index() + region.group_count() - 1)
!= (position / u64::from(chunk_size)))
{
return Err(io::Error::new(
io::ErrorKind::InvalidData,
"raw group data is less than expected",
));
}
Ok(decompressed)
}
}
fn process_partition(
metadata: &Metadata,
group: &Group,
decompressed: Box<[u8]>,
position: u64,
region: &Region,
chunk_size: u32,
compressed: bool,
) -> io::Result<Box<[u8]>> {
let packed = group.packed_size != 0;
let mut cursor = Cursor::new(&decompressed);
let start_pos = cursor.position();
let exception_list = cursor.read_rvz_exceptions()?;
let mut exception_list_read: u16 = (cursor.position() - start_pos)
.try_into()
.expect("more exception lists than expected");
if !compressed {
let to_read = 4 - (exception_list_read % 4);
if to_read > 0 {
assert!(to_read == 2);
exception_list_read += to_read;
cursor.seek_relative(to_read.into())?;
}
}
let data_size = cursor.get_ref().len() - (exception_list_read as usize);
let mut output = if packed {
let mut current_position = region.partition_offset(position);
current_position -= current_position / 0x8000 * 0x400;
let current_position = current_position;
process_packed(
&decompressed[(exception_list_read as usize)..],
current_position,
chunk_size,
)?
} else {
if data_size != ((chunk_size / 0x8000 * 0x7C00) as usize)
&& (region.group_index() + region.group_count() - 1)
!= region.group_index_of_position(metadata, position).unwrap()
{
return Err(io::Error::new(
io::ErrorKind::InvalidData,
"less partition bytes extracted than expected",
));
}
let mut output = Vec::with_capacity(decompressed.len() - usize::from(exception_list_read));
output.extend_from_slice(&decompressed[(exception_list_read as usize)..]);
output.into_boxed_slice()
};
std::mem::drop(decompressed);
let sectors = output.len() / 0x7C00;
let mut output_cursor = Cursor::new(&mut output);
let mut result = vec![];
let mut result_cursor = Cursor::new(&mut result);
for _ in 0..sectors {
let hashes = region.partition_hashes(metadata, position + 0x8000);
let mut hash = hashes.map_or([0u8; 0x400], |hashes| {
let partition_offset = region.partition_offset(position);
let mut data = [0u8; 0x400];
let sector = usize::try_from(partition_offset / 0x8000).unwrap();
let h0 = &hashes.h0[sector];
let h1 = &hashes.h1[sector / 8];
let h2 = &hashes.h2[sector / 64];
for i in 0..31 {
data[i * 20..(i + 1) * 20].copy_from_slice(&h0[i]);
}
for i in 0..8 {
data[0x280 + i * 20..0x280 + (i + 1) * 20].copy_from_slice(&h1[i]);
data[0x340 + i * 20..0x340 + (i + 1) * 20].copy_from_slice(&h2[i]);
}
data
});
let mut hash_cursor = Cursor::new(&mut hash);
io::copy(&mut hash_cursor, &mut result_cursor)?;
io::copy(&mut output_cursor.by_ref().take(0x7C00), &mut result_cursor)?;
}
for exception in exception_list.0 {
let sector: usize = (exception.offset / 0x400).into();
let position: usize = sector * 0x8000 + usize::from(exception.offset) % 0x400;
result[position..position + 20].copy_from_slice(&exception.hash);
}
Ok(result.into_boxed_slice())
}