use crate::prng::Prng;
use crate::rvz::Group;
use crate::rvz::GroupExceptionRead;
use crate::rvz::Hashes;
use crate::rvz::Metadata;
use crate::rvz::Partition;
use crate::rvz::PartitionData;
use crate::rvz::RawData;
use std::cmp;
use std::io;
use std::io::Cursor;
use std::io::Read;
use std::io::Seek;
use std::io::SeekFrom;
use std::ops::Range;
use byteorder::BigEndian;
use byteorder::ReadBytesExt;
#[derive(Clone, Copy, Debug)]
pub enum RegionType {
Raw(RawData),
Partition(Partition, PartitionData),
}
#[derive(Clone, Copy, Debug)]
pub struct Bound {
start: u64,
end: u64,
}
impl Bound {
#[must_use]
pub const fn contains(&self, val: u64) -> bool {
(val >= self.start) && (val < self.end)
}
#[must_use]
pub fn overlap(&self, other: &Self) -> Option<Self> {
if self.contains(other.start) || self.contains(other.end) {
return Some(Self {
start: cmp::max(self.start, other.start),
end: cmp::min(self.end, other.end),
});
}
None
}
}
impl From<Range<u64>> for Bound {
fn from(val: Range<u64>) -> Self {
Self {
start: val.start,
end: val.end,
}
}
}
fn cursor_left(cursor: &Cursor<&mut [u8]>) -> u64 {
u64::try_from(cursor.get_ref().len()).unwrap() - cursor.position()
}
#[derive(Clone, Debug)]
pub struct Region {
pub range: Bound,
pub type_: RegionType,
}
impl Region {
#[must_use]
pub const fn group_index(&self) -> u32 {
match self.type_ {
RegionType::Raw(raw) => raw.group_index,
RegionType::Partition(_, partition) => partition.group_index,
}
}
#[must_use]
pub const fn group_count(&self) -> u32 {
match self.type_ {
RegionType::Raw(raw) => raw.group_count,
RegionType::Partition(_, partition) => partition.group_count,
}
}
#[must_use]
pub const fn start(&self) -> u64 {
match self.type_ {
RegionType::Raw(raw) => raw.raw_data_offset & !0x7FFF,
RegionType::Partition(_, _) => self.range.start,
}
}
#[must_use]
pub const fn end(&self) -> u64 {
match self.type_ {
RegionType::Raw(raw) => {
let start = self.start();
let extra = raw.raw_data_offset & 0x7FFF;
let size = raw.raw_data_size + extra;
start + size
}
RegionType::Partition(_, _) => self.range.end,
}
}
#[must_use]
pub const fn size(&self) -> u64 {
self.end() - self.start()
}
#[must_use]
pub const fn contains(&self, position: u64) -> bool {
self.range.contains(position)
}
#[must_use]
fn group_of_position<'meta>(
&self,
metadata: &'meta Metadata,
position: u64,
) -> Option<(usize, &'meta Group)> {
if self.contains(position) {
let offset = position - self.start();
let chunk_size = u64::from(metadata.disc.chunk_size);
let group_offset = offset / chunk_size;
let index = usize::try_from(u64::from(self.group_index()) + group_offset).unwrap();
return Some((index, &metadata.groups[index]));
}
None
}
fn group_io<'io, T1: Read + Seek>(
metadata: &Metadata,
group: &Group,
io: &'io mut T1,
) -> io::Result<Box<dyn Read + 'io>> {
let data_size = u64::from(group.data_size);
let compressed = (data_size & 0x8000_0000) != 0;
let data_size = 0x7FFF_FFFF & data_size;
let offset = u64::from(group.data_offset_div4) * 4;
io.seek(SeekFrom::Start(offset))?;
let io = io.by_ref().take(data_size);
let io: Box<dyn Read> = if compressed {
Box::new(metadata.disc.compression.io(io)?)
} else {
Box::new(io)
};
Ok(io)
}
#[must_use]
const fn region_offset(&self, position: u64) -> u64 {
position - self.start()
}
#[must_use]
const fn partition_offset(&self, position: u64) -> u64 {
match self.type_ {
RegionType::Raw(_) => position,
RegionType::Partition(partition, _) => {
position - (partition.partition_data[0].first_sector as u64) * 0x8000
}
}
}
#[must_use]
fn partition_hashes<'meta>(
&self,
metadata: &'meta Metadata,
position: u64,
) -> Option<&'meta Hashes> {
match self.type_ {
RegionType::Raw(_) => None,
RegionType::Partition(_, _) => {
let sector = u32::try_from(position / 0x8000).unwrap();
let mut index = 0;
for (i, partition) in metadata.partitions.iter().enumerate() {
let start = partition.partition_data[0].first_sector;
let end = partition.partition_data[1].first_sector
+ partition.partition_data[1].sector_count;
if (start..end).contains(§or) {
index = i;
break;
}
}
if metadata.hashes.is_empty() {
None
} else {
Some(&metadata.hashes[index])
}
}
}
}
pub(crate) fn read_region<T: Seek + Read>(
&self,
io: &mut T,
metadata: &Metadata,
position: u64,
buf: &mut Cursor<&mut [u8]>,
) -> io::Result<u64> {
let mut position = position;
let mut amount_read = 0;
loop {
let Some((index, group)) = self.group_of_position(metadata, position) else {
return Err(io::Error::from(io::ErrorKind::InvalidInput));
};
let mut io = Self::group_io(metadata, group, io)?;
let mut io = io.as_mut();
let read = match self.type_ {
RegionType::Raw(_) => self.read_raw_data(&mut io, metadata, position, buf)?,
RegionType::Partition(_, _) => {
self.read_partition(&mut io, metadata, position, buf)?
}
};
amount_read += read;
position += read;
if u64::try_from(buf.get_ref().len()).unwrap() == buf.position()
|| position >= self.end()
{
return Ok(amount_read);
}
let last_index = self.group_index() + self.group_count() - 1;
if (read == 0) && (index == usize::try_from(last_index).unwrap()) {
let left = self.end() - position;
assert!(left < 0x8000);
let to_read = cmp::min(left, cursor_left(buf));
let read = io::copy(&mut io::repeat(0).take(to_read), buf)?;
return Ok(amount_read + read);
}
}
}
fn read_raw_data<T: Read + ?Sized>(
&self,
io: &mut T,
metadata: &Metadata,
position: u64,
buf: &mut Cursor<&mut [u8]>,
) -> io::Result<u64> {
let Some((_, group)) = self.group_of_position(metadata, position) else {
return Err(io::Error::from(io::ErrorKind::InvalidInput));
};
let group_size = metadata.disc.chunk_size;
let group_offset = position % u64::from(group_size);
if group.data_size == 0 {
let to_read = cmp::min(cursor_left(buf), u64::from(group_size) - group_offset);
io::copy(&mut io::repeat(0).take(to_read), buf)
} else if group.packed_size != 0 {
Self::read_raw_packed(io, position, group_size, group, buf)
} else {
Self::read_raw_data_normal(io, group_offset, buf)
}
}
fn read_raw_data_normal<T: Read + ?Sized>(
io: &mut T,
group_offset: u64,
buf: &mut Cursor<&mut [u8]>,
) -> io::Result<u64> {
io::copy(&mut io.take(group_offset), &mut io::sink())?;
if u64::try_from(buf.get_ref().len()).unwrap() == buf.position() {
Ok(0)
} else {
let to_read = cursor_left(buf);
io::copy(&mut io.take(to_read), buf)
}
}
fn read_raw_packed<T: Read + ?Sized>(
mut io: &mut T,
position: u64,
group_size: u32,
group: &Group,
buf: &mut Cursor<&mut [u8]>,
) -> io::Result<u64> {
let mut amount = 0u64;
let mut to_skip = position % u64::from(group_size);
let mut current_position = position / u64::from(group_size) * u64::from(group_size);
let mut input_left: u64 = group.packed_size.into();
while input_left != 0 && cursor_left(buf) != 0 {
let run_size = io.read_u32::<BigEndian>()?;
input_left -= 4;
let use_algorithm = (run_size & 0x8000_0000) != 0;
let run_size = run_size & 0x7FFF_FFFF;
let read = if use_algorithm {
assert!(input_left >= 68);
input_left -= 68;
if to_skip >= run_size.into() {
io::copy(&mut io.take(68), &mut io::sink())?;
to_skip -= u64::from(run_size);
current_position += u64::from(run_size);
continue;
}
let mut prng = Prng::new(&mut io)?;
prng.seek(SeekFrom::Current(
i64::try_from(current_position % 0x8000).unwrap(),
))?;
prng.seek(SeekFrom::Current(to_skip.try_into().unwrap()))?;
current_position += to_skip;
let amount_left = run_size - u32::try_from(to_skip).unwrap();
to_skip = 0;
let to_read = cmp::min(u64::from(amount_left), cursor_left(buf));
let read = io::copy(&mut prng.take(to_read), buf)?;
current_position += read;
read
} else {
if to_skip >= run_size.into() {
let read = io::copy(&mut io.take(run_size.into()), &mut io::sink())?;
to_skip -= read;
input_left -= read;
current_position += read;
continue;
}
let read = if to_skip != 0 {
let read = io::copy(&mut io.take(to_skip), &mut io::sink())?;
to_skip -= read;
input_left -= read;
current_position += read;
read
} else {
0
};
assert!(to_skip == 0);
let to_read = cmp::min(u64::from(run_size) - read, cursor_left(buf));
let read = io::copy(&mut io.take(to_read), buf)?;
input_left -= read;
current_position += read;
read
};
amount += read;
}
Ok(amount)
}
fn read_partition<T: Read + ?Sized>(
&self,
mut io: &mut T,
metadata: &Metadata,
position: u64,
buf: &mut Cursor<&mut [u8]>,
) -> io::Result<u64> {
let Some((index, group)) = self.group_of_position(metadata, position) else {
return Err(io::Error::from(io::ErrorKind::InvalidInput));
};
let group_size = metadata.disc.chunk_size;
let group_offset = self.region_offset(position) % u64::from(group_size);
let RegionType::Partition(partition, _) = self.type_ else {
panic!("in read_partition but enum isn't Partition!")
};
let groups = u32::try_from(index).unwrap() - partition.partition_data[0].group_index;
let sectors = groups * (group_size / 0x8000);
let partition_offset = u64::from(sectors) * 0x7C00;
let hashes = self.partition_hashes(metadata, position);
if group.data_size == 0 {
let to_read = cmp::min(cursor_left(buf), u64::from(group_size) - group_offset);
self.read_partition_normal(
&mut io::repeat(0).take(to_read),
position,
group_offset,
group_size,
hashes,
buf,
)
} else {
let exception_list = io.read_rvz_exceptions()?; if group.data_size & 0x8000_0000 == 0 {
let _ = io.read_u16::<BigEndian>()?;
}
if group.packed_size != 0 {
self.read_partition_packed(
io,
position,
partition_offset,
group_offset,
group_size,
group,
hashes,
buf,
)
} else {
self.read_partition_normal(io, position, group_offset, group_size, hashes, buf)
}
}
}
fn read_partition_normal<T: Read + ?Sized>(
&self,
io: &mut T,
position: u64,
group_offset: u64,
group_size: u32,
hashes: Option<&Hashes>,
buf: &mut Cursor<&mut [u8]>,
) -> io::Result<u64> {
let sectors_to_skip = group_offset / 0x8000;
io::copy(&mut io.take(sectors_to_skip * 0x7C00), &mut io::sink())?;
let mut sector_offset = group_offset % 0x8000;
let mut amount_read = 0;
let sectors_left = u64::from(group_size / 0x8000) - sectors_to_skip;
for _ in 0..sectors_left {
let to_read = 0x8000 - 0x400;
let read =
self.read_sector(&mut io.take(to_read), position, sector_offset, hashes, buf)?;
if read == 0 {
break;
}
amount_read += read;
sector_offset = (sector_offset + amount_read) % 0x8000;
}
Ok(amount_read)
}
fn read_sector<T: Read + ?Sized>(
&self,
io: &mut T,
position: u64,
sector_offset: u64,
hashes: Option<&Hashes>,
buf: &mut Cursor<&mut [u8]>,
) -> io::Result<u64> {
let mut amount_read = 0;
let mut sector_offset = sector_offset;
if sector_offset < 0x400 && u64::try_from(buf.get_ref().len()).unwrap() != buf.position() {
let hash = hashes.map_or([0u8; 0x400], |hashes| {
let partition_offset = self.partition_offset(position);
let mut data = [0u8; 0x400];
let sector = usize::try_from(partition_offset / 0x8000).unwrap();
let h0 = &hashes.h0[sector];
let h1 = &hashes.h1[sector / 8];
let h2 = &hashes.h2[sector / 64];
for i in 0..31 {
data[i * 20..(i + 1) * 20].copy_from_slice(&h0[i]);
}
for i in 0..8 {
data[0x280 + i * 20..0x280 + (i + 1) * 20].copy_from_slice(&h1[i]);
data[0x340 + i * 20..0x340 + (i + 1) * 20].copy_from_slice(&h2[i]);
}
data
});
let mut hash_io = Cursor::new(&hash);
hash_io.seek(SeekFrom::Start(sector_offset))?;
let to_read = cmp::min(0x400 - sector_offset, cursor_left(buf));
let amount = io::copy(&mut hash_io.take(to_read), buf)?;
amount_read += amount;
sector_offset = (sector_offset + amount_read) % 8000;
}
if u64::try_from(buf.get_ref().len()).unwrap() == buf.position() {
return Ok(amount_read);
}
io::copy(&mut io.take(sector_offset - 0x400), &mut io::sink())?;
if u64::try_from(buf.get_ref().len()).unwrap() != buf.position() {
let to_read = cmp::min(0x8000 - sector_offset, cursor_left(buf));
let read = io::copy(&mut io.take(to_read), buf)?;
amount_read += read;
}
Ok(amount_read)
}
#[allow(clippy::too_many_arguments)]
fn read_partition_packed<T: Read + ?Sized>(
&self,
io: &mut T,
position: u64,
partition_offset: u64,
group_offset: u64,
group_size: u32,
group: &Group,
hashes: Option<&Hashes>,
buf: &mut Cursor<&mut [u8]>,
) -> io::Result<u64> {
let mut amount_read = 0u64;
let sectors_to_skip = group_offset / 0x8000;
let mut sector_offset = group_offset % 0x8000;
let data = Self::read_packed_all(io, partition_offset, group)?;
let mut cursor = Cursor::new(data);
let bytes_to_skip = 0x7C00 * sectors_to_skip;
cursor.seek(SeekFrom::Start(bytes_to_skip))?;
let sectors_left = u64::from(group_size / 0x8000) - sectors_to_skip;
for _ in 0..sectors_left {
let to_read = 0x8000 - 0x400;
let read = self.read_sector(
&mut (&mut cursor).take(to_read),
position,
sector_offset,
hashes,
buf,
)?;
if read == 0 {
break;
}
amount_read += read;
sector_offset = (sector_offset + amount_read) % 0x8000;
}
Ok(amount_read)
}
fn read_packed_all<T: Read + ?Sized>(
mut io: &mut T,
partition_offset: u64,
group: &Group,
) -> io::Result<Vec<u8>> {
let mut input_left: u64 = group.packed_size.into();
let mut buf = vec![];
let mut offset = 0u64;
while input_left != 0 {
let size = io.read_u32::<BigEndian>()?;
input_left -= 4;
let use_algorithm = (size & 0x8000_0000) != 0;
let size = size & 0x7FFF_FFFF;
if use_algorithm {
let mut prng = Prng::new(&mut io)?;
prng.seek(SeekFrom::Start((partition_offset + offset) % 0x8000))?;
let mut tmp = vec![0u8; size.try_into().unwrap()];
prng.take(size.into()).read_exact(&mut tmp)?;
input_left -= 68;
buf.append(&mut tmp);
} else {
let mut tmp = vec![0u8; size.try_into().unwrap()];
io.take(size.into()).read_exact(&mut tmp)?;
input_left -= u64::from(size);
buf.append(&mut tmp);
}
offset += u64::from(size);
}
Ok(buf)
}
}