use std::convert::TryFrom;
use std::io::{BufReader, Cursor, Read, Seek, SeekFrom};
use bitreader::BitReader;
use byteorder::{BigEndian, ReadBytesExt, WriteBytesExt};
use num_derive::{FromPrimitive, ToPrimitive};
use num_traits::FromPrimitive;
use crate::const_assert;
use crate::error::{Error, Result};
use crate::header::{Header, HeaderV5};
use crate::huffman::{lookup_len, HuffmanDecoder};
pub(crate) const V5_UNCOMPRESSED_MAP_ENTRY_SIZE: usize = 4;
pub(crate) const V5_COMPRESSED_MAP_ENTRY_SIZE: usize = 12;
const V3_MAP_ENTRY_SIZE: usize = 16; const V1_MAP_ENTRY_SIZE: usize = 8; const MAP_ENTRY_FLAG_TYPE_MASK: u8 = 0x0f; const MAP_ENTRY_FLAG_NO_CRC: u8 = 0x10;
#[repr(u8)]
#[derive(FromPrimitive, ToPrimitive)]
pub enum CompressionTypeV5 {
CompressionType0 = 0,
CompressionType1 = 1,
CompressionType2 = 2,
CompressionType3 = 3,
CompressionNone = 4,
CompressionSelf = 5,
CompressionParent = 6,
CompressionRleSmall = 7,
CompressionRleLarge = 8,
CompressionSelf0 = 9,
CompressionSelf1 = 10,
CompressionParentSelf = 11,
CompressionParent0 = 12,
CompressionParent1 = 13,
}
#[repr(u8)]
#[derive(FromPrimitive, ToPrimitive)]
pub enum CompressionTypeLegacy {
Invalid = 0,
Compressed = 1,
Uncompressed = 2,
Mini = 3,
SelfHunk = 4,
ParentHunk = 5,
ExternalCompressed = 6,
}
pub struct RawMapV5(Vec<u8>, bool, u32);
pub struct RawMapLegacy(Vec<LegacyMapEntry>);
impl From<RawMapV5> for Vec<u8> {
fn from(map: RawMapV5) -> Self {
map.0
}
}
impl From<&RawMapV5> for Vec<u8> {
fn from(map: &RawMapV5) -> Self {
map.0.clone()
}
}
pub struct LegacyMapEntry {
offset: u64, crc: Option<u32>, length: u32, flags: u8, }
pub struct V5CompressedMapEntry<'a>(&'a [u8; V5_COMPRESSED_MAP_ENTRY_SIZE]);
pub struct V5UncompressedMapEntry<'a>(&'a [u8; V5_UNCOMPRESSED_MAP_ENTRY_SIZE], u32);
impl LegacyMapEntry {
pub fn hunk_type(&self) -> Result<CompressionTypeLegacy> {
CompressionTypeLegacy::from_u8(self.flags & MAP_ENTRY_FLAG_TYPE_MASK)
.ok_or(Error::UnsupportedFormat)
}
pub fn block_offset(&self) -> u64 {
self.offset
}
pub fn block_size(&self) -> u32 {
self.length
}
pub fn hunk_crc(&self) -> Option<u32> {
self.crc
}
#[inline(always)]
pub(crate) fn prove_compressed(&self) -> Result<CompressedEntryProof> {
match self.hunk_type()? {
CompressionTypeLegacy::Compressed => {
Ok(CompressedEntryProof(self.block_offset(), self.block_size()))
}
_ => Err(Error::InvalidParameter),
}
}
#[inline(always)]
pub(crate) fn prove_uncompressed(&self) -> Result<UncompressedEntryProof> {
match self.hunk_type()? {
CompressionTypeLegacy::Uncompressed => Ok(UncompressedEntryProof(
self.block_offset(),
self.block_size(),
)),
_ => Err(Error::InvalidParameter),
}
}
}
impl V5CompressedMapEntry<'_> {
pub fn hunk_type(&self) -> Result<CompressionTypeV5> {
CompressionTypeV5::from_u8(self.0[0]).ok_or(Error::UnsupportedFormat)
}
pub fn block_offset(&self) -> Result<u64> {
Ok(Cursor::new(&self.0[4..]).read_u48::<BigEndian>()?)
}
pub fn block_size(&self) -> Result<u32> {
Ok(Cursor::new(&self.0[1..]).read_u24::<BigEndian>()?)
}
pub fn hunk_crc(&self) -> Result<u16> {
Ok(Cursor::new(&self.0[10..]).read_u16::<BigEndian>()?)
}
#[inline(always)]
pub(crate) fn prove_compressed(&self) -> Result<CompressedEntryProof> {
match self.hunk_type()? {
CompressionTypeV5::CompressionType0
| CompressionTypeV5::CompressionType1
| CompressionTypeV5::CompressionType2
| CompressionTypeV5::CompressionType3 => Ok(CompressedEntryProof(
self.block_offset()?,
self.block_size()?,
)),
_ => Err(Error::InvalidParameter),
}
}
#[inline(always)]
pub(crate) fn prove_uncompressed(&self) -> Result<UncompressedEntryProof> {
match self.hunk_type()? {
CompressionTypeV5::CompressionNone => Ok(UncompressedEntryProof(
self.block_offset()?,
self.block_size()?,
)),
_ => Err(Error::InvalidParameter),
}
}
}
impl V5UncompressedMapEntry<'_> {
pub fn block_offset(&self) -> Result<u64> {
let off = Cursor::new(self.0).read_u32::<BigEndian>()?;
Ok(off as u64 * self.1 as u64)
}
pub fn block_size(&self) -> u32 {
self.1
}
#[inline(always)]
pub(crate) fn prove_uncompressed(&self) -> Result<UncompressedEntryProof> {
Ok(UncompressedEntryProof(
self.block_offset()?,
self.block_size(),
))
}
}
pub enum Map {
V5(RawMapV5), Legacy(RawMapLegacy),
}
pub enum MapEntry<'a> {
V5Compressed(V5CompressedMapEntry<'a>),
V5Uncompressed(V5UncompressedMapEntry<'a>),
LegacyEntry(&'a LegacyMapEntry),
}
pub(crate) struct CompressedEntryProof(u64, u32);
impl CompressedEntryProof {
pub fn block_offset(&self) -> u64 {
self.0
}
pub fn block_size(&self) -> u32 {
self.1
}
}
pub(crate) struct UncompressedEntryProof(u64, u32);
impl UncompressedEntryProof {
pub fn block_offset(&self) -> u64 {
self.0
}
pub fn block_size(&self) -> u32 {
self.1
}
}
pub struct MapEntries<'a> {
map: &'a Map,
curr: usize,
}
impl<'a> Iterator for MapEntries<'a> {
type Item = MapEntry<'a>;
fn next(&mut self) -> Option<Self::Item> {
if self.curr == self.map.len() {
None
} else {
let curr = self.curr;
self.curr += 1;
self.map.get_entry(curr)
}
}
}
impl Map {
#[allow(clippy::len_without_is_empty)]
pub fn len(&self) -> usize {
match self {
Map::V5(m) => {
let map_entry_bytes = if m.1 {
V5_COMPRESSED_MAP_ENTRY_SIZE
} else {
V5_UNCOMPRESSED_MAP_ENTRY_SIZE
};
m.0.len() / map_entry_bytes
}
Map::Legacy(m) => m.0.len(),
}
}
pub fn get_entry(&self, hunk_num: usize) -> Option<MapEntry> {
match self {
Map::V5(m) => {
let map_entry_bytes = if m.1 { 12 } else { 4 };
let entry_slice =
&m.0.get(hunk_num * map_entry_bytes..(hunk_num + 1) * map_entry_bytes);
if let &Some(entry_slice) = entry_slice {
return if m.1 {
<&[u8; 12]>::try_from(entry_slice)
.map(|e| MapEntry::V5Compressed(V5CompressedMapEntry(e)))
.ok()
} else {
<&[u8; 4]>::try_from(entry_slice)
.map(|e| MapEntry::V5Uncompressed(V5UncompressedMapEntry(e, m.2)))
.ok()
};
}
None
}
Map::Legacy(m) => m.0.get(hunk_num).map(MapEntry::LegacyEntry),
}
}
pub fn iter(&self) -> MapEntries {
MapEntries { map: self, curr: 0 }
}
pub fn try_read_map<F: Read + Seek>(header: &Header, mut file: F) -> Result<Map> {
match header {
Header::V5Header(v5) => {
Ok(Map::V5(read_map_v5(v5, &mut file, header.is_compressed())?))
}
Header::V3Header(_) | Header::V4Header(_) => {
Ok(Map::Legacy(RawMapLegacy(read_map_legacy::<
_,
V3_MAP_ENTRY_SIZE,
>(header, file)?)))
}
Header::V2Header(_) | Header::V1Header(_) => {
Ok(Map::Legacy(RawMapLegacy(read_map_legacy::<
_,
V1_MAP_ENTRY_SIZE,
>(header, file)?)))
}
}
}
}
fn read_map_legacy<F: Read + Seek, const MAP_ENTRY_SIZE: usize>(
header: &Header,
mut file: F,
) -> Result<Vec<LegacyMapEntry>> {
const_assert!(MAP_ENTRY_SIZE: usize => V3_MAP_ENTRY_SIZE >=
MAP_ENTRY_SIZE && (MAP_ENTRY_SIZE == V3_MAP_ENTRY_SIZE || MAP_ENTRY_SIZE == V1_MAP_ENTRY_SIZE));
let mut map = Vec::new();
let mut max_off = 0;
let mut cookie = [0u8; MAP_ENTRY_SIZE];
file.seek(SeekFrom::Start(0))?;
let mut file = BufReader::new(file);
file.seek(SeekFrom::Start(header.len() as u64))?;
let mut entry_buf = [0u8; V3_MAP_ENTRY_SIZE];
for _ in 0..header.hunk_count() {
file.read_exact(&mut entry_buf[0..MAP_ENTRY_SIZE])?;
let entry = match MAP_ENTRY_SIZE {
V3_MAP_ENTRY_SIZE => read_map_entry_v3(&entry_buf)?,
V1_MAP_ENTRY_SIZE => {
let mut read = Cursor::new(entry_buf);
let entry = read.read_u64::<BigEndian>()?;
read_map_entry_v1(entry, header.hunk_size())
}
_ => unreachable!(),
};
if let Some(CompressionTypeLegacy::Compressed) | Some(CompressionTypeLegacy::Uncompressed) =
CompressionTypeLegacy::from_u8(entry.flags & MAP_ENTRY_FLAG_TYPE_MASK)
{
max_off = std::cmp::max(max_off, entry.offset + entry.length as u64);
}
map.push(entry);
}
file.read_exact(&mut cookie)?;
if &cookie[..] < b"EndOfListCookie\0" {
return Err(Error::InvalidFile);
}
if max_off > file.seek(SeekFrom::End(0))? {
return Err(Error::InvalidFile);
}
Ok(map)
}
#[inline]
fn read_map_entry_v1(val: u64, hunk_bytes: u32) -> LegacyMapEntry {
let length = (val >> 44) as u32;
let flags = MAP_ENTRY_FLAG_NO_CRC
| if length == hunk_bytes {
CompressionTypeLegacy::Uncompressed as u8
} else {
CompressionTypeLegacy::Compressed as u8
};
let offset = (val << 20) >> 20;
LegacyMapEntry {
offset,
crc: None,
length,
flags,
}
}
#[inline]
fn read_map_entry_v3(buf: &[u8; V3_MAP_ENTRY_SIZE]) -> Result<LegacyMapEntry> {
let mut read = Cursor::new(buf);
let offset = read.read_u64::<BigEndian>()?;
let crc = read.read_u32::<BigEndian>()?;
let length: u32 = read.read_u16::<BigEndian>()? as u32 | (buf[14] as u32) << 16;
let flags = buf[15];
Ok(LegacyMapEntry {
offset,
crc: Some(crc),
length,
flags,
})
}
fn read_map_v5<F: Read + Seek>(
header: &HeaderV5,
mut file: F,
is_compressed: bool,
) -> Result<RawMapV5> {
let map_size = header.hunk_count as usize * header.map_entry_bytes as usize;
let mut raw_map = vec![0u8; map_size];
if !is_compressed {
file.seek(SeekFrom::Start(header.map_offset))?;
file.read_exact(&mut raw_map[..])?;
return Ok(RawMapV5(raw_map, is_compressed, header.hunk_bytes));
}
file.seek(SeekFrom::Start(header.map_offset))?;
let map_bytes = file.read_u32::<BigEndian>()?;
let first_offs = file.read_u48::<BigEndian>()?;
let map_crc = file.read_u16::<BigEndian>()?;
let length_bits = file.read_u8()?;
let self_bits = file.read_u8()?;
let parent_bits = file.read_u8()?;
let mut compressed: Vec<u8> = vec![0u8; map_bytes as usize];
file.seek(SeekFrom::Start(header.map_offset + 16))?;
file.read_exact(&mut compressed[..])?;
let mut bitstream = BitReader::new(&compressed[..]);
let decoder = HuffmanDecoder::<16, 8, { lookup_len::<8>() }>::from_tree_rle(&mut bitstream)?;
let mut rep_count = 0;
let mut last_cmp = 0;
for map_slice in raw_map.chunks_exact_mut(V5_COMPRESSED_MAP_ENTRY_SIZE) {
if rep_count > 0 {
map_slice[0] = last_cmp;
rep_count -= 1;
} else {
let val = decoder.decode_one(&mut bitstream)? as u8;
match CompressionTypeV5::from_u8(val).ok_or(Error::DecompressionError)? {
CompressionTypeV5::CompressionRleSmall => {
map_slice[0] = last_cmp;
rep_count = 2 + decoder.decode_one(&mut bitstream)?;
}
CompressionTypeV5::CompressionRleLarge => {
map_slice[0] = last_cmp;
rep_count = 2 + 16 + (decoder.decode_one(&mut bitstream)? << 4);
rep_count += decoder.decode_one(&mut bitstream)?;
}
_ => {
map_slice[0] = val;
last_cmp = val;
}
}
}
}
let mut curr_off = first_offs;
let mut last_self = 0;
let mut last_parent = 0;
for (hunk_num, map_slice) in raw_map
.chunks_exact_mut(V5_COMPRESSED_MAP_ENTRY_SIZE)
.enumerate()
{
let mut off = curr_off;
let mut len: u32 = 0;
let mut crc: u16 = 0;
match CompressionTypeV5::from_u8(map_slice[0]).ok_or(Error::DecompressionError)? {
CompressionTypeV5::CompressionType0
| CompressionTypeV5::CompressionType1
| CompressionTypeV5::CompressionType2
| CompressionTypeV5::CompressionType3 => {
len = bitstream.read_u32(length_bits)?;
curr_off += len as u64;
crc = bitstream.read_u32(16)? as u16;
}
CompressionTypeV5::CompressionNone => {
len = header.hunk_bytes;
curr_off += len as u64;
crc = bitstream.read_u32(16)? as u16;
}
CompressionTypeV5::CompressionSelf => {
off = bitstream.read_u64(self_bits)?;
last_self = off;
}
CompressionTypeV5::CompressionParent => {
off = bitstream.read_u64(parent_bits)?;
last_parent = off;
}
CompressionTypeV5::CompressionSelf1 => {
last_self += 1;
map_slice[0] = CompressionTypeV5::CompressionSelf as u8;
off = last_self;
}
CompressionTypeV5::CompressionSelf0 => {
map_slice[0] = CompressionTypeV5::CompressionSelf as u8;
off = last_self;
}
CompressionTypeV5::CompressionParentSelf => {
map_slice[0] = CompressionTypeV5::CompressionParent as u8;
off = ((hunk_num * header.hunk_bytes as usize) / header.unit_bytes as usize) as u64;
last_parent = off;
}
CompressionTypeV5::CompressionParent1 => {
last_parent += (header.hunk_bytes / header.unit_bytes) as u64;
map_slice[0] = CompressionTypeV5::CompressionParent as u8;
off = last_parent;
}
CompressionTypeV5::CompressionParent0 => {
map_slice[0] = CompressionTypeV5::CompressionParent as u8;
off = last_parent;
}
_ => return Err(Error::DecompressionError),
}
let mut cursor = Cursor::new(&mut map_slice[1..]);
cursor.write_u24::<BigEndian>(len)?;
cursor.write_u48::<BigEndian>(off)?;
cursor.write_u16::<BigEndian>(crc)?;
}
if crate::block_hash::CRC16.checksum(&raw_map[0..header.hunk_count as usize * 12]) != map_crc {
return Err(Error::DecompressionError);
}
Ok(RawMapV5(raw_map, is_compressed, header.hunk_bytes))
}