use adc::AdcDecoder;
use bincode::Options;
use bzip2::read::BzDecoder;
use flate2::read::ZlibDecoder;
use itertools::Itertools;
use num_derive::FromPrimitive;
use num_traits::FromPrimitive;
use serde::{Deserialize, Serialize};
use std::cmp;
use std::cmp::Ordering;
use std::fmt;
use std::io;
use std::io::prelude::*;
use std::io::Cursor;
use std::io::SeekFrom;
mod crypto;
mod error;
use crypto::header::EncryptedDmgHeader;
#[cfg(feature = "crypto")]
pub use crypto::reader::EncryptedDmgReader;
pub use error::{Error, Result};
const SECTOR_SIZE: usize = 512;
#[derive(Serialize, Deserialize, PartialEq, Debug)]
struct KolyHeader {
signature: [char; 4],
version: u32,
header_size: u32,
flags: u32,
running_data_fork_offset: u64,
data_fork_offset: u64,
data_fork_length: u64,
rsrc_fork_offset: u64,
rsrc_fork_length: u64,
segment_number: u32,
segment_count: u32,
segment_id: [u32; 4],
data_fork_checksum_type: u32,
data_fork_checksum_size: u32,
data_fork_checksum: [u32; 32],
xml_offset: u64,
xml_length: u64,
reserved4: [u64; 15],
master_checksum_type: u32,
master_checksum_size: u32,
master_checksum: [u32; 32],
image_variant: u32,
sector_count: u64,
}
impl KolyHeader {
fn get_signature(&self) -> String {
self.signature.iter().collect::<String>()
}
}
#[derive(Debug)]
pub struct Partition {
pub name: String,
id: i64,
attributes: String,
blkx_table: BLKXTable,
}
impl fmt::Display for Partition {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"Partition:\n\
\tName: {}\n\
\tAttributes: {}\n\
\tBlkx Table:\n\
{}\n",
self.name, self.attributes, self.blkx_table
)
}
}
#[derive(Serialize, Deserialize, PartialEq, Debug)]
struct UDIFChecksum {
r#type: u32,
size: u32,
data: [u32; 0x20],
}
#[repr(u32)]
#[derive(FromPrimitive, Debug, PartialEq)]
pub enum ChunkType {
Zero = 0x00000000,
Raw = 0x00000001,
Ignore = 0x00000002,
Comment = 0x7ffffffe,
ADC = 0x80000004,
ZLIB = 0x80000005,
BZLIB = 0x80000006,
LZFSE = 0x80000007,
Term = 0xffffffff,
}
#[derive(Serialize, Deserialize, PartialEq, Debug)]
struct BLKXChunk {
r#type: u32,
comment: u32,
sector_number: u64,
sector_count: u64,
compressed_offset: u64,
compressed_length: u64,
}
impl fmt::Display for BLKXChunk {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let type_str = match ChunkType::from_u32(self.r#type) {
Some(val) => format!("{:?}", val),
None => "??".to_string(),
};
write!(
f,
"\t\t\tType: {:#010x} ({})\n\
\t\t\tComment: {}\n\
\t\t\tsector num: {}\n\
\t\t\t# sectors: {}\n\
\t\t\tOffset: {:#010x}\n\
\t\t\tLength: {:#010x}",
self.r#type,
type_str,
self.comment,
self.sector_number,
self.sector_count,
self.compressed_offset,
self.compressed_length
)
}
}
#[derive(Debug)]
struct BLKXTable {
signature: [char; 4],
version: u32,
sector_number: u64,
sector_count: u64,
data_offset: u64,
buffers_needed: u32,
block_descriptors: u32,
reserved: [u32; 6],
checksum: UDIFChecksum,
num_chunks: u32,
chunks: Vec<BLKXChunk>,
}
impl std::convert::From<Vec<u8>> for BLKXTable {
fn from(data: Vec<u8>) -> Self {
let mut c = Cursor::new(data);
let decoder = bincode::DefaultOptions::new().with_big_endian();
let mut table = BLKXTable {
signature: decoder
.with_fixint_encoding()
.deserialize_from(&mut c)
.unwrap(),
version: decoder
.with_fixint_encoding()
.deserialize_from(&mut c)
.unwrap(),
sector_number: decoder
.with_fixint_encoding()
.deserialize_from(&mut c)
.unwrap(),
sector_count: decoder
.with_fixint_encoding()
.deserialize_from(&mut c)
.unwrap(),
data_offset: decoder
.with_fixint_encoding()
.deserialize_from(&mut c)
.unwrap(),
buffers_needed: decoder
.with_fixint_encoding()
.deserialize_from(&mut c)
.unwrap(),
block_descriptors: decoder
.with_fixint_encoding()
.deserialize_from(&mut c)
.unwrap(),
reserved: decoder
.with_fixint_encoding()
.deserialize_from(&mut c)
.unwrap(),
checksum: decoder
.with_fixint_encoding()
.deserialize_from(&mut c)
.unwrap(),
num_chunks: decoder
.with_fixint_encoding()
.deserialize_from(&mut c)
.unwrap(),
chunks: vec![],
};
let chunks: Vec<BLKXChunk> = (0..table.num_chunks)
.map(|_| {
decoder
.with_fixint_encoding()
.deserialize_from(&mut c)
.unwrap()
})
.collect();
table.chunks = chunks;
table
}
}
impl fmt::Display for BLKXTable {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"\t\tSector count: {}\n\
\t\tData offset: {}\n\
\t\tNumber chunks: {}\n\
\t\tChunks:\n\
{}\n",
self.sector_count,
self.data_offset,
self.num_chunks,
self.chunks.iter().format("\n\t\t\t----\n")
)
}
}
#[derive(PartialEq, Debug, Copy, Clone)]
pub enum Verbosity {
None,
Info,
Debug,
}
macro_rules! printInfo {
($self:ident, $($arg:tt)*) => ({
if $self.verbosity == Verbosity::Info || $self.verbosity == Verbosity::Debug {
println!($($arg)*);
}
})
}
macro_rules! printDebug {
($self:ident, $($arg:tt)*) => ({
if $self.verbosity == Verbosity::Debug {
println!($($arg)*);
}
})
}
pub struct DmgWiz<R> {
input: R,
pub partitions: Vec<Partition>,
data_offset: u64,
pub verbosity: Verbosity,
}
impl<R> DmgWiz<R>
where
R: Read + Seek,
{
pub fn from_reader(mut input: R, verbosity: Verbosity) -> Result<DmgWiz<R>> {
input.seek(SeekFrom::End(-0x200))?;
let header: KolyHeader = match bincode::DefaultOptions::new()
.with_big_endian()
.with_fixint_encoding()
.deserialize_from(&mut input)
{
Err(err) => return Err(DmgWiz::check_encrypted_or(input, err.into())),
Ok(val) => val,
};
if header.get_signature() != "koly" {
return Err(DmgWiz::check_encrypted_or(
input,
Error::InvalidInput("could not parse koly header".to_string()),
));
}
if verbosity == Verbosity::Debug {
println!("{:#?}", header);
}
if header.data_fork_length == 0 {
return Err(Error::InvalidInput("data fork length is 0".to_string()));
}
let data_offset = header.data_fork_offset;
input.seek(SeekFrom::Start(header.xml_offset))?;
let mut plist = plist::Value::from_reader_xml(&mut input)?;
let partitions_arr = plist
.as_dictionary_mut()
.and_then(|dict| dict.get("resource-fork"))
.and_then(|rsfk| rsfk.as_dictionary())
.and_then(|rsfk| rsfk.get("blkx"))
.and_then(|blkx| blkx.as_array())
.ok_or_else(|| Error::InvalidInput("invalid plist structure".to_string()))?;
fn get_string<'a>(dict: &'a plist::Dictionary, name: &str) -> Option<&'a str> {
dict.get(name).and_then(|v| v.as_string())
}
let partitions: Vec<Partition> = partitions_arr
.iter()
.map(|part| part.as_dictionary())
.map(|part| Partition {
name: part
.and_then(|p| {
get_string(p, "Name")
.filter(|n| !n.is_empty())
.or_else(|| get_string(p, "CFName"))
})
.unwrap_or_default()
.to_string(),
attributes: part
.and_then(|p| get_string(p, "Attributes"))
.unwrap_or_default()
.to_string(),
blkx_table: part
.and_then(|p| p.get("Data"))
.and_then(|n| n.as_data())
.unwrap_or_default()
.to_vec()
.into(),
id: part
.and_then(|p| p.get("ID"))
.and_then(|n| n.as_signed_integer())
.unwrap_or_default(),
})
.collect();
Ok(DmgWiz {
input,
partitions,
data_offset,
verbosity,
})
}
pub fn extract_all<W>(&mut self, mut output: W) -> Result<usize>
where
W: Write + Seek,
{
let mut bytes_written = 0;
for i in 0..self.partitions.len() {
bytes_written += self.extract_partition(&mut output, i)?;
}
Ok(bytes_written)
}
pub fn extract_partition<W>(&mut self, mut output: W, partition_num: usize) -> Result<usize>
where
W: Write,
{
let partition = self
.partitions
.get(partition_num)
.ok_or(Error::InvalidPartition(partition_num))?;
printInfo!(
self,
"extracting partition {} \"{}\"",
partition_num,
partition.name
);
printDebug!(self, "{}", partition);
if partition.blkx_table.data_offset != 0 {
return Err(Error::InvalidInput(format!(
"invalid data offset of partition {}: {}",
partition_num, partition.blkx_table.data_offset
)));
}
let mut sectors_written = 0;
for (chunk_num, chunk) in partition.blkx_table.chunks.iter().enumerate() {
let chunk_type = ChunkType::from_u32(chunk.r#type).ok_or_else(|| {
Error::InvalidInput(format!("unknown chunk type {:#010x}", chunk.r#type))
})?;
printDebug!(self,
"chunk {}: type={:?} comment={} sector_number={} sector_count={} compressed_offset={} compressed_length={}",
chunk_num,
chunk_type,
chunk.comment,
chunk.sector_number,
chunk.sector_count,
chunk.compressed_offset,
chunk.compressed_length
);
if chunk_type == ChunkType::Term {
printInfo!(self, "done");
return Ok(sectors_written as usize * SECTOR_SIZE);
}
match chunk.sector_number.cmp(§ors_written) {
Ordering::Less => {
return Err(Error::InvalidInput(format!(
"invalid sector number: {} (partition={} chunk={})",
chunk.sector_number, partition_num, chunk_num
)))
}
Ordering::Greater => {
let padding_sectors = chunk.sector_number - sectors_written;
let padding = vec![0; SECTOR_SIZE];
for _ in 0..padding_sectors {
output.write_all(&padding)?;
}
}
Ordering::Equal => (),
}
let in_len = chunk.compressed_length as usize;
let out_len = chunk.sector_count as usize * SECTOR_SIZE;
let chunk_offset = self.data_offset + chunk.compressed_offset;
self.input.seek(SeekFrom::Start(chunk_offset))?;
let mut chunk_input = BoundedReader {
inner: &mut self.input,
len: in_len,
};
let bytes_read = match chunk_type {
ChunkType::Ignore | ChunkType::Zero | ChunkType::Comment => {
write_zero(&mut output, out_len)
}
ChunkType::Raw => copy(&mut chunk_input, &mut output),
ChunkType::ADC => decode_adc(&mut chunk_input, &mut output),
ChunkType::ZLIB => decode_zlib(&mut chunk_input, &mut output),
ChunkType::BZLIB => decode_bzlib(&mut chunk_input, &mut output),
ChunkType::LZFSE => decode_lzfse(&mut chunk_input, &mut output, out_len),
ChunkType::Term => unreachable!(),
};
match bytes_read {
Ok(val) if val == out_len => printDebug!(self, "decompressed {} bytes", val),
_ => {
return Err(Error::Decompress {
partition_num,
chunk_num,
chunk_type,
})
}
};
sectors_written += chunk.sector_count;
}
Ok(sectors_written as usize * SECTOR_SIZE)
}
fn check_encrypted_or(mut input: R, err: Error) -> Error {
if let Err(err) = input.seek(SeekFrom::Start(0)) {
return err.into();
}
match bincode::DefaultOptions::new()
.with_big_endian()
.with_fixint_encoding()
.deserialize_from::<&mut R, EncryptedDmgHeader>(&mut input)
{
Ok(ref hdr) if hdr.get_signature() == "encrcdsa" => Error::Encrypted,
_ => err,
}
}
}
struct BoundedReader<R> {
inner: R,
len: usize,
}
impl<R: Read> Read for BoundedReader<R> {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
let max_len = cmp::min(self.len, buf.len());
let read_len = self.inner.read(&mut buf[..max_len])?;
self.len -= read_len;
Ok(read_len)
}
}
fn decode_zlib<R: Read, W: Write>(src: &mut R, dest: &mut W) -> Result<usize> {
let mut z = ZlibDecoder::new(src);
let len = io::copy(&mut z, dest)?;
Ok(len as usize)
}
fn decode_bzlib<R: Read, W: Write>(src: &mut R, dest: &mut W) -> Result<usize> {
let mut bz = BzDecoder::new(src);
let len = io::copy(&mut bz, dest)?;
Ok(len as usize)
}
fn decode_lzfse<R: Read, W: Write>(src: &mut R, dest: &mut W, dest_size: usize) -> Result<usize> {
let input = src.bytes().collect::<io::Result<Vec<_>>>()?;
let mut out_buf = vec![0; dest_size + 1];
let len = lzfse::decode_buffer(&input, &mut out_buf)
.map_err(|_| Error::InvalidInput("lzfse decompression failed".into()))?;
dest.write_all(&out_buf[..dest_size])?;
Ok(len)
}
fn decode_adc<R: Read, W: Write>(src: &mut R, dest: &mut W) -> Result<usize> {
let mut adc = AdcDecoder::new(src);
let len = io::copy(&mut adc, dest)?;
Ok(len as usize)
}
fn write_zero<W: Write>(w: &mut W, len: usize) -> Result<usize> {
let mut zeros = io::repeat(0).take(len as u64);
io::copy(&mut zeros, w)?;
Ok(len)
}
fn copy<R: Read, W: Write>(src: &mut R, dest: &mut W) -> Result<usize> {
let len = io::copy(src, dest)?;
Ok(len as usize)
}