pub mod attribute;
pub mod header;
use crate::io::*;
use ::smallvec::SmallVec;
use self::attribute::*;
use crate::block::chunk::{TileCoordinates, Block};
use crate::error::*;
use std::fs::File;
use std::io::{BufReader};
use crate::math::*;
use std::collections::{HashSet};
use std::convert::TryFrom;
use crate::meta::header::{Header};
#[derive(Debug, Clone, PartialEq)]
pub struct MetaData {
pub requirements: Requirements,
pub headers: Headers,
}
pub type Headers = SmallVec<[Header; 3]>;
pub type OffsetTables = SmallVec<[OffsetTable; 3]>;
pub type OffsetTable = Vec<u64>;
#[derive(Clone, Copy, Eq, PartialEq, Debug)]
pub struct Requirements {
file_format_version: u8,
is_single_layer_and_tiled: bool,
has_long_names: bool,
has_deep_data: bool,
has_multiple_layers: bool,
}
#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]
pub struct TileIndices {
pub location: TileCoordinates,
pub size: Vec2<usize>,
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum Blocks {
ScanLines,
Tiles(TileDescription)
}
impl Blocks {
pub fn has_tiles(&self) -> bool {
match self {
Blocks::Tiles { .. } => true,
_ => false
}
}
}
pub mod magic_number {
use super::*;
pub const BYTES: [u8; 4] = [0x76, 0x2f, 0x31, 0x01];
pub fn write(write: &mut impl Write) -> Result<()> {
u8::write_slice(write, &self::BYTES)
}
pub fn is_exr(read: &mut impl Read) -> Result<bool> {
let mut magic_num = [0; 4];
u8::read_slice(read, &mut magic_num)?;
Ok(magic_num == self::BYTES)
}
pub fn validate_exr(read: &mut impl Read) -> UnitResult {
if self::is_exr(read)? {
Ok(())
} else {
Err(Error::invalid("file identifier missing"))
}
}
}
pub mod sequence_end {
use super::*;
pub fn byte_size() -> usize {
1
}
pub fn write<W: Write>(write: &mut W) -> UnitResult {
0_u8.write(write)
}
pub fn has_come(read: &mut PeekRead<impl Read>) -> Result<bool> {
Ok(read.skip_if_eq(0)?)
}
}
fn missing_attribute(name: &str) -> Error {
Error::invalid(format!("missing or invalid {} attribute", name))
}
pub fn compute_block_count(full_res: usize, tile_size: usize) -> usize {
RoundingMode::Up.divide(full_res, tile_size)
}
#[inline]
pub fn calculate_block_position_and_size(total_size: usize, block_size: usize, block_index: usize) -> Result<(usize, usize)> {
let block_position = block_size * block_index;
Ok((
block_position,
calculate_block_size(total_size, block_size, block_position)?
))
}
#[inline]
pub fn calculate_block_size(total_size: usize, block_size: usize, block_position: usize) -> Result<usize> {
if block_position >= total_size {
return Err(Error::invalid("block index"))
}
if block_position + block_size <= total_size {
Ok(block_size)
}
else {
Ok(total_size - block_position)
}
}
pub fn compute_level_count(round: RoundingMode, full_res: usize) -> usize {
usize::try_from(round.log2(u32::try_from(full_res).unwrap())).unwrap() + 1
}
pub fn compute_level_size(round: RoundingMode, full_res: usize, level_index: usize) -> usize {
assert!(level_index < std::mem::size_of::<usize>() * 8, "largest level size exceeds maximum integer value");
round.divide(full_res, 1 << level_index).max(1)
}
pub fn rip_map_levels(round: RoundingMode, max_resolution: Vec2<usize>) -> impl Iterator<Item=(Vec2<usize>, Vec2<usize>)> {
rip_map_indices(round, max_resolution).map(move |level_indices|{
let width = compute_level_size(round, max_resolution.width(), level_indices.x());
let height = compute_level_size(round, max_resolution.height(), level_indices.y());
(level_indices, Vec2(width, height))
})
}
pub fn mip_map_levels(round: RoundingMode, max_resolution: Vec2<usize>) -> impl Iterator<Item=(usize, Vec2<usize>)> {
mip_map_indices(round, max_resolution)
.map(move |level_index|{
let width = compute_level_size(round, max_resolution.width(), level_index);
let height = compute_level_size(round, max_resolution.height(), level_index);
(level_index, Vec2(width, height))
})
}
pub fn rip_map_indices(round: RoundingMode, max_resolution: Vec2<usize>) -> impl Iterator<Item=Vec2<usize>> {
let (width, height) = (
compute_level_count(round, max_resolution.width()),
compute_level_count(round, max_resolution.height())
);
(0..height).flat_map(move |y_level|{
(0..width).map(move |x_level|{
Vec2(x_level, y_level)
})
})
}
pub fn mip_map_indices(round: RoundingMode, max_resolution: Vec2<usize>) -> impl Iterator<Item=usize> {
0..compute_level_count(round, max_resolution.width().max(max_resolution.height()))
}
pub fn compute_chunk_count(compression: Compression, data_size: Vec2<usize>, blocks: Blocks) -> usize {
if let Blocks::Tiles(tiles) = blocks {
let round = tiles.rounding_mode;
let Vec2(tile_width, tile_height) = tiles.tile_size;
use crate::meta::attribute::LevelMode::*;
match tiles.level_mode {
Singular => {
let tiles_x = compute_block_count(data_size.width(), tile_width);
let tiles_y = compute_block_count(data_size.height(), tile_height);
tiles_x * tiles_y
}
MipMap => {
mip_map_levels(round, data_size).map(|(_, Vec2(level_width, level_height))| {
compute_block_count(level_width, tile_width) * compute_block_count(level_height, tile_height)
}).sum()
},
RipMap => {
rip_map_levels(round, data_size).map(|(_, Vec2(level_width, level_height))| {
compute_block_count(level_width, tile_width) * compute_block_count(level_height, tile_height)
}).sum()
}
}
}
else {
compute_block_count(data_size.height(), compression.scan_lines_per_block())
}
}
impl MetaData {
#[must_use]
pub fn read_from_file(path: impl AsRef<::std::path::Path>, pedantic: bool) -> Result<Self> {
Self::read_from_unbuffered(File::open(path)?, pedantic)
}
#[must_use]
pub fn read_from_unbuffered(unbuffered: impl Read, pedantic: bool) -> Result<Self> {
Self::read_from_buffered(BufReader::new(unbuffered), pedantic)
}
#[must_use]
pub fn read_from_buffered(buffered: impl Read, pedantic: bool) -> Result<Self> {
let mut read = PeekRead::new(buffered);
MetaData::read_unvalidated_from_buffered_peekable(&mut read, pedantic)
}
#[must_use]
pub(crate) fn read_unvalidated_from_buffered_peekable(read: &mut PeekRead<impl Read>, pedantic: bool) -> Result<Self> {
magic_number::validate_exr(read)?;
let requirements = Requirements::read(read)?;
requirements.validate()?;
let headers = Header::read_all(read, &requirements, pedantic)?;
Ok(MetaData { requirements, headers })
}
#[must_use]
pub(crate) fn read_validated_from_buffered_peekable(
read: &mut PeekRead<impl Read>, pedantic: bool
) -> Result<Self> {
let meta_data = Self::read_unvalidated_from_buffered_peekable(read, !pedantic)?;
MetaData::validate(meta_data.headers.as_slice(), pedantic)?;
Ok(meta_data)
}
pub(crate) fn write_validating_to_buffered(write: &mut impl Write, headers: &[Header], pedantic: bool) -> UnitResult {
let minimal_requirements = Self::validate(headers, pedantic)?;
magic_number::write(write)?;
minimal_requirements.write(write)?;
Header::write_all(headers, write, minimal_requirements.has_multiple_layers)?;
Ok(())
}
pub fn read_offset_tables(read: &mut PeekRead<impl Read>, headers: &Headers) -> Result<OffsetTables> {
headers.iter()
.map(|header| u64::read_vec(read, header.chunk_count, u16::MAX as usize, None, "offset table size"))
.collect()
}
pub fn skip_offset_tables(read: &mut PeekRead<impl Read>, headers: &Headers) -> Result<usize> {
let chunk_count: usize = headers.iter().map(|header| header.chunk_count).sum();
crate::io::skip_bytes(read, chunk_count * u64::BYTE_SIZE)?;
Ok(chunk_count)
}
pub fn validate(headers: &[Header], pedantic: bool) -> Result<Requirements> {
if headers.len() == 0 {
return Err(Error::invalid("at least one layer is required"));
}
let deep = false;
let is_multilayer = headers.len() > 1;
let first_header_has_tiles = headers.iter().next()
.map_or(false, |header| header.blocks.has_tiles());
let mut minimal_requirements = Requirements {
file_format_version: 2,
has_long_names: false,
is_single_layer_and_tiled: !is_multilayer && first_header_has_tiles,
has_multiple_layers: is_multilayer,
has_deep_data: deep,
};
for header in headers {
if header.deep {
return Err(Error::unsupported("deep data not supported yet"));
}
header.validate(is_multilayer, &mut minimal_requirements.has_long_names, pedantic)?;
}
if pedantic {
let mut header_names = HashSet::with_capacity(headers.len());
for header in headers {
if !header_names.insert(&header.own_attributes.layer_name) {
return Err(Error::invalid(format!(
"duplicate layer name: `{}`",
header.own_attributes.layer_name.as_ref().expect("header validation bug")
)));
}
}
}
if pedantic {
let must_share = headers.iter().flat_map(|header| header.own_attributes.other.iter())
.any(|(_, value)| value.to_chromaticities().is_ok() || value.to_time_code().is_ok());
if must_share {
return Err(Error::invalid("chromaticities and time code attributes must must not exist in own attributes but shared instead"));
}
}
if pedantic && headers.len() > 1 {
let first_header = headers.first().expect("header count validation bug");
let first_header_attributes = &first_header.shared_attributes;
for header in &headers[1..] {
if &header.shared_attributes != first_header_attributes {
return Err(Error::invalid("display window, pixel aspect, chromaticities, and time code attributes must be equal for all headers"))
}
}
}
debug_assert!(minimal_requirements.validate().is_ok());
Ok(minimal_requirements)
}
}
impl Requirements {
pub fn is_multilayer(&self) -> bool {
self.has_multiple_layers
}
pub fn read<R: Read>(read: &mut R) -> Result<Self> {
use ::bit_field::BitField;
let version_and_flags = u32::read(read)?;
let version = (version_and_flags & 0x000F) as u8;
let is_single_tile = version_and_flags.get_bit(9);
let has_long_names = version_and_flags.get_bit(10);
let has_deep_data = version_and_flags.get_bit(11);
let has_multiple_layers = version_and_flags.get_bit(12);
let unknown_flags = version_and_flags >> 13;
if unknown_flags != 0 {
return Err(Error::unsupported("too new file feature flags"));
}
let version = Requirements {
file_format_version: version,
is_single_layer_and_tiled: is_single_tile, has_long_names,
has_deep_data, has_multiple_layers,
};
Ok(version)
}
pub fn write<W: Write>(self, write: &mut W) -> UnitResult {
use ::bit_field::BitField;
let mut version_and_flags = self.file_format_version as u32;
version_and_flags.set_bit(9, self.is_single_layer_and_tiled);
version_and_flags.set_bit(10, self.has_long_names);
version_and_flags.set_bit(11, self.has_deep_data);
version_and_flags.set_bit(12, self.has_multiple_layers);
version_and_flags.write(write)?;
Ok(())
}
pub fn validate(&self) -> UnitResult {
if self.file_format_version == 2 {
match (
self.is_single_layer_and_tiled, self.has_deep_data, self.has_multiple_layers,
self.file_format_version
) {
(false, false, false, 1..=2) => Ok(()),
(true, false, false, 1..=2) => Ok(()),
(false, false, true, 2) => Ok(()),
(false, true, false, 2) => Ok(()),
(false, true, true, 2) => Ok(()),
_ => Err(Error::invalid("file feature flags"))
}
}
else {
Err(Error::unsupported("file versions other than 2.0 are not supported"))
}
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::meta::header::{ImageAttributes, LayerAttributes};
#[test]
fn round_trip_requirements() {
let requirements = Requirements {
file_format_version: 2,
is_single_layer_and_tiled: true,
has_long_names: false,
has_deep_data: true,
has_multiple_layers: false
};
let mut data: Vec<u8> = Vec::new();
requirements.write(&mut data).unwrap();
let read = Requirements::read(&mut data.as_slice()).unwrap();
assert_eq!(requirements, read);
}
#[test]
fn round_trip(){
let header = Header {
channels: ChannelList::new(smallvec![
ChannelDescription {
name: Text::from("main"),
sample_type: SampleType::U32,
quantize_linearly: false,
sampling: Vec2(1, 1)
}
],
),
compression: Compression::Uncompressed,
line_order: LineOrder::Increasing,
deep_data_version: Some(1),
chunk_count: compute_chunk_count(Compression::Uncompressed, Vec2(2000, 333), Blocks::ScanLines),
max_samples_per_pixel: Some(4),
shared_attributes: ImageAttributes {
pixel_aspect: 3.0,
.. ImageAttributes::new(IntegerBounds {
position: Vec2(2,1),
size: Vec2(11, 9)
})
},
blocks: Blocks::ScanLines,
deep: false,
layer_size: Vec2(2000, 333),
own_attributes: LayerAttributes {
layer_name: Some(Text::from("test name lol")),
layer_position: Vec2(3, -5),
screen_window_center: Vec2(0.3, 99.0),
screen_window_width: 0.19,
.. Default::default()
}
};
let meta = MetaData {
requirements: Requirements {
file_format_version: 2,
is_single_layer_and_tiled: false,
has_long_names: false,
has_deep_data: false,
has_multiple_layers: false
},
headers: smallvec![ header ],
};
let mut data: Vec<u8> = Vec::new();
MetaData::write_validating_to_buffered(&mut data, meta.headers.as_slice(), true).unwrap();
let meta2 = MetaData::read_from_buffered(data.as_slice(), false).unwrap();
MetaData::validate(meta2.headers.as_slice(), true).unwrap();
assert_eq!(meta, meta2);
}
#[test]
fn infer_low_requirements() {
let header_version_1_short_names = Header {
channels: ChannelList::new(smallvec![
ChannelDescription {
name: Text::from("main"),
sample_type: SampleType::U32,
quantize_linearly: false,
sampling: Vec2(1, 1)
}
],
),
compression: Compression::Uncompressed,
line_order: LineOrder::Increasing,
deep_data_version: Some(1),
chunk_count: compute_chunk_count(Compression::Uncompressed, Vec2(2000, 333), Blocks::ScanLines),
max_samples_per_pixel: Some(4),
shared_attributes: ImageAttributes {
pixel_aspect: 3.0,
.. ImageAttributes::new(IntegerBounds {
position: Vec2(2,1),
size: Vec2(11, 9)
})
},
blocks: Blocks::ScanLines,
deep: false,
layer_size: Vec2(2000, 333),
own_attributes: LayerAttributes {
other: vec![
(Text::try_from("x").unwrap(), AttributeValue::F32(3.0)),
(Text::try_from("y").unwrap(), AttributeValue::F32(-1.0)),
].into_iter().collect(),
.. Default::default()
}
};
let low_requirements = MetaData::validate(
&[header_version_1_short_names], true
).unwrap();
assert_eq!(low_requirements.has_long_names, false);
assert_eq!(low_requirements.file_format_version, 2);
assert_eq!(low_requirements.has_deep_data, false);
assert_eq!(low_requirements.has_multiple_layers, false);
}
#[test]
fn infer_high_requirements() {
let header_version_2_long_names = Header {
channels: ChannelList::new(
smallvec![
ChannelDescription {
name: Text::new_or_panic("main"),
sample_type: SampleType::U32,
quantize_linearly: false,
sampling: Vec2(1, 1)
}
],
),
compression: Compression::Uncompressed,
line_order: LineOrder::Increasing,
deep_data_version: Some(1),
chunk_count: compute_chunk_count(Compression::Uncompressed, Vec2(2000, 333), Blocks::ScanLines),
max_samples_per_pixel: Some(4),
shared_attributes: ImageAttributes {
pixel_aspect: 3.0,
.. ImageAttributes::new(IntegerBounds {
position: Vec2(2,1),
size: Vec2(11, 9)
})
},
blocks: Blocks::ScanLines,
deep: false,
layer_size: Vec2(2000, 333),
own_attributes: LayerAttributes {
layer_name: Some(Text::new_or_panic("oasdasoidfj")),
other: vec![
(Text::new_or_panic("xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"), AttributeValue::F32(3.0)),
(Text::new_or_panic("y"), AttributeValue::F32(-1.0)),
].into_iter().collect(),
.. Default::default()
}
};
let mut layer_2 = header_version_2_long_names.clone();
layer_2.own_attributes.layer_name = Some(Text::new_or_panic("anythingelse"));
let low_requirements = MetaData::validate(
&[header_version_2_long_names, layer_2], true
).unwrap();
assert_eq!(low_requirements.has_long_names, true);
assert_eq!(low_requirements.file_format_version, 2);
assert_eq!(low_requirements.has_deep_data, false);
assert_eq!(low_requirements.has_multiple_layers, true);
}
}