use std::{collections::HashMap, error::Error, io::Write};
use crate::Tag;
use bytes::{Buf as _, BufMut};
use crate::{
GLYF, HEAD, HMTX, LOCA, Round4, compute_checksum,
error::{WuffErr, bail, bail_if, bail_with_msg_if},
woff::{
glyf_decoder::tranform_glyf_table,
headers::{
CollectionDirectory, CollectionDirectoryEntry, TableDirectory, TableDirectoryEntry,
WOFF2FontInfo, Woff2, WoffHeader, WoffVersion,
},
hmtx_decoder::{decode_hmtx_table, generate_hmtx_table},
},
write_table_directory_header,
};
const K_MAX_PLAUSIBLE_COMPRESSION_RATIO: f32 = 100.0;
#[cfg(feature = "brotli")]
fn decompress_brotli(compressed_data: &[u8], size_hint: usize) -> Result<Vec<u8>, Box<dyn Error>> {
use brotli_decompressor::{BrotliResult, DecompressorWriter};
let mut output: Vec<u8> = Vec::with_capacity(size_hint);
let mut decompressor = DecompressorWriter::new(&mut output, 4096);
decompressor.write_all(compressed_data)?;
decompressor.close()?;
drop(decompressor);
Ok(output)
}
#[cfg(feature = "brotli")]
pub fn decompress_woff2(raw_woff_data: &[u8]) -> Result<Vec<u8>, WuffErr> {
decompress_woff2_with_custom_brotli(raw_woff_data, &mut decompress_brotli)
}
#[allow(clippy::type_complexity)]
pub fn decompress_woff2_with_custom_brotli(
raw_woff_data: &[u8],
decompress_brotli: &mut dyn FnMut(&[u8], usize) -> Result<Vec<u8>, Box<dyn Error>>,
) -> Result<Vec<u8>, WuffErr> {
let mut input = raw_woff_data;
let header = WoffHeader::parse(&mut input)?;
bail_if!(header.woff_version != WoffVersion::Woff2);
let table_directory = TableDirectory::parse_woff2(&mut input, header.num_tables as usize)?;
let mut collection_directory = if header.is_collection() {
CollectionDirectory::parse(&mut input, &table_directory)?
} else {
CollectionDirectory::generate_for_single_font(header.flavor, &table_directory)
};
collection_directory.sort_tables_within_each_font(&table_directory);
let num_fonts = collection_directory.fonts.len();
let compression_ratio: f32 = (header.total_sfnt_size as f32) / (raw_woff_data.len() as f32);
let compressed_data = &input[0..(header.total_compressed_size as usize)];
let decompressed_data =
decompress_brotli(compressed_data, header.total_sfnt_size as usize).unwrap();
bail_if!(header.total_sfnt_size < 1);
bail_with_msg_if!(
compression_ratio > K_MAX_PLAUSIBLE_COMPRESSION_RATIO,
"Implausible compression ratio {:.1}",
compression_ratio
);
let mut out: Vec<u8> = Vec::with_capacity(header.total_sfnt_size as usize);
let mut out_header = generate_header(&header, &table_directory, &collection_directory);
out.extend_from_slice(&out_header.data);
let mut table_metadata: Vec<Option<TableMetadata>> = vec![None; header.num_tables as usize];
for i in 0..num_fonts {
reconstruct_font(
&decompressed_data,
&header,
&table_directory,
&collection_directory.fonts[i],
&mut out_header,
&mut table_metadata,
&mut out,
i,
)?;
}
out[0..out_header.data.len()].copy_from_slice(&out_header.data);
Ok(out)
}
fn iter_tables_for_font<'a>(
font_entry: &'a CollectionDirectoryEntry,
tables: &'a TableDirectory,
) -> impl Iterator<Item = (usize, &'a TableDirectoryEntry)> {
font_entry
.table_indices
.iter()
.map(|table_idx| (*table_idx as usize, &tables[*table_idx as usize]))
}
#[allow(clippy::too_many_arguments)]
fn reconstruct_font(
woff_data: &[u8],
header: &WoffHeader,
tables: &TableDirectory,
font_entry: &CollectionDirectoryEntry,
out_header: &mut HeaderData,
table_metadata: &mut [Option<TableMetadata>],
out: &mut Vec<u8>,
font_idx: usize,
) -> Result<(), WuffErr> {
let glyf_idx = font_entry.glyf_idx.map(|idx| idx as usize);
let loca_idx = font_entry.loca_idx.map(|idx| idx as usize);
let hhea_idx = font_entry.hhea_idx.map(|idx| idx as usize);
match (glyf_idx, loca_idx) {
(Some(glyf_idx), Some(loca_idx)) => {
bail_with_msg_if!(
tables[glyf_idx].is_transformed() != tables[loca_idx].is_transformed(),
"Cannot transform just one of glyf/loca"
);
}
(Some(_), None) | (None, Some(_)) => {
bail_with_msg_if!(true, "Cannot have just one of glyf/loca")
}
(None, None) => {}
}
let mut font_checksum: u32 = if header.is_collection() {
out_header.checksum
} else {
out_header.font_infos[font_idx].header_checksum
};
let num_hmetrics = match hhea_idx {
Some(hhea_idx) => {
let hhea_table = &tables[hhea_idx];
Some(read_num_hmetrics(hhea_table.data_as_slice(woff_data)?)?)
}
None => None,
};
let mut num_glyphs = None;
let mut x_mins = None;
for (table_idx, table) in iter_tables_for_font(font_entry, tables) {
bail_if!(table.woff_offset as usize + table.woff_length as usize > woff_data.len());
let metadata = if let Some(metadata) = table_metadata[table_idx] {
bail_if!(font_idx == 0 && table.tag != LOCA);
metadata
}
else if !table.is_transformed() {
let check_sum_adjustment = if table.tag == HEAD {
bail_if!(table.woff_length < 12);
let checksum_slice =
&woff_data[(table.woff_offset as usize + 8)..(table.woff_offset as usize + 12)];
let checksum_bytes: [u8; 4] = checksum_slice.try_into().unwrap();
u32::from_be_bytes(checksum_bytes)
} else {
0
};
let table_data = table.data_as_slice(woff_data)?;
let checksum = compute_checksum(table_data).wrapping_sub(check_sum_adjustment);
let metadata = TableMetadata {
dst_offset: out.len() as u32,
dst_length: table.woff_length,
checksum,
};
table_metadata[table_idx] = Some(metadata);
out.extend_from_slice(table_data);
out.resize(Round4!(out.len()), 0);
metadata
}
else if table.tag == GLYF {
let loca_idx =
loca_idx.expect("We already returned an error if glyf is present but loca isn't");
let raw_glyf_table_data = table.data_as_slice(woff_data)?;
let glyf_and_loca_data = tranform_glyf_table(raw_glyf_table_data)?;
num_glyphs = Some(glyf_and_loca_data.num_glyphs);
x_mins = Some(glyf_and_loca_data.x_mins);
let glyf_dest_offset = out.len();
out.extend_from_slice(&glyf_and_loca_data.glyf_table);
out.resize(Round4!(out.len()), 0);
let glyf_metadata = TableMetadata {
checksum: glyf_and_loca_data.glyf_checksum,
dst_offset: glyf_dest_offset as u32,
dst_length: glyf_and_loca_data.glyf_table.len() as u32,
};
table_metadata[table_idx] = Some(glyf_metadata);
let loca_dest_offset = out.len();
out.extend_from_slice(&glyf_and_loca_data.loca_table);
out.resize(Round4!(out.len()), 0);
let loca_metdata = TableMetadata {
checksum: glyf_and_loca_data.loca_checksum,
dst_offset: loca_dest_offset as u32,
dst_length: glyf_and_loca_data.loca_table.len() as u32,
};
table_metadata[loca_idx] = Some(loca_metdata);
glyf_metadata
} else if table.tag == LOCA {
unreachable!("loca table is computed when glyf table is processed");
}
else if table.tag == HMTX {
let num_glyphs = num_glyphs.unwrap();
let num_hmetrics = num_hmetrics.unwrap();
let x_mins = x_mins.as_ref().unwrap();
let mut raw_hmtx_table_data = table.data_as_slice(woff_data)?;
let hmtx_data =
decode_hmtx_table(&mut raw_hmtx_table_data, num_glyphs, num_hmetrics, x_mins)?;
let hmtx_table = generate_hmtx_table(&hmtx_data)?;
let checksum = compute_checksum(&hmtx_table);
let dest_offset = out.len();
out.extend_from_slice(&hmtx_table);
out.resize(Round4!(out.len()), 0);
let hmtx_metadata = TableMetadata {
checksum,
dst_offset: dest_offset as u32,
dst_length: hmtx_table.len() as u32,
};
table_metadata[table_idx] = Some(hmtx_metadata);
hmtx_metadata
} else {
bail!()
};
font_checksum = font_checksum.wrapping_add(metadata.checksum);
out_header.update_table_entry(font_idx, table.tag, metadata);
font_checksum = font_checksum.wrapping_add(metadata.header_checksum_contribution());
}
let checksum_adjustment = 0xB1B0AFBA_u32.wrapping_sub(font_checksum);
if let Some(head_table_idx) = font_entry.head_idx {
let head_table_metadata = &table_metadata[head_table_idx as usize]
.expect("Every table in the font should have metadata at this point");
let mut writer = &mut out[head_table_metadata.dst_offset as usize + 8..];
writer.put_u32(checksum_adjustment);
}
Ok(())
}
fn read_num_hmetrics(mut hhea_data: &[u8]) -> Result<u16, WuffErr> {
hhea_data.advance(34); Ok(hhea_data.try_get_u16()?)
}
struct HeaderData {
data: Vec<u8>,
checksum: u32,
font_infos: Vec<WOFF2FontInfo>,
}
#[derive(Clone, Copy, Default)]
struct TableMetadata {
checksum: u32,
dst_offset: u32,
dst_length: u32,
}
impl TableMetadata {
pub fn is_already_computed(&self) -> bool {
self.dst_offset != 0
}
pub fn header_checksum_contribution(&self) -> u32 {
self.checksum
.wrapping_add(self.dst_offset)
.wrapping_add(self.dst_length)
}
}
impl HeaderData {
fn update_table_entry(&mut self, font_idx: usize, tag: Tag, metadata: TableMetadata) {
let table_entry_offset = self.font_infos[font_idx].table_entry_by_tag[&tag];
let mut out = &mut self.data[(table_entry_offset + 4)..(table_entry_offset + 16)];
out.put_u32(metadata.checksum);
out.put_u32(metadata.dst_offset);
out.put_u32(metadata.dst_length);
let mut checksum = self.font_infos[font_idx].header_checksum;
checksum = checksum.wrapping_add(metadata.checksum);
checksum = checksum.wrapping_add(metadata.dst_offset);
checksum = checksum.wrapping_add(metadata.dst_length);
self.font_infos[font_idx].header_checksum = checksum;
}
}
fn compute_header_size(collection_directory: &CollectionDirectory, is_collection: bool) -> usize {
if is_collection {
collection_directory.table_directories_required_size()
+ collection_directory.collection_header_required_size()
} else {
collection_directory.table_directories_required_size()
}
}
fn generate_header(
header: &WoffHeader,
tables: &TableDirectory,
collection_directory: &CollectionDirectory,
) -> HeaderData {
let num_fonts = collection_directory.fonts.len();
let size_of_header = compute_header_size(collection_directory, header.is_collection());
let mut output: Vec<u8> = Vec::with_capacity(size_of_header);
let mut font_infos: Vec<WOFF2FontInfo> = vec![WOFF2FontInfo::default(); num_fonts];
let mut checksum: u32 = 0;
if header.is_collection() {
output.put_u32(u32::from_be_bytes(header.flavor.to_be_bytes())); output.put_u32(collection_directory.version); output.put_u32(num_fonts as u32);
let first_table_directory_offset = match collection_directory.version {
0x00010000 => 12 + (4 * num_fonts as u32),
0x00020000 => 12 + 12 + (4 * num_fonts as u32),
_ => unreachable!("Only 1.0 and 2.0 are supported versions"),
};
let mut table_directory_offset = first_table_directory_offset;
for font in collection_directory.fonts.iter() {
output.put_u32(table_directory_offset);
table_directory_offset += font.table_directory_size() as u32;
}
if collection_directory.version == 0x00020000 {
output.put_u32(0); output.put_u32(0); output.put_u32(0); }
checksum = checksum.wrapping_add(compute_checksum(&output));
}
for (font, info) in collection_directory.fonts.iter().zip(font_infos.iter_mut()) {
let start_offset = output.len();
write_table_directory_header(&mut output, font.flavor, font.table_indices.len() as u16);
for &table_index in &font.table_indices {
let tag = tables[table_index as usize].tag;
info.table_entry_by_tag.insert(tag, output.len());
write_empty_offset_table_entry(&mut output, tag);
}
info.header_checksum = compute_checksum(&output[start_offset..]);
checksum = checksum.wrapping_add(info.header_checksum);
}
HeaderData {
data: output,
font_infos,
checksum,
}
}
fn write_empty_offset_table_entry(output: &mut impl BufMut, tag: Tag) {
output.put_u32(u32::from_be_bytes(tag.to_be_bytes()));
output.put_u32(0);
output.put_u32(0);
output.put_u32(0);
}