use crate::parser::{read_i16, read_i8, read_u16, read_u32, read_u8};
use crate::Error;
const MAX_TUPLES_PER_GLYPH: u16 = 4096;
const MAX_POINTS_PER_GLYPH: usize = 0xFFFF;
const FLAG_LONG_OFFSETS: u16 = 0x0001;
const TI_EMBEDDED_PEAK: u16 = 0x8000;
const TI_INTERMEDIATE: u16 = 0x4000;
const TI_PRIVATE_POINTS: u16 = 0x2000;
const TI_TUPLE_INDEX_MASK: u16 = 0x0FFF;
#[derive(Debug, Clone)]
pub struct GvarTable<'a> {
bytes: &'a [u8],
axis_count: u16,
shared_tuple_count: u16,
shared_tuples_offset: usize,
glyph_count: u16,
offsets: Vec<u32>,
glyph_data_array_offset: usize,
}
impl<'a> GvarTable<'a> {
pub fn parse(bytes: &'a [u8]) -> Result<Self, Error> {
if bytes.len() < 20 {
return Err(Error::UnexpectedEof);
}
let major = read_u16(bytes, 0)?;
if major != 1 {
return Err(Error::BadStructure("gvar version not 1.x"));
}
let axis_count = read_u16(bytes, 4)?;
let shared_tuple_count = read_u16(bytes, 6)?;
let shared_tuples_offset = read_u32(bytes, 8)? as usize;
let glyph_count = read_u16(bytes, 12)?;
let flags = read_u16(bytes, 14)?;
let long_offsets = flags & FLAG_LONG_OFFSETS != 0;
let glyph_data_array_offset = read_u32(bytes, 16)? as usize;
let entry = if long_offsets { 4 } else { 2 };
let off_array_start = 20usize;
let off_array_end = off_array_start
.checked_add(entry * (glyph_count as usize + 1))
.ok_or(Error::BadOffset)?;
if bytes.len() < off_array_end {
return Err(Error::UnexpectedEof);
}
let mut offsets = Vec::with_capacity(glyph_count as usize + 1);
for i in 0..=glyph_count as usize {
let off = off_array_start + i * entry;
let v = if long_offsets {
read_u32(bytes, off)?
} else {
read_u16(bytes, off)? as u32 * 2
};
offsets.push(v);
}
Ok(Self {
bytes,
axis_count,
shared_tuple_count,
shared_tuples_offset,
glyph_count,
offsets,
glyph_data_array_offset,
})
}
pub fn axis_count(&self) -> u16 {
self.axis_count
}
pub fn glyph_count(&self) -> u16 {
self.glyph_count
}
fn shared_tuple(&self, i: u16) -> Result<Vec<f32>, Error> {
if i >= self.shared_tuple_count {
return Err(Error::BadStructure("gvar shared tuple index out of range"));
}
let stride = self.axis_count as usize * 2;
let off = self
.shared_tuples_offset
.checked_add(i as usize * stride)
.ok_or(Error::BadOffset)?;
if off + stride > self.bytes.len() {
return Err(Error::UnexpectedEof);
}
let mut t = Vec::with_capacity(self.axis_count as usize);
for ai in 0..self.axis_count as usize {
t.push(f2dot14(read_i16(self.bytes, off + ai * 2)?));
}
Ok(t)
}
pub fn glyph_deltas(
&self,
glyph_id: u16,
num_points: u16,
coords: &[f32],
) -> Result<Vec<(i32, i32)>, Error> {
let np = num_points as usize;
if np > MAX_POINTS_PER_GLYPH {
return Err(Error::BadStructure("gvar point count exceeds cap"));
}
let mut out = vec![(0i32, 0i32); np];
if glyph_id >= self.glyph_count {
return Err(Error::GlyphOutOfRange(glyph_id));
}
if coords.len() != self.axis_count as usize {
return Err(Error::BadStructure(
"gvar coord vector length != fvar axis count",
));
}
let start = self.offsets[glyph_id as usize] as usize;
let end = self.offsets[glyph_id as usize + 1] as usize;
if end <= start {
return Ok(out);
}
let block_off = self
.glyph_data_array_offset
.checked_add(start)
.ok_or(Error::BadOffset)?;
let block_len = end - start;
if block_off + block_len > self.bytes.len() {
return Err(Error::UnexpectedEof);
}
let block = &self.bytes[block_off..block_off + block_len];
if block.len() < 4 {
return Ok(out);
}
let tuple_count = read_u16(block, 0)?;
let n_tuples = tuple_count & 0x0FFF; if n_tuples > MAX_TUPLES_PER_GLYPH {
return Err(Error::BadStructure("gvar tupleVariationCount > cap"));
}
let data_offset = read_u16(block, 2)? as usize;
if data_offset > block.len() {
return Err(Error::BadOffset);
}
let mut hdr_off = 4usize;
let mut data_cursor = data_offset;
let total_points = np + 4;
let shared_points: Option<Vec<u16>> = if data_offset < block.len() {
let shared_slice = &block[data_offset..];
let (pts, used) = decode_packed_points(shared_slice, total_points as u16)?;
data_cursor = data_offset + used;
Some(pts)
} else {
None
};
for _ in 0..n_tuples {
if hdr_off + 4 > block.len() {
return Err(Error::BadStructure("gvar tuple header truncated"));
}
let var_data_size = read_u16(block, hdr_off)? as usize;
let tuple_index = read_u16(block, hdr_off + 2)?;
hdr_off += 4;
let peak = if tuple_index & TI_EMBEDDED_PEAK != 0 {
let need = self.axis_count as usize * 2;
if hdr_off + need > block.len() {
return Err(Error::BadStructure("gvar embedded peak truncated"));
}
let mut p = Vec::with_capacity(self.axis_count as usize);
for ai in 0..self.axis_count as usize {
p.push(f2dot14(read_i16(block, hdr_off + ai * 2)?));
}
hdr_off += need;
p
} else {
let idx = tuple_index & TI_TUPLE_INDEX_MASK;
self.shared_tuple(idx)?
};
let (start_t, end_t) = if tuple_index & TI_INTERMEDIATE != 0 {
let need = self.axis_count as usize * 4;
if hdr_off + need > block.len() {
return Err(Error::BadStructure("gvar intermediate region truncated"));
}
let mut s = Vec::with_capacity(self.axis_count as usize);
let mut e = Vec::with_capacity(self.axis_count as usize);
for ai in 0..self.axis_count as usize {
s.push(f2dot14(read_i16(block, hdr_off + ai * 2)?));
}
for ai in 0..self.axis_count as usize {
e.push(f2dot14(read_i16(
block,
hdr_off + self.axis_count as usize * 2 + ai * 2,
)?));
}
hdr_off += need;
(Some(s), Some(e))
} else {
(None, None)
};
let scalar = tuple_scalar(coords, &peak, start_t.as_deref(), end_t.as_deref());
if data_cursor + var_data_size > block.len() {
return Err(Error::BadStructure("gvar tuple data overruns"));
}
let tuple_data = &block[data_cursor..data_cursor + var_data_size];
data_cursor += var_data_size;
if scalar == 0.0 {
continue;
}
let mut td_off = 0usize;
let points = if tuple_index & TI_PRIVATE_POINTS != 0 {
let (pts, used) = decode_packed_points(tuple_data, total_points as u16)?;
td_off += used;
pts
} else {
shared_points.clone().unwrap_or_else(|| {
(0..total_points as u16).collect()
})
};
let n_pts = points.len();
let dxs = decode_packed_deltas(tuple_data, &mut td_off, n_pts)?;
let dys = decode_packed_deltas(tuple_data, &mut td_off, n_pts)?;
for (i, &p_idx) in points.iter().enumerate() {
let pi = p_idx as usize;
if pi >= np {
continue;
}
let dx = (dxs[i] as f32 * scalar).round() as i32;
let dy = (dys[i] as f32 * scalar).round() as i32;
out[pi].0 += dx;
out[pi].1 += dy;
}
}
Ok(out)
}
}
#[inline]
fn f2dot14(raw: i16) -> f32 {
raw as f32 / 16384.0
}
fn tuple_scalar(coords: &[f32], peak: &[f32], start: Option<&[f32]>, end: Option<&[f32]>) -> f32 {
let mut s = 1.0f32;
for (ai, &c) in coords.iter().enumerate() {
let p = peak.get(ai).copied().unwrap_or(0.0);
if p == 0.0 {
continue;
}
if c == p {
continue;
}
if (c < 0.0) != (p < 0.0) && c != 0.0 {
return 0.0;
}
match (start, end) {
(Some(st), Some(en)) => {
let s_v = st.get(ai).copied().unwrap_or(0.0);
let e_v = en.get(ai).copied().unwrap_or(0.0);
if c < s_v || c > e_v {
return 0.0;
}
if c < p {
if (p - s_v).abs() < f32::EPSILON {
return 0.0;
}
s *= (c - s_v) / (p - s_v);
} else {
if (e_v - p).abs() < f32::EPSILON {
return 0.0;
}
s *= (e_v - c) / (e_v - p);
}
}
_ => {
if c.abs() > p.abs() {
return 0.0;
}
if p.abs() < f32::EPSILON {
return 0.0;
}
s *= c / p;
}
}
}
s
}
pub(crate) fn decode_packed_points(
bytes: &[u8],
total_points: u16,
) -> Result<(Vec<u16>, usize), Error> {
if bytes.is_empty() {
return Err(Error::BadStructure("gvar packed points truncated"));
}
let mut off = 0usize;
let first = read_u8(bytes, off)?;
off += 1;
if first == 0 {
return Ok(((0..total_points).collect(), off));
}
let count = if first & 0x80 != 0 {
let lo = read_u8(bytes, off)? as u16;
off += 1;
((first & 0x7F) as u16) << 8 | lo
} else {
first as u16
};
let mut out = Vec::with_capacity(count as usize);
let mut last: u16 = 0;
while (out.len() as u16) < count {
let ctrl = read_u8(bytes, off)?;
off += 1;
let words = ctrl & 0x80 != 0;
let run = (ctrl & 0x7F) as u16 + 1;
for _ in 0..run {
if (out.len() as u16) >= count {
break;
}
let delta = if words {
let v = read_u16(bytes, off)?;
off += 2;
v
} else {
let v = read_u8(bytes, off)? as u16;
off += 1;
v
};
last = last
.checked_add(delta)
.ok_or(Error::BadStructure("gvar packed point overflow"))?;
out.push(last);
}
}
Ok((out, off))
}
pub(crate) fn decode_packed_deltas(
bytes: &[u8],
off: &mut usize,
n: usize,
) -> Result<Vec<i32>, Error> {
let mut out = Vec::with_capacity(n);
while out.len() < n {
if *off >= bytes.len() {
return Err(Error::BadStructure("gvar packed deltas truncated"));
}
let ctrl = read_u8(bytes, *off)?;
*off += 1;
let zeros = ctrl & 0x80 != 0;
let words = ctrl & 0x40 != 0;
let run = (ctrl & 0x3F) as usize + 1;
for _ in 0..run {
if out.len() >= n {
break;
}
if zeros {
out.push(0);
} else if words {
let v = read_i16(bytes, *off)? as i32;
*off += 2;
out.push(v);
} else {
let v = read_i8(bytes, *off)? as i32;
*off += 1;
out.push(v);
}
}
}
Ok(out)
}
#[cfg(test)]
mod tests {
use super::*;
fn build_empty_one_glyph() -> Vec<u8> {
let mut b = vec![0u8; 20 + 4];
b[0..2].copy_from_slice(&1u16.to_be_bytes()); b[4..6].copy_from_slice(&1u16.to_be_bytes()); b[12..14].copy_from_slice(&1u16.to_be_bytes()); b[16..20].copy_from_slice(&24u32.to_be_bytes()); b
}
#[test]
fn gvar_zero_coords_yields_static_outline() {
let raw = build_empty_one_glyph();
let g = GvarTable::parse(&raw).expect("parse");
let deltas = g.glyph_deltas(0, 5, &[0.5]).expect("deltas");
assert_eq!(deltas.len(), 5);
assert!(deltas.iter().all(|&(x, y)| x == 0 && y == 0));
let deltas0 = g.glyph_deltas(0, 5, &[0.0]).expect("deltas");
assert!(deltas0.iter().all(|&(x, y)| x == 0 && y == 0));
}
#[test]
fn gvar_packed_points_all_sentinel() {
let (pts, used) = decode_packed_points(&[0x00, 0xff, 0xff], 5).unwrap();
assert_eq!(pts, vec![0, 1, 2, 3, 4]);
assert_eq!(used, 1);
}
#[test]
fn gvar_packed_points_short_run() {
let raw = [3u8, 0x02, 1, 1, 1];
let (pts, used) = decode_packed_points(&raw, 100).unwrap();
assert_eq!(pts, vec![1, 2, 3]);
assert_eq!(used, 5);
}
#[test]
fn gvar_packed_deltas_words_then_zeros() {
let mut raw = vec![0x41u8];
raw.extend_from_slice(&10i16.to_be_bytes());
raw.extend_from_slice(&(-3i16).to_be_bytes());
raw.push(0x82);
let mut off = 0usize;
let d = decode_packed_deltas(&raw, &mut off, 5).unwrap();
assert_eq!(d, vec![10, -3, 0, 0, 0]);
assert_eq!(off, 6);
}
#[test]
fn gvar_packed_deltas_byte_run() {
let raw = [0x03u8, 1, 0xFF, 2, 0xFE];
let mut off = 0usize;
let d = decode_packed_deltas(&raw, &mut off, 4).unwrap();
assert_eq!(d, vec![1, -1, 2, -2]);
}
#[test]
fn gvar_tuple_scalar_at_peak_is_one() {
let coords = [0.5];
let peak = [0.5];
assert!((tuple_scalar(&coords, &peak, None, None) - 1.0).abs() < 1e-6);
}
#[test]
fn gvar_tuple_scalar_at_zero_is_zero() {
let coords = [0.0];
let peak = [1.0];
assert!(tuple_scalar(&coords, &peak, None, None).abs() < 1e-6);
}
#[test]
fn gvar_tuple_scalar_default_region_linear() {
assert!((tuple_scalar(&[0.5], &[1.0], None, None) - 0.5).abs() < 1e-6);
assert!((tuple_scalar(&[-0.25], &[-1.0], None, None) - 0.25).abs() < 1e-6);
assert_eq!(tuple_scalar(&[-0.5], &[1.0], None, None), 0.0);
}
#[test]
fn gvar_tuple_scalar_intermediate_region() {
let s = tuple_scalar(&[0.5], &[1.0], Some(&[0.0]), Some(&[2.0]));
assert!((s - 0.5).abs() < 1e-6);
let s = tuple_scalar(&[1.5], &[1.0], Some(&[0.0]), Some(&[2.0]));
assert!((s - 0.5).abs() < 1e-6);
let s = tuple_scalar(&[2.5], &[1.0], Some(&[0.0]), Some(&[2.0]));
assert_eq!(s, 0.0);
}
}