use crate::parser::{read_i16, read_u16};
use crate::Error;
const MAX_SEGMENTS: u16 = 256;
#[derive(Debug, Clone, Default)]
pub struct AvarTable {
segments: Vec<Vec<(f32, f32)>>,
}
impl AvarTable {
pub fn parse(bytes: &[u8]) -> Result<Self, Error> {
if bytes.len() < 8 {
return Err(Error::UnexpectedEof);
}
let major = read_u16(bytes, 0)?;
if major != 1 {
return Ok(Self::default());
}
let axis_count = read_u16(bytes, 6)?;
let mut off = 8usize;
let mut segments = Vec::with_capacity(axis_count as usize);
for _ in 0..axis_count {
if off + 2 > bytes.len() {
return Err(Error::UnexpectedEof);
}
let n = read_u16(bytes, off)?;
off += 2;
if n > MAX_SEGMENTS {
return Err(Error::BadStructure("avar segment count exceeds cap"));
}
let need = (n as usize).checked_mul(4).ok_or(Error::BadOffset)?;
if off + need > bytes.len() {
return Err(Error::UnexpectedEof);
}
let mut list = Vec::with_capacity(n as usize);
let mut prev_from = f32::NEG_INFINITY;
for _ in 0..n {
let from = f2dot14(read_i16(bytes, off)?);
let to = f2dot14(read_i16(bytes, off + 2)?);
off += 4;
if from < prev_from {
return Err(Error::BadStructure("avar fromCoord not ascending"));
}
prev_from = from;
list.push((from, to));
}
segments.push(list);
}
Ok(Self { segments })
}
pub fn remap_normalised(&self, axis_index: usize, n: f32) -> f32 {
let n = n.clamp(-1.0, 1.0);
let segs = match self.segments.get(axis_index) {
Some(s) if !s.is_empty() => s,
_ => return n,
};
if n <= segs[0].0 {
return segs[0].1;
}
if n >= segs[segs.len() - 1].0 {
return segs[segs.len() - 1].1;
}
for w in segs.windows(2) {
let (f0, t0) = w[0];
let (f1, t1) = w[1];
if n >= f0 && n <= f1 {
if (f1 - f0).abs() < f32::EPSILON {
return t0;
}
let alpha = (n - f0) / (f1 - f0);
return t0 + alpha * (t1 - t0);
}
}
n
}
pub fn axis_count(&self) -> usize {
self.segments.len()
}
}
#[inline]
fn f2dot14(raw: i16) -> f32 {
raw as f32 / 16384.0
}
#[cfg(test)]
mod tests {
use super::*;
fn build_empty(axis_count: u16) -> Vec<u8> {
let mut b = vec![0u8; 8 + (axis_count as usize) * 2];
b[0..2].copy_from_slice(&1u16.to_be_bytes()); b[6..8].copy_from_slice(&axis_count.to_be_bytes());
b
}
#[test]
fn avar_remap_identity_when_no_segments() {
let raw = build_empty(2);
let a = AvarTable::parse(&raw).expect("parse");
for &v in &[-1.0f32, -0.5, 0.0, 0.25, 1.0] {
assert_eq!(a.remap_normalised(0, v), v);
assert_eq!(a.remap_normalised(1, v), v);
}
assert_eq!(a.remap_normalised(99, 0.5), 0.5);
}
#[test]
fn avar_remap_identity_segments() {
let mut b = vec![0u8; 8 + 2 + 12];
b[0..2].copy_from_slice(&1u16.to_be_bytes());
b[6..8].copy_from_slice(&1u16.to_be_bytes());
b[8..10].copy_from_slice(&3u16.to_be_bytes());
for (i, &v) in [-16384i16, -16384, 0, 0, 16384, 16384].iter().enumerate() {
let off = 10 + i * 2;
b[off..off + 2].copy_from_slice(&v.to_be_bytes());
}
let a = AvarTable::parse(&b).unwrap();
for &v in &[-1.0f32, -0.5, 0.0, 0.25, 1.0] {
assert!((a.remap_normalised(0, v) - v).abs() < 1e-6);
}
}
#[test]
fn avar_remap_piecewise_linear() {
let mut b = vec![0u8; 8 + 2 + 16];
b[0..2].copy_from_slice(&1u16.to_be_bytes());
b[6..8].copy_from_slice(&1u16.to_be_bytes());
b[8..10].copy_from_slice(&4u16.to_be_bytes());
let pairs: [(i16, i16); 4] = [
(-16384, -16384),
(0, 0),
(16384 / 2, 16384 / 4),
(16384, 16384),
];
for (i, (f, t)) in pairs.iter().enumerate() {
let off = 10 + i * 4;
b[off..off + 2].copy_from_slice(&f.to_be_bytes());
b[off + 2..off + 4].copy_from_slice(&t.to_be_bytes());
}
let a = AvarTable::parse(&b).unwrap();
assert!(a.remap_normalised(0, 0.0).abs() < 1e-6);
assert!((a.remap_normalised(0, 0.25) - 0.125).abs() < 1e-4);
assert!((a.remap_normalised(0, 0.75) - 0.625).abs() < 1e-4);
assert!((a.remap_normalised(0, 1.0) - 1.0).abs() < 1e-4);
}
#[test]
fn avar_v2_falls_back_to_identity() {
let mut b = vec![0u8; 8];
b[0..2].copy_from_slice(&2u16.to_be_bytes()); let a = AvarTable::parse(&b).expect("parse");
assert_eq!(a.remap_normalised(0, 0.5), 0.5);
}
}