use anyhow::Result;
use bytemuck::cast_slice;
use oxihuman_mesh::MeshBuffers;
use serde_json::json;
use std::io::Write;
use std::path::Path;
const GLB_MAGIC: u32 = 0x46546C67; const GLB_VERSION: u32 = 2;
const CHUNK_JSON: u32 = 0x4E4F534A; const CHUNK_BIN: u32 = 0x004E4942;
#[allow(dead_code)]
pub struct AnimFrame {
pub time: f32,
pub positions: Vec<[f32; 3]>,
}
impl AnimFrame {
#[allow(dead_code)]
pub fn new(time: f32, positions: Vec<[f32; 3]>) -> Self {
Self { time, positions }
}
#[allow(dead_code)]
pub fn deltas_from_base(&self, base: &[[f32; 3]]) -> Vec<[f32; 3]> {
self.positions
.iter()
.zip(base.iter())
.map(|(p, b)| [p[0] - b[0], p[1] - b[1], p[2] - b[2]])
.collect()
}
}
#[allow(dead_code)]
pub struct VertexAnimation {
pub name: String,
pub frames: Vec<AnimFrame>,
pub fps: f32,
}
impl VertexAnimation {
#[allow(dead_code)]
pub fn new(name: impl Into<String>, fps: f32) -> Self {
Self {
name: name.into(),
frames: Vec::new(),
fps,
}
}
#[allow(dead_code)]
pub fn add_frame(&mut self, frame: AnimFrame) {
self.frames.push(frame);
}
#[allow(dead_code)]
pub fn frame_count(&self) -> usize {
self.frames.len()
}
#[allow(dead_code)]
pub fn duration(&self) -> f32 {
if self.frames.len() < 2 {
return 0.0;
}
self.frames.last().map_or(0.0, |f| f.time) - self.frames.first().map_or(0.0, |f| f.time)
}
#[allow(dead_code)]
pub fn is_empty(&self) -> bool {
self.frames.is_empty()
}
#[allow(dead_code)]
pub fn frame_deltas(&self) -> Vec<Vec<[f32; 3]>> {
if self.frames.is_empty() {
return Vec::new();
}
let base = &self.frames[0].positions;
self.frames
.iter()
.map(|f| f.deltas_from_base(base))
.collect()
}
}
fn append_aligned(buf: &mut Vec<u8>, data: &[u8]) -> (usize, usize) {
let offset = buf.len();
let length = data.len();
buf.extend_from_slice(data);
while !buf.len().is_multiple_of(4) {
buf.push(0x00);
}
(offset, length)
}
fn positions_to_bytes(positions: &[[f32; 3]]) -> Vec<u8> {
let mut buf = Vec::with_capacity(positions.len() * 12);
for p in positions {
for &c in p {
buf.extend_from_slice(&c.to_le_bytes());
}
}
buf
}
fn write_glb(path: &Path, json_bytes: &[u8], bin_data: &[u8]) -> Result<()> {
let mut json_padded = json_bytes.to_vec();
while !json_padded.len().is_multiple_of(4) {
json_padded.push(b' ');
}
let mut bin_padded = bin_data.to_vec();
while !bin_padded.len().is_multiple_of(4) {
bin_padded.push(0x00);
}
let json_chunk_len = json_padded.len() as u32;
let bin_chunk_len = bin_padded.len() as u32;
let total_len = 12 + 8 + json_chunk_len + 8 + bin_chunk_len;
let mut file = std::fs::File::create(path)?;
file.write_all(&GLB_MAGIC.to_le_bytes())?;
file.write_all(&GLB_VERSION.to_le_bytes())?;
file.write_all(&total_len.to_le_bytes())?;
file.write_all(&json_chunk_len.to_le_bytes())?;
file.write_all(&CHUNK_JSON.to_le_bytes())?;
file.write_all(&json_padded)?;
file.write_all(&bin_chunk_len.to_le_bytes())?;
file.write_all(&CHUNK_BIN.to_le_bytes())?;
file.write_all(&bin_padded)?;
Ok(())
}
#[allow(dead_code)]
pub fn export_vertex_anim_glb(
base_mesh: &MeshBuffers,
anim: &VertexAnimation,
path: &Path,
) -> Result<()> {
let n_verts = base_mesh.positions.len();
let n_idx = base_mesh.indices.len();
let n_frames = anim.frame_count();
let pos_bytes: &[u8] = cast_slice(&base_mesh.positions);
let norm_bytes: &[u8] = cast_slice(&base_mesh.normals);
let uv_bytes: &[u8] = cast_slice(&base_mesh.uvs);
let idx_bytes: &[u8] = cast_slice(&base_mesh.indices);
let mut bin: Vec<u8> = Vec::new();
let (pos_offset, pos_len) = append_aligned(&mut bin, pos_bytes);
let (norm_offset, norm_len) = append_aligned(&mut bin, norm_bytes);
let (uv_offset, uv_len) = append_aligned(&mut bin, uv_bytes);
let (idx_offset, idx_len) = append_aligned(&mut bin, idx_bytes);
let all_deltas = anim.frame_deltas();
let mut delta_sections: Vec<(usize, usize)> = Vec::with_capacity(n_frames);
for deltas in &all_deltas {
let bytes = positions_to_bytes(deltas);
let sec = append_aligned(&mut bin, &bytes);
delta_sections.push(sec);
}
let mut times_bytes: Vec<u8> = Vec::with_capacity(n_frames * 4);
for frame in &anim.frames {
times_bytes.extend_from_slice(&frame.time.to_le_bytes());
}
let (times_offset, times_len) = append_aligned(&mut bin, ×_bytes);
let n_weights = n_frames * n_frames;
let mut weights_bytes: Vec<u8> = Vec::with_capacity(n_weights * 4);
for k in 0..n_frames {
for t in 0..n_frames {
let w: f32 = if t == k { 1.0 } else { 0.0 };
weights_bytes.extend_from_slice(&w.to_le_bytes());
}
}
let (weights_offset, weights_len) = append_aligned(&mut bin, &weights_bytes);
let mut accessors: Vec<serde_json::Value> = Vec::new();
let mut buffer_views: Vec<serde_json::Value> = Vec::new();
let push_bv_acc = |buffer_views: &mut Vec<serde_json::Value>,
accessors: &mut Vec<serde_json::Value>,
offset: usize,
byte_len: usize,
component_type: u32,
count: usize,
type_str: &str| {
let bv_idx = buffer_views.len();
buffer_views.push(json!({
"buffer": 0,
"byteOffset": offset,
"byteLength": byte_len
}));
accessors.push(json!({
"bufferView": bv_idx,
"componentType": component_type,
"count": count,
"type": type_str
}));
};
push_bv_acc(
&mut buffer_views,
&mut accessors,
pos_offset,
pos_len,
5126,
n_verts,
"VEC3",
);
push_bv_acc(
&mut buffer_views,
&mut accessors,
norm_offset,
norm_len,
5126,
n_verts,
"VEC3",
);
push_bv_acc(
&mut buffer_views,
&mut accessors,
uv_offset,
uv_len,
5126,
n_verts,
"VEC2",
);
push_bv_acc(
&mut buffer_views,
&mut accessors,
idx_offset,
idx_len,
5125,
n_idx,
"SCALAR",
);
let mut morph_targets: Vec<serde_json::Value> = Vec::with_capacity(n_frames);
for &(d_offset, d_len) in &delta_sections {
let acc_idx = accessors.len();
push_bv_acc(
&mut buffer_views,
&mut accessors,
d_offset,
d_len,
5126,
n_verts,
"VEC3",
);
morph_targets.push(json!({ "POSITION": acc_idx }));
}
let times_acc_idx = accessors.len();
push_bv_acc(
&mut buffer_views,
&mut accessors,
times_offset,
times_len,
5126,
n_frames,
"SCALAR",
);
let weights_acc_idx = accessors.len();
push_bv_acc(
&mut buffer_views,
&mut accessors,
weights_offset,
weights_len,
5126,
n_frames * n_frames,
"SCALAR",
);
let initial_weights: Vec<f32> = vec![0.0_f32; n_frames];
let animations = if n_frames > 0 {
json!([{
"name": anim.name,
"samplers": [{
"input": times_acc_idx,
"output": weights_acc_idx,
"interpolation": "LINEAR"
}],
"channels": [{
"sampler": 0,
"target": { "node": 0, "path": "weights" }
}]
}])
} else {
json!([])
};
let gltf = json!({
"asset": { "version": "2.0", "generator": "oxihuman-export" },
"scene": 0,
"scenes": [{ "nodes": [0] }],
"nodes": [{ "mesh": 0 }],
"meshes": [{
"name": anim.name,
"weights": initial_weights,
"primitives": [{
"attributes": {
"POSITION": 0,
"NORMAL": 1,
"TEXCOORD_0": 2
},
"indices": 3,
"targets": morph_targets
}]
}],
"accessors": accessors,
"bufferViews": buffer_views,
"buffers": [{ "byteLength": bin.len() }],
"animations": animations
});
let json_bytes = serde_json::to_vec(&gltf)?;
write_glb(path, &json_bytes, &bin)
}
#[allow(dead_code)]
pub fn export_morph_pair_glb(
mesh_a: &MeshBuffers,
mesh_b: &MeshBuffers,
duration: f32,
path: &Path,
) -> Result<()> {
let n_verts = mesh_a.positions.len();
let n_idx = mesh_a.indices.len();
let deltas: Vec<[f32; 3]> = mesh_a
.positions
.iter()
.zip(mesh_b.positions.iter())
.map(|(a, b)| [b[0] - a[0], b[1] - a[1], b[2] - a[2]])
.collect();
let pos_bytes: &[u8] = cast_slice(&mesh_a.positions);
let norm_bytes: &[u8] = cast_slice(&mesh_a.normals);
let uv_bytes: &[u8] = cast_slice(&mesh_a.uvs);
let idx_bytes: &[u8] = cast_slice(&mesh_a.indices);
let delta_bytes = positions_to_bytes(&deltas);
let mut bin: Vec<u8> = Vec::new();
let (pos_offset, pos_len) = append_aligned(&mut bin, pos_bytes);
let (norm_offset, norm_len) = append_aligned(&mut bin, norm_bytes);
let (uv_offset, uv_len) = append_aligned(&mut bin, uv_bytes);
let (idx_offset, idx_len) = append_aligned(&mut bin, idx_bytes);
let (delta_offset, delta_len) = append_aligned(&mut bin, &delta_bytes);
let mut times_bytes: Vec<u8> = Vec::with_capacity(8);
times_bytes.extend_from_slice(&0.0_f32.to_le_bytes());
times_bytes.extend_from_slice(&duration.to_le_bytes());
let (times_offset, times_len) = append_aligned(&mut bin, ×_bytes);
let mut weights_bytes: Vec<u8> = Vec::with_capacity(8);
weights_bytes.extend_from_slice(&0.0_f32.to_le_bytes());
weights_bytes.extend_from_slice(&1.0_f32.to_le_bytes());
let (weights_offset, weights_len) = append_aligned(&mut bin, &weights_bytes);
let mut accessors: Vec<serde_json::Value> = Vec::new();
let mut buffer_views: Vec<serde_json::Value> = Vec::new();
let push_bv_acc_mp = |buffer_views: &mut Vec<serde_json::Value>,
accessors: &mut Vec<serde_json::Value>,
offset: usize,
byte_len: usize,
component_type: u32,
count: usize,
type_str: &str| {
let bv_idx = buffer_views.len();
buffer_views.push(json!({
"buffer": 0,
"byteOffset": offset,
"byteLength": byte_len
}));
accessors.push(json!({
"bufferView": bv_idx,
"componentType": component_type,
"count": count,
"type": type_str
}));
};
push_bv_acc_mp(
&mut buffer_views,
&mut accessors,
pos_offset,
pos_len,
5126,
n_verts,
"VEC3",
); push_bv_acc_mp(
&mut buffer_views,
&mut accessors,
norm_offset,
norm_len,
5126,
n_verts,
"VEC3",
); push_bv_acc_mp(
&mut buffer_views,
&mut accessors,
uv_offset,
uv_len,
5126,
n_verts,
"VEC2",
); push_bv_acc_mp(
&mut buffer_views,
&mut accessors,
idx_offset,
idx_len,
5125,
n_idx,
"SCALAR",
); push_bv_acc_mp(
&mut buffer_views,
&mut accessors,
delta_offset,
delta_len,
5126,
n_verts,
"VEC3",
); let times_acc_idx = accessors.len();
push_bv_acc_mp(
&mut buffer_views,
&mut accessors,
times_offset,
times_len,
5126,
2,
"SCALAR",
); let weights_acc_idx = accessors.len();
push_bv_acc_mp(
&mut buffer_views,
&mut accessors,
weights_offset,
weights_len,
5126,
2,
"SCALAR",
);
let gltf = json!({
"asset": { "version": "2.0", "generator": "oxihuman-export" },
"scene": 0,
"scenes": [{ "nodes": [0] }],
"nodes": [{ "mesh": 0 }],
"meshes": [{
"name": "morph_pair",
"weights": [0.0_f32],
"primitives": [{
"attributes": {
"POSITION": 0,
"NORMAL": 1,
"TEXCOORD_0": 2
},
"indices": 3,
"targets": [{ "POSITION": 4 }]
}]
}],
"accessors": accessors,
"bufferViews": buffer_views,
"buffers": [{ "byteLength": bin.len() }],
"animations": [{
"name": "morph",
"samplers": [{
"input": times_acc_idx,
"output": weights_acc_idx,
"interpolation": "LINEAR"
}],
"channels": [{
"sampler": 0,
"target": { "node": 0, "path": "weights" }
}]
}]
});
let json_bytes = serde_json::to_vec(&gltf)?;
write_glb(path, &json_bytes, &bin)
}
#[cfg(test)]
mod tests {
use super::*;
use oxihuman_mesh::MeshBuffers;
use oxihuman_morph::engine::MeshBuffers as MB;
fn make_mesh(positions: Vec<[f32; 3]>) -> MeshBuffers {
let n = positions.len();
MeshBuffers::from_morph(MB {
positions,
normals: vec![[0.0, 0.0, 1.0]; n],
uvs: vec![[0.0, 0.0]; n],
indices: vec![0, 1, 2],
has_suit: true,
})
}
fn triangle_a() -> MeshBuffers {
make_mesh(vec![[0.0, 0.0, 0.0], [1.0, 0.0, 0.0], [0.0, 1.0, 0.0]])
}
fn triangle_b() -> MeshBuffers {
make_mesh(vec![[0.1, 0.0, 0.0], [1.1, 0.0, 0.0], [0.1, 1.0, 0.0]])
}
#[test]
fn anim_frame_new_fields() {
let positions = vec![[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]];
let frame = AnimFrame::new(0.5, positions.clone());
assert_eq!(frame.time, 0.5);
assert_eq!(frame.positions, positions);
}
#[test]
fn anim_frame_deltas_from_base_zero_when_same() {
let positions = vec![[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]];
let frame = AnimFrame::new(0.0, positions.clone());
let deltas = frame.deltas_from_base(&positions);
for d in &deltas {
assert_eq!(d, &[0.0_f32, 0.0, 0.0]);
}
}
#[test]
fn anim_frame_deltas_correct_offset() {
let base = vec![[0.0, 0.0, 0.0], [1.0, 0.0, 0.0]];
let positions = vec![[0.5, 0.1, 0.2], [1.5, 0.1, 0.2]];
let frame = AnimFrame::new(1.0, positions);
let deltas = frame.deltas_from_base(&base);
assert!((deltas[0][0] - 0.5).abs() < 1e-6);
assert!((deltas[0][1] - 0.1).abs() < 1e-6);
assert!((deltas[0][2] - 0.2).abs() < 1e-6);
assert!((deltas[1][0] - 0.5).abs() < 1e-6);
}
#[test]
fn vertex_animation_frame_count() {
let mut anim = VertexAnimation::new("walk", 24.0);
assert_eq!(anim.frame_count(), 0);
anim.add_frame(AnimFrame::new(0.0, vec![[0.0, 0.0, 0.0]]));
anim.add_frame(AnimFrame::new(1.0, vec![[1.0, 0.0, 0.0]]));
assert_eq!(anim.frame_count(), 2);
}
#[test]
fn vertex_animation_duration() {
let mut anim = VertexAnimation::new("run", 30.0);
assert_eq!(anim.duration(), 0.0);
anim.add_frame(AnimFrame::new(0.5, vec![[0.0, 0.0, 0.0]]));
assert_eq!(anim.duration(), 0.0);
anim.add_frame(AnimFrame::new(2.5, vec![[1.0, 0.0, 0.0]]));
assert!((anim.duration() - 2.0).abs() < 1e-6);
}
#[test]
fn vertex_animation_frame_deltas_length() {
let mut anim = VertexAnimation::new("test", 24.0);
anim.add_frame(AnimFrame::new(0.0, vec![[0.0, 0.0, 0.0], [1.0, 0.0, 0.0]]));
anim.add_frame(AnimFrame::new(1.0, vec![[0.1, 0.0, 0.0], [1.1, 0.0, 0.0]]));
anim.add_frame(AnimFrame::new(2.0, vec![[0.2, 0.0, 0.0], [1.2, 0.0, 0.0]]));
let deltas = anim.frame_deltas();
assert_eq!(deltas.len(), 3);
for d in &deltas {
assert_eq!(d.len(), 2);
}
for v in &deltas[0] {
assert_eq!(v, &[0.0_f32, 0.0, 0.0]);
}
}
#[test]
fn export_morph_pair_creates_file() {
let mesh_a = triangle_a();
let mesh_b = triangle_b();
let path = Path::new("/tmp/test_vertex_anim_pair.glb");
export_morph_pair_glb(&mesh_a, &mesh_b, 1.0, path).expect("export failed");
assert!(path.exists(), "GLB file was not created");
let meta = std::fs::metadata(path).expect("should succeed");
assert!(meta.len() > 0, "GLB file is empty");
std::fs::remove_file(path).ok();
}
#[test]
fn export_morph_pair_valid_glb_header() {
let mesh_a = triangle_a();
let mesh_b = triangle_b();
let path = Path::new("/tmp/test_vertex_anim_pair_header.glb");
export_morph_pair_glb(&mesh_a, &mesh_b, 2.0, path).expect("export failed");
let bytes = std::fs::read(path).expect("should succeed");
assert!(bytes.len() >= 12, "file too short");
assert_eq!(&bytes[0..4], &[0x67, 0x6C, 0x54, 0x46], "wrong GLB magic");
let version = u32::from_le_bytes(bytes[4..8].try_into().expect("should succeed"));
assert_eq!(version, 2);
std::fs::remove_file(Path::new("/tmp/test_vertex_anim_pair_header.glb")).ok();
}
#[test]
fn export_vertex_anim_glb_creates_file() {
let base_mesh = triangle_a();
let mut anim = VertexAnimation::new("test_anim", 24.0);
anim.add_frame(AnimFrame::new(
0.0,
vec![[0.0, 0.0, 0.0], [1.0, 0.0, 0.0], [0.0, 1.0, 0.0]],
));
anim.add_frame(AnimFrame::new(
1.0,
vec![[0.1, 0.0, 0.0], [1.1, 0.0, 0.0], [0.1, 1.0, 0.0]],
));
let path = Path::new("/tmp/test_vertex_anim_seq.glb");
export_vertex_anim_glb(&base_mesh, &anim, path).expect("export failed");
assert!(path.exists(), "GLB file was not created");
let meta = std::fs::metadata(path).expect("should succeed");
assert!(meta.len() > 0, "GLB file is empty");
std::fs::remove_file(path).ok();
}
#[test]
fn export_vertex_anim_glb_valid_header() {
let base_mesh = triangle_a();
let mut anim = VertexAnimation::new("hdr_test", 24.0);
anim.add_frame(AnimFrame::new(
0.0,
vec![[0.0, 0.0, 0.0], [1.0, 0.0, 0.0], [0.0, 1.0, 0.0]],
));
anim.add_frame(AnimFrame::new(
0.5,
vec![[0.2, 0.0, 0.0], [1.2, 0.0, 0.0], [0.2, 1.0, 0.0]],
));
let path = Path::new("/tmp/test_vertex_anim_seq_header.glb");
export_vertex_anim_glb(&base_mesh, &anim, path).expect("export failed");
let bytes = std::fs::read(path).expect("should succeed");
assert!(bytes.len() >= 12);
assert_eq!(&bytes[0..4], &[0x67, 0x6C, 0x54, 0x46], "wrong GLB magic");
let version = u32::from_le_bytes(bytes[4..8].try_into().expect("should succeed"));
assert_eq!(version, 2);
std::fs::remove_file(path).ok();
}
#[test]
fn export_vertex_anim_larger_than_base_glb() {
let base_mesh = triangle_a();
let mesh_b = triangle_b();
let pair_path = Path::new("/tmp/test_vertex_anim_larger_pair.glb");
export_morph_pair_glb(&base_mesh, &mesh_b, 1.0, pair_path).expect("pair export failed");
let pair_size = std::fs::metadata(pair_path).expect("should succeed").len();
let mut anim = VertexAnimation::new("multi", 24.0);
for i in 0..3 {
anim.add_frame(AnimFrame::new(
i as f32 * 0.5,
vec![
[i as f32 * 0.1, 0.0, 0.0],
[1.0 + i as f32 * 0.1, 0.0, 0.0],
[0.0, 1.0 + i as f32 * 0.1, 0.0],
],
));
}
let seq_path = Path::new("/tmp/test_vertex_anim_larger_seq.glb");
export_vertex_anim_glb(&base_mesh, &anim, seq_path).expect("seq export failed");
let seq_size = std::fs::metadata(seq_path).expect("should succeed").len();
assert!(
seq_size > pair_size,
"3-frame seq ({seq_size}) should be larger than morph pair ({pair_size})"
);
std::fs::remove_file(pair_path).ok();
std::fs::remove_file(seq_path).ok();
}
}