1use super::codec::SmplCodec;
2use gltf_json::{validation::Checked::Valid, Root};
3use ndarray as nd;
4use std::{fs::File, io::Read};
5#[derive(Debug, Clone)]
7pub struct CameraTrack {
8 pub yfov: f32,
9 pub znear: f32,
10 pub zfar: Option<f32>,
11 pub aspect_ratio: Option<f32>,
12 pub per_frame_translations: Option<nd::Array2<f32>>,
13 pub per_frame_rotations: Option<nd::Array2<f32>>,
14}
15impl Default for CameraTrack {
16 fn default() -> Self {
17 Self {
18 yfov: 1.0,
19 znear: 0.1,
20 zfar: None,
21 aspect_ratio: None,
22 per_frame_translations: None,
23 per_frame_rotations: None,
24 }
25 }
26}
27#[derive(Debug, Clone)]
29pub struct McsCodec {
30 pub num_frames: usize,
31 pub frame_rate: f32,
32 pub smpl_bodies: Vec<SmplBody>,
33 pub camera_track: Option<CameraTrack>,
34}
35#[derive(Debug, Clone)]
37pub struct SmplBody {
38 pub frame_presence: Vec<usize>,
39 pub codec: SmplCodec,
40}
41#[allow(clippy::cast_possible_truncation)]
43impl McsCodec {
44 pub fn from_file(path: &str) -> Self {
46 let mut file = File::open(path).expect("Failed to open GLTF file");
47 let mut json_data = String::new();
48 file.read_to_string(&mut json_data).expect("Failed to read GLTF file");
49 let gltf: Root = serde_json::from_str(&json_data).expect("Failed to parse GLTF JSON");
50 Self::from_gltf(&gltf)
51 }
52 pub fn from_gltf(gltf: &Root) -> Self {
54 let mut num_frames = 0;
55 let mut smpl_bodies = Vec::new();
56 if let Some(scene) = gltf.scenes.first() {
57 if let Some(extensions) = &scene.extensions {
58 if let Some(extension_value) = extensions.others.get("MC_scene_description") {
59 let extension = extension_value.as_object().expect("Expected extension to be an object");
60 if let Some(nf) = extension.get("num_frames").and_then(gltf_json::Value::as_u64) {
61 num_frames = nf as usize;
62 }
63 if let Some(smpl_bodies_data) = extension.get("smpl_bodies").and_then(|v| v.as_array()) {
64 smpl_bodies = Self::extract_smpl_bodies(gltf, smpl_bodies_data);
65 }
66 }
67 }
68 Self {
69 num_frames,
70 frame_rate: smpl_bodies.first().and_then(|b| b.codec.frame_rate).unwrap_or(30.0),
71 smpl_bodies,
72 camera_track: Self::extract_camera_track(gltf),
73 }
74 } else {
75 panic!("Not able to find GLTF root! Check the GLTF file format!")
76 }
77 }
78 fn extract_smpl_bodies(gltf: &Root, smpl_bodies_data: &[serde_json::Value]) -> Vec<SmplBody> {
80 smpl_bodies_data
81 .iter()
82 .filter_map(|smpl_body_data| {
83 smpl_body_data
84 .get("bufferView")
85 .and_then(gltf_json::Value::as_u64)
86 .map(|buffer_view_index| {
87 let buffer = Self::read_smpl_buffer(gltf, buffer_view_index as usize);
88 let frame_presence = smpl_body_data
89 .get("frame_presence")
90 .and_then(|v| v.as_array())
91 .map_or_else(Vec::new, |arr| arr.iter().filter_map(|v| v.as_u64().map(|n| n as usize)).collect());
92 let codec = SmplCodec::from_buf(&buffer);
93 SmplBody { frame_presence, codec }
94 })
95 })
96 .collect()
97 }
98 fn read_smpl_buffer(gltf: &Root, buffer_index: usize) -> Vec<u8> {
100 let buffer = &gltf.buffers[buffer_index];
101 buffer
102 .uri
103 .as_ref()
104 .and_then(|uri| {
105 if uri.starts_with("data:") {
106 uri.split(',')
107 .nth(1)
108 .map(|encoded_data| base64::decode(encoded_data).expect("Failed to decode Base64 data"))
109 } else {
110 panic!("The data buffers must not be separate files!")
111 }
112 })
113 .unwrap_or_default()
114 }
115 fn extract_camera_track(gltf: &Root) -> Option<CameraTrack> {
117 if let Some(camera) = gltf.cameras.first() {
118 let (yfov, znear, zfar, aspect_ratio) = match camera.type_.unwrap() {
119 gltf_json::camera::Type::Perspective => (
120 camera.perspective.as_ref().map_or(std::f32::consts::FRAC_PI_2, |p| p.yfov),
121 camera.perspective.as_ref().map_or(0.1, |p| p.znear),
122 camera.perspective.as_ref().and_then(|p| p.zfar),
123 camera.perspective.as_ref().and_then(|p| p.aspect_ratio),
124 ),
125 gltf_json::camera::Type::Orthographic => {
126 panic!("Orthographic camera not supported!")
127 }
128 };
129 if gltf.animations.is_empty() {
130 return Some(CameraTrack {
131 yfov,
132 znear,
133 zfar,
134 aspect_ratio,
135 per_frame_translations: None,
136 per_frame_rotations: None,
137 });
138 }
139 let mut per_frame_translations = None;
140 let mut per_frame_rotations = None;
141 for animation in &gltf.animations {
142 for channel in &animation.channels {
143 let target = &channel.target;
144 let Some(node) = gltf.nodes.get(target.node.value()) else { continue };
145 if node.camera.is_none() {
146 continue;
147 }
148 let Some(sampler) = animation.samplers.get(channel.sampler.value()) else {
149 continue;
150 };
151 let Some(output_accessor) = gltf.accessors.get(sampler.output.value()) else {
152 continue;
153 };
154 let Some(buffer_view) = output_accessor.buffer_view.as_ref().and_then(|bv| gltf.buffer_views.get(bv.value())) else {
155 continue;
156 };
157 let Some(buffer) = gltf.buffers.get(buffer_view.buffer.value()) else {
158 continue;
159 };
160 let Some(uri) = &buffer.uri else { continue };
161 if !uri.starts_with("data:") {
162 continue;
163 }
164 let encoded_data = uri.split(',').nth(1).expect("Invalid data URI");
165 let buffer_data = base64::decode(encoded_data).expect("Failed to decode Base64 data");
166 let start = buffer_view.byte_offset.map_or(0, |x| x.0 as usize);
167 let length = buffer_view.byte_length.0 as usize;
168 let data = &buffer_data[start..start + length];
169 let floats: Vec<f32> = data.chunks(4).map(|b| f32::from_le_bytes([b[0], b[1], b[2], b[3]])).collect();
170 if let Valid(path) = &target.path {
171 match path {
172 gltf_json::animation::Property::Translation => {
173 let num_frames = floats.len() / 3;
174 per_frame_translations = Some(nd::Array2::from_shape_vec((num_frames, 3), floats).unwrap());
175 }
176 gltf_json::animation::Property::Rotation => {
177 let num_frames = floats.len() / 4;
178 per_frame_rotations = Some(nd::Array2::from_shape_vec((num_frames, 4), floats).unwrap());
179 }
180 _ => {}
181 }
182 }
183 }
184 }
185 Some(CameraTrack {
186 yfov,
187 znear,
188 zfar,
189 aspect_ratio,
190 per_frame_translations,
191 per_frame_rotations,
192 })
193 } else {
194 None
195 }
196 }
197}