perigee/animation/
asset.rs

1// Much of this code is adapted from
2// https://{github|gitlab}.com/aunyks/gltf-animation
3
4use crate::perigee_gltf::util::access_gltf_bytes;
5use crate::time::PassiveClock;
6use gltf::{
7    accessor::DataType as GltfDataType,
8    animation::{Interpolation, Property as GltfProperty},
9    Gltf,
10};
11use rapier3d::na::{Quaternion, UnitQuaternion, Vector3};
12use std::collections::HashMap;
13use thiserror::Error;
14
15pub fn inverse_lerp(a: f32, b: f32, value: f32) -> f32 {
16    if (a - b).abs() > f32::EPSILON {
17        return (value - a) / (b - a);
18    }
19    0.0
20}
21
22#[derive(Error, Debug)]
23pub enum AnimationCreationError {
24    #[error("could not find name in glTF document")]
25    NameNotFound,
26    #[error("theres not an even amount of keyframe timestamps for each keyframe property")]
27    MismatchedKeyframes,
28    #[error("could not find a binary blob in glTF document")]
29    NoBinaryBlob,
30    #[error("could not get accessor bytes")]
31    CouldntAccessBytes,
32}
33
34#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
35pub enum ChannelType {
36    Translation,
37    Scale,
38    Rotation,
39}
40
41#[derive(Debug, Clone, Copy)]
42pub enum AnimatedProperty {
43    Translation(Vector3<f32>),
44    Scale(Vector3<f32>),
45    Rotation(UnitQuaternion<f32>),
46}
47
48impl AnimatedProperty {
49    pub fn inner_vector(&self) -> Option<Vector3<f32>> {
50        match self {
51            Self::Translation(vec) => Some(*vec),
52            Self::Scale(vec) => Some(*vec),
53            _ => None,
54        }
55    }
56
57    pub fn inner_quaternion(&self) -> Option<UnitQuaternion<f32>> {
58        match self {
59            Self::Rotation(quat) => Some(*quat),
60            _ => None,
61        }
62    }
63
64    pub fn lerp(&self, other: &Self, t: f32) -> Self {
65        match (self, other) {
66            (Self::Translation(lhs_vec3), Self::Translation(rhs_vec3)) => {
67                Self::Translation(lhs_vec3.lerp(rhs_vec3, t))
68            }
69            (Self::Scale(lhs_vec3), Self::Scale(rhs_vec3)) => {
70                Self::Scale(lhs_vec3.lerp(rhs_vec3, t))
71            }
72            (Self::Rotation(lhs_quat), Self::Rotation(rhs_quat)) => {
73                Self::Rotation(lhs_quat.slerp(rhs_quat, t))
74            }
75            _ => {
76                panic!("Can't lerp between two different animation properties!")
77            }
78        }
79    }
80}
81
82#[derive(Debug, Clone, Copy)]
83pub struct Keyframe {
84    timestamp: f32,
85    property: AnimatedProperty,
86}
87
88impl Keyframe {
89    pub fn new(timestamp: f32, property: AnimatedProperty) -> Self {
90        Self {
91            timestamp,
92            property,
93        }
94    }
95
96    pub fn timestamp(&self) -> f32 {
97        self.timestamp
98    }
99
100    pub fn property(&self) -> AnimatedProperty {
101        self.property
102    }
103
104    pub fn lerp_property(&self, other: &Self, timestamp: f32) -> AnimatedProperty {
105        let t = inverse_lerp(self.timestamp, other.timestamp(), timestamp);
106        self.property.lerp(&other.property(), t)
107    }
108}
109
110#[derive(Debug, Clone)]
111pub struct AnimationChannel {
112    keyframes: Vec<Keyframe>,
113    interpolation_method: Interpolation,
114}
115
116impl AnimationChannel {
117    pub fn new(keyframes: Vec<Keyframe>, interpolation_method: Interpolation) -> Self {
118        Self {
119            keyframes,
120            interpolation_method,
121        }
122    }
123
124    pub fn keyframes(&self) -> &Vec<Keyframe> {
125        &self.keyframes
126    }
127
128    pub fn duration(&self) -> f32 {
129        // This should never panic since we guarantee tha ta channel
130        // always has at least one keyframe
131        self.keyframes
132            .last()
133            .expect("Could not get last keyframe of AnimationChannel. Channel has no keyframes")
134            .timestamp()
135    }
136
137    pub fn interpolation_method(&self) -> Interpolation {
138        self.interpolation_method
139    }
140
141    fn find_boundary_keyframes(&self, timestamp: f32) -> (Option<&Keyframe>, Option<&Keyframe>) {
142        let first_keyframe = self
143            .keyframes
144            .first()
145            .expect("Could not get first keyframe");
146        let last_keyframe = self.keyframes.last().expect("Could not get last keyframe");
147        if timestamp < first_keyframe.timestamp() {
148            return (None, Some(&first_keyframe));
149        }
150        if timestamp > last_keyframe.timestamp() {
151            return (Some(&last_keyframe), None);
152        }
153        let mut early_index: usize = 0;
154        let mut late_index = self.keyframes.len() - 1;
155        // Binary search
156        while early_index <= late_index || (late_index - early_index) > 1 {
157            if (late_index - early_index) <= 1 {
158                return (
159                    Some(&self.keyframes[early_index]),
160                    Some(&self.keyframes[late_index]),
161                );
162            }
163            let midpoint_index = (early_index + late_index) / 2;
164            let midpoint_timestamp = self.keyframes[midpoint_index].timestamp();
165            if timestamp > midpoint_timestamp {
166                if midpoint_index == 0 {
167                    return (
168                        Some(&self.keyframes[midpoint_index]),
169                        Some(&self.keyframes[1]),
170                    );
171                }
172                early_index = midpoint_index + 1;
173            } else if timestamp < midpoint_timestamp {
174                late_index = midpoint_index - 1;
175            } else {
176                return (
177                    Some(&self.keyframes[midpoint_index]),
178                    Some(&self.keyframes[midpoint_index]),
179                );
180            }
181        }
182        unreachable!();
183    }
184
185    pub fn property_at(&self, timestamp: f32) -> AnimatedProperty {
186        let (early_bound_frame, late_bound_frame) = self.find_boundary_keyframes(timestamp);
187        if early_bound_frame.is_none() {
188            return late_bound_frame
189                .expect("Tried to get first keyframe but it doesn't exist")
190                .property();
191        }
192        if late_bound_frame.is_none() {
193            return early_bound_frame
194                .expect("Tried to get last keyframe but it doesn't exist")
195                .property();
196        }
197        let early_bound_frame =
198            early_bound_frame.expect("Early bound keyframe was None despite asserting it wasn't.");
199        let late_bound_frame =
200            late_bound_frame.expect("Late bound keyframe was None despite asserting it wasn't.");
201        // Interpolate between the two
202        match self.interpolation_method {
203            Interpolation::Step => {
204                if (timestamp - late_bound_frame.timestamp()).abs() <= f32::EPSILON {
205                    return late_bound_frame.property();
206                }
207                early_bound_frame.property()
208            }
209            Interpolation::Linear => early_bound_frame.lerp_property(&late_bound_frame, timestamp),
210            Interpolation::CubicSpline => {
211                early_bound_frame.lerp_property(&late_bound_frame, timestamp)
212            }
213        }
214    }
215}
216
217/// A playable animation capable of having translation, rotation, and scale channels.
218pub struct Animation {
219    passive_timer: PassiveClock,
220    fps: u32,
221    duration: f32,
222    target_channels: HashMap<String, HashMap<ChannelType, AnimationChannel>>,
223    name: String,
224    frame_listeners: HashMap<u32, Box<dyn FnMut()>>,
225}
226
227impl Animation {
228    pub fn from_gltf(gltf: &Gltf, anim_name: &str) -> Result<Self, AnimationCreationError> {
229        let gltf_bytes = match &gltf.blob {
230            Some(bytes) => bytes,
231            None => return Err(AnimationCreationError::NoBinaryBlob),
232        };
233        for anim in gltf.animations() {
234            if let Some(anim_name_candidate) = anim.name() {
235                if anim_name_candidate == anim_name {
236                    let mut max_channel_duration = 0.0;
237                    let mut max_channel_frames = 0;
238
239                    let mut target_channels: HashMap<
240                        String,
241                        HashMap<ChannelType, AnimationChannel>,
242                    > = HashMap::new();
243                    for channel in anim.channels() {
244                        if matches!(
245                            channel.target().property(),
246                            GltfProperty::MorphTargetWeights
247                        ) {
248                            continue;
249                        }
250                        let channel_sampler = channel.sampler();
251
252                        let sampler_input = channel_sampler.input();
253                        let sampler_input_bytes = if let Ok(sampler_input_bytes) =
254                            access_gltf_bytes(&gltf_bytes, &sampler_input)
255                        {
256                            sampler_input_bytes
257                        } else {
258                            return Err(AnimationCreationError::CouldntAccessBytes);
259                        };
260
261                        // If we have 0 timestamps then we can skip this iteration.
262                        // One timestamp f32 would be 4 bytes, so if we have less than 4 bytes
263                        // then we don't have a whole recoverable timestamp.
264                        if sampler_input_bytes.len() < 4 {
265                            continue;
266                        }
267
268                        if sampler_input.count() > max_channel_frames {
269                            max_channel_frames = sampler_input.count();
270                        }
271
272                        let keyframe_timestamps: Vec<f32> = match channel_sampler.input().data_type() {
273                            GltfDataType::F32 => sampler_input_bytes
274                                .chunks_exact(4)
275                                .map(|f32_bytes| {
276                                    let f32_byte_array: [u8; 4] = f32_bytes[0..4]
277                                        .try_into()
278                                        .expect("Could not convert f32 byte slice into f32 byte array");
279                                    f32::from_le_bytes(f32_byte_array)
280                                })
281                                .collect(),
282                            _ => panic!(
283                                "Unexpected data type received while converting sampler input bytes. Expected F32"
284                            ),
285                        };
286                        if let Some(channel_duration) = keyframe_timestamps.last() {
287                            if channel_duration > &max_channel_duration {
288                                max_channel_duration = *channel_duration;
289                            }
290                        }
291
292                        let sampler_output = channel_sampler.output();
293                        let sampler_output_bytes = if let Ok(sampler_output_bytes) =
294                            access_gltf_bytes(&gltf_bytes, &sampler_output)
295                        {
296                            sampler_output_bytes
297                        } else {
298                            return Err(AnimationCreationError::CouldntAccessBytes);
299                        };
300
301                        let mut keyframe_properties: Vec<AnimatedProperty> =
302                            Vec::with_capacity(keyframe_timestamps.len());
303
304                        let channel_type: ChannelType;
305                        match channel.target().property() {
306                            GltfProperty::Translation => {
307                                channel_type = ChannelType::Translation;
308                                let flattened_vec3s: Vec<f32> = sampler_output_bytes
309                                    .chunks_exact(4)
310                                    .map(|f32_bytes| {
311                                        let f32_byte_array: [u8; 4] = f32_bytes[0..4]
312                                            .try_into()
313                                            .expect(
314                                            "Could not convert f32 byte slice into f32 byte array",
315                                        );
316                                        f32::from_le_bytes(f32_byte_array)
317                                    })
318                                    .collect();
319                                flattened_vec3s.chunks_exact(3).for_each(|vec3| {
320                                    keyframe_properties.push(AnimatedProperty::Translation(
321                                        Vector3::new(vec3[0], vec3[1], vec3[2]),
322                                    ))
323                                });
324                            }
325                            GltfProperty::Scale => {
326                                channel_type = ChannelType::Scale;
327                                let flattened_vec3s: Vec<f32> = sampler_output_bytes
328                                    .chunks_exact(4)
329                                    .map(|f32_bytes| {
330                                        let f32_byte_array: [u8; 4] = f32_bytes[0..4]
331                                            .try_into()
332                                            .expect(
333                                            "Could not convert f32 byte slice into f32 byte array",
334                                        );
335                                        f32::from_le_bytes(f32_byte_array)
336                                    })
337                                    .collect();
338                                flattened_vec3s.chunks_exact(3).for_each(|vec3| {
339                                    keyframe_properties.push(AnimatedProperty::Scale(Vector3::new(
340                                        vec3[0], vec3[1], vec3[2],
341                                    )))
342                                });
343                            }
344                            GltfProperty::Rotation => {
345                                channel_type = ChannelType::Rotation;
346                                let flattened_vec4s: Vec<f32> = sampler_output_bytes
347                                    .chunks_exact(4)
348                                    .map(|f32_bytes| {
349                                        let f32_byte_array: [u8; 4] = f32_bytes[0..4]
350                                            .try_into()
351                                            .expect(
352                                            "Could not convert f32 byte slice into f32 byte array",
353                                        );
354                                        f32::from_le_bytes(f32_byte_array)
355                                    })
356                                    .collect();
357                                flattened_vec4s.chunks_exact(4).for_each(|vec4| {
358                                    keyframe_properties.push(AnimatedProperty::Rotation(
359                                        UnitQuaternion::from_quaternion(Quaternion::new(
360                                            vec4[3], vec4[0], vec4[1], vec4[2],
361                                        )),
362                                    ))
363                                });
364                            }
365                            _ => {
366                                unreachable!();
367                            }
368                        };
369
370                        if keyframe_timestamps.len() % keyframe_properties.len() != 0 {
371                            return Err(AnimationCreationError::MismatchedKeyframes);
372                        }
373
374                        let frames_per_property =
375                            keyframe_timestamps.len() / keyframe_properties.len();
376                        let mut keyframes: Vec<Keyframe> =
377                            Vec::with_capacity(keyframe_properties.len());
378                        for i in (0..keyframe_timestamps.len()).step_by(frames_per_property) {
379                            keyframes.push(Keyframe::new(
380                                keyframe_timestamps[i],
381                                keyframe_properties[i / frames_per_property],
382                            ));
383                        }
384
385                        let new_channel =
386                            AnimationChannel::new(keyframes, channel_sampler.interpolation());
387
388                        let target_name =
389                            String::from(channel.target().node().name().unwrap_or("Unknown"));
390                        if let Some(channels_for_target) = target_channels.get_mut(&target_name) {
391                            channels_for_target.insert(channel_type, new_channel);
392                        } else {
393                            target_channels
394                                .insert(target_name, HashMap::from([(channel_type, new_channel)]));
395                        }
396                    }
397
398                    return Ok(Self {
399                        passive_timer: PassiveClock::default(),
400                        target_channels,
401                        fps: (max_channel_frames as f32 / max_channel_duration).round() as u32,
402                        duration: max_channel_duration,
403                        name: String::from(anim_name),
404                        frame_listeners: HashMap::new(),
405                    });
406                }
407            }
408        }
409        return Err(AnimationCreationError::NameNotFound);
410    }
411
412    pub fn fps(&self) -> u32 {
413        self.fps
414    }
415
416    pub fn frame_at_time(&self, timestamp: f32) -> u32 {
417        (timestamp * self.fps() as f32).round() as u32
418    }
419
420    pub fn on_frame(&mut self, frame: u32, listener: impl FnMut() + 'static) {
421        self.frame_listeners.insert(frame, Box::new(listener));
422    }
423
424    pub fn name(&self) -> &String {
425        &self.name
426    }
427
428    pub fn duration(&self) -> f32 {
429        self.duration
430    }
431
432    pub fn set_timeline_position(&mut self, secs: f32) {
433        self.passive_timer.set_seconds(secs);
434    }
435
436    pub fn timeline_position(&self) -> f32 {
437        self.passive_timer.elapsed().as_secs_f32()
438    }
439
440    pub fn update(&mut self, delta_seconds: f32) {
441        if delta_seconds >= 0.0 {
442            self.update_forward(delta_seconds);
443        } else {
444            self.update_reverse(delta_seconds);
445        }
446    }
447
448    fn update_forward(&mut self, delta_seconds: f32) {
449        let old_frame = self.frame_at_time(self.timeline_position());
450        self.passive_timer.tick(delta_seconds);
451        let new_frame = self.frame_at_time(self.timeline_position());
452
453        for (listener_frame, listener) in self.frame_listeners.iter_mut() {
454            if listener_frame <= &old_frame {
455                continue;
456            }
457            if listener_frame > &new_frame {
458                continue;
459            }
460
461            listener();
462        }
463    }
464
465    fn update_reverse(&mut self, delta_seconds: f32) {
466        let old_frame = self.frame_at_time(self.timeline_position());
467        self.passive_timer.tick_reverse(delta_seconds);
468        let new_frame = self.frame_at_time(self.timeline_position());
469
470        for (listener_frame, listener) in self.frame_listeners.iter_mut() {
471            if listener_frame >= &old_frame {
472                continue;
473            }
474            if listener_frame < &new_frame {
475                continue;
476            }
477
478            listener();
479        }
480    }
481
482    pub fn current_translation(&self, target_name: &str) -> Option<Vector3<f32>> {
483        match self.target_channels.get(target_name) {
484            Some(channels) => match channels.get(&ChannelType::Translation) {
485                Some(channel) => Some(
486                    channel
487                        .property_at(self.passive_timer.elapsed().as_secs_f32())
488                        .inner_vector()
489                        .expect("Tried to get inner vector of Translation property but it doesn't exist"),
490                ),
491                None => None,
492            },
493            None => None,
494        }
495    }
496
497    pub fn current_scale(&self, target_name: &str) -> Option<Vector3<f32>> {
498        match self.target_channels.get(target_name) {
499            Some(channels) => match channels.get(&ChannelType::Scale) {
500                Some(channel) => Some(
501                    channel
502                        .property_at(self.passive_timer.elapsed().as_secs_f32())
503                        .inner_vector()
504                        .expect("Tried to get inner vector of Scale property but it doesn't exist"),
505                ),
506                None => None,
507            },
508            None => None,
509        }
510    }
511
512    pub fn current_rotation(&self, target_name: &str) -> Option<UnitQuaternion<f32>> {
513        match self.target_channels.get(target_name) {
514            Some(channels) => match channels.get(&ChannelType::Rotation) {
515                Some(channel) => Some(
516                    channel
517                        .property_at(self.passive_timer.elapsed().as_secs_f32())
518                        .inner_quaternion()
519                        .expect("Tried to get inner quaternion of Rotation property but it doesn't exist"),
520                ),
521                None => None,
522            },
523            None => None,
524        }
525    }
526}