Skip to main content

oxihuman_morph/
body_language.rs

1// Copyright (C) 2026 COOLJAPAN OU (Team KitaSan)
2// SPDX-License-Identifier: Apache-2.0
3
4#![allow(dead_code)]
5
6use std::collections::HashMap;
7
8/// Pose feature vector extracted from a body pose.
9#[derive(Debug, Clone)]
10pub struct PoseFeatures {
11    /// Forward lean angle in degrees (+ve = leaning forward).
12    pub spine_lean: f32,
13    /// Shoulder elevation: 0 = relaxed, 1 = maximally raised.
14    pub shoulder_elevation: f32,
15    /// Arm openness: 0 = arms crossed, 1 = wide open.
16    pub arm_openness: f32,
17    /// Head tilt in degrees (+ve = right tilt).
18    pub head_tilt: f32,
19    /// Head nod: -1 = looking down, +1 = looking up.
20    pub head_nod: f32,
21    /// Lateral hip sway (normalised).
22    pub hip_sway: f32,
23    /// Leg spread: 0 = together, 1 = wide.
24    pub leg_spread: f32,
25    /// Gesture height: 0 = low, 1 = high.
26    pub gesture_height: f32,
27}
28
29/// High-level body emotion category.
30#[derive(Debug, Clone, PartialEq)]
31pub enum BodyEmotion {
32    Neutral,
33    Confident,
34    Submissive,
35    Aggressive,
36    Joyful,
37    Sad,
38    Fearful,
39    Curious,
40    Relaxed,
41    Tense,
42}
43
44/// Classification result.
45#[derive(Debug, Clone)]
46pub struct BodyLanguageProfile {
47    pub emotion: BodyEmotion,
48    /// How strongly the features match [0, 1].
49    pub confidence: f32,
50    pub features: PoseFeatures,
51}
52
53// ─── Reference poses for each emotion ────────────────────────────────────────
54
55fn reference_pose(emotion: &BodyEmotion) -> PoseFeatures {
56    match emotion {
57        BodyEmotion::Neutral => PoseFeatures {
58            spine_lean: 0.0,
59            shoulder_elevation: 0.2,
60            arm_openness: 0.5,
61            head_tilt: 0.0,
62            head_nod: 0.0,
63            hip_sway: 0.0,
64            leg_spread: 0.3,
65            gesture_height: 0.3,
66        },
67        BodyEmotion::Confident => PoseFeatures {
68            spine_lean: -5.0,
69            shoulder_elevation: 0.3,
70            arm_openness: 0.7,
71            head_tilt: 0.0,
72            head_nod: 0.2,
73            hip_sway: 0.1,
74            leg_spread: 0.6,
75            gesture_height: 0.5,
76        },
77        BodyEmotion::Submissive => PoseFeatures {
78            spine_lean: 10.0,
79            shoulder_elevation: 0.0,
80            arm_openness: 0.2,
81            head_tilt: 5.0,
82            head_nod: -0.3,
83            hip_sway: 0.0,
84            leg_spread: 0.1,
85            gesture_height: 0.1,
86        },
87        BodyEmotion::Aggressive => PoseFeatures {
88            spine_lean: -8.0,
89            shoulder_elevation: 0.8,
90            arm_openness: 0.3,
91            head_tilt: 0.0,
92            head_nod: 0.1,
93            hip_sway: 0.0,
94            leg_spread: 0.7,
95            gesture_height: 0.6,
96        },
97        BodyEmotion::Joyful => PoseFeatures {
98            spine_lean: 0.0,
99            shoulder_elevation: 0.4,
100            arm_openness: 0.9,
101            head_tilt: 8.0,
102            head_nod: 0.2,
103            hip_sway: 0.3,
104            leg_spread: 0.5,
105            gesture_height: 0.8,
106        },
107        BodyEmotion::Sad => PoseFeatures {
108            spine_lean: 15.0,
109            shoulder_elevation: 0.0,
110            arm_openness: 0.1,
111            head_tilt: -3.0,
112            head_nod: -0.5,
113            hip_sway: 0.0,
114            leg_spread: 0.2,
115            gesture_height: 0.1,
116        },
117        BodyEmotion::Fearful => PoseFeatures {
118            spine_lean: 5.0,
119            shoulder_elevation: 0.9,
120            arm_openness: 0.2,
121            head_tilt: -5.0,
122            head_nod: -0.2,
123            hip_sway: 0.0,
124            leg_spread: 0.2,
125            gesture_height: 0.4,
126        },
127        BodyEmotion::Curious => PoseFeatures {
128            spine_lean: -3.0,
129            shoulder_elevation: 0.3,
130            arm_openness: 0.6,
131            head_tilt: 12.0,
132            head_nod: 0.1,
133            hip_sway: 0.1,
134            leg_spread: 0.4,
135            gesture_height: 0.5,
136        },
137        BodyEmotion::Relaxed => PoseFeatures {
138            spine_lean: 8.0,
139            shoulder_elevation: 0.1,
140            arm_openness: 0.6,
141            head_tilt: 0.0,
142            head_nod: 0.0,
143            hip_sway: 0.2,
144            leg_spread: 0.5,
145            gesture_height: 0.2,
146        },
147        BodyEmotion::Tense => PoseFeatures {
148            spine_lean: 0.0,
149            shoulder_elevation: 0.7,
150            arm_openness: 0.1,
151            head_tilt: 0.0,
152            head_nod: 0.0,
153            hip_sway: 0.0,
154            leg_spread: 0.3,
155            gesture_height: 0.3,
156        },
157    }
158}
159
160// ─── Feature vector helpers ───────────────────────────────────────────────────
161
162fn pose_to_vec(f: &PoseFeatures) -> [f32; 8] {
163    [
164        f.spine_lean / 90.0,
165        f.shoulder_elevation,
166        f.arm_openness,
167        f.head_tilt / 45.0,
168        f.head_nod,
169        f.hip_sway,
170        f.leg_spread,
171        f.gesture_height,
172    ]
173}
174
175fn dot8(a: &[f32; 8], b: &[f32; 8]) -> f32 {
176    a.iter().zip(b.iter()).map(|(x, y)| x * y).sum()
177}
178
179fn norm8(a: &[f32; 8]) -> f32 {
180    dot8(a, a).sqrt()
181}
182
183// ─── Public API ───────────────────────────────────────────────────────────────
184
185/// Rule-based classifier: finds the reference emotion with highest cosine similarity.
186pub fn classify_body_language(features: &PoseFeatures) -> BodyLanguageProfile {
187    let all_emotions = [
188        BodyEmotion::Neutral,
189        BodyEmotion::Confident,
190        BodyEmotion::Submissive,
191        BodyEmotion::Aggressive,
192        BodyEmotion::Joyful,
193        BodyEmotion::Sad,
194        BodyEmotion::Fearful,
195        BodyEmotion::Curious,
196        BodyEmotion::Relaxed,
197        BodyEmotion::Tense,
198    ];
199
200    let query = pose_to_vec(features);
201    let qn = norm8(&query);
202
203    let mut best_emotion = BodyEmotion::Neutral;
204    let mut best_sim: f32 = -2.0;
205
206    for emotion in &all_emotions {
207        let ref_vec = pose_to_vec(&reference_pose(emotion));
208        let rn = norm8(&ref_vec);
209        let sim = if qn > 1e-6 && rn > 1e-6 {
210            dot8(&query, &ref_vec) / (qn * rn)
211        } else {
212            0.0
213        };
214        if sim > best_sim {
215            best_sim = sim;
216            best_emotion = emotion.clone();
217        }
218    }
219
220    BodyLanguageProfile {
221        emotion: best_emotion,
222        confidence: ((best_sim + 1.0) / 2.0).clamp(0.0, 1.0),
223        features: features.clone(),
224    }
225}
226
227/// Inverse mapping: generate pose features for a given emotion at given intensity.
228pub fn generate_pose_for_emotion(emotion: &BodyEmotion, intensity: f32) -> PoseFeatures {
229    let neutral = reference_pose(&BodyEmotion::Neutral);
230    let target = reference_pose(emotion);
231    let t = intensity.clamp(0.0, 1.0);
232    interpolate_pose_features(&neutral, &target, t)
233}
234
235/// Linear interpolation between two pose feature sets.
236pub fn interpolate_pose_features(a: &PoseFeatures, b: &PoseFeatures, t: f32) -> PoseFeatures {
237    let t = t.clamp(0.0, 1.0);
238    let lerp = |x: f32, y: f32| x + (y - x) * t;
239    PoseFeatures {
240        spine_lean: lerp(a.spine_lean, b.spine_lean),
241        shoulder_elevation: lerp(a.shoulder_elevation, b.shoulder_elevation),
242        arm_openness: lerp(a.arm_openness, b.arm_openness),
243        head_tilt: lerp(a.head_tilt, b.head_tilt),
244        head_nod: lerp(a.head_nod, b.head_nod),
245        hip_sway: lerp(a.hip_sway, b.hip_sway),
246        leg_spread: lerp(a.leg_spread, b.leg_spread),
247        gesture_height: lerp(a.gesture_height, b.gesture_height),
248    }
249}
250
251/// Cosine-like similarity in [0, 1] between two pose feature vectors.
252pub fn pose_similarity(a: &PoseFeatures, b: &PoseFeatures) -> f32 {
253    let va = pose_to_vec(a);
254    let vb = pose_to_vec(b);
255    let na = norm8(&va);
256    let nb = norm8(&vb);
257    if na < 1e-6 || nb < 1e-6 {
258        return 0.0;
259    }
260    ((dot8(&va, &vb) / (na * nb) + 1.0) / 2.0).clamp(0.0, 1.0)
261}
262
263/// Mirror pose left-right (negates head_tilt and hip_sway).
264pub fn mirror_pose(features: &PoseFeatures) -> PoseFeatures {
265    PoseFeatures {
266        spine_lean: features.spine_lean,
267        shoulder_elevation: features.shoulder_elevation,
268        arm_openness: features.arm_openness,
269        head_tilt: -features.head_tilt,
270        head_nod: features.head_nod,
271        hip_sway: -features.hip_sway,
272        leg_spread: features.leg_spread,
273        gesture_height: features.gesture_height,
274    }
275}
276
277/// Weighted blend of multiple emotion poses.
278pub fn blend_body_emotions(emotions: &[(BodyEmotion, f32)]) -> PoseFeatures {
279    let mut total_weight = 0.0_f32;
280    let mut acc = PoseFeatures {
281        spine_lean: 0.0,
282        shoulder_elevation: 0.0,
283        arm_openness: 0.0,
284        head_tilt: 0.0,
285        head_nod: 0.0,
286        hip_sway: 0.0,
287        leg_spread: 0.0,
288        gesture_height: 0.0,
289    };
290
291    for (emotion, w) in emotions {
292        let pose = reference_pose(emotion);
293        let w = w.max(0.0);
294        acc.spine_lean += pose.spine_lean * w;
295        acc.shoulder_elevation += pose.shoulder_elevation * w;
296        acc.arm_openness += pose.arm_openness * w;
297        acc.head_tilt += pose.head_tilt * w;
298        acc.head_nod += pose.head_nod * w;
299        acc.hip_sway += pose.hip_sway * w;
300        acc.leg_spread += pose.leg_spread * w;
301        acc.gesture_height += pose.gesture_height * w;
302        total_weight += w;
303    }
304
305    if total_weight > 1e-6 {
306        let inv = 1.0 / total_weight;
307        acc.spine_lean *= inv;
308        acc.shoulder_elevation *= inv;
309        acc.arm_openness *= inv;
310        acc.head_tilt *= inv;
311        acc.head_nod *= inv;
312        acc.hip_sway *= inv;
313        acc.leg_spread *= inv;
314        acc.gesture_height *= inv;
315    }
316
317    acc
318}
319
320pub fn pose_to_json(features: &PoseFeatures) -> String {
321    format!(
322        "{{\"spine_lean\":{:.4},\"shoulder_elevation\":{:.4},\"arm_openness\":{:.4},\
323         \"head_tilt\":{:.4},\"head_nod\":{:.4},\"hip_sway\":{:.4},\
324         \"leg_spread\":{:.4},\"gesture_height\":{:.4}}}",
325        features.spine_lean,
326        features.shoulder_elevation,
327        features.arm_openness,
328        features.head_tilt,
329        features.head_nod,
330        features.hip_sway,
331        features.leg_spread,
332        features.gesture_height,
333    )
334}
335
336/// Return the profile with the highest confidence, if any.
337pub fn dominant_emotion(profiles: &[BodyLanguageProfile]) -> Option<&BodyLanguageProfile> {
338    profiles.iter().max_by(|a, b| {
339        a.confidence
340            .partial_cmp(&b.confidence)
341            .unwrap_or(std::cmp::Ordering::Equal)
342    })
343}
344
345/// Map an emotion at a given intensity to morph parameter values.
346pub fn apply_emotion_to_params(emotion: &BodyEmotion, intensity: f32) -> HashMap<String, f32> {
347    let pose = generate_pose_for_emotion(emotion, intensity);
348    let mut map = HashMap::new();
349    map.insert(
350        "spine_lean".to_string(),
351        (pose.spine_lean / 90.0).clamp(-1.0, 1.0),
352    );
353    map.insert(
354        "shoulder_elevation".to_string(),
355        pose.shoulder_elevation.clamp(0.0, 1.0),
356    );
357    map.insert(
358        "arm_openness".to_string(),
359        pose.arm_openness.clamp(0.0, 1.0),
360    );
361    map.insert(
362        "head_tilt".to_string(),
363        (pose.head_tilt / 45.0).clamp(-1.0, 1.0),
364    );
365    map.insert("head_nod".to_string(), pose.head_nod.clamp(-1.0, 1.0));
366    map.insert("hip_sway".to_string(), pose.hip_sway.clamp(-1.0, 1.0));
367    map.insert("leg_spread".to_string(), pose.leg_spread.clamp(0.0, 1.0));
368    map.insert(
369        "gesture_height".to_string(),
370        pose.gesture_height.clamp(0.0, 1.0),
371    );
372    map
373}
374
375/// Clamp all PoseFeatures fields to their valid ranges.
376pub fn normalize_pose_features(features: &mut PoseFeatures) {
377    features.spine_lean = features.spine_lean.clamp(-90.0, 90.0);
378    features.shoulder_elevation = features.shoulder_elevation.clamp(0.0, 1.0);
379    features.arm_openness = features.arm_openness.clamp(0.0, 1.0);
380    features.head_tilt = features.head_tilt.clamp(-45.0, 45.0);
381    features.head_nod = features.head_nod.clamp(-1.0, 1.0);
382    features.hip_sway = features.hip_sway.clamp(-1.0, 1.0);
383    features.leg_spread = features.leg_spread.clamp(0.0, 1.0);
384    features.gesture_height = features.gesture_height.clamp(0.0, 1.0);
385}
386
387// ─── Tests ────────────────────────────────────────────────────────────────────
388
389#[cfg(test)]
390mod tests {
391    use super::*;
392
393    fn neutral_pose() -> PoseFeatures {
394        reference_pose(&BodyEmotion::Neutral)
395    }
396
397    #[test]
398    fn test_classify_neutral() {
399        let p = reference_pose(&BodyEmotion::Neutral);
400        let profile = classify_body_language(&p);
401        assert_eq!(profile.emotion, BodyEmotion::Neutral);
402        assert!(profile.confidence > 0.5);
403    }
404
405    #[test]
406    fn test_classify_confident() {
407        let p = reference_pose(&BodyEmotion::Confident);
408        let profile = classify_body_language(&p);
409        assert_eq!(profile.emotion, BodyEmotion::Confident);
410    }
411
412    #[test]
413    fn test_generate_pose_zero_intensity() {
414        let p = generate_pose_for_emotion(&BodyEmotion::Sad, 0.0);
415        let neutral = neutral_pose();
416        assert!((p.spine_lean - neutral.spine_lean).abs() < 1e-4);
417    }
418
419    #[test]
420    fn test_generate_pose_full_intensity() {
421        let p = generate_pose_for_emotion(&BodyEmotion::Sad, 1.0);
422        let sad_ref = reference_pose(&BodyEmotion::Sad);
423        assert!((p.spine_lean - sad_ref.spine_lean).abs() < 1e-4);
424    }
425
426    #[test]
427    fn test_interpolate_pose_midpoint() {
428        let a = PoseFeatures {
429            spine_lean: 0.0,
430            shoulder_elevation: 0.0,
431            arm_openness: 0.0,
432            head_tilt: 0.0,
433            head_nod: 0.0,
434            hip_sway: 0.0,
435            leg_spread: 0.0,
436            gesture_height: 0.0,
437        };
438        let b = PoseFeatures {
439            spine_lean: 10.0,
440            shoulder_elevation: 1.0,
441            arm_openness: 1.0,
442            head_tilt: 0.0,
443            head_nod: 0.0,
444            hip_sway: 0.0,
445            leg_spread: 0.0,
446            gesture_height: 0.0,
447        };
448        let mid = interpolate_pose_features(&a, &b, 0.5);
449        assert!((mid.spine_lean - 5.0).abs() < 1e-4);
450        assert!((mid.arm_openness - 0.5).abs() < 1e-4);
451    }
452
453    #[test]
454    fn test_pose_similarity_identical() {
455        let p = neutral_pose();
456        let sim = pose_similarity(&p, &p);
457        assert!(sim > 0.99);
458    }
459
460    #[test]
461    fn test_mirror_pose_negates_tilt() {
462        let p = PoseFeatures {
463            head_tilt: 10.0,
464            hip_sway: 0.3,
465            spine_lean: 0.0,
466            shoulder_elevation: 0.0,
467            arm_openness: 0.0,
468            head_nod: 0.0,
469            leg_spread: 0.0,
470            gesture_height: 0.0,
471        };
472        let m = mirror_pose(&p);
473        assert!((m.head_tilt + 10.0).abs() < 1e-5);
474        assert!((m.hip_sway + 0.3).abs() < 1e-5);
475        assert!((m.spine_lean - 0.0).abs() < 1e-5);
476    }
477
478    #[test]
479    fn test_blend_body_emotions_single() {
480        let result = blend_body_emotions(&[(BodyEmotion::Joyful, 1.0)]);
481        let joy = reference_pose(&BodyEmotion::Joyful);
482        assert!((result.arm_openness - joy.arm_openness).abs() < 1e-4);
483    }
484
485    #[test]
486    fn test_blend_body_emotions_equal_weights() {
487        // two equal-weight emotions - result should be mid of reference poses
488        let poses = [(BodyEmotion::Neutral, 1.0), (BodyEmotion::Confident, 1.0)];
489        let result = blend_body_emotions(&poses);
490        let n = reference_pose(&BodyEmotion::Neutral);
491        let c = reference_pose(&BodyEmotion::Confident);
492        let expected_arm = (n.arm_openness + c.arm_openness) / 2.0;
493        assert!((result.arm_openness - expected_arm).abs() < 1e-4);
494    }
495
496    #[test]
497    fn test_pose_to_json() {
498        let p = neutral_pose();
499        let j = pose_to_json(&p);
500        assert!(j.contains("spine_lean"));
501        assert!(j.contains("arm_openness"));
502    }
503
504    #[test]
505    fn test_dominant_emotion_empty() {
506        let profiles: Vec<BodyLanguageProfile> = vec![];
507        assert!(dominant_emotion(&profiles).is_none());
508    }
509
510    #[test]
511    fn test_dominant_emotion_picks_highest_confidence() {
512        let profiles = vec![
513            BodyLanguageProfile {
514                emotion: BodyEmotion::Sad,
515                confidence: 0.3,
516                features: neutral_pose(),
517            },
518            BodyLanguageProfile {
519                emotion: BodyEmotion::Joyful,
520                confidence: 0.9,
521                features: neutral_pose(),
522            },
523        ];
524        let dom = dominant_emotion(&profiles).expect("should succeed");
525        assert_eq!(dom.emotion, BodyEmotion::Joyful);
526    }
527
528    #[test]
529    fn test_apply_emotion_to_params_keys() {
530        let params = apply_emotion_to_params(&BodyEmotion::Confident, 1.0);
531        assert!(params.contains_key("spine_lean"));
532        assert!(params.contains_key("arm_openness"));
533    }
534
535    #[test]
536    fn test_normalize_pose_features_clamps() {
537        let mut p = PoseFeatures {
538            spine_lean: 200.0,
539            shoulder_elevation: 2.0,
540            arm_openness: -1.0,
541            head_tilt: 100.0,
542            head_nod: 5.0,
543            hip_sway: -5.0,
544            leg_spread: 3.0,
545            gesture_height: -1.0,
546        };
547        normalize_pose_features(&mut p);
548        assert!(p.spine_lean <= 90.0);
549        assert!(p.shoulder_elevation <= 1.0);
550        assert!(p.arm_openness >= 0.0);
551        assert!(p.head_tilt <= 45.0);
552        assert!(p.head_nod >= -1.0 && p.head_nod <= 1.0);
553    }
554}