#![allow(dead_code)]
#[allow(dead_code)]
#[derive(Debug, Clone)]
pub struct EthnicFeatureSet {
pub set_id: u32,
pub name: String,
pub weights: Vec<f32>,
}
#[allow(dead_code)]
#[derive(Debug, Clone, Default)]
pub struct EthnicBlend {
pub sets: Vec<(EthnicFeatureSet, f32)>,
}
#[allow(dead_code)]
pub fn new_ethnic_blend() -> EthnicBlend {
EthnicBlend::default()
}
#[allow(dead_code)]
pub fn add_ethnic_set(
blend: &mut EthnicBlend,
id: u32,
name: &str,
weights: Vec<f32>,
influence: f32,
) {
let efs = EthnicFeatureSet {
set_id: id,
name: name.to_string(),
weights,
};
blend.sets.push((efs, influence.max(0.0)));
}
#[allow(dead_code)]
pub fn compute_ethnic_blend(blend: &EthnicBlend) -> Vec<f32> {
if blend.sets.is_empty() {
return Vec::new();
}
let max_len = blend.sets.iter().map(|(s, _)| s.weights.len()).max().unwrap_or(0);
if max_len == 0 {
return Vec::new();
}
let mut result = vec![0.0_f32; max_len];
let total_inf: f32 = blend.sets.iter().map(|(_, inf)| inf).sum();
let denom = if total_inf > 1e-9 { total_inf } else { 1.0 };
for (efs, inf) in &blend.sets {
let factor = inf / denom;
for (i, &w) in efs.weights.iter().enumerate() {
result[i] += w * factor;
}
}
result
}
#[allow(dead_code)]
pub fn normalize_influences(blend: &mut EthnicBlend) {
let total: f32 = blend.sets.iter().map(|(_, inf)| *inf).sum();
if total < 1e-9 {
return;
}
for (_, inf) in blend.sets.iter_mut() {
*inf /= total;
}
}
#[allow(dead_code)]
pub fn set_count(blend: &EthnicBlend) -> usize {
blend.sets.len()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn new_blend_is_empty() {
let b = new_ethnic_blend();
assert_eq!(set_count(&b), 0);
}
#[test]
fn add_ethnic_set_increments_count() {
let mut b = new_ethnic_blend();
add_ethnic_set(&mut b, 1, "SetA", vec![0.5, 0.5], 1.0);
assert_eq!(set_count(&b), 1);
}
#[test]
fn compute_blend_empty_returns_empty() {
let b = new_ethnic_blend();
assert!(compute_ethnic_blend(&b).is_empty());
}
#[test]
fn compute_blend_single_set_identity() {
let mut b = new_ethnic_blend();
add_ethnic_set(&mut b, 1, "A", vec![0.4, 0.6], 1.0);
let result = compute_ethnic_blend(&b);
assert!((result[0] - 0.4).abs() < 1e-5);
assert!((result[1] - 0.6).abs() < 1e-5);
}
#[test]
fn compute_blend_two_equal_sets() {
let mut b = new_ethnic_blend();
add_ethnic_set(&mut b, 1, "A", vec![1.0, 0.0], 1.0);
add_ethnic_set(&mut b, 2, "B", vec![0.0, 1.0], 1.0);
let result = compute_ethnic_blend(&b);
assert!((result[0] - 0.5).abs() < 1e-5);
assert!((result[1] - 0.5).abs() < 1e-5);
}
#[test]
fn normalize_influences_sums_to_one() {
let mut b = new_ethnic_blend();
add_ethnic_set(&mut b, 1, "A", vec![1.0], 2.0);
add_ethnic_set(&mut b, 2, "B", vec![0.0], 3.0);
normalize_influences(&mut b);
let total: f32 = b.sets.iter().map(|(_, inf)| inf).sum();
assert!((total - 1.0).abs() < 1e-5);
}
#[test]
fn normalize_influences_noop_when_empty() {
let mut b = new_ethnic_blend();
normalize_influences(&mut b); }
#[test]
fn add_ethnic_set_negative_influence_clamped() {
let mut b = new_ethnic_blend();
add_ethnic_set(&mut b, 1, "A", vec![0.5], -5.0);
assert!((b.sets[0].1 - 0.0).abs() < 1e-6);
}
#[test]
fn set_count_multiple() {
let mut b = new_ethnic_blend();
for i in 0..5u32 {
add_ethnic_set(&mut b, i, "x", vec![], 1.0);
}
assert_eq!(set_count(&b), 5);
}
#[test]
fn compute_blend_unequal_weights() {
let mut b = new_ethnic_blend();
add_ethnic_set(&mut b, 1, "A", vec![1.0], 3.0);
add_ethnic_set(&mut b, 2, "B", vec![0.0], 1.0);
let result = compute_ethnic_blend(&b);
assert!((result[0] - 0.75).abs() < 1e-5);
}
}