karma 1.0.0

A sophisticated Hidden Markov Model (HMM) implementation using the Baum-Welch algorithm
Documentation
use karma::HiddenMarkovModel;

fn main() -> Result<(), Box<dyn std::error::Error>> {
    println!("Karma - Hidden Markov Model Example\n");
    println!("====================================\n");

    // Create an HMM with 20 hidden states and 10 possible observations
    println!("Creating HMM with 20 states and 10 observations...");
    let mut hmm = HiddenMarkovModel::new(20, 10)?;

    println!(
        "Initial state: {} states, {} observations\n",
        hmm.n_states(),
        hmm.n_observations()
    );

    // Randomize initial probabilities to break symmetry
    println!("Randomizing initial probabilities...");
    hmm.randomize_initial_probabilities();

    // Training data - sequential patterns
    let training_sequences = [
        vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
        vec![0, 1, 4, 5, 7, 9, 0, 4, 7, 8, 2],
        vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 0],
        vec![5, 6, 7, 8, 9, 0, 1, 2, 3, 4],
    ];

    println!("\nTraining on {} sequences...", training_sequences.len());
    for (i, sequence) in training_sequences.iter().enumerate() {
        hmm.train(sequence, Some(0.05))?;
        println!("  Trained on sequence {}: {:?}", i + 1, sequence);
    }

    // Evaluate test sequences
    println!("\nEvaluating test sequences:");
    println!("{:-<50}", "");

    let test_sequences = [
        vec![5, 6, 7, 8],
        vec![0, 1, 2, 3],
        vec![9, 8, 7, 6],
        vec![0, 0, 0, 0],
    ];

    for (i, sequence) in test_sequences.iter().enumerate() {
        let probability = hmm.evaluate(sequence)?;
        println!(
            "Sequence {:2}: {:?} => P = {:.6e}",
            i + 1,
            sequence,
            probability
        );
    }

    println!("\n{:-<50}", "");
    println!("Sequences similar to training data have higher probability.");
    println!("Unusual sequences have lower probability.");

    Ok(())
}