entrenar 0.7.13

Training & Optimization library with autograd, LoRA, quantization, and model merging
Documentation
1
2
3
4
5
6
7
8
9
10
11
//! Utility functions for transformer training

/// Calculate perplexity from cross-entropy loss
pub fn perplexity(loss: f32) -> f32 {
    loss.exp()
}

/// Calculate tokens per second
pub fn tokens_per_second(num_tokens: usize, elapsed_secs: f64) -> f64 {
    num_tokens as f64 / elapsed_secs
}