yscv-optim 0.1.8

Optimizers (SGD, Adam, LAMB), LR schedulers, and gradient clipping
Documentation
1
2
3
4
5
6
7
8
9
10
use super::OptimError;

/// Shared learning-rate control surface for optimizers.
pub trait LearningRate {
    /// Returns current optimizer learning rate.
    fn learning_rate(&self) -> f32;

    /// Sets optimizer learning rate after validation.
    fn set_learning_rate(&mut self, lr: f32) -> Result<(), OptimError>;
}