pub trait LossOptimizer<M: Model>: Sized {
type Config: Sized;
// Required methods
fn new(vs: Vec<Var>, params: Self::Config, model: M) -> CResult<Self>;
fn backward_step(&mut self, loss: &Tensor) -> CResult<ModelOutcome>;
fn learning_rate(&self) -> f64;
fn set_learning_rate(&mut self, lr: f64);
fn into_inner(self) -> Vec<Var>;
// Provided method
fn from_slice(
vars: &[&Var],
config: Self::Config,
model: M,
) -> CResult<Self> { ... }
}Expand description
trait for optimisers like LBFGS that need the ability to calculate the loss and its gradient
Required Associated Types§
Required Methods§
Sourcefn new(vs: Vec<Var>, params: Self::Config, model: M) -> CResult<Self>
fn new(vs: Vec<Var>, params: Self::Config, model: M) -> CResult<Self>
create a new optimiser from a Vec of variables, setup parameters and a model
Sourcefn backward_step(&mut self, loss: &Tensor) -> CResult<ModelOutcome>
fn backward_step(&mut self, loss: &Tensor) -> CResult<ModelOutcome>
take a step of the optimiser
Sourcefn learning_rate(&self) -> f64
fn learning_rate(&self) -> f64
get the current learning rate
Sourcefn set_learning_rate(&mut self, lr: f64)
fn set_learning_rate(&mut self, lr: f64)
set the learning rate
Sourcefn into_inner(self) -> Vec<Var>
fn into_inner(self) -> Vec<Var>
get the a vec of the variables being optimised
Provided Methods§
Sourcefn from_slice(vars: &[&Var], config: Self::Config, model: M) -> CResult<Self>
fn from_slice(vars: &[&Var], config: Self::Config, model: M) -> CResult<Self>
create a new optimiser from a slice of variables, setup parameters and a model
Dyn Compatibility§
This trait is not dyn compatible.
In older versions of Rust, dyn compatibility was called "object safety", so this trait is not object safe.