Trait UnifiedOptimizer

Source
pub trait UnifiedOptimizer<A: Float> {
    // Required methods
    fn config(&self) -> &OptimizerConfig<A>;
    fn step_param<D: Dimension>(
        &mut self,
        param: &mut Parameter<A, D>,
    ) -> Result<()>
       where A: ScalarOperand + Debug;
    fn set_lr(&mut self, lr: A);
    fn get_lr(&self) -> A;
    fn state_dict(&self) -> HashMap<String, Vec<u8>>;
    fn load_state_dict(
        &mut self,
        state_dict: HashMap<String, Vec<u8>>,
    ) -> Result<()>;

    // Provided methods
    fn step_params<D: Dimension>(
        &mut self,
        params: &mut [Parameter<A, D>],
    ) -> Result<()>
       where A: ScalarOperand + Debug { ... }
    fn zero_grad<D: Dimension>(&self, params: &mut [Parameter<A, D>]) { ... }
}
Expand description

Unified optimizer interface

Required Methods§

Source

fn config(&self) -> &OptimizerConfig<A>

Get optimizer configuration

Source

fn step_param<D: Dimension>( &mut self, param: &mut Parameter<A, D>, ) -> Result<()>
where A: ScalarOperand + Debug,

Update a single parameter

Source

fn set_lr(&mut self, lr: A)

Update learning rate

Source

fn get_lr(&self) -> A

Get current learning rate

Source

fn state_dict(&self) -> HashMap<String, Vec<u8>>

State dictionary for serialization

Source

fn load_state_dict( &mut self, state_dict: HashMap<String, Vec<u8>>, ) -> Result<()>

Load state from dictionary

Provided Methods§

Source

fn step_params<D: Dimension>( &mut self, params: &mut [Parameter<A, D>], ) -> Result<()>
where A: ScalarOperand + Debug,

Update multiple parameters

Source

fn zero_grad<D: Dimension>(&self, params: &mut [Parameter<A, D>])

Zero gradients for all parameters

Dyn Compatibility§

This trait is not dyn compatible.

In older versions of Rust, dyn compatibility was called "object safety", so this trait is not object safe.

Implementors§