burn_optim/lr_scheduler/
constant.rs

1use burn_core as burn;
2
3use burn::tensor::backend::Backend;
4
5use super::LrScheduler;
6use crate::LearningRate;
7
8/// Constant learning rate implementing [learning rate scheduler](LrScheduler).
9///
10/// # Notes
11///
12/// You can also use [learning rate](LearningRate) which the same effect.
13#[derive(new, Clone, Debug)]
14pub struct ConstantLr {
15    lr: LearningRate,
16}
17
18impl From<LearningRate> for ConstantLr {
19    fn from(lr: LearningRate) -> Self {
20        Self { lr }
21    }
22}
23
24impl LrScheduler for ConstantLr {
25    type Record<B: Backend> = ();
26
27    fn step(&mut self) -> LearningRate {
28        self.lr
29    }
30
31    fn to_record<B: Backend>(&self) -> Self::Record<B> {}
32
33    fn load_record<B: Backend>(self, _record: Self::Record<B>) -> Self {
34        self
35    }
36}
37
38impl LrScheduler for LearningRate {
39    type Record<B: Backend> = ();
40
41    fn step(&mut self) -> LearningRate {
42        *self
43    }
44
45    fn to_record<B: Backend>(&self) -> Self::Record<B> {}
46
47    fn load_record<B: Backend>(self, _record: Self::Record<B>) -> Self {
48        self
49    }
50}