gomez/core/optimizer.rs
1use nalgebra::{storage::StorageMut, Dyn, IsContiguous, Vector};
2
3use super::{domain::Domain, function::Function};
4
5/// Interface of an optimizer.
6///
7/// An optimizer is an iterative algorithm which takes a point _x_ and computes
8/// the next step in the optimization process. Repeated calls to the next step
9/// should eventually converge into a minimum _x'_.
10///
11/// If you implement an optimizer, please reach out to discuss if we could
12/// include it in gomez.
13///
14/// ## Implementing an optimizer
15///
16/// Here is an implementation of a random "optimizer" which randomly generates
17/// values in a hope that a minimum can be found with enough luck.
18///
19/// ```rust
20/// use gomez::nalgebra as na;
21/// use gomez::{Domain, Function, Optimizer, Sample};
22/// use na::{storage::StorageMut, Dyn, IsContiguous, Vector};
23/// use fastrand::Rng;
24///
25/// struct Random {
26/// rng: Rng,
27/// }
28///
29/// impl Random {
30/// fn new(rng: Rng) -> Self {
31/// Self { rng }
32/// }
33/// }
34///
35/// impl<F: Function> Optimizer<F> for Random
36/// where
37/// F::Field: Sample,
38/// {
39/// const NAME: &'static str = "Random";
40/// type Error = std::convert::Infallible;
41///
42/// fn opt_next<Sx>(
43/// &mut self,
44/// f: &F,
45/// dom: &Domain<F::Field>,
46/// x: &mut Vector<F::Field, Dyn, Sx>,
47/// ) -> Result<F::Field, Self::Error>
48/// where
49/// Sx: StorageMut<F::Field, Dyn> + IsContiguous,
50/// {
51/// // Randomly sample in the domain.
52/// dom.sample(x, &mut self.rng);
53///
54/// // We must compute the value.
55/// Ok(f.apply(x))
56/// }
57/// }
58/// ```
59pub trait Optimizer<F: Function> {
60 /// Name of the optimizer.
61 const NAME: &'static str;
62
63 /// Error while computing the next step.
64 type Error;
65
66 /// Computes the next step in the optimization process.
67 ///
68 /// The value of `x` is the current point. After the method returns, `x`
69 /// should hold the variable values of the performed step and the return
70 /// value _must_ be the function value of that step as computed by
71 /// [`Function::apply`].
72 ///
73 /// The implementations _can_ assume that subsequent calls to `opt_next`
74 /// pass the value of `x` as was returned in the previous iteration
75 fn opt_next<Sx>(
76 &mut self,
77 f: &F,
78 dom: &Domain<F::Field>,
79 x: &mut Vector<F::Field, Dyn, Sx>,
80 ) -> Result<F::Field, Self::Error>
81 where
82 Sx: StorageMut<F::Field, Dyn> + IsContiguous;
83
84 /// Indicates that the optimizer cannot make further progress.
85 ///
86 /// Generally, an optimizer can get stuck in a local optimum and in that
87 /// case cannot continue. This indication can be used in the stopping
88 /// criterion if a local optimum is satisfactory result.
89 ///
90 /// When an algorithm detects in an optimization iteration that it cannot
91 /// make progress, it should defer returning an error and instead set an
92 /// inner flag. This way, the user can react on the no-progress indication.
93 /// If another iteration is requested, then the algorithm should return an
94 /// error.
95 fn no_progress(&self) -> bool {
96 false
97 }
98}