1use std::f64::EPSILON;
2
3use problems::Problem;
4use types::{Function, Function1};
5
6
7pub struct NumericalDifferentiation<F: Function> {
24 function: F
25}
26
27impl<F: Function> NumericalDifferentiation<F> {
28 pub fn new(function: F) -> Self {
31 NumericalDifferentiation {
32 function
33 }
34 }
35}
36
37impl<F: Function> Function for NumericalDifferentiation<F> {
38 fn value(&self, position: &[f64]) -> f64 {
39 self.function.value(position)
40 }
41}
42
43impl<F: Function> Function1 for NumericalDifferentiation<F> {
44 fn gradient(&self, position: &[f64]) -> Vec<f64> {
45 let mut x: Vec<_> = position.to_vec();
46
47 let current = self.value(&x);
48
49 position.iter().cloned().enumerate().map(|(i, x_i)| {
50 let h = if x_i == 0.0 {
51 EPSILON * 1.0e10
52 } else {
53 (EPSILON * x_i.abs()).sqrt()
54 };
55
56 assert!(h.is_finite());
57
58 x[i] = x_i + h;
59
60 let forward = self.function.value(&x);
61
62 x[i] = x_i;
63
64 let d_i = (forward - current) / h;
65
66 assert!(d_i.is_finite());
67
68 d_i
69 }).collect()
70 }
71}
72
73impl<F: Function + Default> Default for NumericalDifferentiation<F> {
74 fn default() -> Self {
75 NumericalDifferentiation::new(F::default())
76 }
77}
78
79impl<F: Problem> Problem for NumericalDifferentiation<F> {
80 fn dimensions(&self) -> usize {
81 self.function.dimensions()
82 }
83
84 fn domain(&self) -> Vec<(f64, f64)> {
85 self.function.domain()
86 }
87
88 fn minimum(&self) -> (Vec<f64>, f64) {
89 self.function.minimum()
90 }
91
92 fn random_start(&self) -> Vec<f64> {
93 self.function.random_start()
94 }
95}
96
97
98#[cfg(test)]
99mod tests {
100 use types::Function1;
101 use problems::{Problem, Sphere, Rosenbrock};
102 use utils::are_close;
103 use gd::GradientDescent;
104
105 use super::NumericalDifferentiation;
106
107 #[test]
108 fn test_accuracy() {
109 let b = Rosenbrock::default();
111
112 let problems = vec![b];
114
115 for analytical_problem in problems {
116 let numerical_problem = NumericalDifferentiation::new(analytical_problem);
117
118 for _ in 0..1000 {
119 let position = analytical_problem.random_start();
120
121 let analytical_gradient = analytical_problem.gradient(&position);
122 let numerical_gradient = numerical_problem.gradient(&position);
123
124 assert_eq!(analytical_gradient.len(), numerical_gradient.len());
125
126 assert!(analytical_gradient.into_iter().zip(numerical_gradient).all(|(a, n)|
127 a.is_finite() && n.is_finite() && are_close(a, n, 1.0e-3)
128 ));
129 }
130 }
131 }
132
133 test_minimizer!{GradientDescent::new(),
134 test_gd_sphere => NumericalDifferentiation::new(Sphere::default()),
135 test_gd_rosenbrock => NumericalDifferentiation::new(Rosenbrock::default())}
136}