rv/process/gaussian/kernel/
rational_quadratic.rs

1use super::{e2_norm, CovGrad, CovGradError, Kernel, KernelError, E2METRIC};
2use nalgebra::base::storage::Storage;
3use nalgebra::{
4    base::constraint::{SameNumberOfColumns, ShapeConstraint},
5    Norm,
6};
7use nalgebra::{dvector, DMatrix, DVector, Dim, Matrix};
8use std::f64;
9
10#[cfg(feature = "serde1")]
11use serde::{Deserialize, Serialize};
12
13/// Rational Quadratic Kernel
14///
15/// # Parameters
16/// `scale` -- Length scale
17/// `mixture` -- Mixture Scale
18#[derive(Clone, Debug, PartialEq)]
19#[cfg_attr(feature = "serde1", derive(Serialize, Deserialize))]
20#[cfg_attr(feature = "serde1", serde(rename_all = "snake_case"))]
21pub struct RationalQuadratic {
22    scale: f64,
23    mixture: f64,
24}
25
26impl RationalQuadratic {
27    /// Create a new RationalQuadratic kernel
28    pub fn new(scale: f64, mixture: f64) -> Result<Self, KernelError> {
29        if scale <= 0.0 {
30            Err(KernelError::ParameterOutOfBounds {
31                name: "scale".to_string(),
32                given: scale,
33                bounds: (0.0, f64::INFINITY),
34            })
35        } else if mixture <= 0.0 {
36            Err(KernelError::ParameterOutOfBounds {
37                name: "mixture".to_string(),
38                given: mixture,
39                bounds: (0.0, f64::INFINITY),
40            })
41        } else {
42            Ok(Self { scale, mixture })
43        }
44    }
45
46    /// Create a new RationalQuadratic without checking values
47    pub fn new_unchecked(scale: f64, mixture: f64) -> Self {
48        Self { scale, mixture }
49    }
50}
51
52impl Kernel for RationalQuadratic {
53    fn n_parameters(&self) -> usize {
54        2
55    }
56    fn covariance<R1, R2, C1, C2, S1, S2>(
57        &self,
58        x1: &Matrix<f64, R1, C1, S1>,
59        x2: &Matrix<f64, R2, C2, S2>,
60    ) -> DMatrix<f64>
61    where
62        R1: Dim,
63        R2: Dim,
64        C1: Dim,
65        C2: Dim,
66        S1: Storage<f64, R1, C1>,
67        S2: Storage<f64, R2, C2>,
68        ShapeConstraint: SameNumberOfColumns<C1, C2>,
69    {
70        let d = (2.0 * self.scale * self.scale * self.mixture).sqrt();
71        DMatrix::from_fn(x1.nrows(), x2.nrows(), |i, j| {
72            let t = e2_norm(&x1.row(i), &x2.row(j), d);
73            (1.0 + t).powf(-self.mixture)
74        })
75    }
76
77    fn is_stationary(&self) -> bool {
78        true
79    }
80
81    fn diag<R, C, S>(&self, x: &Matrix<f64, R, C, S>) -> DVector<f64>
82    where
83        R: Dim,
84        C: Dim,
85        S: Storage<f64, R, C>,
86    {
87        DVector::repeat(x.len(), 1.0)
88    }
89
90    fn parameters(&self) -> DVector<f64> {
91        dvector![self.scale.ln(), self.mixture.ln()]
92    }
93
94    fn reparameterize(&self, params: &[f64]) -> Result<Self, KernelError> {
95        match params {
96            [] => Err(KernelError::MissingParameters(2)),
97            [_] => Err(KernelError::MissingParameters(1)),
98            [length_scale, mixture] => {
99                Self::new(length_scale.exp(), mixture.exp())
100            }
101            _ => Err(KernelError::ExtraneousParameters(params.len() - 1)),
102        }
103    }
104
105    fn covariance_with_gradient<R, C, S>(
106        &self,
107        x: &Matrix<f64, R, C, S>,
108    ) -> Result<(DMatrix<f64>, CovGrad), CovGradError>
109    where
110        R: Dim,
111        C: Dim,
112        S: Storage<f64, R, C>,
113    {
114        let n = x.nrows();
115        let mut cov = DMatrix::zeros(n, n);
116        let mut grad = CovGrad::zeros(n, 2);
117        let d = 2.0 * self.mixture * self.scale.powi(2);
118        for i in 0..n {
119            // off-diagnols
120            for j in 0..i {
121                let d2 = E2METRIC.metric_distance(&x.row(i), &x.row(j));
122                let temp = d2 / d;
123                let base = 1.0 + temp;
124                let k = base.powf(-self.mixture);
125                cov[(i, j)] = k;
126                cov[(j, i)] = k;
127
128                let dk_dl = d2 * k / (self.scale.powi(2) * base);
129                let dk_da = k * base.ln().mul_add(
130                    -self.mixture,
131                    d2 / (2.0 * self.scale.powi(2) * base),
132                );
133
134                grad[(i, j, 0)] = dk_dl;
135                grad[(j, i, 0)] = dk_dl;
136                grad[(j, i, 1)] = dk_da;
137                grad[(i, j, 1)] = dk_da;
138            }
139            // diag
140            cov[(i, i)] = 1.0;
141        }
142        Ok((cov, grad))
143    }
144}
145
146#[cfg(test)]
147mod tests {
148    use super::*;
149
150    #[test]
151    fn rational_quadratic() -> Result<(), KernelError> {
152        let kernel = RationalQuadratic::new(3.0, 5.0)?;
153        assert::close(kernel.parameters()[0], 3.0_f64.ln(), 1E-10);
154        assert::close(kernel.parameters()[1], 5.0_f64.ln(), 1E-10);
155        assert!(kernel.parameters().relative_eq(
156            &dvector![3.0_f64.ln(), 5.0_f64.ln()],
157            1E-10,
158            1E-10
159        ));
160
161        let x = DMatrix::from_row_slice(2, 2, &[1.0, 2.0, 3.0, 4.0]);
162        let y = DMatrix::from_row_slice(2, 2, &[5.0, 7.0, 6.0, 8.0]);
163
164        let cov = kernel.covariance(&x, &y);
165        let expected_cov = DMatrix::from_row_slice(
166            2,
167            2,
168            &[
169                5_904_900_000.0 / 38_579_489_651.0,
170                5_904_900_000.0 / 78_502_725_751.0,
171                5_904_900_000.0 / 11_592_740_742.0,
172                1_889_568.0 / 6_436_343.0,
173            ],
174        );
175        assert!(cov.relative_eq(&expected_cov, 1E-8, 1E-8));
176
177        let (cov, grad) = kernel.covariance_with_gradient(&x)?;
178
179        let expected_cov = DMatrix::from_row_slice(
180            2,
181            2,
182            &[
183                1.0,
184                184_528_125.0 / 282_475_249.0,
185                184_528_125.0 / 282_475_249.0,
186                1.0,
187            ],
188        );
189
190        let eg_l = 0.533_268_68;
191        let eg_a = -0.011_514_11;
192        let expected_grad = CovGrad::from_row_slices(
193            2,
194            2,
195            &[0.0, eg_l, eg_l, 0.0, 0.0, eg_a, eg_a, 0.0],
196        )
197        .unwrap();
198        assert!(cov.relative_eq(&expected_cov, 1E-8, 1E-8));
199        assert!(grad.relative_eq(&expected_grad, 1E-8, 1E-8));
200        Ok(())
201    }
202}