Skip to main content

libsvm_rs/
kernel.rs

1//! Kernel functions matching the original LIBSVM.
2//!
3//! Provides both:
4//! - Standalone `k_function` for prediction (operates on sparse node slices)
5//! - `Kernel` struct for training (precomputes x_square for RBF, stores refs)
6
7use crate::types::{KernelType, SvmNode, SvmParameter};
8
9// ─── Integer power (matches LIBSVM's powi) ─────────────────────────
10
11/// Integer power by squaring. Matches LIBSVM's `powi(base, times)`.
12///
13/// For negative `times`, returns 1.0 (same as the C code, which only
14/// iterates while `t > 0`).
15#[inline]
16pub fn powi(base: f64, times: i32) -> f64 {
17    let mut tmp = base;
18    let mut ret = 1.0;
19    let mut t = times;
20    while t > 0 {
21        if t % 2 == 1 {
22            ret *= tmp;
23        }
24        tmp *= tmp;
25        t /= 2;
26    }
27    ret
28}
29
30// ─── Sparse dot product ─────────────────────────────────────────────
31
32/// Sparse dot product of two sorted-by-index node slices.
33///
34/// This is the merge-based O(n+m) algorithm from LIBSVM.
35#[inline]
36pub fn dot(x: &[SvmNode], y: &[SvmNode]) -> f64 {
37    let mut sum = 0.0;
38    let mut ix = 0;
39    let mut iy = 0;
40    while ix < x.len() && iy < y.len() {
41        if x[ix].index == y[iy].index {
42            sum += x[ix].value * y[iy].value;
43            ix += 1;
44            iy += 1;
45        } else if x[ix].index > y[iy].index {
46            iy += 1;
47        } else {
48            ix += 1;
49        }
50    }
51    sum
52}
53
54/// Squared Euclidean distance for sparse vectors (used by RBF k_function).
55///
56/// Computes ‖x - y‖² without computing the difference vector.
57#[inline]
58fn sparse_sq_dist(x: &[SvmNode], y: &[SvmNode]) -> f64 {
59    let mut sum = 0.0;
60    let mut ix = 0;
61    let mut iy = 0;
62    while ix < x.len() && iy < y.len() {
63        if x[ix].index == y[iy].index {
64            let d = x[ix].value - y[iy].value;
65            sum += d * d;
66            ix += 1;
67            iy += 1;
68        } else if x[ix].index > y[iy].index {
69            sum += y[iy].value * y[iy].value;
70            iy += 1;
71        } else {
72            sum += x[ix].value * x[ix].value;
73            ix += 1;
74        }
75    }
76    // Drain remaining elements
77    while ix < x.len() {
78        sum += x[ix].value * x[ix].value;
79        ix += 1;
80    }
81    while iy < y.len() {
82        sum += y[iy].value * y[iy].value;
83        iy += 1;
84    }
85    sum
86}
87
88// ─── Standalone kernel evaluation ───────────────────────────────────
89
90/// Evaluate the kernel function K(x, y) for the given parameters.
91///
92/// This is the standalone version used during prediction. Matches
93/// LIBSVM's `Kernel::k_function`.
94pub fn k_function(x: &[SvmNode], y: &[SvmNode], param: &SvmParameter) -> f64 {
95    match param.kernel_type {
96        KernelType::Linear => dot(x, y),
97        KernelType::Polynomial => {
98            powi(param.gamma * dot(x, y) + param.coef0, param.degree)
99        }
100        KernelType::Rbf => {
101            (-param.gamma * sparse_sq_dist(x, y)).exp()
102        }
103        KernelType::Sigmoid => {
104            (param.gamma * dot(x, y) + param.coef0).tanh()
105        }
106        KernelType::Precomputed => {
107            // For precomputed kernels, x[y[0].value as index] gives the value.
108            // y[0].value is the column index (1-based SV index).
109            let col = y[0].value as usize;
110            x.get(col).map_or(0.0, |n| n.value)
111        }
112    }
113}
114
115// ─── Kernel struct for training ─────────────────────────────────────
116
117/// Kernel evaluator for training. Holds references to the dataset and
118/// precomputes `x_square[i] = dot(x[i], x[i])` for RBF kernels.
119///
120/// Stores `Vec<&'a [SvmNode]>` so that the solver can swap entries
121/// during shrinking (mirroring the C++ pointer-array swap trick).
122///
123/// The `kernel_function` method pointer pattern from C++ is replaced
124/// by a match on `kernel_type` — the branch predictor handles this
125/// efficiently since the type doesn't change during training.
126pub struct Kernel<'a> {
127    x: Vec<&'a [SvmNode]>,
128    x_square: Option<Vec<f64>>,
129    kernel_type: KernelType,
130    degree: i32,
131    gamma: f64,
132    coef0: f64,
133}
134
135impl<'a> Kernel<'a> {
136    /// Create a new kernel evaluator for the given dataset and parameters.
137    pub fn new(x: &'a [Vec<SvmNode>], param: &SvmParameter) -> Self {
138        let x_refs: Vec<&'a [SvmNode]> = x.iter().map(|xi| xi.as_slice()).collect();
139        let x_square = if param.kernel_type == KernelType::Rbf {
140            Some(x_refs.iter().map(|xi| dot(xi, xi)).collect())
141        } else {
142            None
143        };
144
145        Self {
146            x: x_refs,
147            x_square,
148            kernel_type: param.kernel_type,
149            degree: param.degree,
150            gamma: param.gamma,
151            coef0: param.coef0,
152        }
153    }
154
155    /// Evaluate K(x\[i\], x\[j\]) using precomputed data where possible.
156    #[inline]
157    pub fn evaluate(&self, i: usize, j: usize) -> f64 {
158        match self.kernel_type {
159            KernelType::Linear => dot(self.x[i], self.x[j]),
160            KernelType::Polynomial => {
161                powi(self.gamma * dot(self.x[i], self.x[j]) + self.coef0, self.degree)
162            }
163            KernelType::Rbf => {
164                // Use precomputed x_square: ‖x_i - x_j‖² = x_sq[i] + x_sq[j] - 2*dot(x_i, x_j)
165                let sq = self.x_square.as_ref().unwrap();
166                let val = sq[i] + sq[j] - 2.0 * dot(self.x[i], self.x[j]);
167                (-self.gamma * val).exp()
168            }
169            KernelType::Sigmoid => {
170                (self.gamma * dot(self.x[i], self.x[j]) + self.coef0).tanh()
171            }
172            KernelType::Precomputed => {
173                let col = self.x[j][0].value as usize;
174                self.x[i].get(col).map_or(0.0, |n| n.value)
175            }
176        }
177    }
178
179    /// Swap data-point references and precomputed squares at positions i and j.
180    ///
181    /// Used by QMatrix implementations during solver shrinking.
182    pub fn swap_index(&mut self, i: usize, j: usize) {
183        self.x.swap(i, j);
184        if let Some(ref mut sq) = self.x_square {
185            sq.swap(i, j);
186        }
187    }
188}
189
190#[cfg(test)]
191mod tests {
192    use super::*;
193    use crate::types::SvmParameter;
194
195    fn make_nodes(pairs: &[(i32, f64)]) -> Vec<SvmNode> {
196        pairs
197            .iter()
198            .map(|&(index, value)| SvmNode { index, value })
199            .collect()
200    }
201
202    #[test]
203    fn powi_basic() {
204        assert_eq!(powi(2.0, 10), 1024.0);
205        assert_eq!(powi(3.0, 0), 1.0);
206        assert_eq!(powi(5.0, 1), 5.0);
207        assert!((powi(2.0, 3) - 8.0).abs() < 1e-15);
208        // Negative exponent: LIBSVM returns 1.0 (loop doesn't execute)
209        assert_eq!(powi(2.0, -1), 1.0);
210    }
211
212    #[test]
213    fn dot_product() {
214        let x = make_nodes(&[(1, 1.0), (3, 2.0), (5, 3.0)]);
215        let y = make_nodes(&[(1, 4.0), (2, 5.0), (5, 6.0)]);
216        // dot = 1*4 + 3*6 = 4 + 18 = 22
217        assert!((dot(&x, &y) - 22.0).abs() < 1e-15);
218    }
219
220    #[test]
221    fn dot_disjoint() {
222        let x = make_nodes(&[(1, 1.0), (3, 2.0)]);
223        let y = make_nodes(&[(2, 5.0), (4, 6.0)]);
224        assert_eq!(dot(&x, &y), 0.0);
225    }
226
227    #[test]
228    fn dot_empty() {
229        let x = make_nodes(&[]);
230        let y = make_nodes(&[(1, 1.0)]);
231        assert_eq!(dot(&x, &y), 0.0);
232    }
233
234    #[test]
235    fn kernel_linear() {
236        let x = make_nodes(&[(1, 1.0), (2, 2.0)]);
237        let y = make_nodes(&[(1, 3.0), (2, 4.0)]);
238        let param = SvmParameter {
239            kernel_type: KernelType::Linear,
240            ..Default::default()
241        };
242        assert!((k_function(&x, &y, &param) - 11.0).abs() < 1e-15);
243    }
244
245    #[test]
246    fn kernel_rbf() {
247        let x = make_nodes(&[(1, 1.0), (2, 0.0)]);
248        let y = make_nodes(&[(1, 0.0), (2, 1.0)]);
249        let param = SvmParameter {
250            kernel_type: KernelType::Rbf,
251            gamma: 0.5,
252            ..Default::default()
253        };
254        // ‖x-y‖² = 1+1 = 2, K = exp(-0.5 * 2) = exp(-1)
255        let expected = (-1.0_f64).exp();
256        assert!((k_function(&x, &y, &param) - expected).abs() < 1e-15);
257    }
258
259    #[test]
260    fn kernel_poly() {
261        let x = make_nodes(&[(1, 1.0), (2, 2.0)]);
262        let y = make_nodes(&[(1, 3.0), (2, 4.0)]);
263        let param = SvmParameter {
264            kernel_type: KernelType::Polynomial,
265            gamma: 1.0,
266            coef0: 1.0,
267            degree: 2,
268            ..Default::default()
269        };
270        // (1*1*11 + 1)^2 = 12^2 = 144
271        assert!((k_function(&x, &y, &param) - 144.0).abs() < 1e-15);
272    }
273
274    #[test]
275    fn kernel_sigmoid() {
276        let x = make_nodes(&[(1, 1.0)]);
277        let y = make_nodes(&[(1, 1.0)]);
278        let param = SvmParameter {
279            kernel_type: KernelType::Sigmoid,
280            gamma: 1.0,
281            coef0: 0.0,
282            ..Default::default()
283        };
284        // tanh(1*1 + 0) = tanh(1)
285        let expected = 1.0_f64.tanh();
286        assert!((k_function(&x, &y, &param) - expected).abs() < 1e-15);
287    }
288
289    #[test]
290    fn kernel_struct_matches_standalone() {
291        let data = vec![
292            make_nodes(&[(1, 0.5), (3, -1.0)]),
293            make_nodes(&[(1, -0.25), (2, 0.75)]),
294            make_nodes(&[(2, 1.0), (3, 0.5)]),
295        ];
296        let param = SvmParameter {
297            kernel_type: KernelType::Rbf,
298            gamma: 0.5,
299            ..Default::default()
300        };
301
302        let kern = Kernel::new(&data, &param);
303
304        // Verify Kernel::evaluate matches k_function for all pairs
305        for i in 0..data.len() {
306            for j in 0..data.len() {
307                let via_struct = kern.evaluate(i, j);
308                let via_func = k_function(&data[i], &data[j], &param);
309                assert!(
310                    (via_struct - via_func).abs() < 1e-15,
311                    "mismatch at ({},{}): {} vs {}",
312                    i, j, via_struct, via_func
313                );
314            }
315        }
316    }
317
318    #[test]
319    fn rbf_self_kernel_is_one() {
320        let x = make_nodes(&[(1, 3.0), (5, -2.0), (10, 0.7)]);
321        let param = SvmParameter {
322            kernel_type: KernelType::Rbf,
323            gamma: 1.0,
324            ..Default::default()
325        };
326        // K(x, x) = exp(-γ * 0) = 1
327        assert!((k_function(&x, &x, &param) - 1.0).abs() < 1e-15);
328    }
329}