1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
use std::convert::{From, TryFrom};
use super::{KernelDense, KernelSparse};
use crate::{
errors::Error,
f32s,
parser::ModelFile,
sparse::{SparseMatrix, SparseVector},
};
use simd_aligned::{MatrixD, Rows, VectorD};
#[derive(Copy, Clone, Debug, Default)]
#[doc(hidden)]
pub struct Poly {
degree: u32,
gamma: f32,
coef0: f32,
}
impl KernelDense for Poly {
fn compute(&self, vectors: &MatrixD<f32s, Rows>, feature: &VectorD<f32s>, output: &mut [f64]) {
use simd_aligned::SimdExt;
for (i, sv) in vectors.row_iter().enumerate() {
let mut sum = f32s::splat(0.0);
let feature: &[f32s] = feature;
for (a, b) in sv.iter().zip(feature) {
sum += *a * *b;
}
output[i] = crate::util::powi(f64::from(self.gamma * sum.sum() + self.coef0), self.degree);
}
}
}
impl KernelSparse for Poly {
fn compute(&self, vectors: &SparseMatrix<f32>, feature: &SparseVector<f32>, output: &mut [f64]) {
for (i, sv) in vectors.row_iter().enumerate() {
let mut sum = 0.0;
let mut a_iter = sv.iter();
let mut b_iter = feature.iter();
let (mut a, mut b) = (a_iter.next(), b_iter.next());
output[i] = loop {
match (a, b) {
(Some((i_a, x)), Some((i_b, y))) if i_a == i_b => {
sum += x * y;
a = a_iter.next();
b = b_iter.next();
}
(Some((i_a, _)), Some((i_b, _))) if i_a < i_b => a = a_iter.next(),
(Some((i_a, _)), Some((i_b, _))) if i_a > i_b => b = b_iter.next(),
_ => break crate::util::powi(f64::from(self.gamma * sum + self.coef0), self.degree),
}
}
}
}
}
impl<'a, 'b> TryFrom<&'a ModelFile<'b>> for Poly {
type Error = Error;
fn try_from(raw_model: &'a ModelFile<'b>) -> Result<Self, Error> {
let gamma = raw_model.header.gamma.ok_or(Error::NoGamma)?;
let coef0 = raw_model.header.coef0.ok_or(Error::NoCoef0)?;
let degree = raw_model.header.degree.ok_or(Error::NoDegree)?;
Ok(Self { gamma, coef0, degree })
}
}