use crate::{
errors::Error,
parser::ModelFile,
svm::{
class::Class,
features::{FeatureVector, Label},
kernel::{KernelDense, Linear, Poly, Rbf, Sigmoid},
predict::Predict,
Probabilities, SVMType,
},
util::{find_max_index, set_all, sigmoid_predict},
vectors::Triangular,
};
use simd_aligned::{arch::f32x8, traits::Simd, MatSimd, Rows, VecSimd};
use std::convert::TryFrom;
pub struct DenseSVM {
pub(crate) num_total_sv: usize,
pub(crate) num_attributes: usize,
pub(crate) rho: Triangular<f64>,
pub(crate) probabilities: Option<Probabilities>,
pub(crate) svm_type: SVMType,
pub(crate) kernel: Box<dyn KernelDense>,
pub(crate) classes: Vec<Class<MatSimd<f32x8, Rows>>>,
}
impl DenseSVM {
#[must_use]
pub fn class_index_for_label(&self, label: i32) -> Option<usize> {
for (i, class) in self.classes.iter().enumerate() {
if class.label != label {
continue;
}
return Some(i);
}
None
}
#[must_use]
pub fn class_label_for_index(&self, index: usize) -> Option<i32> {
if index >= self.classes.len() {
None
} else {
Some(self.classes[index].label)
}
}
pub(crate) fn compute_kernel_values(&self, problem: &mut FeatureVector<VecSimd<f32x8>>) {
let features = &problem.features;
let kernel_values = &mut problem.kernel_values;
for (i, class) in self.classes.iter().enumerate() {
let kvalues = kernel_values.row_as_flat_mut(i);
self.kernel.compute(&class.support_vectors, features, kvalues);
}
}
pub(crate) fn compute_multiclass_probabilities(&self, problem: &mut FeatureVector<VecSimd<f32x8>>) -> Result<(), Error> {
compute_multiclass_probabilities_impl!(self, problem)
}
pub(crate) fn compute_classification_values(&self, problem: &mut FeatureVector<VecSimd<f32x8>>) {
compute_classification_values_impl!(self, problem);
}
pub(crate) fn compute_regression_values(&self, problem: &mut FeatureVector<VecSimd<f32x8>>) {
let class = &self.classes[0];
let coef = class.coefficients.row(0);
let kvalues = problem.kernel_values.row(0);
let mut sum = coef.iter().zip(kvalues).map(|(a, b)| (*a * *b).sum()).sum::<f64>();
sum -= self.rho[0];
problem.result = Label::Value(sum as f32);
}
#[must_use]
pub const fn attributes(&self) -> usize {
self.num_attributes
}
#[must_use]
pub fn classes(&self) -> usize {
self.classes.len()
}
}
impl Predict<VecSimd<f32x8>> for DenseSVM {
fn predict_value(&self, fv: &mut FeatureVector<VecSimd<f32x8>>) -> Result<(), Error> {
match self.svm_type {
SVMType::CSvc | SVMType::NuSvc => {
self.compute_kernel_values(fv);
self.compute_classification_values(fv);
let highest_vote = find_max_index(&fv.vote);
fv.result = Label::Class(self.classes[highest_vote].label);
Ok(())
}
SVMType::ESvr | SVMType::NuSvr => {
self.compute_kernel_values(fv);
self.compute_regression_values(fv);
Ok(())
}
}
}
fn predict_probability(&self, problem: &mut FeatureVector<VecSimd<f32x8>>) -> Result<(), Error> {
predict_probability_impl!(self, problem)
}
}
impl<'a> TryFrom<&'a str> for DenseSVM {
type Error = Error;
fn try_from(input: &'a str) -> Result<Self, Error> {
let raw_model = ModelFile::try_from(input)?;
Self::try_from(&raw_model)
}
}
impl<'a> TryFrom<&'a ModelFile<'_>> for DenseSVM {
type Error = Error;
fn try_from(raw_model: &'a ModelFile<'_>) -> Result<Self, Error> {
let (mut svm, nr_sv) = prepare_svm!(raw_model, dyn KernelDense, MatSimd<f32x8, Rows>, Self);
let vectors = &raw_model.vectors();
let mut start_offset = 0;
for (i, num_sv_per_class) in nr_sv.iter().enumerate() {
let stop_offset = start_offset + *num_sv_per_class as usize;
for (i_vector, vector) in vectors[start_offset..stop_offset].iter().enumerate() {
let mut last_attribute = None;
for (i_attribute, attribute) in vector.features.iter().enumerate() {
if let Some(last) = last_attribute {
if attribute.index != last + 1 {
return Result::Err(Error::AttributesUnordered {
index: attribute.index,
value: attribute.value,
last_index: last,
});
}
};
let mut support_vectors = svm.classes[i].support_vectors.flat_mut();
support_vectors[(i_vector, i_attribute)] = attribute.value;
last_attribute = Some(attribute.index);
}
for (i_coefficient, coefficient) in vector.coefs.iter().enumerate() {
let mut coefficients = svm.classes[i].coefficients.flat_mut();
coefficients[(i_coefficient, i_vector)] = f64::from(*coefficient);
}
}
start_offset = stop_offset;
}
Ok(svm)
}
}
#[cfg(test)]
mod tests {
use crate::*;
use std::convert::TryFrom;
#[test]
fn class_operations() -> Result<(), Error> {
let svm = DenseSVM::try_from(SAMPLE_MODEL)?;
assert_eq!(None, svm.class_index_for_label(0));
assert_eq!(Some(1), svm.class_index_for_label(42));
Ok(())
}
}