use crate::sparse::{SparseMatrix, SparseVector};
use simd_aligned::traits::Simd;
use std::convert::TryFrom;
use crate::{
errors::Error,
parser::ModelFile,
svm::{
class::Class,
features::{FeatureVector, Label},
kernel::{KernelSparse, Linear, Poly, Rbf, Sigmoid},
predict::Predict,
Probabilities, SVMType,
},
util::{find_max_index, set_all, sigmoid_predict},
vectors::Triangular,
};
pub struct SparseSVM {
pub(crate) num_total_sv: usize,
pub(crate) num_attributes: usize,
pub(crate) rho: Triangular<f64>,
pub(crate) probabilities: Option<Probabilities>,
pub(crate) svm_type: SVMType,
pub(crate) kernel: Box<dyn KernelSparse>,
pub(crate) classes: Vec<Class<SparseMatrix<f32>>>,
}
impl SparseSVM {
#[must_use]
pub fn class_index_for_label(&self, label: i32) -> Option<usize> {
for (i, class) in self.classes.iter().enumerate() {
if class.label != label {
continue;
}
return Some(i);
}
None
}
#[must_use]
pub fn class_label_for_index(&self, index: usize) -> Option<i32> {
if index >= self.classes.len() {
None
} else {
Some(self.classes[index].label)
}
}
pub(crate) fn compute_kernel_values(&self, problem: &mut FeatureVector<SparseVector<f32>>) {
let features = &problem.features;
let kernel_values = &mut problem.kernel_values;
for (i, class) in self.classes.iter().enumerate() {
let kvalues = kernel_values.row_as_flat_mut(i);
self.kernel.compute(&class.support_vectors, features, kvalues);
}
}
pub(crate) fn compute_multiclass_probabilities(&self, problem: &mut FeatureVector<SparseVector<f32>>) -> Result<(), Error> {
compute_multiclass_probabilities_impl!(self, problem)
}
pub(crate) fn compute_classification_values(&self, problem: &mut FeatureVector<SparseVector<f32>>) {
compute_classification_values_impl!(self, problem);
}
pub(crate) fn compute_regression_values(&self, problem: &mut FeatureVector<SparseVector<f32>>) {
let class = &self.classes[0];
let coef = class.coefficients.row(0);
let kvalues = problem.kernel_values.row(0);
let mut sum = coef.iter().zip(kvalues).map(|(a, b)| (*a * *b).sum()).sum::<f64>();
sum -= self.rho[0];
problem.result = Label::Value(sum as f32);
}
#[must_use]
pub const fn attributes(&self) -> usize {
self.num_attributes
}
#[must_use]
pub fn classes(&self) -> usize {
self.classes.len()
}
}
impl Predict<SparseVector<f32>> for SparseSVM {
fn predict_value(&self, problem: &mut FeatureVector<SparseVector<f32>>) -> Result<(), Error> {
match self.svm_type {
SVMType::CSvc | SVMType::NuSvc => {
self.compute_kernel_values(problem);
self.compute_classification_values(problem);
let highest_vote = find_max_index(&problem.vote);
problem.result = Label::Class(self.classes[highest_vote].label);
Ok(())
}
SVMType::ESvr | SVMType::NuSvr => {
self.compute_kernel_values(problem);
self.compute_regression_values(problem);
Ok(())
}
}
}
fn predict_probability(&self, problem: &mut FeatureVector<SparseVector<f32>>) -> Result<(), Error> {
predict_probability_impl!(self, problem)
}
}
impl<'a> TryFrom<&'a str> for SparseSVM {
type Error = Error;
fn try_from(input: &'a str) -> Result<Self, Error> {
let raw_model = ModelFile::try_from(input)?;
Self::try_from(&raw_model)
}
}
impl<'a> TryFrom<&'a ModelFile<'_>> for SparseSVM {
type Error = Error;
fn try_from(raw_model: &'a ModelFile<'_>) -> Result<Self, Error> {
let (mut svm, nr_sv) = prepare_svm!(raw_model, dyn KernelSparse, SparseMatrix<f32>, Self);
let vectors = &raw_model.vectors();
let mut start_offset = 0;
for (i, num_sv_per_class) in nr_sv.iter().enumerate() {
let stop_offset = start_offset + *num_sv_per_class as usize;
for (i_vector, vector) in vectors[start_offset..stop_offset].iter().enumerate() {
for attribute in &vector.features {
let support_vectors = &mut svm.classes[i].support_vectors;
support_vectors[(i_vector, attribute.index as usize)] = attribute.value;
}
for (i_coefficient, coefficient) in vector.coefs.iter().enumerate() {
let mut coefficients = svm.classes[i].coefficients.flat_mut();
coefficients[(i_coefficient, i_vector)] = f64::from(*coefficient);
}
}
start_offset = stop_offset;
}
Ok(svm)
}
}