use std::fmt;
use num_traits::Unsigned;
use crate::api::{Predictor, SupervisedEstimator};
use crate::error::Failed;
use crate::linalg::basic::arrays::{Array1, Array2, ArrayView1};
use crate::naive_bayes::{BaseNaiveBayes, NBDistribution};
use crate::numbers::basenum::Number;
#[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[derive(Debug, PartialEq, Clone)]
struct MultinomialNBDistribution<T: Number> {
class_labels: Vec<T>,
class_count: Vec<usize>,
class_priors: Vec<f64>,
feature_log_prob: Vec<Vec<f64>>,
feature_count: Vec<Vec<usize>>,
n_features: usize,
}
impl<T: Number + Ord + Unsigned> fmt::Display for MultinomialNBDistribution<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
writeln!(
f,
"MultinomialNBDistribution: n_features: {:?}",
self.n_features
)?;
writeln!(f, "class_labels: {:?}", self.class_labels)?;
Ok(())
}
}
impl<X: Number + Unsigned, Y: Number + Ord + Unsigned> NBDistribution<X, Y>
for MultinomialNBDistribution<Y>
{
fn prior(&self, class_index: usize) -> f64 {
self.class_priors[class_index]
}
fn log_likelihood<'a>(&self, class_index: usize, j: &'a Box<dyn ArrayView1<X> + 'a>) -> f64 {
let mut likelihood = 0f64;
for feature in 0..j.shape() {
let value = X::to_f64(j.get(feature)).unwrap();
likelihood += value * self.feature_log_prob[class_index][feature];
}
likelihood
}
fn classes(&self) -> &Vec<Y> {
&self.class_labels
}
}
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[derive(Debug, Clone)]
pub struct MultinomialNBParameters {
#[cfg_attr(feature = "serde", serde(default))]
pub alpha: f64,
#[cfg_attr(feature = "serde", serde(default))]
pub priors: Option<Vec<f64>>,
}
impl MultinomialNBParameters {
pub fn with_alpha(mut self, alpha: f64) -> Self {
self.alpha = alpha;
self
}
pub fn with_priors(mut self, priors: Vec<f64>) -> Self {
self.priors = Some(priors);
self
}
}
impl Default for MultinomialNBParameters {
fn default() -> Self {
Self {
alpha: 1f64,
priors: Option::None,
}
}
}
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[derive(Debug, Clone)]
pub struct MultinomialNBSearchParameters {
#[cfg_attr(feature = "serde", serde(default))]
pub alpha: Vec<f64>,
#[cfg_attr(feature = "serde", serde(default))]
pub priors: Vec<Option<Vec<f64>>>,
}
pub struct MultinomialNBSearchParametersIterator {
multinomial_nb_search_parameters: MultinomialNBSearchParameters,
current_alpha: usize,
current_priors: usize,
}
impl IntoIterator for MultinomialNBSearchParameters {
type Item = MultinomialNBParameters;
type IntoIter = MultinomialNBSearchParametersIterator;
fn into_iter(self) -> Self::IntoIter {
MultinomialNBSearchParametersIterator {
multinomial_nb_search_parameters: self,
current_alpha: 0,
current_priors: 0,
}
}
}
impl Iterator for MultinomialNBSearchParametersIterator {
type Item = MultinomialNBParameters;
fn next(&mut self) -> Option<Self::Item> {
if self.current_alpha == self.multinomial_nb_search_parameters.alpha.len()
&& self.current_priors == self.multinomial_nb_search_parameters.priors.len()
{
return None;
}
let next = MultinomialNBParameters {
alpha: self.multinomial_nb_search_parameters.alpha[self.current_alpha],
priors: self.multinomial_nb_search_parameters.priors[self.current_priors].clone(),
};
if self.current_alpha + 1 < self.multinomial_nb_search_parameters.alpha.len() {
self.current_alpha += 1;
} else if self.current_priors + 1 < self.multinomial_nb_search_parameters.priors.len() {
self.current_alpha = 0;
self.current_priors += 1;
} else {
self.current_alpha += 1;
self.current_priors += 1;
}
Some(next)
}
}
impl Default for MultinomialNBSearchParameters {
fn default() -> Self {
let default_params = MultinomialNBParameters::default();
MultinomialNBSearchParameters {
alpha: vec![default_params.alpha],
priors: vec![default_params.priors],
}
}
}
impl<TY: Number + Ord + Unsigned> MultinomialNBDistribution<TY> {
pub fn fit<TX: Number + Unsigned, X: Array2<TX>, Y: Array1<TY>>(
x: &X,
y: &Y,
alpha: f64,
priors: Option<Vec<f64>>,
) -> Result<Self, Failed> {
let (n_samples, n_features) = x.shape();
let y_samples = y.shape();
if y_samples != n_samples {
return Err(Failed::fit(&format!(
"Size of x should equal size of y; |x|=[{}], |y|=[{}]",
n_samples, y_samples
)));
}
if n_samples == 0 {
return Err(Failed::fit(&format!(
"Size of x and y should greater than 0; |x|=[{}]",
n_samples
)));
}
if alpha < 0f64 {
return Err(Failed::fit(&format!(
"Alpha should be greater than 0; |alpha|=[{}]",
alpha
)));
}
let (class_labels, indices) = y.unique_with_indices();
let mut class_count = vec![0_usize; class_labels.len()];
for class_index in indices.iter() {
class_count[*class_index] += 1;
}
let class_priors = if let Some(class_priors) = priors {
if class_priors.len() != class_labels.len() {
return Err(Failed::fit(
"Size of priors provided does not match the number of classes of the data.",
));
}
class_priors
} else {
class_count
.iter()
.map(|&c| c as f64 / n_samples as f64)
.collect()
};
let mut feature_in_class_counter = vec![vec![0_usize; n_features]; class_labels.len()];
for (row, class_index) in x.row_iter().zip(indices) {
for (idx, row_i) in row.iterator(0).enumerate().take(n_features) {
feature_in_class_counter[class_index][idx] +=
row_i.to_usize().ok_or_else(|| {
Failed::fit(&format!(
"Elements of the matrix should be convertible to usize |found|=[{}]",
row_i
))
})?;
}
}
let feature_log_prob = feature_in_class_counter
.iter()
.map(|feature_count| {
let n_c: usize = feature_count.iter().sum();
feature_count
.iter()
.map(|&count| {
((count as f64 + alpha) / (n_c as f64 + alpha * n_features as f64)).ln()
})
.collect()
})
.collect();
Ok(Self {
class_count,
class_labels,
class_priors,
feature_log_prob,
feature_count: feature_in_class_counter,
n_features,
})
}
}
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[derive(Debug, PartialEq)]
pub struct MultinomialNB<
TX: Number + Unsigned,
TY: Number + Ord + Unsigned,
X: Array2<TX>,
Y: Array1<TY>,
> {
inner: Option<BaseNaiveBayes<TX, TY, X, Y, MultinomialNBDistribution<TY>>>,
}
impl<TX: Number + Unsigned, TY: Number + Ord + Unsigned, X: Array2<TX>, Y: Array1<TY>> fmt::Display
for MultinomialNB<TX, TY, X, Y>
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
writeln!(
f,
"MultinomialNB:\ninner: {:?}",
self.inner.as_ref().unwrap()
)?;
Ok(())
}
}
impl<TX: Number + Unsigned, TY: Number + Ord + Unsigned, X: Array2<TX>, Y: Array1<TY>>
SupervisedEstimator<X, Y, MultinomialNBParameters> for MultinomialNB<TX, TY, X, Y>
{
fn new() -> Self {
Self {
inner: Option::None,
}
}
fn fit(x: &X, y: &Y, parameters: MultinomialNBParameters) -> Result<Self, Failed> {
MultinomialNB::fit(x, y, parameters)
}
}
impl<TX: Number + Unsigned, TY: Number + Ord + Unsigned, X: Array2<TX>, Y: Array1<TY>>
Predictor<X, Y> for MultinomialNB<TX, TY, X, Y>
{
fn predict(&self, x: &X) -> Result<Y, Failed> {
self.predict(x)
}
}
impl<TX: Number + Unsigned, TY: Number + Ord + Unsigned, X: Array2<TX>, Y: Array1<TY>>
MultinomialNB<TX, TY, X, Y>
{
pub fn fit(x: &X, y: &Y, parameters: MultinomialNBParameters) -> Result<Self, Failed> {
let distribution =
MultinomialNBDistribution::fit(x, y, parameters.alpha, parameters.priors)?;
let inner = BaseNaiveBayes::fit(distribution)?;
Ok(Self { inner: Some(inner) })
}
pub fn predict(&self, x: &X) -> Result<Y, Failed> {
self.inner.as_ref().unwrap().predict(x)
}
pub fn classes(&self) -> &Vec<TY> {
&self.inner.as_ref().unwrap().distribution.class_labels
}
pub fn class_count(&self) -> &Vec<usize> {
&self.inner.as_ref().unwrap().distribution.class_count
}
pub fn feature_log_prob(&self) -> &Vec<Vec<f64>> {
&self.inner.as_ref().unwrap().distribution.feature_log_prob
}
pub fn n_features(&self) -> usize {
self.inner.as_ref().unwrap().distribution.n_features
}
pub fn feature_count(&self) -> &Vec<Vec<usize>> {
&self.inner.as_ref().unwrap().distribution.feature_count
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::linalg::basic::matrix::DenseMatrix;
#[test]
fn search_parameters() {
let parameters = MultinomialNBSearchParameters {
alpha: vec![1., 2.],
..Default::default()
};
let mut iter = parameters.into_iter();
let next = iter.next().unwrap();
assert_eq!(next.alpha, 1.);
let next = iter.next().unwrap();
assert_eq!(next.alpha, 2.);
assert!(iter.next().is_none());
}
#[cfg_attr(
all(target_arch = "wasm32", not(target_os = "wasi")),
wasm_bindgen_test::wasm_bindgen_test
)]
#[test]
fn run_multinomial_naive_bayes() {
let x = DenseMatrix::from_2d_array(&[
&[1, 2, 0, 0, 0, 0],
&[0, 2, 0, 0, 1, 0],
&[0, 1, 0, 1, 0, 0],
&[0, 1, 1, 0, 0, 1],
]);
let y: Vec<u32> = vec![0, 0, 0, 1];
let mnb = MultinomialNB::fit(&x, &y, Default::default()).unwrap();
assert_eq!(mnb.classes(), &[0, 1]);
assert_eq!(mnb.class_count(), &[3, 1]);
let distribution = mnb.inner.clone().unwrap().distribution;
assert_eq!(&distribution.class_priors, &[0.75, 0.25]);
assert_eq!(
mnb.feature_log_prob(),
&[
&[
(1_f64 / 7_f64).ln(),
(3_f64 / 7_f64).ln(),
(1_f64 / 14_f64).ln(),
(1_f64 / 7_f64).ln(),
(1_f64 / 7_f64).ln(),
(1_f64 / 14_f64).ln()
],
&[
(1_f64 / 9_f64).ln(),
(2_f64 / 9_f64).ln(),
(2_f64 / 9_f64).ln(),
(1_f64 / 9_f64).ln(),
(1_f64 / 9_f64).ln(),
(2_f64 / 9_f64).ln()
]
]
);
let x_test = DenseMatrix::<u32>::from_2d_array(&[&[0, 3, 1, 0, 0, 1]]);
let y_hat = mnb.predict(&x_test).unwrap();
assert_eq!(y_hat, &[0]);
}
#[cfg_attr(
all(target_arch = "wasm32", not(target_os = "wasi")),
wasm_bindgen_test::wasm_bindgen_test
)]
#[test]
fn multinomial_nb_scikit_parity() {
let x = DenseMatrix::<u32>::from_2d_array(&[
&[2, 4, 0, 0, 2, 1, 2, 4, 2, 0],
&[3, 4, 0, 2, 1, 0, 1, 4, 0, 3],
&[1, 4, 2, 4, 1, 0, 1, 2, 3, 2],
&[0, 3, 3, 4, 1, 0, 3, 1, 1, 1],
&[0, 2, 1, 4, 3, 4, 1, 2, 3, 1],
&[3, 2, 4, 1, 3, 0, 2, 4, 0, 2],
&[3, 1, 3, 0, 2, 0, 4, 4, 3, 4],
&[2, 2, 2, 0, 1, 1, 2, 1, 0, 1],
&[3, 3, 2, 2, 0, 2, 3, 2, 2, 3],
&[4, 3, 4, 4, 4, 2, 2, 0, 1, 4],
&[3, 4, 2, 2, 1, 4, 4, 4, 1, 3],
&[3, 0, 1, 4, 4, 0, 0, 3, 2, 4],
&[2, 0, 3, 3, 1, 2, 0, 2, 4, 1],
&[2, 4, 0, 4, 2, 4, 1, 3, 1, 4],
&[0, 2, 2, 3, 4, 0, 4, 4, 4, 4],
]);
let y: Vec<u32> = vec![2, 2, 0, 0, 0, 2, 1, 1, 0, 1, 0, 0, 2, 0, 2];
let nb = MultinomialNB::fit(&x, &y, Default::default()).unwrap();
assert_eq!(nb.n_features(), 10);
assert_eq!(
nb.feature_count(),
&[
&[12, 20, 11, 24, 12, 14, 13, 17, 13, 18],
&[9, 6, 9, 4, 7, 3, 8, 5, 4, 9],
&[10, 12, 9, 9, 11, 3, 9, 18, 10, 10]
]
);
println!("{}", &nb);
let y_hat = nb.predict(&x).unwrap();
let distribution = nb.inner.clone().unwrap().distribution;
assert_eq!(
&distribution.class_priors,
&vec!(0.4666666666666667, 0.2, 0.3333333333333333)
);
#[cfg(not(target_arch = "wasm32"))]
assert_eq!(
&nb.feature_log_prob()[1],
&vec![
-2.001480000210124,
-2.3581549441488563,
-2.001480000210124,
-2.6946271807700692,
-2.2246235515243336,
-2.917770732084279,
-2.10684051586795,
-2.512305623976115,
-2.6946271807700692,
-2.001480000210124
]
);
assert_eq!(y_hat, vec!(2, 2, 0, 0, 0, 2, 2, 1, 0, 1, 0, 2, 0, 0, 2));
}
#[cfg_attr(
all(target_arch = "wasm32", not(target_os = "wasi")),
wasm_bindgen_test::wasm_bindgen_test
)]
#[test]
#[cfg(feature = "serde")]
fn serde() {
let x = DenseMatrix::from_2d_array(&[
&[1, 1, 0, 0, 0, 0],
&[0, 1, 0, 0, 1, 0],
&[0, 1, 0, 1, 0, 0],
&[0, 1, 1, 0, 0, 1],
]);
let y = vec![0, 0, 0, 1];
let mnb = MultinomialNB::fit(&x, &y, Default::default()).unwrap();
let deserialized_mnb: MultinomialNB<u32, u32, DenseMatrix<u32>, Vec<u32>> =
serde_json::from_str(&serde_json::to_string(&mnb).unwrap()).unwrap();
assert_eq!(mnb, deserialized_mnb);
}
}