pub mod adaboost;
pub mod adaboost_regressor;
pub mod bagging;
pub mod decision_tree;
pub mod extra_tree;
pub mod extra_trees_ensemble;
pub mod gradient_boosting;
pub mod hist_gradient_boosting;
pub mod isolation_forest;
pub mod random_forest;
pub mod random_trees_embedding;
pub mod voting;
pub use adaboost::{AdaBoostAlgorithm, AdaBoostClassifier, FittedAdaBoostClassifier};
pub use adaboost_regressor::{AdaBoostLoss, AdaBoostRegressor, FittedAdaBoostRegressor};
pub use bagging::{
BaggingClassifier, BaggingRegressor, FittedBaggingClassifier, FittedBaggingRegressor,
};
pub use decision_tree::{
ClassificationCriterion, DecisionTreeClassifier, DecisionTreeRegressor,
FittedDecisionTreeClassifier, FittedDecisionTreeRegressor, Node, RegressionCriterion,
};
pub use extra_tree::{
ExtraTreeClassifier, ExtraTreeRegressor, FittedExtraTreeClassifier, FittedExtraTreeRegressor,
};
pub use extra_trees_ensemble::{
ExtraTreesClassifier, ExtraTreesRegressor, FittedExtraTreesClassifier,
FittedExtraTreesRegressor,
};
pub use gradient_boosting::{
ClassificationLoss, FittedGradientBoostingClassifier, FittedGradientBoostingRegressor,
GradientBoostingClassifier, GradientBoostingRegressor, RegressionLoss,
};
pub use hist_gradient_boosting::{
FittedHistGradientBoostingClassifier, FittedHistGradientBoostingRegressor,
HistClassificationLoss, HistGradientBoostingClassifier, HistGradientBoostingRegressor,
HistNode, HistRegressionLoss,
};
pub use isolation_forest::{FittedIsolationForest, IsolationForest};
pub use random_forest::{
FittedRandomForestClassifier, FittedRandomForestRegressor, MaxFeatures, RandomForestClassifier,
RandomForestRegressor,
};
pub use random_trees_embedding::{FittedRandomTreesEmbedding, RandomTreesEmbedding};
pub use voting::{
FittedVotingClassifier, FittedVotingRegressor, VotingClassifier, VotingRegressor,
};
use ndarray::{Array1, Array2};
use num_traits::Float;
pub(crate) fn log_proba<F: Float>(proba: &Array2<F>) -> Array2<F> {
let eps = F::from(1e-300).unwrap();
proba.mapv(|p| if p > eps { p.ln() } else { eps.ln() })
}
pub(crate) fn mean_accuracy<F: Float>(predictions: &Array1<usize>, targets: &Array1<usize>) -> F {
let n = targets.len();
if n == 0 {
return F::zero();
}
let correct = predictions
.iter()
.zip(targets.iter())
.filter(|(p, t)| p == t)
.count();
F::from(correct).unwrap() / F::from(n).unwrap()
}
pub(crate) fn r2_score<F: Float>(y_pred: &Array1<F>, y_true: &Array1<F>) -> F {
let n = y_true.len();
if n == 0 {
return F::zero();
}
let mean = y_true.iter().copied().fold(F::zero(), |a, b| a + b) / F::from(n).unwrap();
let mut ss_res = F::zero();
let mut ss_tot = F::zero();
for i in 0..n {
let r = y_true[i] - y_pred[i];
let t = y_true[i] - mean;
ss_res = ss_res + r * r;
ss_tot = ss_tot + t * t;
}
if ss_tot == F::zero() {
if ss_res == F::zero() {
F::one()
} else {
F::neg_infinity()
}
} else {
F::one() - ss_res / ss_tot
}
}