sklears_ensemble/adaboost/
types.rs

1//! Type definitions for AdaBoost ensemble methods
2
3use scirs2_core::ndarray::{Array1, Array2};
4use sklears_core::{
5    traits::{Trained, Untrained},
6    types::{Float, Int},
7};
8use std::marker::PhantomData;
9
10/// AdaBoost algorithm variants
11#[derive(Debug, Clone, Copy, PartialEq, Eq)]
12pub enum AdaBoostAlgorithm {
13    /// SAMME (Stagewise Additive Modeling using a Multi-class Exponential loss function)
14    SAMME,
15    /// SAMME.R (SAMME with Real-valued predictions)
16    SAMMER,
17    /// Gentle AdaBoost
18    Gentle,
19    /// Discrete AdaBoost
20    Discrete,
21    /// Real AdaBoost
22    RealAdaBoost,
23    /// AdaBoost.M1 for multiclass
24    M1,
25    /// AdaBoost.M2 for multiclass
26    M2,
27}
28
29/// Split criterion for decision trees
30#[derive(Debug, Clone, Copy)]
31pub enum SplitCriterion {
32    Gini,
33    Entropy,
34}
35
36/// Configuration for AdaBoost
37#[derive(Debug, Clone)]
38pub struct AdaBoostConfig {
39    pub(crate) n_estimators: usize,
40    pub(crate) learning_rate: Float,
41    pub(crate) algorithm: AdaBoostAlgorithm,
42    pub(crate) random_state: Option<u64>,
43}
44
45/// Configuration for LogitBoost
46#[derive(Debug, Clone)]
47pub struct LogitBoostConfig {
48    pub(crate) n_estimators: usize,
49    pub(crate) learning_rate: Float,
50    pub(crate) random_state: Option<u64>,
51    pub(crate) max_depth: Option<usize>,
52    pub(crate) min_samples_split: usize,
53    pub(crate) min_samples_leaf: usize,
54    pub(crate) tolerance: Float,
55    pub(crate) max_iter: usize,
56}
57
58/// Decision tree classifier (simplified stub for AdaBoost)
59#[derive(Debug, Clone)]
60pub struct DecisionTreeClassifier<T> {
61    pub(crate) criterion: SplitCriterion,
62    pub(crate) max_depth: Option<usize>,
63    pub(crate) min_samples_split: usize,
64    pub(crate) min_samples_leaf: usize,
65    pub(crate) random_state: Option<u64>,
66    pub(crate) state: PhantomData<T>,
67}
68
69/// Decision tree regressor (simplified stub for AdaBoost)
70#[derive(Debug, Clone)]
71pub struct DecisionTreeRegressor<T> {
72    pub(crate) criterion: SplitCriterion,
73    pub(crate) max_depth: Option<usize>,
74    pub(crate) min_samples_split: usize,
75    pub(crate) min_samples_leaf: usize,
76    pub(crate) random_state: Option<u64>,
77    pub(crate) state: PhantomData<T>,
78}
79
80/// AdaBoost Classifier
81///
82/// AdaBoost is a meta-algorithm that can be used in conjunction with many other
83/// types of learning algorithms to improve performance. The key idea is to fit
84/// a sequence of weak learners on repeatedly modified versions of the data.
85#[derive(Clone)]
86pub struct AdaBoostClassifier<State = Untrained> {
87    pub(crate) config: AdaBoostConfig,
88    pub(crate) state: PhantomData<State>,
89    pub(crate) estimators_: Option<Vec<DecisionTreeClassifier<Trained>>>,
90    pub(crate) estimator_weights_: Option<Array1<Float>>,
91    pub(crate) estimator_errors_: Option<Array1<Float>>,
92    pub(crate) classes_: Option<Array1<Float>>,
93    pub(crate) n_classes_: Option<usize>,
94    pub(crate) n_features_in_: Option<usize>,
95}
96
97/// LogitBoost Classifier
98#[derive(Debug, Clone)]
99pub struct LogitBoostClassifier<State = Untrained> {
100    pub(crate) config: LogitBoostConfig,
101    pub(crate) state: PhantomData<State>,
102    pub(crate) estimators_: Option<Vec<DecisionTreeRegressor<Trained>>>,
103    pub(crate) estimator_weights_: Option<Array1<Float>>,
104    pub(crate) classes_: Option<Array1<Float>>,
105    pub(crate) n_classes_: Option<usize>,
106    pub(crate) n_features_in_: Option<usize>,
107    pub(crate) intercept_: Option<Float>,
108}