pub struct DecisionTreeClassifierParameters {
pub criterion: SplitCriterion,
pub max_depth: Option<u16>,
pub min_samples_leaf: usize,
pub min_samples_split: usize,
}
Expand description
Parameters for decision tree classification (re-export from Smartcore) Parameters of Decision Tree
Fields§
§criterion: SplitCriterion
Split criteria to use when building a tree.
max_depth: Option<u16>
The maximum depth of the tree.
min_samples_leaf: usize
The minimum number of samples required to be at a leaf node.
min_samples_split: usize
The minimum number of samples required to split an internal node.
Implementations§
Source§impl DecisionTreeClassifierParameters
impl DecisionTreeClassifierParameters
Sourcepub fn with_criterion(
self,
criterion: SplitCriterion,
) -> DecisionTreeClassifierParameters
pub fn with_criterion( self, criterion: SplitCriterion, ) -> DecisionTreeClassifierParameters
Split criteria to use when building a tree.
Sourcepub fn with_max_depth(self, max_depth: u16) -> DecisionTreeClassifierParameters
pub fn with_max_depth(self, max_depth: u16) -> DecisionTreeClassifierParameters
The maximum depth of the tree.
Examples found in repository?
examples/maximal_classification.rs (line 43)
4fn main() {
5 // Totally customize settings
6 let settings = Settings::default_classification()
7 .with_number_of_folds(3)
8 .shuffle_data(true)
9 .verbose(true)
10 .with_final_model(FinalModel::Blending {
11 algorithm: Algorithm::CategoricalNaiveBayes,
12 meta_training_fraction: 0.15,
13 meta_testing_fraction: 0.15,
14 })
15 .skip(Algorithm::RandomForestClassifier)
16 .sorted_by(Metric::Accuracy)
17 .with_preprocessing(PreProcessing::ReplaceWithPCA {
18 number_of_components: 5,
19 })
20 .with_random_forest_classifier_settings(
21 RandomForestClassifierParameters::default()
22 .with_m(100)
23 .with_max_depth(5)
24 .with_min_samples_leaf(20)
25 .with_n_trees(100)
26 .with_min_samples_split(20),
27 )
28 .with_logistic_settings(
29 LogisticRegressionParameters::default()
30 .with_alpha(1.0)
31 .with_solver(LogisticRegressionSolverName::LBFGS),
32 )
33 .with_svc_settings(
34 SVCParameters::default()
35 .with_epoch(10)
36 .with_tol(1e-10)
37 .with_c(1.0)
38 .with_kernel(Kernel::Linear),
39 )
40 .with_decision_tree_classifier_settings(
41 DecisionTreeClassifierParameters::default()
42 .with_min_samples_split(20)
43 .with_max_depth(5)
44 .with_min_samples_leaf(20),
45 )
46 .with_knn_classifier_settings(
47 KNNClassifierParameters::default()
48 .with_algorithm(KNNAlgorithmName::CoverTree)
49 .with_k(3)
50 .with_distance(Distance::Euclidean)
51 .with_weight(KNNWeightFunction::Uniform),
52 )
53 .with_gaussian_nb_settings(GaussianNBParameters::default().with_priors(vec![1.0, 1.0]))
54 .with_categorical_nb_settings(CategoricalNBParameters::default().with_alpha(1.0));
55
56 // Save the settings for later use
57 settings.save("examples/maximal_classification_settings.yaml");
58
59 // Load a dataset from smartcore and add it to the regressor
60 let mut model =
61 SupervisedModel::new(smartcore::dataset::breast_cancer::load_dataset(), settings);
62
63 // Run a model comparison with all models at default settings
64 model.train();
65
66 // Print the results
67 println!("{}", model);
68
69 // Save teh model for later
70 model.save("examples/maximal_classification_model.aml");
71}
More examples
examples/print_settings.rs (line 88)
3fn main() {
4 let regressor_settings = automl::Settings::default_regression()
5 .with_number_of_folds(3)
6 .shuffle_data(true)
7 .verbose(true)
8 .sorted_by(Metric::RSquared)
9 .with_preprocessing(PreProcessing::AddInteractions)
10 .with_linear_settings(
11 LinearRegressionParameters::default().with_solver(LinearRegressionSolverName::QR),
12 )
13 .with_lasso_settings(
14 LassoParameters::default()
15 .with_alpha(10.0)
16 .with_tol(1e-10)
17 .with_normalize(true)
18 .with_max_iter(10_000),
19 )
20 .with_ridge_settings(
21 RidgeRegressionParameters::default()
22 .with_alpha(10.0)
23 .with_normalize(true)
24 .with_solver(RidgeRegressionSolverName::Cholesky),
25 )
26 .with_elastic_net_settings(
27 ElasticNetParameters::default()
28 .with_tol(1e-10)
29 .with_normalize(true)
30 .with_alpha(1.0)
31 .with_max_iter(10_000)
32 .with_l1_ratio(0.5),
33 )
34 .with_knn_regressor_settings(
35 KNNRegressorParameters::default()
36 .with_algorithm(KNNAlgorithmName::CoverTree)
37 .with_k(3)
38 .with_distance(Distance::Euclidean)
39 .with_weight(KNNWeightFunction::Uniform),
40 )
41 .with_svr_settings(
42 SVRParameters::default()
43 .with_eps(1e-10)
44 .with_tol(1e-10)
45 .with_c(1.0)
46 .with_kernel(Kernel::Linear),
47 )
48 .with_random_forest_regressor_settings(
49 RandomForestRegressorParameters::default()
50 .with_m(100)
51 .with_max_depth(5)
52 .with_min_samples_leaf(20)
53 .with_n_trees(100)
54 .with_min_samples_split(20),
55 )
56 .with_decision_tree_regressor_settings(
57 DecisionTreeRegressorParameters::default()
58 .with_min_samples_split(20)
59 .with_max_depth(5)
60 .with_min_samples_leaf(20),
61 );
62
63 let classifier_settings = automl::Settings::default_classification()
64 .with_number_of_folds(3)
65 .shuffle_data(true)
66 .verbose(true)
67 .sorted_by(Metric::Accuracy)
68 .with_preprocessing(PreProcessing::AddInteractions)
69 .with_random_forest_classifier_settings(
70 RandomForestClassifierParameters::default()
71 .with_m(100)
72 .with_max_depth(5)
73 .with_min_samples_leaf(20)
74 .with_n_trees(100)
75 .with_min_samples_split(20),
76 )
77 .with_logistic_settings(LogisticRegressionParameters::default())
78 .with_svc_settings(
79 SVCParameters::default()
80 .with_epoch(10)
81 .with_tol(1e-10)
82 .with_c(1.0)
83 .with_kernel(Kernel::Linear),
84 )
85 .with_decision_tree_classifier_settings(
86 DecisionTreeClassifierParameters::default()
87 .with_min_samples_split(20)
88 .with_max_depth(5)
89 .with_min_samples_leaf(20),
90 )
91 .with_knn_classifier_settings(
92 KNNClassifierParameters::default()
93 .with_algorithm(KNNAlgorithmName::CoverTree)
94 .with_k(3)
95 .with_distance(Distance::Hamming)
96 .with_weight(KNNWeightFunction::Uniform),
97 )
98 .with_gaussian_nb_settings(GaussianNBParameters::default().with_priors(vec![1.0, 1.0]))
99 .with_categorical_nb_settings(CategoricalNBParameters::default().with_alpha(1.0));
100
101 println!("{}", regressor_settings);
102 println!("{}", classifier_settings)
103}
Sourcepub fn with_min_samples_leaf(
self,
min_samples_leaf: usize,
) -> DecisionTreeClassifierParameters
pub fn with_min_samples_leaf( self, min_samples_leaf: usize, ) -> DecisionTreeClassifierParameters
The minimum number of samples required to be at a leaf node.
Examples found in repository?
examples/maximal_classification.rs (line 44)
4fn main() {
5 // Totally customize settings
6 let settings = Settings::default_classification()
7 .with_number_of_folds(3)
8 .shuffle_data(true)
9 .verbose(true)
10 .with_final_model(FinalModel::Blending {
11 algorithm: Algorithm::CategoricalNaiveBayes,
12 meta_training_fraction: 0.15,
13 meta_testing_fraction: 0.15,
14 })
15 .skip(Algorithm::RandomForestClassifier)
16 .sorted_by(Metric::Accuracy)
17 .with_preprocessing(PreProcessing::ReplaceWithPCA {
18 number_of_components: 5,
19 })
20 .with_random_forest_classifier_settings(
21 RandomForestClassifierParameters::default()
22 .with_m(100)
23 .with_max_depth(5)
24 .with_min_samples_leaf(20)
25 .with_n_trees(100)
26 .with_min_samples_split(20),
27 )
28 .with_logistic_settings(
29 LogisticRegressionParameters::default()
30 .with_alpha(1.0)
31 .with_solver(LogisticRegressionSolverName::LBFGS),
32 )
33 .with_svc_settings(
34 SVCParameters::default()
35 .with_epoch(10)
36 .with_tol(1e-10)
37 .with_c(1.0)
38 .with_kernel(Kernel::Linear),
39 )
40 .with_decision_tree_classifier_settings(
41 DecisionTreeClassifierParameters::default()
42 .with_min_samples_split(20)
43 .with_max_depth(5)
44 .with_min_samples_leaf(20),
45 )
46 .with_knn_classifier_settings(
47 KNNClassifierParameters::default()
48 .with_algorithm(KNNAlgorithmName::CoverTree)
49 .with_k(3)
50 .with_distance(Distance::Euclidean)
51 .with_weight(KNNWeightFunction::Uniform),
52 )
53 .with_gaussian_nb_settings(GaussianNBParameters::default().with_priors(vec![1.0, 1.0]))
54 .with_categorical_nb_settings(CategoricalNBParameters::default().with_alpha(1.0));
55
56 // Save the settings for later use
57 settings.save("examples/maximal_classification_settings.yaml");
58
59 // Load a dataset from smartcore and add it to the regressor
60 let mut model =
61 SupervisedModel::new(smartcore::dataset::breast_cancer::load_dataset(), settings);
62
63 // Run a model comparison with all models at default settings
64 model.train();
65
66 // Print the results
67 println!("{}", model);
68
69 // Save teh model for later
70 model.save("examples/maximal_classification_model.aml");
71}
More examples
examples/print_settings.rs (line 89)
3fn main() {
4 let regressor_settings = automl::Settings::default_regression()
5 .with_number_of_folds(3)
6 .shuffle_data(true)
7 .verbose(true)
8 .sorted_by(Metric::RSquared)
9 .with_preprocessing(PreProcessing::AddInteractions)
10 .with_linear_settings(
11 LinearRegressionParameters::default().with_solver(LinearRegressionSolverName::QR),
12 )
13 .with_lasso_settings(
14 LassoParameters::default()
15 .with_alpha(10.0)
16 .with_tol(1e-10)
17 .with_normalize(true)
18 .with_max_iter(10_000),
19 )
20 .with_ridge_settings(
21 RidgeRegressionParameters::default()
22 .with_alpha(10.0)
23 .with_normalize(true)
24 .with_solver(RidgeRegressionSolverName::Cholesky),
25 )
26 .with_elastic_net_settings(
27 ElasticNetParameters::default()
28 .with_tol(1e-10)
29 .with_normalize(true)
30 .with_alpha(1.0)
31 .with_max_iter(10_000)
32 .with_l1_ratio(0.5),
33 )
34 .with_knn_regressor_settings(
35 KNNRegressorParameters::default()
36 .with_algorithm(KNNAlgorithmName::CoverTree)
37 .with_k(3)
38 .with_distance(Distance::Euclidean)
39 .with_weight(KNNWeightFunction::Uniform),
40 )
41 .with_svr_settings(
42 SVRParameters::default()
43 .with_eps(1e-10)
44 .with_tol(1e-10)
45 .with_c(1.0)
46 .with_kernel(Kernel::Linear),
47 )
48 .with_random_forest_regressor_settings(
49 RandomForestRegressorParameters::default()
50 .with_m(100)
51 .with_max_depth(5)
52 .with_min_samples_leaf(20)
53 .with_n_trees(100)
54 .with_min_samples_split(20),
55 )
56 .with_decision_tree_regressor_settings(
57 DecisionTreeRegressorParameters::default()
58 .with_min_samples_split(20)
59 .with_max_depth(5)
60 .with_min_samples_leaf(20),
61 );
62
63 let classifier_settings = automl::Settings::default_classification()
64 .with_number_of_folds(3)
65 .shuffle_data(true)
66 .verbose(true)
67 .sorted_by(Metric::Accuracy)
68 .with_preprocessing(PreProcessing::AddInteractions)
69 .with_random_forest_classifier_settings(
70 RandomForestClassifierParameters::default()
71 .with_m(100)
72 .with_max_depth(5)
73 .with_min_samples_leaf(20)
74 .with_n_trees(100)
75 .with_min_samples_split(20),
76 )
77 .with_logistic_settings(LogisticRegressionParameters::default())
78 .with_svc_settings(
79 SVCParameters::default()
80 .with_epoch(10)
81 .with_tol(1e-10)
82 .with_c(1.0)
83 .with_kernel(Kernel::Linear),
84 )
85 .with_decision_tree_classifier_settings(
86 DecisionTreeClassifierParameters::default()
87 .with_min_samples_split(20)
88 .with_max_depth(5)
89 .with_min_samples_leaf(20),
90 )
91 .with_knn_classifier_settings(
92 KNNClassifierParameters::default()
93 .with_algorithm(KNNAlgorithmName::CoverTree)
94 .with_k(3)
95 .with_distance(Distance::Hamming)
96 .with_weight(KNNWeightFunction::Uniform),
97 )
98 .with_gaussian_nb_settings(GaussianNBParameters::default().with_priors(vec![1.0, 1.0]))
99 .with_categorical_nb_settings(CategoricalNBParameters::default().with_alpha(1.0));
100
101 println!("{}", regressor_settings);
102 println!("{}", classifier_settings)
103}
Sourcepub fn with_min_samples_split(
self,
min_samples_split: usize,
) -> DecisionTreeClassifierParameters
pub fn with_min_samples_split( self, min_samples_split: usize, ) -> DecisionTreeClassifierParameters
The minimum number of samples required to split an internal node.
Examples found in repository?
examples/maximal_classification.rs (line 42)
4fn main() {
5 // Totally customize settings
6 let settings = Settings::default_classification()
7 .with_number_of_folds(3)
8 .shuffle_data(true)
9 .verbose(true)
10 .with_final_model(FinalModel::Blending {
11 algorithm: Algorithm::CategoricalNaiveBayes,
12 meta_training_fraction: 0.15,
13 meta_testing_fraction: 0.15,
14 })
15 .skip(Algorithm::RandomForestClassifier)
16 .sorted_by(Metric::Accuracy)
17 .with_preprocessing(PreProcessing::ReplaceWithPCA {
18 number_of_components: 5,
19 })
20 .with_random_forest_classifier_settings(
21 RandomForestClassifierParameters::default()
22 .with_m(100)
23 .with_max_depth(5)
24 .with_min_samples_leaf(20)
25 .with_n_trees(100)
26 .with_min_samples_split(20),
27 )
28 .with_logistic_settings(
29 LogisticRegressionParameters::default()
30 .with_alpha(1.0)
31 .with_solver(LogisticRegressionSolverName::LBFGS),
32 )
33 .with_svc_settings(
34 SVCParameters::default()
35 .with_epoch(10)
36 .with_tol(1e-10)
37 .with_c(1.0)
38 .with_kernel(Kernel::Linear),
39 )
40 .with_decision_tree_classifier_settings(
41 DecisionTreeClassifierParameters::default()
42 .with_min_samples_split(20)
43 .with_max_depth(5)
44 .with_min_samples_leaf(20),
45 )
46 .with_knn_classifier_settings(
47 KNNClassifierParameters::default()
48 .with_algorithm(KNNAlgorithmName::CoverTree)
49 .with_k(3)
50 .with_distance(Distance::Euclidean)
51 .with_weight(KNNWeightFunction::Uniform),
52 )
53 .with_gaussian_nb_settings(GaussianNBParameters::default().with_priors(vec![1.0, 1.0]))
54 .with_categorical_nb_settings(CategoricalNBParameters::default().with_alpha(1.0));
55
56 // Save the settings for later use
57 settings.save("examples/maximal_classification_settings.yaml");
58
59 // Load a dataset from smartcore and add it to the regressor
60 let mut model =
61 SupervisedModel::new(smartcore::dataset::breast_cancer::load_dataset(), settings);
62
63 // Run a model comparison with all models at default settings
64 model.train();
65
66 // Print the results
67 println!("{}", model);
68
69 // Save teh model for later
70 model.save("examples/maximal_classification_model.aml");
71}
More examples
examples/print_settings.rs (line 87)
3fn main() {
4 let regressor_settings = automl::Settings::default_regression()
5 .with_number_of_folds(3)
6 .shuffle_data(true)
7 .verbose(true)
8 .sorted_by(Metric::RSquared)
9 .with_preprocessing(PreProcessing::AddInteractions)
10 .with_linear_settings(
11 LinearRegressionParameters::default().with_solver(LinearRegressionSolverName::QR),
12 )
13 .with_lasso_settings(
14 LassoParameters::default()
15 .with_alpha(10.0)
16 .with_tol(1e-10)
17 .with_normalize(true)
18 .with_max_iter(10_000),
19 )
20 .with_ridge_settings(
21 RidgeRegressionParameters::default()
22 .with_alpha(10.0)
23 .with_normalize(true)
24 .with_solver(RidgeRegressionSolverName::Cholesky),
25 )
26 .with_elastic_net_settings(
27 ElasticNetParameters::default()
28 .with_tol(1e-10)
29 .with_normalize(true)
30 .with_alpha(1.0)
31 .with_max_iter(10_000)
32 .with_l1_ratio(0.5),
33 )
34 .with_knn_regressor_settings(
35 KNNRegressorParameters::default()
36 .with_algorithm(KNNAlgorithmName::CoverTree)
37 .with_k(3)
38 .with_distance(Distance::Euclidean)
39 .with_weight(KNNWeightFunction::Uniform),
40 )
41 .with_svr_settings(
42 SVRParameters::default()
43 .with_eps(1e-10)
44 .with_tol(1e-10)
45 .with_c(1.0)
46 .with_kernel(Kernel::Linear),
47 )
48 .with_random_forest_regressor_settings(
49 RandomForestRegressorParameters::default()
50 .with_m(100)
51 .with_max_depth(5)
52 .with_min_samples_leaf(20)
53 .with_n_trees(100)
54 .with_min_samples_split(20),
55 )
56 .with_decision_tree_regressor_settings(
57 DecisionTreeRegressorParameters::default()
58 .with_min_samples_split(20)
59 .with_max_depth(5)
60 .with_min_samples_leaf(20),
61 );
62
63 let classifier_settings = automl::Settings::default_classification()
64 .with_number_of_folds(3)
65 .shuffle_data(true)
66 .verbose(true)
67 .sorted_by(Metric::Accuracy)
68 .with_preprocessing(PreProcessing::AddInteractions)
69 .with_random_forest_classifier_settings(
70 RandomForestClassifierParameters::default()
71 .with_m(100)
72 .with_max_depth(5)
73 .with_min_samples_leaf(20)
74 .with_n_trees(100)
75 .with_min_samples_split(20),
76 )
77 .with_logistic_settings(LogisticRegressionParameters::default())
78 .with_svc_settings(
79 SVCParameters::default()
80 .with_epoch(10)
81 .with_tol(1e-10)
82 .with_c(1.0)
83 .with_kernel(Kernel::Linear),
84 )
85 .with_decision_tree_classifier_settings(
86 DecisionTreeClassifierParameters::default()
87 .with_min_samples_split(20)
88 .with_max_depth(5)
89 .with_min_samples_leaf(20),
90 )
91 .with_knn_classifier_settings(
92 KNNClassifierParameters::default()
93 .with_algorithm(KNNAlgorithmName::CoverTree)
94 .with_k(3)
95 .with_distance(Distance::Hamming)
96 .with_weight(KNNWeightFunction::Uniform),
97 )
98 .with_gaussian_nb_settings(GaussianNBParameters::default().with_priors(vec![1.0, 1.0]))
99 .with_categorical_nb_settings(CategoricalNBParameters::default().with_alpha(1.0));
100
101 println!("{}", regressor_settings);
102 println!("{}", classifier_settings)
103}
Trait Implementations§
Source§impl Clone for DecisionTreeClassifierParameters
impl Clone for DecisionTreeClassifierParameters
Source§fn clone(&self) -> DecisionTreeClassifierParameters
fn clone(&self) -> DecisionTreeClassifierParameters
Returns a duplicate of the value. Read more
1.0.0 · Source§fn clone_from(&mut self, source: &Self)
fn clone_from(&mut self, source: &Self)
Performs copy-assignment from
source
. Read moreSource§impl Default for DecisionTreeClassifierParameters
impl Default for DecisionTreeClassifierParameters
Source§fn default() -> DecisionTreeClassifierParameters
fn default() -> DecisionTreeClassifierParameters
Returns the “default value” for a type. Read more
Source§impl<'de> Deserialize<'de> for DecisionTreeClassifierParameters
impl<'de> Deserialize<'de> for DecisionTreeClassifierParameters
Source§fn deserialize<__D>(
__deserializer: __D,
) -> Result<DecisionTreeClassifierParameters, <__D as Deserializer<'de>>::Error>where
__D: Deserializer<'de>,
fn deserialize<__D>(
__deserializer: __D,
) -> Result<DecisionTreeClassifierParameters, <__D as Deserializer<'de>>::Error>where
__D: Deserializer<'de>,
Deserialize this value from the given Serde deserializer. Read more
Source§impl Serialize for DecisionTreeClassifierParameters
impl Serialize for DecisionTreeClassifierParameters
Source§fn serialize<__S>(
&self,
__serializer: __S,
) -> Result<<__S as Serializer>::Ok, <__S as Serializer>::Error>where
__S: Serializer,
fn serialize<__S>(
&self,
__serializer: __S,
) -> Result<<__S as Serializer>::Ok, <__S as Serializer>::Error>where
__S: Serializer,
Serialize this value into the given Serde serializer. Read more
Auto Trait Implementations§
impl Freeze for DecisionTreeClassifierParameters
impl RefUnwindSafe for DecisionTreeClassifierParameters
impl Send for DecisionTreeClassifierParameters
impl Sync for DecisionTreeClassifierParameters
impl Unpin for DecisionTreeClassifierParameters
impl UnwindSafe for DecisionTreeClassifierParameters
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more
Source§impl<T> CloneToUninit for Twhere
T: Clone,
impl<T> CloneToUninit for Twhere
T: Clone,
Source§impl<T> IntoEither for T
impl<T> IntoEither for T
Source§fn into_either(self, into_left: bool) -> Either<Self, Self>
fn into_either(self, into_left: bool) -> Either<Self, Self>
Converts
self
into a Left
variant of Either<Self, Self>
if into_left
is true
.
Converts self
into a Right
variant of Either<Self, Self>
otherwise. Read moreSource§fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
Converts
self
into a Left
variant of Either<Self, Self>
if into_left(&self)
returns true
.
Converts self
into a Right
variant of Either<Self, Self>
otherwise. Read more