pub struct DecisionTreeRegressorParameters {
pub max_depth: Option<u16>,
pub min_samples_leaf: usize,
pub min_samples_split: usize,
}Expand description
Parameters for decision tree regression (re-export from Smartcore) Parameters of Regression Tree
Fields§
§max_depth: Option<u16>The maximum depth of the tree.
min_samples_leaf: usizeThe minimum number of samples required to be at a leaf node.
min_samples_split: usizeThe minimum number of samples required to split an internal node.
Implementations§
Source§impl DecisionTreeRegressorParameters
impl DecisionTreeRegressorParameters
Sourcepub fn with_max_depth(self, max_depth: u16) -> DecisionTreeRegressorParameters
pub fn with_max_depth(self, max_depth: u16) -> DecisionTreeRegressorParameters
The maximum depth of the tree.
Examples found in repository?
examples/maximal_regression.rs (line 67)
4fn main() {
5 // Totally customize settings
6 let settings = Settings::default_regression()
7 .with_number_of_folds(3)
8 .shuffle_data(true)
9 .verbose(true)
10 .with_final_model(FinalModel::Blending {
11 algorithm: Algorithm::Linear,
12 meta_training_fraction: 0.15,
13 meta_testing_fraction: 0.15,
14 })
15 .skip(Algorithm::RandomForestRegressor)
16 .sorted_by(Metric::RSquared)
17 .with_preprocessing(PreProcessing::AddInteractions)
18 .with_linear_settings(
19 LinearRegressionParameters::default().with_solver(LinearRegressionSolverName::QR),
20 )
21 .with_lasso_settings(
22 LassoParameters::default()
23 .with_alpha(1.0)
24 .with_tol(1e-4)
25 .with_normalize(true)
26 .with_max_iter(1000),
27 )
28 .with_ridge_settings(
29 RidgeRegressionParameters::default()
30 .with_alpha(1.0)
31 .with_normalize(true)
32 .with_solver(RidgeRegressionSolverName::Cholesky),
33 )
34 .with_elastic_net_settings(
35 ElasticNetParameters::default()
36 .with_tol(1e-4)
37 .with_normalize(true)
38 .with_alpha(1.0)
39 .with_max_iter(1000)
40 .with_l1_ratio(0.5),
41 )
42 .with_knn_regressor_settings(
43 KNNRegressorParameters::default()
44 .with_algorithm(KNNAlgorithmName::CoverTree)
45 .with_k(3)
46 .with_distance(Distance::Euclidean)
47 .with_weight(KNNWeightFunction::Uniform),
48 )
49 .with_svr_settings(
50 SVRParameters::default()
51 .with_eps(0.1)
52 .with_tol(1e-3)
53 .with_c(1.0)
54 .with_kernel(Kernel::Linear),
55 )
56 .with_random_forest_regressor_settings(
57 RandomForestRegressorParameters::default()
58 .with_m(1)
59 .with_max_depth(5)
60 .with_min_samples_leaf(1)
61 .with_n_trees(10)
62 .with_min_samples_split(2),
63 )
64 .with_decision_tree_regressor_settings(
65 DecisionTreeRegressorParameters::default()
66 .with_min_samples_split(2)
67 .with_max_depth(15)
68 .with_min_samples_leaf(1),
69 );
70
71 // Save the settings for later use
72 settings.save("examples/maximal_regression_settings.yaml");
73
74 // Load a dataset from smartcore and add it to the regressor along with the customized settings
75 let mut model = SupervisedModel::new(smartcore::dataset::diabetes::load_dataset(), settings);
76
77 // Run a model comparison with all models at default settings
78 model.train();
79
80 // Print the results
81 println!("{}", model);
82
83 // Save teh model for later
84 model.save("examples/maximal_regression_model.aml");
85}More examples
examples/print_settings.rs (line 59)
3fn main() {
4 let regressor_settings = automl::Settings::default_regression()
5 .with_number_of_folds(3)
6 .shuffle_data(true)
7 .verbose(true)
8 .sorted_by(Metric::RSquared)
9 .with_preprocessing(PreProcessing::AddInteractions)
10 .with_linear_settings(
11 LinearRegressionParameters::default().with_solver(LinearRegressionSolverName::QR),
12 )
13 .with_lasso_settings(
14 LassoParameters::default()
15 .with_alpha(10.0)
16 .with_tol(1e-10)
17 .with_normalize(true)
18 .with_max_iter(10_000),
19 )
20 .with_ridge_settings(
21 RidgeRegressionParameters::default()
22 .with_alpha(10.0)
23 .with_normalize(true)
24 .with_solver(RidgeRegressionSolverName::Cholesky),
25 )
26 .with_elastic_net_settings(
27 ElasticNetParameters::default()
28 .with_tol(1e-10)
29 .with_normalize(true)
30 .with_alpha(1.0)
31 .with_max_iter(10_000)
32 .with_l1_ratio(0.5),
33 )
34 .with_knn_regressor_settings(
35 KNNRegressorParameters::default()
36 .with_algorithm(KNNAlgorithmName::CoverTree)
37 .with_k(3)
38 .with_distance(Distance::Euclidean)
39 .with_weight(KNNWeightFunction::Uniform),
40 )
41 .with_svr_settings(
42 SVRParameters::default()
43 .with_eps(1e-10)
44 .with_tol(1e-10)
45 .with_c(1.0)
46 .with_kernel(Kernel::Linear),
47 )
48 .with_random_forest_regressor_settings(
49 RandomForestRegressorParameters::default()
50 .with_m(100)
51 .with_max_depth(5)
52 .with_min_samples_leaf(20)
53 .with_n_trees(100)
54 .with_min_samples_split(20),
55 )
56 .with_decision_tree_regressor_settings(
57 DecisionTreeRegressorParameters::default()
58 .with_min_samples_split(20)
59 .with_max_depth(5)
60 .with_min_samples_leaf(20),
61 );
62
63 let classifier_settings = automl::Settings::default_classification()
64 .with_number_of_folds(3)
65 .shuffle_data(true)
66 .verbose(true)
67 .sorted_by(Metric::Accuracy)
68 .with_preprocessing(PreProcessing::AddInteractions)
69 .with_random_forest_classifier_settings(
70 RandomForestClassifierParameters::default()
71 .with_m(100)
72 .with_max_depth(5)
73 .with_min_samples_leaf(20)
74 .with_n_trees(100)
75 .with_min_samples_split(20),
76 )
77 .with_logistic_settings(LogisticRegressionParameters::default())
78 .with_svc_settings(
79 SVCParameters::default()
80 .with_epoch(10)
81 .with_tol(1e-10)
82 .with_c(1.0)
83 .with_kernel(Kernel::Linear),
84 )
85 .with_decision_tree_classifier_settings(
86 DecisionTreeClassifierParameters::default()
87 .with_min_samples_split(20)
88 .with_max_depth(5)
89 .with_min_samples_leaf(20),
90 )
91 .with_knn_classifier_settings(
92 KNNClassifierParameters::default()
93 .with_algorithm(KNNAlgorithmName::CoverTree)
94 .with_k(3)
95 .with_distance(Distance::Hamming)
96 .with_weight(KNNWeightFunction::Uniform),
97 )
98 .with_gaussian_nb_settings(GaussianNBParameters::default().with_priors(vec![1.0, 1.0]))
99 .with_categorical_nb_settings(CategoricalNBParameters::default().with_alpha(1.0));
100
101 println!("{}", regressor_settings);
102 println!("{}", classifier_settings)
103}Sourcepub fn with_min_samples_leaf(
self,
min_samples_leaf: usize,
) -> DecisionTreeRegressorParameters
pub fn with_min_samples_leaf( self, min_samples_leaf: usize, ) -> DecisionTreeRegressorParameters
The minimum number of samples required to be at a leaf node.
Examples found in repository?
examples/maximal_regression.rs (line 68)
4fn main() {
5 // Totally customize settings
6 let settings = Settings::default_regression()
7 .with_number_of_folds(3)
8 .shuffle_data(true)
9 .verbose(true)
10 .with_final_model(FinalModel::Blending {
11 algorithm: Algorithm::Linear,
12 meta_training_fraction: 0.15,
13 meta_testing_fraction: 0.15,
14 })
15 .skip(Algorithm::RandomForestRegressor)
16 .sorted_by(Metric::RSquared)
17 .with_preprocessing(PreProcessing::AddInteractions)
18 .with_linear_settings(
19 LinearRegressionParameters::default().with_solver(LinearRegressionSolverName::QR),
20 )
21 .with_lasso_settings(
22 LassoParameters::default()
23 .with_alpha(1.0)
24 .with_tol(1e-4)
25 .with_normalize(true)
26 .with_max_iter(1000),
27 )
28 .with_ridge_settings(
29 RidgeRegressionParameters::default()
30 .with_alpha(1.0)
31 .with_normalize(true)
32 .with_solver(RidgeRegressionSolverName::Cholesky),
33 )
34 .with_elastic_net_settings(
35 ElasticNetParameters::default()
36 .with_tol(1e-4)
37 .with_normalize(true)
38 .with_alpha(1.0)
39 .with_max_iter(1000)
40 .with_l1_ratio(0.5),
41 )
42 .with_knn_regressor_settings(
43 KNNRegressorParameters::default()
44 .with_algorithm(KNNAlgorithmName::CoverTree)
45 .with_k(3)
46 .with_distance(Distance::Euclidean)
47 .with_weight(KNNWeightFunction::Uniform),
48 )
49 .with_svr_settings(
50 SVRParameters::default()
51 .with_eps(0.1)
52 .with_tol(1e-3)
53 .with_c(1.0)
54 .with_kernel(Kernel::Linear),
55 )
56 .with_random_forest_regressor_settings(
57 RandomForestRegressorParameters::default()
58 .with_m(1)
59 .with_max_depth(5)
60 .with_min_samples_leaf(1)
61 .with_n_trees(10)
62 .with_min_samples_split(2),
63 )
64 .with_decision_tree_regressor_settings(
65 DecisionTreeRegressorParameters::default()
66 .with_min_samples_split(2)
67 .with_max_depth(15)
68 .with_min_samples_leaf(1),
69 );
70
71 // Save the settings for later use
72 settings.save("examples/maximal_regression_settings.yaml");
73
74 // Load a dataset from smartcore and add it to the regressor along with the customized settings
75 let mut model = SupervisedModel::new(smartcore::dataset::diabetes::load_dataset(), settings);
76
77 // Run a model comparison with all models at default settings
78 model.train();
79
80 // Print the results
81 println!("{}", model);
82
83 // Save teh model for later
84 model.save("examples/maximal_regression_model.aml");
85}More examples
examples/print_settings.rs (line 60)
3fn main() {
4 let regressor_settings = automl::Settings::default_regression()
5 .with_number_of_folds(3)
6 .shuffle_data(true)
7 .verbose(true)
8 .sorted_by(Metric::RSquared)
9 .with_preprocessing(PreProcessing::AddInteractions)
10 .with_linear_settings(
11 LinearRegressionParameters::default().with_solver(LinearRegressionSolverName::QR),
12 )
13 .with_lasso_settings(
14 LassoParameters::default()
15 .with_alpha(10.0)
16 .with_tol(1e-10)
17 .with_normalize(true)
18 .with_max_iter(10_000),
19 )
20 .with_ridge_settings(
21 RidgeRegressionParameters::default()
22 .with_alpha(10.0)
23 .with_normalize(true)
24 .with_solver(RidgeRegressionSolverName::Cholesky),
25 )
26 .with_elastic_net_settings(
27 ElasticNetParameters::default()
28 .with_tol(1e-10)
29 .with_normalize(true)
30 .with_alpha(1.0)
31 .with_max_iter(10_000)
32 .with_l1_ratio(0.5),
33 )
34 .with_knn_regressor_settings(
35 KNNRegressorParameters::default()
36 .with_algorithm(KNNAlgorithmName::CoverTree)
37 .with_k(3)
38 .with_distance(Distance::Euclidean)
39 .with_weight(KNNWeightFunction::Uniform),
40 )
41 .with_svr_settings(
42 SVRParameters::default()
43 .with_eps(1e-10)
44 .with_tol(1e-10)
45 .with_c(1.0)
46 .with_kernel(Kernel::Linear),
47 )
48 .with_random_forest_regressor_settings(
49 RandomForestRegressorParameters::default()
50 .with_m(100)
51 .with_max_depth(5)
52 .with_min_samples_leaf(20)
53 .with_n_trees(100)
54 .with_min_samples_split(20),
55 )
56 .with_decision_tree_regressor_settings(
57 DecisionTreeRegressorParameters::default()
58 .with_min_samples_split(20)
59 .with_max_depth(5)
60 .with_min_samples_leaf(20),
61 );
62
63 let classifier_settings = automl::Settings::default_classification()
64 .with_number_of_folds(3)
65 .shuffle_data(true)
66 .verbose(true)
67 .sorted_by(Metric::Accuracy)
68 .with_preprocessing(PreProcessing::AddInteractions)
69 .with_random_forest_classifier_settings(
70 RandomForestClassifierParameters::default()
71 .with_m(100)
72 .with_max_depth(5)
73 .with_min_samples_leaf(20)
74 .with_n_trees(100)
75 .with_min_samples_split(20),
76 )
77 .with_logistic_settings(LogisticRegressionParameters::default())
78 .with_svc_settings(
79 SVCParameters::default()
80 .with_epoch(10)
81 .with_tol(1e-10)
82 .with_c(1.0)
83 .with_kernel(Kernel::Linear),
84 )
85 .with_decision_tree_classifier_settings(
86 DecisionTreeClassifierParameters::default()
87 .with_min_samples_split(20)
88 .with_max_depth(5)
89 .with_min_samples_leaf(20),
90 )
91 .with_knn_classifier_settings(
92 KNNClassifierParameters::default()
93 .with_algorithm(KNNAlgorithmName::CoverTree)
94 .with_k(3)
95 .with_distance(Distance::Hamming)
96 .with_weight(KNNWeightFunction::Uniform),
97 )
98 .with_gaussian_nb_settings(GaussianNBParameters::default().with_priors(vec![1.0, 1.0]))
99 .with_categorical_nb_settings(CategoricalNBParameters::default().with_alpha(1.0));
100
101 println!("{}", regressor_settings);
102 println!("{}", classifier_settings)
103}Sourcepub fn with_min_samples_split(
self,
min_samples_split: usize,
) -> DecisionTreeRegressorParameters
pub fn with_min_samples_split( self, min_samples_split: usize, ) -> DecisionTreeRegressorParameters
The minimum number of samples required to split an internal node.
Examples found in repository?
examples/maximal_regression.rs (line 66)
4fn main() {
5 // Totally customize settings
6 let settings = Settings::default_regression()
7 .with_number_of_folds(3)
8 .shuffle_data(true)
9 .verbose(true)
10 .with_final_model(FinalModel::Blending {
11 algorithm: Algorithm::Linear,
12 meta_training_fraction: 0.15,
13 meta_testing_fraction: 0.15,
14 })
15 .skip(Algorithm::RandomForestRegressor)
16 .sorted_by(Metric::RSquared)
17 .with_preprocessing(PreProcessing::AddInteractions)
18 .with_linear_settings(
19 LinearRegressionParameters::default().with_solver(LinearRegressionSolverName::QR),
20 )
21 .with_lasso_settings(
22 LassoParameters::default()
23 .with_alpha(1.0)
24 .with_tol(1e-4)
25 .with_normalize(true)
26 .with_max_iter(1000),
27 )
28 .with_ridge_settings(
29 RidgeRegressionParameters::default()
30 .with_alpha(1.0)
31 .with_normalize(true)
32 .with_solver(RidgeRegressionSolverName::Cholesky),
33 )
34 .with_elastic_net_settings(
35 ElasticNetParameters::default()
36 .with_tol(1e-4)
37 .with_normalize(true)
38 .with_alpha(1.0)
39 .with_max_iter(1000)
40 .with_l1_ratio(0.5),
41 )
42 .with_knn_regressor_settings(
43 KNNRegressorParameters::default()
44 .with_algorithm(KNNAlgorithmName::CoverTree)
45 .with_k(3)
46 .with_distance(Distance::Euclidean)
47 .with_weight(KNNWeightFunction::Uniform),
48 )
49 .with_svr_settings(
50 SVRParameters::default()
51 .with_eps(0.1)
52 .with_tol(1e-3)
53 .with_c(1.0)
54 .with_kernel(Kernel::Linear),
55 )
56 .with_random_forest_regressor_settings(
57 RandomForestRegressorParameters::default()
58 .with_m(1)
59 .with_max_depth(5)
60 .with_min_samples_leaf(1)
61 .with_n_trees(10)
62 .with_min_samples_split(2),
63 )
64 .with_decision_tree_regressor_settings(
65 DecisionTreeRegressorParameters::default()
66 .with_min_samples_split(2)
67 .with_max_depth(15)
68 .with_min_samples_leaf(1),
69 );
70
71 // Save the settings for later use
72 settings.save("examples/maximal_regression_settings.yaml");
73
74 // Load a dataset from smartcore and add it to the regressor along with the customized settings
75 let mut model = SupervisedModel::new(smartcore::dataset::diabetes::load_dataset(), settings);
76
77 // Run a model comparison with all models at default settings
78 model.train();
79
80 // Print the results
81 println!("{}", model);
82
83 // Save teh model for later
84 model.save("examples/maximal_regression_model.aml");
85}More examples
examples/print_settings.rs (line 58)
3fn main() {
4 let regressor_settings = automl::Settings::default_regression()
5 .with_number_of_folds(3)
6 .shuffle_data(true)
7 .verbose(true)
8 .sorted_by(Metric::RSquared)
9 .with_preprocessing(PreProcessing::AddInteractions)
10 .with_linear_settings(
11 LinearRegressionParameters::default().with_solver(LinearRegressionSolverName::QR),
12 )
13 .with_lasso_settings(
14 LassoParameters::default()
15 .with_alpha(10.0)
16 .with_tol(1e-10)
17 .with_normalize(true)
18 .with_max_iter(10_000),
19 )
20 .with_ridge_settings(
21 RidgeRegressionParameters::default()
22 .with_alpha(10.0)
23 .with_normalize(true)
24 .with_solver(RidgeRegressionSolverName::Cholesky),
25 )
26 .with_elastic_net_settings(
27 ElasticNetParameters::default()
28 .with_tol(1e-10)
29 .with_normalize(true)
30 .with_alpha(1.0)
31 .with_max_iter(10_000)
32 .with_l1_ratio(0.5),
33 )
34 .with_knn_regressor_settings(
35 KNNRegressorParameters::default()
36 .with_algorithm(KNNAlgorithmName::CoverTree)
37 .with_k(3)
38 .with_distance(Distance::Euclidean)
39 .with_weight(KNNWeightFunction::Uniform),
40 )
41 .with_svr_settings(
42 SVRParameters::default()
43 .with_eps(1e-10)
44 .with_tol(1e-10)
45 .with_c(1.0)
46 .with_kernel(Kernel::Linear),
47 )
48 .with_random_forest_regressor_settings(
49 RandomForestRegressorParameters::default()
50 .with_m(100)
51 .with_max_depth(5)
52 .with_min_samples_leaf(20)
53 .with_n_trees(100)
54 .with_min_samples_split(20),
55 )
56 .with_decision_tree_regressor_settings(
57 DecisionTreeRegressorParameters::default()
58 .with_min_samples_split(20)
59 .with_max_depth(5)
60 .with_min_samples_leaf(20),
61 );
62
63 let classifier_settings = automl::Settings::default_classification()
64 .with_number_of_folds(3)
65 .shuffle_data(true)
66 .verbose(true)
67 .sorted_by(Metric::Accuracy)
68 .with_preprocessing(PreProcessing::AddInteractions)
69 .with_random_forest_classifier_settings(
70 RandomForestClassifierParameters::default()
71 .with_m(100)
72 .with_max_depth(5)
73 .with_min_samples_leaf(20)
74 .with_n_trees(100)
75 .with_min_samples_split(20),
76 )
77 .with_logistic_settings(LogisticRegressionParameters::default())
78 .with_svc_settings(
79 SVCParameters::default()
80 .with_epoch(10)
81 .with_tol(1e-10)
82 .with_c(1.0)
83 .with_kernel(Kernel::Linear),
84 )
85 .with_decision_tree_classifier_settings(
86 DecisionTreeClassifierParameters::default()
87 .with_min_samples_split(20)
88 .with_max_depth(5)
89 .with_min_samples_leaf(20),
90 )
91 .with_knn_classifier_settings(
92 KNNClassifierParameters::default()
93 .with_algorithm(KNNAlgorithmName::CoverTree)
94 .with_k(3)
95 .with_distance(Distance::Hamming)
96 .with_weight(KNNWeightFunction::Uniform),
97 )
98 .with_gaussian_nb_settings(GaussianNBParameters::default().with_priors(vec![1.0, 1.0]))
99 .with_categorical_nb_settings(CategoricalNBParameters::default().with_alpha(1.0));
100
101 println!("{}", regressor_settings);
102 println!("{}", classifier_settings)
103}Trait Implementations§
Source§impl Clone for DecisionTreeRegressorParameters
impl Clone for DecisionTreeRegressorParameters
Source§fn clone(&self) -> DecisionTreeRegressorParameters
fn clone(&self) -> DecisionTreeRegressorParameters
Returns a duplicate of the value. Read more
1.0.0 · Source§fn clone_from(&mut self, source: &Self)
fn clone_from(&mut self, source: &Self)
Performs copy-assignment from
source. Read moreSource§impl Default for DecisionTreeRegressorParameters
impl Default for DecisionTreeRegressorParameters
Source§fn default() -> DecisionTreeRegressorParameters
fn default() -> DecisionTreeRegressorParameters
Returns the “default value” for a type. Read more
Source§impl<'de> Deserialize<'de> for DecisionTreeRegressorParameters
impl<'de> Deserialize<'de> for DecisionTreeRegressorParameters
Source§fn deserialize<__D>(
__deserializer: __D,
) -> Result<DecisionTreeRegressorParameters, <__D as Deserializer<'de>>::Error>where
__D: Deserializer<'de>,
fn deserialize<__D>(
__deserializer: __D,
) -> Result<DecisionTreeRegressorParameters, <__D as Deserializer<'de>>::Error>where
__D: Deserializer<'de>,
Deserialize this value from the given Serde deserializer. Read more
Source§impl Serialize for DecisionTreeRegressorParameters
impl Serialize for DecisionTreeRegressorParameters
Source§fn serialize<__S>(
&self,
__serializer: __S,
) -> Result<<__S as Serializer>::Ok, <__S as Serializer>::Error>where
__S: Serializer,
fn serialize<__S>(
&self,
__serializer: __S,
) -> Result<<__S as Serializer>::Ok, <__S as Serializer>::Error>where
__S: Serializer,
Serialize this value into the given Serde serializer. Read more
Auto Trait Implementations§
impl Freeze for DecisionTreeRegressorParameters
impl RefUnwindSafe for DecisionTreeRegressorParameters
impl Send for DecisionTreeRegressorParameters
impl Sync for DecisionTreeRegressorParameters
impl Unpin for DecisionTreeRegressorParameters
impl UnwindSafe for DecisionTreeRegressorParameters
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more
Source§impl<T> CloneToUninit for Twhere
T: Clone,
impl<T> CloneToUninit for Twhere
T: Clone,
Source§impl<T> IntoEither for T
impl<T> IntoEither for T
Source§fn into_either(self, into_left: bool) -> Either<Self, Self>
fn into_either(self, into_left: bool) -> Either<Self, Self>
Converts
self into a Left variant of Either<Self, Self>
if into_left is true.
Converts self into a Right variant of Either<Self, Self>
otherwise. Read moreSource§fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
Converts
self into a Left variant of Either<Self, Self>
if into_left(&self) returns true.
Converts self into a Right variant of Either<Self, Self>
otherwise. Read more