pub struct FeatureImportance<F: Float + Debug + Display> {
pub feature_names: Vec<String>,
pub importance: Array1<F>,
}
Expand description
Feature importance visualization for machine learning models
This struct facilitates the visualization and analysis of feature importance scores from machine learning models, helping to identify which features contribute most to predictions.
Fields§
§feature_names: Vec<String>
Names of the features
importance: Array1<F>
Importance scores for each feature
Implementations§
Source§impl<F: Float + Debug + Display> FeatureImportance<F>
impl<F: Float + Debug + Display> FeatureImportance<F>
Sourcepub fn new(feature_names: Vec<String>, importance: Array1<F>) -> Result<Self>
pub fn new(feature_names: Vec<String>, importance: Array1<F>) -> Result<Self>
Create a new feature importance visualization
§Arguments
feature_names
- Names of featuresimportance
- Importance scores
§Returns
Result<FeatureImportance<F>>
- The feature importance visualization
§Example
use ndarray::Array1;
use scirs2_neural::utils::evaluation::FeatureImportance;
// Create feature names and importance scores
let feature_names = vec![
"Age".to_string(),
"Income".to_string(),
"Education".to_string(),
"Location".to_string()
];
let importance = Array1::from_vec(vec![0.35, 0.25, 0.20, 0.10]);
// Create feature importance visualization
let feature_importance = FeatureImportance::<f64>::new(feature_names, importance).unwrap();
Examples found in repository?
examples/model_visualization_example.rs (line 93)
7fn main() -> Result<()> {
8 println!("Neural Network Model Evaluation Visualization Example\n");
9
10 // Generate some example data
11 let n_samples = 500;
12 let n_features = 10;
13 let n_classes = 4;
14
15 println!(
16 "Generating {} samples with {} features for {} classes",
17 n_samples, n_features, n_classes
18 );
19
20 // 1. Confusion Matrix Example
21 println!("\n--- Confusion Matrix Visualization ---\n");
22
23 // Create a deterministic RNG for reproducibility
24 let mut rng = SmallRng::seed_from_u64(42);
25
26 // Generate random predictions and true labels
27 let y_true = Array::from_shape_fn(n_samples, |_| rng.random_range(0..n_classes));
28
29 // Create slightly correlated predictions (not completely random)
30 let y_pred = Array::from_shape_fn(n_samples, |i| {
31 if rng.random::<f32>() < 0.7 {
32 // 70% chance of correct prediction
33 y_true[i]
34 } else {
35 // 30% chance of random class
36 rng.random_range(0..n_classes)
37 }
38 });
39
40 // Create confusion matrix
41 let class_labels = vec![
42 "Class A".to_string(),
43 "Class B".to_string(),
44 "Class C".to_string(),
45 "Class D".to_string(),
46 ];
47
48 let cm = ConfusionMatrix::<f32>::new(
49 &y_true.view(),
50 &y_pred.view(),
51 Some(n_classes),
52 Some(class_labels),
53 )?;
54
55 // Print raw and normalized confusion matrices
56 println!("Raw Confusion Matrix:\n");
57 println!("{}", cm.to_ascii(Some("Confusion Matrix"), false));
58
59 println!("\nNormalized Confusion Matrix:\n");
60 println!("{}", cm.to_ascii(Some("Normalized Confusion Matrix"), true));
61
62 // Print metrics
63 println!("\nAccuracy: {:.3}", cm.accuracy());
64
65 let precision = cm.precision();
66 let recall = cm.recall();
67 let f1 = cm.f1_score();
68
69 println!("Per-class metrics:");
70 for i in 0..n_classes {
71 println!(
72 " Class {}: Precision={:.3}, Recall={:.3}, F1={:.3}",
73 i, precision[i], recall[i], f1[i]
74 );
75 }
76
77 println!("Macro F1 Score: {:.3}", cm.macro_f1());
78
79 // 2. Feature Importance Visualization
80 println!("\n--- Feature Importance Visualization ---\n");
81
82 // Generate random feature importance scores
83 let feature_names = (0..n_features)
84 .map(|i| format!("Feature_{}", i))
85 .collect::<Vec<String>>();
86
87 let importance = Array1::from_shape_fn(n_features, |i| {
88 // Make some features more important than others
89 let base = (n_features - i) as f32 / n_features as f32;
90 base + 0.2 * rng.random::<f32>()
91 });
92
93 let fi = FeatureImportance::new(feature_names, importance)?;
94
95 // Print full feature importance
96 println!("{}", fi.to_ascii(Some("Feature Importance"), 60, None));
97
98 // Print top-5 features
99 println!("\nTop 5 Most Important Features:\n");
100 println!("{}", fi.to_ascii(Some("Top 5 Features"), 60, Some(5)));
101
102 // 3. ROC Curve for Binary Classification
103 println!("\n--- ROC Curve Visualization ---\n");
104
105 // Generate binary classification data
106 let n_binary = 200;
107 let y_true_binary = Array::from_shape_fn(n_binary, |_| rng.random_range(0..2));
108
109 // Generate scores with some predictive power
110 let y_scores = Array1::from_shape_fn(n_binary, |i| {
111 if y_true_binary[i] == 1 {
112 // Higher scores for positive class
113 0.6 + 0.4 * rng.random::<f32>()
114 } else {
115 // Lower scores for negative class
116 0.4 * rng.random::<f32>()
117 }
118 });
119
120 let roc = ROCCurve::new(&y_true_binary.view(), &y_scores.view())?;
121
122 println!("ROC AUC: {:.3}", roc.auc);
123 println!("\n{}", roc.to_ascii(None, 50, 20));
124
125 // 4. Learning Curve Visualization
126 println!("\n--- Learning Curve Visualization ---\n");
127
128 // Generate learning curve data
129 let n_points = 10;
130 let n_cv = 5;
131
132 let train_sizes = Array1::from_shape_fn(n_points, |i| 50 + i * 50);
133
134 // Generate training scores (decreasing with size due to overfitting)
135 let train_scores = Array2::from_shape_fn((n_points, n_cv), |(i, _)| {
136 0.95 - 0.05 * (i as f32 / n_points as f32) + 0.03 * rng.random::<f32>()
137 });
138
139 // Generate validation scores (increasing with size)
140 let val_scores = Array2::from_shape_fn((n_points, n_cv), |(i, _)| {
141 0.7 + 0.2 * (i as f32 / n_points as f32) + 0.05 * rng.random::<f32>()
142 });
143
144 let lc = LearningCurve::new(train_sizes, train_scores, val_scores)?;
145
146 println!("{}", lc.to_ascii(None, 60, 20, "Accuracy"));
147
148 // Print final message
149 println!("\nModel evaluation visualizations completed successfully!");
150
151 Ok(())
152}
More examples
examples/colored_eval_visualization.rs (line 141)
10fn main() -> Result<()> {
11 println!(
12 "{}",
13 stylize("Neural Network Model Evaluation with Color", Style::Bold)
14 );
15 println!("{}", "-".repeat(50));
16
17 // Set up color options
18 let color_options = ColorOptions {
19 enabled: true,
20 use_background: false,
21 use_bright: true,
22 };
23
24 // Generate some example data
25 let n_samples = 500;
26 let n_features = 10;
27 let n_classes = 4;
28
29 println!(
30 "\n{} {} {} {} {} {}",
31 colorize("Generating", Color::BrightGreen),
32 colorize(n_samples.to_string(), Color::BrightYellow),
33 colorize("samples with", Color::BrightGreen),
34 colorize(n_features.to_string(), Color::BrightYellow),
35 colorize("features for", Color::BrightGreen),
36 colorize(n_classes.to_string(), Color::BrightYellow),
37 );
38
39 // Create a deterministic RNG for reproducibility
40 let mut rng = SmallRng::seed_from_u64(42);
41
42 // 1. Confusion Matrix Example
43 println!(
44 "\n{}",
45 stylize("1. CONFUSION MATRIX VISUALIZATION", Style::Bold)
46 );
47
48 // Generate random predictions and true labels
49 let y_true = Array::from_shape_fn(n_samples, |_| rng.random_range(0..n_classes));
50
51 // Create slightly correlated predictions (not completely random)
52 let y_pred = Array::from_shape_fn(n_samples, |i| {
53 if rng.random::<f32>() < 0.7 {
54 // 70% chance of correct prediction
55 y_true[i]
56 } else {
57 // 30% chance of random class
58 rng.random_range(0..n_classes)
59 }
60 });
61
62 // Create confusion matrix
63 let class_labels = vec![
64 "Class A".to_string(),
65 "Class B".to_string(),
66 "Class C".to_string(),
67 "Class D".to_string(),
68 ];
69
70 let cm = ConfusionMatrix::<f32>::new(
71 &y_true.view(),
72 &y_pred.view(),
73 Some(n_classes),
74 Some(class_labels),
75 )?;
76
77 // Print raw and normalized confusion matrices with color
78 println!("\n{}", colorize("Raw Confusion Matrix:", Color::BrightCyan));
79 println!(
80 "{}",
81 cm.to_ascii_with_options(Some("Confusion Matrix"), false, &color_options)
82 );
83
84 println!(
85 "\n{}",
86 colorize("Normalized Confusion Matrix:", Color::BrightCyan)
87 );
88 println!(
89 "{}",
90 cm.to_ascii_with_options(Some("Normalized Confusion Matrix"), true, &color_options)
91 );
92
93 // Print metrics
94 println!(
95 "\n{} {:.3}",
96 colorize("Overall Accuracy:", Color::BrightMagenta),
97 cm.accuracy()
98 );
99
100 let precision = cm.precision();
101 let recall = cm.recall();
102 let f1 = cm.f1_score();
103
104 println!("{}", colorize("Per-class metrics:", Color::BrightMagenta));
105 for i in 0..n_classes {
106 println!(
107 " {}: {}={:.3}, {}={:.3}, {}={:.3}",
108 colorize(format!("Class {}", i), Color::BrightYellow),
109 colorize("Precision", Color::BrightCyan),
110 precision[i],
111 colorize("Recall", Color::BrightGreen),
112 recall[i],
113 colorize("F1", Color::BrightBlue),
114 f1[i]
115 );
116 }
117
118 println!(
119 "{} {:.3}",
120 colorize("Macro F1 Score:", Color::BrightMagenta),
121 cm.macro_f1()
122 );
123
124 // 2. Feature Importance Visualization
125 println!(
126 "\n{}",
127 stylize("2. FEATURE IMPORTANCE VISUALIZATION", Style::Bold)
128 );
129
130 // Generate random feature importance scores
131 let feature_names = (0..n_features)
132 .map(|i| format!("Feature_{}", i))
133 .collect::<Vec<String>>();
134
135 let importance = Array1::from_shape_fn(n_features, |i| {
136 // Make some features more important than others
137 let base = (n_features - i) as f32 / n_features as f32;
138 base + 0.2 * rng.random::<f32>()
139 });
140
141 let fi = FeatureImportance::new(feature_names, importance)?;
142
143 // Print full feature importance with color
144 println!(
145 "{}",
146 fi.to_ascii_with_options(Some("Feature Importance"), 60, None, &color_options)
147 );
148
149 // Print top-5 features with color
150 println!(
151 "\n{}",
152 colorize("Top 5 Most Important Features:", Color::BrightCyan)
153 );
154 println!(
155 "{}",
156 fi.to_ascii_with_options(Some("Top 5 Features"), 60, Some(5), &color_options)
157 );
158
159 // 3. ROC Curve for Binary Classification
160 println!("\n{}", stylize("3. ROC CURVE VISUALIZATION", Style::Bold));
161
162 // Generate binary classification data
163 let n_binary = 200;
164 let y_true_binary = Array::from_shape_fn(n_binary, |_| rng.random_range(0..2));
165
166 // Generate scores with some predictive power
167 let y_scores = Array1::from_shape_fn(n_binary, |i| {
168 if y_true_binary[i] == 1 {
169 // Higher scores for positive class
170 0.6 + 0.4 * rng.random::<f32>()
171 } else {
172 // Lower scores for negative class
173 0.4 * rng.random::<f32>()
174 }
175 });
176
177 let roc = ROCCurve::new(&y_true_binary.view(), &y_scores.view())?;
178
179 println!(
180 "{} {:.3}",
181 colorize("ROC AUC:", Color::BrightMagenta),
182 roc.auc
183 );
184
185 println!("\n{}", roc.to_ascii(None, 50, 20));
186
187 // 4. Learning Curve Visualization
188 println!(
189 "\n{}",
190 stylize("4. LEARNING CURVE VISUALIZATION", Style::Bold)
191 );
192
193 // Generate learning curve data
194 let n_points = 10;
195 let n_cv = 5;
196
197 let train_sizes = Array1::from_shape_fn(n_points, |i| 50 + i * 50);
198
199 // Generate training scores (decreasing with size due to overfitting)
200 let train_scores = Array2::from_shape_fn((n_points, n_cv), |(i, _)| {
201 0.95 - 0.05 * (i as f32 / n_points as f32) + 0.03 * rng.random::<f32>()
202 });
203
204 // Generate validation scores (increasing with size)
205 let val_scores = Array2::from_shape_fn((n_points, n_cv), |(i, _)| {
206 0.7 + 0.2 * (i as f32 / n_points as f32) + 0.05 * rng.random::<f32>()
207 });
208
209 let lc = LearningCurve::new(train_sizes, train_scores, val_scores)?;
210
211 println!("{}", lc.to_ascii(None, 60, 20, "Accuracy"));
212
213 // Print final message with color
214 println!(
215 "\n{}",
216 colorize(
217 "Model evaluation visualizations completed successfully!",
218 Color::BrightGreen
219 )
220 );
221
222 Ok(())
223}
Sourcepub fn to_ascii(
&self,
title: Option<&str>,
width: usize,
k: Option<usize>,
) -> String
pub fn to_ascii( &self, title: Option<&str>, width: usize, k: Option<usize>, ) -> String
Create an ASCII bar chart of feature importance
§Arguments
title
- Optional title for the chartwidth
- Width of the bar chartk
- Number of top features to include (None for all)
§Returns
String
- ASCII bar chart
Examples found in repository?
examples/model_visualization_example.rs (line 96)
7fn main() -> Result<()> {
8 println!("Neural Network Model Evaluation Visualization Example\n");
9
10 // Generate some example data
11 let n_samples = 500;
12 let n_features = 10;
13 let n_classes = 4;
14
15 println!(
16 "Generating {} samples with {} features for {} classes",
17 n_samples, n_features, n_classes
18 );
19
20 // 1. Confusion Matrix Example
21 println!("\n--- Confusion Matrix Visualization ---\n");
22
23 // Create a deterministic RNG for reproducibility
24 let mut rng = SmallRng::seed_from_u64(42);
25
26 // Generate random predictions and true labels
27 let y_true = Array::from_shape_fn(n_samples, |_| rng.random_range(0..n_classes));
28
29 // Create slightly correlated predictions (not completely random)
30 let y_pred = Array::from_shape_fn(n_samples, |i| {
31 if rng.random::<f32>() < 0.7 {
32 // 70% chance of correct prediction
33 y_true[i]
34 } else {
35 // 30% chance of random class
36 rng.random_range(0..n_classes)
37 }
38 });
39
40 // Create confusion matrix
41 let class_labels = vec![
42 "Class A".to_string(),
43 "Class B".to_string(),
44 "Class C".to_string(),
45 "Class D".to_string(),
46 ];
47
48 let cm = ConfusionMatrix::<f32>::new(
49 &y_true.view(),
50 &y_pred.view(),
51 Some(n_classes),
52 Some(class_labels),
53 )?;
54
55 // Print raw and normalized confusion matrices
56 println!("Raw Confusion Matrix:\n");
57 println!("{}", cm.to_ascii(Some("Confusion Matrix"), false));
58
59 println!("\nNormalized Confusion Matrix:\n");
60 println!("{}", cm.to_ascii(Some("Normalized Confusion Matrix"), true));
61
62 // Print metrics
63 println!("\nAccuracy: {:.3}", cm.accuracy());
64
65 let precision = cm.precision();
66 let recall = cm.recall();
67 let f1 = cm.f1_score();
68
69 println!("Per-class metrics:");
70 for i in 0..n_classes {
71 println!(
72 " Class {}: Precision={:.3}, Recall={:.3}, F1={:.3}",
73 i, precision[i], recall[i], f1[i]
74 );
75 }
76
77 println!("Macro F1 Score: {:.3}", cm.macro_f1());
78
79 // 2. Feature Importance Visualization
80 println!("\n--- Feature Importance Visualization ---\n");
81
82 // Generate random feature importance scores
83 let feature_names = (0..n_features)
84 .map(|i| format!("Feature_{}", i))
85 .collect::<Vec<String>>();
86
87 let importance = Array1::from_shape_fn(n_features, |i| {
88 // Make some features more important than others
89 let base = (n_features - i) as f32 / n_features as f32;
90 base + 0.2 * rng.random::<f32>()
91 });
92
93 let fi = FeatureImportance::new(feature_names, importance)?;
94
95 // Print full feature importance
96 println!("{}", fi.to_ascii(Some("Feature Importance"), 60, None));
97
98 // Print top-5 features
99 println!("\nTop 5 Most Important Features:\n");
100 println!("{}", fi.to_ascii(Some("Top 5 Features"), 60, Some(5)));
101
102 // 3. ROC Curve for Binary Classification
103 println!("\n--- ROC Curve Visualization ---\n");
104
105 // Generate binary classification data
106 let n_binary = 200;
107 let y_true_binary = Array::from_shape_fn(n_binary, |_| rng.random_range(0..2));
108
109 // Generate scores with some predictive power
110 let y_scores = Array1::from_shape_fn(n_binary, |i| {
111 if y_true_binary[i] == 1 {
112 // Higher scores for positive class
113 0.6 + 0.4 * rng.random::<f32>()
114 } else {
115 // Lower scores for negative class
116 0.4 * rng.random::<f32>()
117 }
118 });
119
120 let roc = ROCCurve::new(&y_true_binary.view(), &y_scores.view())?;
121
122 println!("ROC AUC: {:.3}", roc.auc);
123 println!("\n{}", roc.to_ascii(None, 50, 20));
124
125 // 4. Learning Curve Visualization
126 println!("\n--- Learning Curve Visualization ---\n");
127
128 // Generate learning curve data
129 let n_points = 10;
130 let n_cv = 5;
131
132 let train_sizes = Array1::from_shape_fn(n_points, |i| 50 + i * 50);
133
134 // Generate training scores (decreasing with size due to overfitting)
135 let train_scores = Array2::from_shape_fn((n_points, n_cv), |(i, _)| {
136 0.95 - 0.05 * (i as f32 / n_points as f32) + 0.03 * rng.random::<f32>()
137 });
138
139 // Generate validation scores (increasing with size)
140 let val_scores = Array2::from_shape_fn((n_points, n_cv), |(i, _)| {
141 0.7 + 0.2 * (i as f32 / n_points as f32) + 0.05 * rng.random::<f32>()
142 });
143
144 let lc = LearningCurve::new(train_sizes, train_scores, val_scores)?;
145
146 println!("{}", lc.to_ascii(None, 60, 20, "Accuracy"));
147
148 // Print final message
149 println!("\nModel evaluation visualizations completed successfully!");
150
151 Ok(())
152}
Sourcepub fn to_ascii_with_options(
&self,
title: Option<&str>,
width: usize,
k: Option<usize>,
color_options: &ColorOptions,
) -> String
pub fn to_ascii_with_options( &self, title: Option<&str>, width: usize, k: Option<usize>, color_options: &ColorOptions, ) -> String
Create an ASCII bar chart of feature importance with color options
This method creates a bar chart visualization with customizable colors, showing feature importance scores in descending order.
§Arguments
title
- Optional title for the chartwidth
- Width of the bar chartk
- Number of top features to include (None for all)color_options
- Color options for visualization
§Returns
String
- ASCII bar chart with colors
Examples found in repository?
examples/colored_eval_visualization.rs (line 146)
10fn main() -> Result<()> {
11 println!(
12 "{}",
13 stylize("Neural Network Model Evaluation with Color", Style::Bold)
14 );
15 println!("{}", "-".repeat(50));
16
17 // Set up color options
18 let color_options = ColorOptions {
19 enabled: true,
20 use_background: false,
21 use_bright: true,
22 };
23
24 // Generate some example data
25 let n_samples = 500;
26 let n_features = 10;
27 let n_classes = 4;
28
29 println!(
30 "\n{} {} {} {} {} {}",
31 colorize("Generating", Color::BrightGreen),
32 colorize(n_samples.to_string(), Color::BrightYellow),
33 colorize("samples with", Color::BrightGreen),
34 colorize(n_features.to_string(), Color::BrightYellow),
35 colorize("features for", Color::BrightGreen),
36 colorize(n_classes.to_string(), Color::BrightYellow),
37 );
38
39 // Create a deterministic RNG for reproducibility
40 let mut rng = SmallRng::seed_from_u64(42);
41
42 // 1. Confusion Matrix Example
43 println!(
44 "\n{}",
45 stylize("1. CONFUSION MATRIX VISUALIZATION", Style::Bold)
46 );
47
48 // Generate random predictions and true labels
49 let y_true = Array::from_shape_fn(n_samples, |_| rng.random_range(0..n_classes));
50
51 // Create slightly correlated predictions (not completely random)
52 let y_pred = Array::from_shape_fn(n_samples, |i| {
53 if rng.random::<f32>() < 0.7 {
54 // 70% chance of correct prediction
55 y_true[i]
56 } else {
57 // 30% chance of random class
58 rng.random_range(0..n_classes)
59 }
60 });
61
62 // Create confusion matrix
63 let class_labels = vec![
64 "Class A".to_string(),
65 "Class B".to_string(),
66 "Class C".to_string(),
67 "Class D".to_string(),
68 ];
69
70 let cm = ConfusionMatrix::<f32>::new(
71 &y_true.view(),
72 &y_pred.view(),
73 Some(n_classes),
74 Some(class_labels),
75 )?;
76
77 // Print raw and normalized confusion matrices with color
78 println!("\n{}", colorize("Raw Confusion Matrix:", Color::BrightCyan));
79 println!(
80 "{}",
81 cm.to_ascii_with_options(Some("Confusion Matrix"), false, &color_options)
82 );
83
84 println!(
85 "\n{}",
86 colorize("Normalized Confusion Matrix:", Color::BrightCyan)
87 );
88 println!(
89 "{}",
90 cm.to_ascii_with_options(Some("Normalized Confusion Matrix"), true, &color_options)
91 );
92
93 // Print metrics
94 println!(
95 "\n{} {:.3}",
96 colorize("Overall Accuracy:", Color::BrightMagenta),
97 cm.accuracy()
98 );
99
100 let precision = cm.precision();
101 let recall = cm.recall();
102 let f1 = cm.f1_score();
103
104 println!("{}", colorize("Per-class metrics:", Color::BrightMagenta));
105 for i in 0..n_classes {
106 println!(
107 " {}: {}={:.3}, {}={:.3}, {}={:.3}",
108 colorize(format!("Class {}", i), Color::BrightYellow),
109 colorize("Precision", Color::BrightCyan),
110 precision[i],
111 colorize("Recall", Color::BrightGreen),
112 recall[i],
113 colorize("F1", Color::BrightBlue),
114 f1[i]
115 );
116 }
117
118 println!(
119 "{} {:.3}",
120 colorize("Macro F1 Score:", Color::BrightMagenta),
121 cm.macro_f1()
122 );
123
124 // 2. Feature Importance Visualization
125 println!(
126 "\n{}",
127 stylize("2. FEATURE IMPORTANCE VISUALIZATION", Style::Bold)
128 );
129
130 // Generate random feature importance scores
131 let feature_names = (0..n_features)
132 .map(|i| format!("Feature_{}", i))
133 .collect::<Vec<String>>();
134
135 let importance = Array1::from_shape_fn(n_features, |i| {
136 // Make some features more important than others
137 let base = (n_features - i) as f32 / n_features as f32;
138 base + 0.2 * rng.random::<f32>()
139 });
140
141 let fi = FeatureImportance::new(feature_names, importance)?;
142
143 // Print full feature importance with color
144 println!(
145 "{}",
146 fi.to_ascii_with_options(Some("Feature Importance"), 60, None, &color_options)
147 );
148
149 // Print top-5 features with color
150 println!(
151 "\n{}",
152 colorize("Top 5 Most Important Features:", Color::BrightCyan)
153 );
154 println!(
155 "{}",
156 fi.to_ascii_with_options(Some("Top 5 Features"), 60, Some(5), &color_options)
157 );
158
159 // 3. ROC Curve for Binary Classification
160 println!("\n{}", stylize("3. ROC CURVE VISUALIZATION", Style::Bold));
161
162 // Generate binary classification data
163 let n_binary = 200;
164 let y_true_binary = Array::from_shape_fn(n_binary, |_| rng.random_range(0..2));
165
166 // Generate scores with some predictive power
167 let y_scores = Array1::from_shape_fn(n_binary, |i| {
168 if y_true_binary[i] == 1 {
169 // Higher scores for positive class
170 0.6 + 0.4 * rng.random::<f32>()
171 } else {
172 // Lower scores for negative class
173 0.4 * rng.random::<f32>()
174 }
175 });
176
177 let roc = ROCCurve::new(&y_true_binary.view(), &y_scores.view())?;
178
179 println!(
180 "{} {:.3}",
181 colorize("ROC AUC:", Color::BrightMagenta),
182 roc.auc
183 );
184
185 println!("\n{}", roc.to_ascii(None, 50, 20));
186
187 // 4. Learning Curve Visualization
188 println!(
189 "\n{}",
190 stylize("4. LEARNING CURVE VISUALIZATION", Style::Bold)
191 );
192
193 // Generate learning curve data
194 let n_points = 10;
195 let n_cv = 5;
196
197 let train_sizes = Array1::from_shape_fn(n_points, |i| 50 + i * 50);
198
199 // Generate training scores (decreasing with size due to overfitting)
200 let train_scores = Array2::from_shape_fn((n_points, n_cv), |(i, _)| {
201 0.95 - 0.05 * (i as f32 / n_points as f32) + 0.03 * rng.random::<f32>()
202 });
203
204 // Generate validation scores (increasing with size)
205 let val_scores = Array2::from_shape_fn((n_points, n_cv), |(i, _)| {
206 0.7 + 0.2 * (i as f32 / n_points as f32) + 0.05 * rng.random::<f32>()
207 });
208
209 let lc = LearningCurve::new(train_sizes, train_scores, val_scores)?;
210
211 println!("{}", lc.to_ascii(None, 60, 20, "Accuracy"));
212
213 // Print final message with color
214 println!(
215 "\n{}",
216 colorize(
217 "Model evaluation visualizations completed successfully!",
218 Color::BrightGreen
219 )
220 );
221
222 Ok(())
223}
Auto Trait Implementations§
impl<F> Freeze for FeatureImportance<F>
impl<F> RefUnwindSafe for FeatureImportance<F>where
F: RefUnwindSafe,
impl<F> Send for FeatureImportance<F>where
F: Send,
impl<F> Sync for FeatureImportance<F>where
F: Sync,
impl<F> Unpin for FeatureImportance<F>
impl<F> UnwindSafe for FeatureImportance<F>where
F: RefUnwindSafe,
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more
Source§impl<T> IntoEither for T
impl<T> IntoEither for T
Source§fn into_either(self, into_left: bool) -> Either<Self, Self>
fn into_either(self, into_left: bool) -> Either<Self, Self>
Converts
self
into a Left
variant of Either<Self, Self>
if into_left
is true
.
Converts self
into a Right
variant of Either<Self, Self>
otherwise. Read moreSource§fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
Converts
self
into a Left
variant of Either<Self, Self>
if into_left(&self)
returns true
.
Converts self
into a Right
variant of Either<Self, Self>
otherwise. Read more