pub struct QuantumMLPClassifier { /* private fields */ }Expand description
Quantum Neural Network Classifier (sklearn-compatible)
Implementations§
Source§impl QuantumMLPClassifier
impl QuantumMLPClassifier
Sourcepub fn new() -> Self
pub fn new() -> Self
Create new Quantum MLP Classifier
Examples found in repository?
examples/sklearn_pipeline_demo.rs (line 43)
15fn main() -> Result<()> {
16 println!("=== Scikit-learn Compatible Quantum ML Demo ===\n");
17
18 // Step 1: Create sklearn-style dataset
19 println!("1. Creating scikit-learn style dataset...");
20
21 let (X, y) = create_sklearn_dataset()?;
22 println!(" - Dataset shape: {:?}", X.dim());
23 println!(
24 " - Labels: {} classes",
25 y.iter()
26 .map(|&x| x as i32)
27 .collect::<std::collections::HashSet<_>>()
28 .len()
29 );
30 println!(
31 " - Feature range: [{:.3}, {:.3}]",
32 X.iter().fold(f64::INFINITY, |a, &b| a.min(b)),
33 X.iter().fold(f64::NEG_INFINITY, |a, &b| a.max(b))
34 );
35
36 // Step 2: Create sklearn-compatible quantum estimators
37 println!("\n2. Creating sklearn-compatible quantum estimators...");
38
39 // Quantum Support Vector Classifier
40 let qsvc = QuantumSVC::new();
41
42 // Quantum Multi-Layer Perceptron Classifier
43 let qmlp = QuantumMLPClassifier::new();
44
45 // Quantum K-Means Clustering
46 let mut qkmeans = QuantumKMeans::new(2); // n_clusters
47
48 println!(" - QuantumSVC: quantum kernel");
49 println!(" - QuantumMLP: multi-layer perceptron");
50 println!(" - QuantumKMeans: 2 clusters");
51
52 // Step 3: Create sklearn-style preprocessing pipeline
53 println!("\n3. Building sklearn-compatible preprocessing pipeline...");
54
55 let preprocessing_pipeline = Pipeline::new(vec![
56 ("scaler", Box::new(StandardScaler::new())),
57 (
58 "feature_selection",
59 Box::new(SelectKBest::new(
60 "quantum_mutual_info", // score_func
61 3, // k
62 )),
63 ),
64 (
65 "quantum_encoder",
66 Box::new(QuantumFeatureEncoder::new(
67 "angle", // encoding_type
68 "l2", // normalization
69 )),
70 ),
71 ])?;
72
73 // Step 4: Create complete quantum ML pipeline
74 println!("\n4. Creating complete quantum ML pipeline...");
75
76 let quantum_pipeline = Pipeline::new(vec![
77 ("preprocessing", Box::new(preprocessing_pipeline)),
78 ("classifier", Box::new(qsvc.clone())),
79 ])?;
80
81 println!(" Pipeline steps:");
82 for (i, step_name) in quantum_pipeline.named_steps().iter().enumerate() {
83 println!(" {}. {}", i + 1, step_name);
84 }
85
86 // Step 5: Train-test split (sklearn style)
87 println!("\n5. Performing train-test split...");
88
89 let (X_train, X_test, y_train, y_test) = model_selection::train_test_split(
90 &X,
91 &y,
92 0.3, // test_size
93 Some(42), // random_state
94 )?;
95
96 println!(" - Training set: {:?}", X_train.dim());
97 println!(" - Test set: {:?}", X_test.dim());
98
99 // Step 6: Cross-validation with quantum models
100 println!("\n6. Performing cross-validation...");
101
102 let mut pipeline_clone = quantum_pipeline.clone();
103 let cv_scores = model_selection::cross_val_score(
104 &mut pipeline_clone,
105 &X_train,
106 &y_train,
107 5, // cv
108 )?;
109
110 println!(" Cross-validation scores: {:?}", cv_scores);
111 println!(
112 " Mean CV accuracy: {:.3} (+/- {:.3})",
113 cv_scores.mean().unwrap(),
114 cv_scores.std(0.0) * 2.0
115 );
116
117 // Step 7: Hyperparameter grid search
118 println!("\n7. Hyperparameter optimization with GridSearchCV...");
119
120 let param_grid = HashMap::from([
121 (
122 "classifier__C".to_string(),
123 vec!["0.1".to_string(), "1.0".to_string(), "10.0".to_string()],
124 ),
125 (
126 "classifier__feature_map_depth".to_string(),
127 vec!["1".to_string(), "2".to_string(), "3".to_string()],
128 ),
129 (
130 "preprocessing__feature_selection__k".to_string(),
131 vec!["2".to_string(), "3".to_string(), "4".to_string()],
132 ),
133 ]);
134
135 let mut grid_search = model_selection::GridSearchCV::new(
136 quantum_pipeline.clone(), // estimator
137 param_grid,
138 3, // cv
139 );
140
141 grid_search.fit(&X_train, &y_train)?;
142
143 println!(" Best parameters: {:?}", grid_search.best_params_);
144 println!(
145 " Best cross-validation score: {:.3}",
146 grid_search.best_score_
147 );
148
149 // Step 8: Train best model and evaluate
150 println!("\n8. Training best model and evaluation...");
151
152 let best_model = grid_search.best_estimator_;
153 let y_pred = best_model.predict(&X_test)?;
154
155 // Calculate metrics using sklearn-style functions
156 let y_test_int = y_test.mapv(|x| x.round() as i32);
157 let accuracy = metrics::accuracy_score(&y_test_int, &y_pred);
158 let precision = metrics::precision_score(&y_test_int, &y_pred, "weighted"); // average
159 let recall = metrics::recall_score(&y_test_int, &y_pred, "weighted"); // average
160 let f1 = metrics::f1_score(&y_test_int, &y_pred, "weighted"); // average
161
162 println!(" Test Results:");
163 println!(" - Accuracy: {:.3}", accuracy);
164 println!(" - Precision: {:.3}", precision);
165 println!(" - Recall: {:.3}", recall);
166 println!(" - F1-score: {:.3}", f1);
167
168 // Step 9: Classification report
169 println!("\n9. Detailed classification report...");
170
171 let classification_report = metrics::classification_report(
172 &y_test_int,
173 &y_pred,
174 vec!["Class 0", "Class 1"], // target_names
175 3, // digits
176 );
177 println!("{}", classification_report);
178
179 // Step 10: Feature importance analysis
180 println!("\n10. Feature importance analysis...");
181
182 if let Some(feature_importances) = best_model.feature_importances() {
183 println!(" Quantum Feature Importances:");
184 for (i, importance) in feature_importances.iter().enumerate() {
185 println!(" - Feature {}: {:.4}", i, importance);
186 }
187 }
188
189 // Step 11: Model comparison with classical sklearn models
190 println!("\n11. Comparing with classical sklearn models...");
191
192 let classical_models = vec![
193 (
194 "Logistic Regression",
195 Box::new(LogisticRegression::new()) as Box<dyn SklearnClassifier>,
196 ),
197 (
198 "Random Forest",
199 Box::new(RandomForestClassifier::new()) as Box<dyn SklearnClassifier>,
200 ),
201 ("SVM", Box::new(SVC::new()) as Box<dyn SklearnClassifier>),
202 ];
203
204 let mut comparison_results = Vec::new();
205
206 for (name, mut model) in classical_models {
207 model.fit(&X_train, Some(&y_train))?;
208 let y_pred_classical = model.predict(&X_test)?;
209 let classical_accuracy = metrics::accuracy_score(&y_test_int, &y_pred_classical);
210 comparison_results.push((name, classical_accuracy));
211 }
212
213 println!(" Model Comparison:");
214 println!(" - Quantum Pipeline: {:.3}", accuracy);
215 for (name, classical_accuracy) in comparison_results {
216 println!(" - {}: {:.3}", name, classical_accuracy);
217 }
218
219 // Step 12: Clustering with quantum K-means
220 println!("\n12. Quantum clustering analysis...");
221
222 let cluster_labels = qkmeans.fit_predict(&X)?;
223 let silhouette_score = metrics::silhouette_score(&X, &cluster_labels, "euclidean"); // metric
224 let calinski_score = metrics::calinski_harabasz_score(&X, &cluster_labels);
225
226 println!(" Clustering Results:");
227 println!(" - Silhouette Score: {:.3}", silhouette_score);
228 println!(" - Calinski-Harabasz Score: {:.3}", calinski_score);
229 println!(
230 " - Unique clusters found: {}",
231 cluster_labels
232 .iter()
233 .collect::<std::collections::HashSet<_>>()
234 .len()
235 );
236
237 // Step 13: Model persistence (sklearn style)
238 println!("\n13. Model persistence (sklearn joblib style)...");
239
240 // Save model
241 best_model.save("quantum_sklearn_model.joblib")?;
242 println!(" - Model saved to: quantum_sklearn_model.joblib");
243
244 // Load model
245 let loaded_model = QuantumSVC::load("quantum_sklearn_model.joblib")?;
246 let test_subset = X_test.slice(s![..5, ..]).to_owned();
247 let y_pred_loaded = loaded_model.predict(&test_subset)?;
248 println!(" - Model loaded and tested on 5 samples");
249
250 // Step 14: Advanced sklearn utilities
251 println!("\n14. Advanced sklearn utilities...");
252
253 // Learning curves (commented out - function not available)
254 // let (train_sizes, train_scores, val_scores) = model_selection::learning_curve(...)?;
255 println!(" Learning Curve Analysis: (Mock results)");
256 let train_sizes = vec![0.1, 0.33, 0.55, 0.78, 1.0];
257 let train_scores = vec![0.65, 0.72, 0.78, 0.82, 0.85];
258 let val_scores = vec![0.62, 0.70, 0.76, 0.79, 0.81];
259
260 for (i, &size) in train_sizes.iter().enumerate() {
261 println!(
262 " - {:.0}% data: train={:.3}, val={:.3}",
263 size * 100.0,
264 train_scores[i],
265 val_scores[i]
266 );
267 }
268
269 // Validation curves (commented out - function not available)
270 // let (train_scores_val, test_scores_val) = model_selection::validation_curve(...)?;
271 println!(" Validation Curve (C parameter): (Mock results)");
272 let param_range = vec![0.1, 0.5, 1.0, 2.0, 5.0];
273 let train_scores_val = vec![0.70, 0.75, 0.80, 0.78, 0.75];
274 let test_scores_val = vec![0.68, 0.73, 0.78, 0.76, 0.72];
275
276 for (i, ¶m_value) in param_range.iter().enumerate() {
277 println!(
278 " - C={}: train={:.3}, test={:.3}",
279 param_value, train_scores_val[i], test_scores_val[i]
280 );
281 }
282
283 // Step 15: Quantum-specific sklearn extensions
284 println!("\n15. Quantum-specific sklearn extensions...");
285
286 // Quantum feature analysis
287 let quantum_feature_analysis = analyze_quantum_features(&best_model, &X_test)?;
288 println!(" Quantum Feature Analysis:");
289 println!(
290 " - Quantum advantage score: {:.3}",
291 quantum_feature_analysis.advantage_score
292 );
293 println!(
294 " - Feature entanglement: {:.3}",
295 quantum_feature_analysis.entanglement_measure
296 );
297 println!(
298 " - Circuit depth efficiency: {:.3}",
299 quantum_feature_analysis.circuit_efficiency
300 );
301
302 // Quantum model interpretation
303 let sample_row = X_test.row(0).to_owned();
304 let quantum_interpretation = interpret_quantum_model(&best_model, &sample_row)?;
305 println!(" Quantum Model Interpretation (sample 0):");
306 println!(
307 " - Quantum state fidelity: {:.3}",
308 quantum_interpretation.state_fidelity
309 );
310 println!(
311 " - Feature contributions: {:?}",
312 quantum_interpretation.feature_contributions
313 );
314
315 println!("\n=== Scikit-learn Integration Demo Complete ===");
316
317 Ok(())
318}Set hidden layer sizes
Sourcepub fn set_activation(self, activation: String) -> Self
pub fn set_activation(self, activation: String) -> Self
Set activation function
Sourcepub fn set_learning_rate(self, lr: f64) -> Self
pub fn set_learning_rate(self, lr: f64) -> Self
Set learning rate
Sourcepub fn set_max_iter(self, max_iter: usize) -> Self
pub fn set_max_iter(self, max_iter: usize) -> Self
Set maximum iterations
Trait Implementations§
Source§impl SklearnClassifier for QuantumMLPClassifier
impl SklearnClassifier for QuantumMLPClassifier
Auto Trait Implementations§
impl Freeze for QuantumMLPClassifier
impl !RefUnwindSafe for QuantumMLPClassifier
impl Send for QuantumMLPClassifier
impl Sync for QuantumMLPClassifier
impl Unpin for QuantumMLPClassifier
impl !UnwindSafe for QuantumMLPClassifier
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more
Source§impl<T> IntoEither for T
impl<T> IntoEither for T
Source§fn into_either(self, into_left: bool) -> Either<Self, Self>
fn into_either(self, into_left: bool) -> Either<Self, Self>
Converts
self into a Left variant of Either<Self, Self>
if into_left is true.
Converts self into a Right variant of Either<Self, Self>
otherwise. Read moreSource§fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
Converts
self into a Left variant of Either<Self, Self>
if into_left(&self) returns true.
Converts self into a Right variant of Either<Self, Self>
otherwise. Read moreSource§impl<T> Pointable for T
impl<T> Pointable for T
Source§impl<SS, SP> SupersetOf<SS> for SPwhere
SS: SubsetOf<SP>,
impl<SS, SP> SupersetOf<SS> for SPwhere
SS: SubsetOf<SP>,
Source§fn to_subset(&self) -> Option<SS>
fn to_subset(&self) -> Option<SS>
The inverse inclusion map: attempts to construct
self from the equivalent element of its
superset. Read moreSource§fn is_in_subset(&self) -> bool
fn is_in_subset(&self) -> bool
Checks if
self is actually part of its subset T (and can be converted to it).Source§fn to_subset_unchecked(&self) -> SS
fn to_subset_unchecked(&self) -> SS
Use with care! Same as
self.to_subset but without any property checks. Always succeeds.Source§fn from_subset(element: &SS) -> SP
fn from_subset(element: &SS) -> SP
The inclusion map: converts
self to the equivalent element of its superset.Source§impl<SS, SP> SupersetOf<SS> for SPwhere
SS: SubsetOf<SP>,
impl<SS, SP> SupersetOf<SS> for SPwhere
SS: SubsetOf<SP>,
Source§fn to_subset(&self) -> Option<SS>
fn to_subset(&self) -> Option<SS>
The inverse inclusion map: attempts to construct
self from the equivalent element of its
superset. Read moreSource§fn is_in_subset(&self) -> bool
fn is_in_subset(&self) -> bool
Checks if
self is actually part of its subset T (and can be converted to it).Source§fn to_subset_unchecked(&self) -> SS
fn to_subset_unchecked(&self) -> SS
Use with care! Same as
self.to_subset but without any property checks. Always succeeds.Source§fn from_subset(element: &SS) -> SP
fn from_subset(element: &SS) -> SP
The inclusion map: converts
self to the equivalent element of its superset.