pub struct QuantumTransferLearning { /* private fields */ }Expand description
Quantum transfer learning framework
Implementations§
Source§impl QuantumTransferLearning
impl QuantumTransferLearning
Sourcepub fn new(
source_model: PretrainedModel,
target_layers: Vec<QNNLayerType>,
strategy: TransferStrategy,
) -> Result<Self>
pub fn new( source_model: PretrainedModel, target_layers: Vec<QNNLayerType>, strategy: TransferStrategy, ) -> Result<Self>
Create a new transfer learning instance
Examples found in repository?
examples/transfer_learning.rs (lines 52-58)
19fn main() -> Result<()> {
20 println!("=== Quantum Transfer Learning Demo ===\n");
21
22 // Step 1: Load a pre-trained model from the model zoo
23 println!("1. Loading pre-trained image classifier...");
24 let pretrained = QuantumModelZoo::get_image_classifier()?;
25
26 println!(" Pre-trained model info:");
27 println!(" - Task: {}", pretrained.task_description);
28 println!(
29 " - Original accuracy: {:.2}%",
30 pretrained
31 .performance_metrics
32 .get("accuracy")
33 .unwrap_or(&0.0)
34 * 100.0
35 );
36 println!(" - Number of qubits: {}", pretrained.qnn.num_qubits);
37
38 // Step 2: Create new layers for the target task
39 println!("\n2. Creating new layers for text classification task...");
40 let new_layers = vec![
41 QNNLayerType::VariationalLayer { num_params: 6 },
42 QNNLayerType::MeasurementLayer {
43 measurement_basis: "Pauli-Z".to_string(),
44 },
45 ];
46
47 // Step 3: Initialize transfer learning with different strategies
48 println!("\n3. Testing different transfer learning strategies:");
49
50 // Strategy 1: Fine-tuning
51 println!("\n a) Fine-tuning strategy (train last 2 layers only)");
52 let mut transfer_finetune = QuantumTransferLearning::new(
53 pretrained.clone(),
54 new_layers.clone(),
55 TransferStrategy::FineTuning {
56 num_trainable_layers: 2,
57 },
58 )?;
59
60 // Strategy 2: Feature extraction
61 println!(" b) Feature extraction strategy (freeze all pre-trained layers)");
62 let transfer_feature = QuantumTransferLearning::new(
63 pretrained.clone(),
64 new_layers.clone(),
65 TransferStrategy::FeatureExtraction,
66 )?;
67
68 // Strategy 3: Progressive unfreezing
69 println!(" c) Progressive unfreezing (unfreeze one layer every 5 epochs)");
70 let transfer_progressive = QuantumTransferLearning::new(
71 pretrained,
72 new_layers,
73 TransferStrategy::ProgressiveUnfreezing { unfreeze_rate: 5 },
74 )?;
75
76 // Step 4: Generate synthetic training data for the new task
77 println!("\n4. Generating synthetic training data...");
78 let num_samples = 50;
79 let num_features = 4;
80 let training_data = Array2::from_shape_fn((num_samples, num_features), |(i, j)| {
81 (i as f64).mul_add(0.1, j as f64 * 0.2).sin()
82 });
83 let labels = Array1::from_shape_fn(num_samples, |i| if i % 2 == 0 { 0.0 } else { 1.0 });
84
85 // Step 5: Train with fine-tuning strategy
86 println!("\n5. Training with fine-tuning strategy...");
87 let mut optimizer = Adam::new(0.01);
88
89 let result = transfer_finetune.train(
90 &training_data,
91 &labels,
92 &mut optimizer,
93 20, // epochs
94 10, // batch_size
95 )?;
96
97 println!(" Training complete!");
98 println!(" - Final loss: {:.4}", result.final_loss);
99 println!(" - Accuracy: {:.2}%", result.accuracy * 100.0);
100
101 // Step 6: Extract features using pre-trained layers
102 println!("\n6. Extracting features from pre-trained layers...");
103 let features = transfer_feature.extract_features(&training_data)?;
104 println!(" Extracted feature dimensions: {:?}", features.dim());
105
106 // Step 7: Demonstrate model zoo
107 println!("\n7. Available pre-trained models in the zoo:");
108 println!(" - Image classifier (4 qubits, MNIST subset)");
109 println!(" - Chemistry model (6 qubits, molecular energy)");
110
111 // Load chemistry model
112 let chemistry_model = QuantumModelZoo::get_chemistry_model()?;
113 println!("\n Chemistry model info:");
114 println!(" - Task: {}", chemistry_model.task_description);
115 println!(
116 " - MAE: {:.4}",
117 chemistry_model
118 .performance_metrics
119 .get("mae")
120 .unwrap_or(&0.0)
121 );
122 println!(
123 " - R² score: {:.4}",
124 chemistry_model
125 .performance_metrics
126 .get("r2_score")
127 .unwrap_or(&0.0)
128 );
129
130 println!("\n=== Transfer Learning Demo Complete ===");
131
132 Ok(())
133}Sourcepub fn train(
&mut self,
training_data: &Array2<f64>,
labels: &Array1<f64>,
optimizer: &mut dyn Optimizer,
epochs: usize,
batch_size: usize,
) -> Result<TrainingResult>
pub fn train( &mut self, training_data: &Array2<f64>, labels: &Array1<f64>, optimizer: &mut dyn Optimizer, epochs: usize, batch_size: usize, ) -> Result<TrainingResult>
Train the target model on new data
Examples found in repository?
examples/transfer_learning.rs (lines 89-95)
19fn main() -> Result<()> {
20 println!("=== Quantum Transfer Learning Demo ===\n");
21
22 // Step 1: Load a pre-trained model from the model zoo
23 println!("1. Loading pre-trained image classifier...");
24 let pretrained = QuantumModelZoo::get_image_classifier()?;
25
26 println!(" Pre-trained model info:");
27 println!(" - Task: {}", pretrained.task_description);
28 println!(
29 " - Original accuracy: {:.2}%",
30 pretrained
31 .performance_metrics
32 .get("accuracy")
33 .unwrap_or(&0.0)
34 * 100.0
35 );
36 println!(" - Number of qubits: {}", pretrained.qnn.num_qubits);
37
38 // Step 2: Create new layers for the target task
39 println!("\n2. Creating new layers for text classification task...");
40 let new_layers = vec![
41 QNNLayerType::VariationalLayer { num_params: 6 },
42 QNNLayerType::MeasurementLayer {
43 measurement_basis: "Pauli-Z".to_string(),
44 },
45 ];
46
47 // Step 3: Initialize transfer learning with different strategies
48 println!("\n3. Testing different transfer learning strategies:");
49
50 // Strategy 1: Fine-tuning
51 println!("\n a) Fine-tuning strategy (train last 2 layers only)");
52 let mut transfer_finetune = QuantumTransferLearning::new(
53 pretrained.clone(),
54 new_layers.clone(),
55 TransferStrategy::FineTuning {
56 num_trainable_layers: 2,
57 },
58 )?;
59
60 // Strategy 2: Feature extraction
61 println!(" b) Feature extraction strategy (freeze all pre-trained layers)");
62 let transfer_feature = QuantumTransferLearning::new(
63 pretrained.clone(),
64 new_layers.clone(),
65 TransferStrategy::FeatureExtraction,
66 )?;
67
68 // Strategy 3: Progressive unfreezing
69 println!(" c) Progressive unfreezing (unfreeze one layer every 5 epochs)");
70 let transfer_progressive = QuantumTransferLearning::new(
71 pretrained,
72 new_layers,
73 TransferStrategy::ProgressiveUnfreezing { unfreeze_rate: 5 },
74 )?;
75
76 // Step 4: Generate synthetic training data for the new task
77 println!("\n4. Generating synthetic training data...");
78 let num_samples = 50;
79 let num_features = 4;
80 let training_data = Array2::from_shape_fn((num_samples, num_features), |(i, j)| {
81 (i as f64).mul_add(0.1, j as f64 * 0.2).sin()
82 });
83 let labels = Array1::from_shape_fn(num_samples, |i| if i % 2 == 0 { 0.0 } else { 1.0 });
84
85 // Step 5: Train with fine-tuning strategy
86 println!("\n5. Training with fine-tuning strategy...");
87 let mut optimizer = Adam::new(0.01);
88
89 let result = transfer_finetune.train(
90 &training_data,
91 &labels,
92 &mut optimizer,
93 20, // epochs
94 10, // batch_size
95 )?;
96
97 println!(" Training complete!");
98 println!(" - Final loss: {:.4}", result.final_loss);
99 println!(" - Accuracy: {:.2}%", result.accuracy * 100.0);
100
101 // Step 6: Extract features using pre-trained layers
102 println!("\n6. Extracting features from pre-trained layers...");
103 let features = transfer_feature.extract_features(&training_data)?;
104 println!(" Extracted feature dimensions: {:?}", features.dim());
105
106 // Step 7: Demonstrate model zoo
107 println!("\n7. Available pre-trained models in the zoo:");
108 println!(" - Image classifier (4 qubits, MNIST subset)");
109 println!(" - Chemistry model (6 qubits, molecular energy)");
110
111 // Load chemistry model
112 let chemistry_model = QuantumModelZoo::get_chemistry_model()?;
113 println!("\n Chemistry model info:");
114 println!(" - Task: {}", chemistry_model.task_description);
115 println!(
116 " - MAE: {:.4}",
117 chemistry_model
118 .performance_metrics
119 .get("mae")
120 .unwrap_or(&0.0)
121 );
122 println!(
123 " - R² score: {:.4}",
124 chemistry_model
125 .performance_metrics
126 .get("r2_score")
127 .unwrap_or(&0.0)
128 );
129
130 println!("\n=== Transfer Learning Demo Complete ===");
131
132 Ok(())
133}Sourcepub fn predict(&self, data: &Array2<f64>) -> Result<Array1<f64>>
pub fn predict(&self, data: &Array2<f64>) -> Result<Array1<f64>>
Make predictions using the target model
Sourcepub fn extract_features(&self, data: &Array2<f64>) -> Result<Array2<f64>>
pub fn extract_features(&self, data: &Array2<f64>) -> Result<Array2<f64>>
Extract features using the pre-trained layers
Examples found in repository?
examples/transfer_learning.rs (line 103)
19fn main() -> Result<()> {
20 println!("=== Quantum Transfer Learning Demo ===\n");
21
22 // Step 1: Load a pre-trained model from the model zoo
23 println!("1. Loading pre-trained image classifier...");
24 let pretrained = QuantumModelZoo::get_image_classifier()?;
25
26 println!(" Pre-trained model info:");
27 println!(" - Task: {}", pretrained.task_description);
28 println!(
29 " - Original accuracy: {:.2}%",
30 pretrained
31 .performance_metrics
32 .get("accuracy")
33 .unwrap_or(&0.0)
34 * 100.0
35 );
36 println!(" - Number of qubits: {}", pretrained.qnn.num_qubits);
37
38 // Step 2: Create new layers for the target task
39 println!("\n2. Creating new layers for text classification task...");
40 let new_layers = vec![
41 QNNLayerType::VariationalLayer { num_params: 6 },
42 QNNLayerType::MeasurementLayer {
43 measurement_basis: "Pauli-Z".to_string(),
44 },
45 ];
46
47 // Step 3: Initialize transfer learning with different strategies
48 println!("\n3. Testing different transfer learning strategies:");
49
50 // Strategy 1: Fine-tuning
51 println!("\n a) Fine-tuning strategy (train last 2 layers only)");
52 let mut transfer_finetune = QuantumTransferLearning::new(
53 pretrained.clone(),
54 new_layers.clone(),
55 TransferStrategy::FineTuning {
56 num_trainable_layers: 2,
57 },
58 )?;
59
60 // Strategy 2: Feature extraction
61 println!(" b) Feature extraction strategy (freeze all pre-trained layers)");
62 let transfer_feature = QuantumTransferLearning::new(
63 pretrained.clone(),
64 new_layers.clone(),
65 TransferStrategy::FeatureExtraction,
66 )?;
67
68 // Strategy 3: Progressive unfreezing
69 println!(" c) Progressive unfreezing (unfreeze one layer every 5 epochs)");
70 let transfer_progressive = QuantumTransferLearning::new(
71 pretrained,
72 new_layers,
73 TransferStrategy::ProgressiveUnfreezing { unfreeze_rate: 5 },
74 )?;
75
76 // Step 4: Generate synthetic training data for the new task
77 println!("\n4. Generating synthetic training data...");
78 let num_samples = 50;
79 let num_features = 4;
80 let training_data = Array2::from_shape_fn((num_samples, num_features), |(i, j)| {
81 (i as f64).mul_add(0.1, j as f64 * 0.2).sin()
82 });
83 let labels = Array1::from_shape_fn(num_samples, |i| if i % 2 == 0 { 0.0 } else { 1.0 });
84
85 // Step 5: Train with fine-tuning strategy
86 println!("\n5. Training with fine-tuning strategy...");
87 let mut optimizer = Adam::new(0.01);
88
89 let result = transfer_finetune.train(
90 &training_data,
91 &labels,
92 &mut optimizer,
93 20, // epochs
94 10, // batch_size
95 )?;
96
97 println!(" Training complete!");
98 println!(" - Final loss: {:.4}", result.final_loss);
99 println!(" - Accuracy: {:.2}%", result.accuracy * 100.0);
100
101 // Step 6: Extract features using pre-trained layers
102 println!("\n6. Extracting features from pre-trained layers...");
103 let features = transfer_feature.extract_features(&training_data)?;
104 println!(" Extracted feature dimensions: {:?}", features.dim());
105
106 // Step 7: Demonstrate model zoo
107 println!("\n7. Available pre-trained models in the zoo:");
108 println!(" - Image classifier (4 qubits, MNIST subset)");
109 println!(" - Chemistry model (6 qubits, molecular energy)");
110
111 // Load chemistry model
112 let chemistry_model = QuantumModelZoo::get_chemistry_model()?;
113 println!("\n Chemistry model info:");
114 println!(" - Task: {}", chemistry_model.task_description);
115 println!(
116 " - MAE: {:.4}",
117 chemistry_model
118 .performance_metrics
119 .get("mae")
120 .unwrap_or(&0.0)
121 );
122 println!(
123 " - R² score: {:.4}",
124 chemistry_model
125 .performance_metrics
126 .get("r2_score")
127 .unwrap_or(&0.0)
128 );
129
130 println!("\n=== Transfer Learning Demo Complete ===");
131
132 Ok(())
133}Sourcepub fn save_model(&self, path: &str) -> Result<()>
pub fn save_model(&self, path: &str) -> Result<()>
Save the fine-tuned model
Sourcepub fn load_pretrained(path: &str) -> Result<PretrainedModel>
pub fn load_pretrained(path: &str) -> Result<PretrainedModel>
Load a pre-trained model for transfer learning
Auto Trait Implementations§
impl Freeze for QuantumTransferLearning
impl RefUnwindSafe for QuantumTransferLearning
impl Send for QuantumTransferLearning
impl Sync for QuantumTransferLearning
impl Unpin for QuantumTransferLearning
impl UnwindSafe for QuantumTransferLearning
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more
Source§impl<T> IntoEither for T
impl<T> IntoEither for T
Source§fn into_either(self, into_left: bool) -> Either<Self, Self>
fn into_either(self, into_left: bool) -> Either<Self, Self>
Converts
self into a Left variant of Either<Self, Self>
if into_left is true.
Converts self into a Right variant of Either<Self, Self>
otherwise. Read moreSource§fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
Converts
self into a Left variant of Either<Self, Self>
if into_left(&self) returns true.
Converts self into a Right variant of Either<Self, Self>
otherwise. Read moreSource§impl<T> Pointable for T
impl<T> Pointable for T
Source§impl<SS, SP> SupersetOf<SS> for SPwhere
SS: SubsetOf<SP>,
impl<SS, SP> SupersetOf<SS> for SPwhere
SS: SubsetOf<SP>,
Source§fn to_subset(&self) -> Option<SS>
fn to_subset(&self) -> Option<SS>
The inverse inclusion map: attempts to construct
self from the equivalent element of its
superset. Read moreSource§fn is_in_subset(&self) -> bool
fn is_in_subset(&self) -> bool
Checks if
self is actually part of its subset T (and can be converted to it).Source§fn to_subset_unchecked(&self) -> SS
fn to_subset_unchecked(&self) -> SS
Use with care! Same as
self.to_subset but without any property checks. Always succeeds.Source§fn from_subset(element: &SS) -> SP
fn from_subset(element: &SS) -> SP
The inclusion map: converts
self to the equivalent element of its superset.