quantum_meta_learning/
quantum_meta_learning.rs

1#![allow(
2    clippy::pedantic,
3    clippy::unnecessary_wraps,
4    clippy::needless_range_loop,
5    clippy::useless_vec,
6    clippy::needless_collect,
7    clippy::too_many_arguments
8)]
9//! Quantum Meta-Learning Example
10//!
11//! This example demonstrates various quantum meta-learning algorithms including
12//! MAML, Reptile, `ProtoMAML`, Meta-SGD, and ANIL for few-shot learning tasks.
13
14use quantrs2_ml::autodiff::optimizers::Adam;
15use quantrs2_ml::prelude::*;
16use quantrs2_ml::qnn::QNNLayerType;
17use scirs2_core::ndarray::{Array1, Array2};
18
19fn main() -> Result<()> {
20    println!("=== Quantum Meta-Learning Demo ===\n");
21
22    // Step 1: Basic MAML demonstration
23    println!("1. Model-Agnostic Meta-Learning (MAML)...");
24    maml_demo()?;
25
26    // Step 2: Reptile algorithm
27    println!("\n2. Reptile Algorithm...");
28    reptile_demo()?;
29
30    // Step 3: ProtoMAML with prototypical learning
31    println!("\n3. ProtoMAML (Prototypical MAML)...");
32    protomaml_demo()?;
33
34    // Step 4: Meta-SGD with learnable learning rates
35    println!("\n4. Meta-SGD...");
36    metasgd_demo()?;
37
38    // Step 5: ANIL (Almost No Inner Loop)
39    println!("\n5. ANIL Algorithm...");
40    anil_demo()?;
41
42    // Step 6: Continual meta-learning
43    println!("\n6. Continual Meta-Learning...");
44    continual_meta_learning_demo()?;
45
46    // Step 7: Task distribution analysis
47    println!("\n7. Task Distribution Analysis...");
48    task_distribution_demo()?;
49
50    println!("\n=== Meta-Learning Demo Complete ===");
51
52    Ok(())
53}
54
55/// MAML demonstration
56fn maml_demo() -> Result<()> {
57    // Create quantum model
58    let layers = vec![
59        QNNLayerType::EncodingLayer { num_features: 4 },
60        QNNLayerType::VariationalLayer { num_params: 12 },
61        QNNLayerType::EntanglementLayer {
62            connectivity: "circular".to_string(),
63        },
64        QNNLayerType::VariationalLayer { num_params: 12 },
65        QNNLayerType::MeasurementLayer {
66            measurement_basis: "computational".to_string(),
67        },
68    ];
69
70    let qnn = QuantumNeuralNetwork::new(layers, 4, 4, 3)?;
71
72    // Create MAML learner
73    let algorithm = MetaLearningAlgorithm::MAML {
74        inner_steps: 5,
75        inner_lr: 0.01,
76        first_order: true, // Use first-order approximation for efficiency
77    };
78
79    let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
80
81    println!("   Created MAML meta-learner:");
82    println!("   - Inner steps: 5");
83    println!("   - Inner learning rate: 0.01");
84    println!("   - Using first-order approximation");
85
86    // Generate tasks
87    let generator = TaskGenerator::new(4, 3);
88    let tasks: Vec<MetaTask> = (0..20)
89        .map(|_| generator.generate_rotation_task(30))
90        .collect();
91
92    // Meta-train
93    println!("\n   Meta-training on 20 rotation tasks...");
94    let mut optimizer = Adam::new(0.001);
95    meta_learner.meta_train(&tasks, &mut optimizer, 50, 5)?;
96
97    // Test adaptation
98    let test_task = generator.generate_rotation_task(20);
99    println!("\n   Testing adaptation to new task...");
100
101    let adapted_params = meta_learner.adapt_to_task(&test_task)?;
102    println!("   Successfully adapted to new task");
103    println!(
104        "   Parameter adaptation magnitude: {:.4}",
105        (&adapted_params - meta_learner.meta_params())
106            .mapv(f64::abs)
107            .mean()
108            .unwrap()
109    );
110
111    Ok(())
112}
113
114/// Reptile algorithm demonstration
115fn reptile_demo() -> Result<()> {
116    let layers = vec![
117        QNNLayerType::EncodingLayer { num_features: 2 },
118        QNNLayerType::VariationalLayer { num_params: 8 },
119        QNNLayerType::MeasurementLayer {
120            measurement_basis: "Pauli-Z".to_string(),
121        },
122    ];
123
124    let qnn = QuantumNeuralNetwork::new(layers, 4, 2, 2)?;
125
126    let algorithm = MetaLearningAlgorithm::Reptile {
127        inner_steps: 10,
128        inner_lr: 0.1,
129    };
130
131    let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
132
133    println!("   Created Reptile meta-learner:");
134    println!("   - Inner steps: 10");
135    println!("   - Inner learning rate: 0.1");
136
137    // Generate sinusoid tasks
138    let generator = TaskGenerator::new(2, 2);
139    let tasks: Vec<MetaTask> = (0..15)
140        .map(|_| generator.generate_sinusoid_task(40))
141        .collect();
142
143    println!("\n   Meta-training on 15 sinusoid tasks...");
144    let mut optimizer = Adam::new(0.001);
145    meta_learner.meta_train(&tasks, &mut optimizer, 30, 3)?;
146
147    println!("   Reptile training complete");
148
149    // Analyze task similarities
150    println!("\n   Task parameter statistics:");
151    for (i, task) in tasks.iter().take(3).enumerate() {
152        if let Some(amplitude) = task.metadata.get("amplitude") {
153            if let Some(phase) = task.metadata.get("phase") {
154                println!("   Task {i}: amplitude={amplitude:.2}, phase={phase:.2}");
155            }
156        }
157    }
158
159    Ok(())
160}
161
162/// `ProtoMAML` demonstration
163fn protomaml_demo() -> Result<()> {
164    let layers = vec![
165        QNNLayerType::EncodingLayer { num_features: 8 },
166        QNNLayerType::VariationalLayer { num_params: 16 },
167        QNNLayerType::EntanglementLayer {
168            connectivity: "full".to_string(),
169        },
170        QNNLayerType::MeasurementLayer {
171            measurement_basis: "computational".to_string(),
172        },
173    ];
174
175    let qnn = QuantumNeuralNetwork::new(layers, 4, 8, 16)?;
176
177    let algorithm = MetaLearningAlgorithm::ProtoMAML {
178        inner_steps: 5,
179        inner_lr: 0.01,
180        proto_weight: 0.5, // Weight for prototype regularization
181    };
182
183    let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
184
185    println!("   Created ProtoMAML meta-learner:");
186    println!("   - Combines MAML with prototypical networks");
187    println!("   - Prototype weight: 0.5");
188
189    // Generate classification tasks
190    let generator = TaskGenerator::new(8, 4);
191    let tasks: Vec<MetaTask> = (0..10)
192        .map(|_| generator.generate_rotation_task(50))
193        .collect();
194
195    println!("\n   Meta-training on 4-way classification tasks...");
196    let mut optimizer = Adam::new(0.001);
197    meta_learner.meta_train(&tasks, &mut optimizer, 40, 2)?;
198
199    println!("   ProtoMAML leverages both gradient-based and metric-based learning");
200
201    Ok(())
202}
203
204/// Meta-SGD demonstration
205fn metasgd_demo() -> Result<()> {
206    let layers = vec![
207        QNNLayerType::EncodingLayer { num_features: 4 },
208        QNNLayerType::VariationalLayer { num_params: 12 },
209        QNNLayerType::MeasurementLayer {
210            measurement_basis: "Pauli-XYZ".to_string(),
211        },
212    ];
213
214    let qnn = QuantumNeuralNetwork::new(layers, 4, 4, 3)?;
215
216    let algorithm = MetaLearningAlgorithm::MetaSGD { inner_steps: 3 };
217
218    let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
219
220    println!("   Created Meta-SGD learner:");
221    println!("   - Learns per-parameter learning rates");
222    println!("   - Inner steps: 3");
223
224    // Generate diverse tasks
225    let generator = TaskGenerator::new(4, 3);
226    let mut tasks = Vec::new();
227
228    // Mix different task types
229    for i in 0..12 {
230        if i % 2 == 0 {
231            tasks.push(generator.generate_rotation_task(30));
232        } else {
233            tasks.push(generator.generate_sinusoid_task(30));
234        }
235    }
236
237    println!("\n   Meta-training on mixed task distribution...");
238    let mut optimizer = Adam::new(0.0005);
239    meta_learner.meta_train(&tasks, &mut optimizer, 50, 4)?;
240
241    if let Some(lr) = meta_learner.per_param_lr() {
242        println!("\n   Learned per-parameter learning rates:");
243        println!(
244            "   - Min LR: {:.4}",
245            lr.iter().copied().fold(f64::INFINITY, f64::min)
246        );
247        println!(
248            "   - Max LR: {:.4}",
249            lr.iter().copied().fold(f64::NEG_INFINITY, f64::max)
250        );
251        println!("   - Mean LR: {:.4}", lr.mean().unwrap());
252    }
253
254    Ok(())
255}
256
257/// ANIL demonstration
258fn anil_demo() -> Result<()> {
259    let layers = vec![
260        QNNLayerType::EncodingLayer { num_features: 6 },
261        QNNLayerType::VariationalLayer { num_params: 12 },
262        QNNLayerType::EntanglementLayer {
263            connectivity: "circular".to_string(),
264        },
265        QNNLayerType::VariationalLayer { num_params: 12 },
266        QNNLayerType::VariationalLayer { num_params: 6 }, // Final layer (adapted)
267        QNNLayerType::MeasurementLayer {
268            measurement_basis: "computational".to_string(),
269        },
270    ];
271
272    let qnn = QuantumNeuralNetwork::new(layers, 4, 6, 2)?;
273
274    let algorithm = MetaLearningAlgorithm::ANIL {
275        inner_steps: 10,
276        inner_lr: 0.1,
277    };
278
279    let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
280
281    println!("   Created ANIL (Almost No Inner Loop) learner:");
282    println!("   - Only adapts final layer during inner loop");
283    println!("   - More parameter efficient than MAML");
284    println!("   - Inner steps: 10");
285
286    // Generate binary classification tasks
287    let generator = TaskGenerator::new(6, 2);
288    let tasks: Vec<MetaTask> = (0..15)
289        .map(|_| generator.generate_rotation_task(40))
290        .collect();
291
292    println!("\n   Meta-training on binary classification tasks...");
293    let mut optimizer = Adam::new(0.001);
294    meta_learner.meta_train(&tasks, &mut optimizer, 40, 5)?;
295
296    println!("   ANIL reduces computational cost while maintaining performance");
297
298    Ok(())
299}
300
301/// Continual meta-learning demonstration
302fn continual_meta_learning_demo() -> Result<()> {
303    let layers = vec![
304        QNNLayerType::EncodingLayer { num_features: 4 },
305        QNNLayerType::VariationalLayer { num_params: 8 },
306        QNNLayerType::MeasurementLayer {
307            measurement_basis: "computational".to_string(),
308        },
309    ];
310
311    let qnn = QuantumNeuralNetwork::new(layers, 4, 4, 2)?;
312
313    let algorithm = MetaLearningAlgorithm::Reptile {
314        inner_steps: 5,
315        inner_lr: 0.05,
316    };
317
318    let meta_learner = QuantumMetaLearner::new(algorithm, qnn);
319    let mut continual_learner = ContinualMetaLearner::new(
320        meta_learner,
321        10,  // memory capacity
322        0.3, // replay ratio
323    );
324
325    println!("   Created Continual Meta-Learner:");
326    println!("   - Memory capacity: 10 tasks");
327    println!("   - Replay ratio: 30%");
328
329    // Generate sequence of tasks
330    let generator = TaskGenerator::new(4, 2);
331
332    println!("\n   Learning sequence of tasks...");
333    for i in 0..20 {
334        let task = if i < 10 {
335            generator.generate_rotation_task(30)
336        } else {
337            generator.generate_sinusoid_task(30)
338        };
339
340        continual_learner.learn_task(task)?;
341
342        if i % 5 == 4 {
343            println!(
344                "   Learned {} tasks, memory contains {} unique tasks",
345                i + 1,
346                continual_learner.memory_buffer_len()
347            );
348        }
349    }
350
351    println!("\n   Continual learning prevents catastrophic forgetting");
352
353    Ok(())
354}
355
356/// Task distribution analysis
357fn task_distribution_demo() -> Result<()> {
358    println!("   Analyzing task distributions...\n");
359
360    let generator = TaskGenerator::new(4, 3);
361
362    // Generate multiple tasks and analyze their properties
363    let mut rotation_tasks = Vec::new();
364    let mut sinusoid_tasks = Vec::new();
365
366    for _ in 0..50 {
367        rotation_tasks.push(generator.generate_rotation_task(20));
368        sinusoid_tasks.push(generator.generate_sinusoid_task(20));
369    }
370
371    // Analyze rotation tasks
372    println!("   Rotation Task Distribution:");
373    let angles: Vec<f64> = rotation_tasks
374        .iter()
375        .filter_map(|t| t.metadata.get("rotation_angle").copied())
376        .collect();
377
378    if !angles.is_empty() {
379        let mean_angle = angles.iter().sum::<f64>() / angles.len() as f64;
380        println!("   - Mean rotation angle: {mean_angle:.2} rad");
381        println!(
382            "   - Angle range: [{:.2}, {:.2}] rad",
383            angles.iter().copied().fold(f64::INFINITY, f64::min),
384            angles.iter().copied().fold(f64::NEG_INFINITY, f64::max)
385        );
386    }
387
388    // Analyze sinusoid tasks
389    println!("\n   Sinusoid Task Distribution:");
390    let amplitudes: Vec<f64> = sinusoid_tasks
391        .iter()
392        .filter_map(|t| t.metadata.get("amplitude").copied())
393        .collect();
394
395    if !amplitudes.is_empty() {
396        let mean_amp = amplitudes.iter().sum::<f64>() / amplitudes.len() as f64;
397        println!("   - Mean amplitude: {mean_amp:.2}");
398        println!(
399            "   - Amplitude range: [{:.2}, {:.2}]",
400            amplitudes.iter().copied().fold(f64::INFINITY, f64::min),
401            amplitudes.iter().copied().fold(f64::NEG_INFINITY, f64::max)
402        );
403    }
404
405    // Compare task complexities
406    println!("\n   Task Complexity Comparison:");
407    println!(
408        "   - Rotation tasks: {} training samples each",
409        rotation_tasks[0].train_data.len()
410    );
411    println!(
412        "   - Sinusoid tasks: {} training samples each",
413        sinusoid_tasks[0].train_data.len()
414    );
415    println!("   - Both use binary classification for simplicity");
416
417    Ok(())
418}