quantum_meta_learning/
quantum_meta_learning.rs

1//! Quantum Meta-Learning Example
2//!
3//! This example demonstrates various quantum meta-learning algorithms including
4//! MAML, Reptile, ProtoMAML, Meta-SGD, and ANIL for few-shot learning tasks.
5
6use scirs2_core::ndarray::{Array1, Array2};
7use quantrs2_ml::autodiff::optimizers::Adam;
8use quantrs2_ml::prelude::*;
9use quantrs2_ml::qnn::QNNLayerType;
10
11fn main() -> Result<()> {
12    println!("=== Quantum Meta-Learning Demo ===\n");
13
14    // Step 1: Basic MAML demonstration
15    println!("1. Model-Agnostic Meta-Learning (MAML)...");
16    maml_demo()?;
17
18    // Step 2: Reptile algorithm
19    println!("\n2. Reptile Algorithm...");
20    reptile_demo()?;
21
22    // Step 3: ProtoMAML with prototypical learning
23    println!("\n3. ProtoMAML (Prototypical MAML)...");
24    protomaml_demo()?;
25
26    // Step 4: Meta-SGD with learnable learning rates
27    println!("\n4. Meta-SGD...");
28    metasgd_demo()?;
29
30    // Step 5: ANIL (Almost No Inner Loop)
31    println!("\n5. ANIL Algorithm...");
32    anil_demo()?;
33
34    // Step 6: Continual meta-learning
35    println!("\n6. Continual Meta-Learning...");
36    continual_meta_learning_demo()?;
37
38    // Step 7: Task distribution analysis
39    println!("\n7. Task Distribution Analysis...");
40    task_distribution_demo()?;
41
42    println!("\n=== Meta-Learning Demo Complete ===");
43
44    Ok(())
45}
46
47/// MAML demonstration
48fn maml_demo() -> Result<()> {
49    // Create quantum model
50    let layers = vec![
51        QNNLayerType::EncodingLayer { num_features: 4 },
52        QNNLayerType::VariationalLayer { num_params: 12 },
53        QNNLayerType::EntanglementLayer {
54            connectivity: "circular".to_string(),
55        },
56        QNNLayerType::VariationalLayer { num_params: 12 },
57        QNNLayerType::MeasurementLayer {
58            measurement_basis: "computational".to_string(),
59        },
60    ];
61
62    let qnn = QuantumNeuralNetwork::new(layers, 4, 4, 3)?;
63
64    // Create MAML learner
65    let algorithm = MetaLearningAlgorithm::MAML {
66        inner_steps: 5,
67        inner_lr: 0.01,
68        first_order: true, // Use first-order approximation for efficiency
69    };
70
71    let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
72
73    println!("   Created MAML meta-learner:");
74    println!("   - Inner steps: 5");
75    println!("   - Inner learning rate: 0.01");
76    println!("   - Using first-order approximation");
77
78    // Generate tasks
79    let generator = TaskGenerator::new(4, 3);
80    let tasks: Vec<MetaTask> = (0..20)
81        .map(|_| generator.generate_rotation_task(30))
82        .collect();
83
84    // Meta-train
85    println!("\n   Meta-training on 20 rotation tasks...");
86    let mut optimizer = Adam::new(0.001);
87    meta_learner.meta_train(&tasks, &mut optimizer, 50, 5)?;
88
89    // Test adaptation
90    let test_task = generator.generate_rotation_task(20);
91    println!("\n   Testing adaptation to new task...");
92
93    let adapted_params = meta_learner.adapt_to_task(&test_task)?;
94    println!("   Successfully adapted to new task");
95    println!(
96        "   Parameter adaptation magnitude: {:.4}",
97        (&adapted_params - meta_learner.meta_params())
98            .mapv(|x| x.abs())
99            .mean()
100            .unwrap()
101    );
102
103    Ok(())
104}
105
106/// Reptile algorithm demonstration
107fn reptile_demo() -> Result<()> {
108    let layers = vec![
109        QNNLayerType::EncodingLayer { num_features: 2 },
110        QNNLayerType::VariationalLayer { num_params: 8 },
111        QNNLayerType::MeasurementLayer {
112            measurement_basis: "Pauli-Z".to_string(),
113        },
114    ];
115
116    let qnn = QuantumNeuralNetwork::new(layers, 4, 2, 2)?;
117
118    let algorithm = MetaLearningAlgorithm::Reptile {
119        inner_steps: 10,
120        inner_lr: 0.1,
121    };
122
123    let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
124
125    println!("   Created Reptile meta-learner:");
126    println!("   - Inner steps: 10");
127    println!("   - Inner learning rate: 0.1");
128
129    // Generate sinusoid tasks
130    let generator = TaskGenerator::new(2, 2);
131    let tasks: Vec<MetaTask> = (0..15)
132        .map(|_| generator.generate_sinusoid_task(40))
133        .collect();
134
135    println!("\n   Meta-training on 15 sinusoid tasks...");
136    let mut optimizer = Adam::new(0.001);
137    meta_learner.meta_train(&tasks, &mut optimizer, 30, 3)?;
138
139    println!("   Reptile training complete");
140
141    // Analyze task similarities
142    println!("\n   Task parameter statistics:");
143    for (i, task) in tasks.iter().take(3).enumerate() {
144        if let Some(amplitude) = task.metadata.get("amplitude") {
145            if let Some(phase) = task.metadata.get("phase") {
146                println!(
147                    "   Task {}: amplitude={:.2}, phase={:.2}",
148                    i, amplitude, phase
149                );
150            }
151        }
152    }
153
154    Ok(())
155}
156
157/// ProtoMAML demonstration
158fn protomaml_demo() -> Result<()> {
159    let layers = vec![
160        QNNLayerType::EncodingLayer { num_features: 8 },
161        QNNLayerType::VariationalLayer { num_params: 16 },
162        QNNLayerType::EntanglementLayer {
163            connectivity: "full".to_string(),
164        },
165        QNNLayerType::MeasurementLayer {
166            measurement_basis: "computational".to_string(),
167        },
168    ];
169
170    let qnn = QuantumNeuralNetwork::new(layers, 4, 8, 16)?;
171
172    let algorithm = MetaLearningAlgorithm::ProtoMAML {
173        inner_steps: 5,
174        inner_lr: 0.01,
175        proto_weight: 0.5, // Weight for prototype regularization
176    };
177
178    let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
179
180    println!("   Created ProtoMAML meta-learner:");
181    println!("   - Combines MAML with prototypical networks");
182    println!("   - Prototype weight: 0.5");
183
184    // Generate classification tasks
185    let generator = TaskGenerator::new(8, 4);
186    let tasks: Vec<MetaTask> = (0..10)
187        .map(|_| generator.generate_rotation_task(50))
188        .collect();
189
190    println!("\n   Meta-training on 4-way classification tasks...");
191    let mut optimizer = Adam::new(0.001);
192    meta_learner.meta_train(&tasks, &mut optimizer, 40, 2)?;
193
194    println!("   ProtoMAML leverages both gradient-based and metric-based learning");
195
196    Ok(())
197}
198
199/// Meta-SGD demonstration
200fn metasgd_demo() -> Result<()> {
201    let layers = vec![
202        QNNLayerType::EncodingLayer { num_features: 4 },
203        QNNLayerType::VariationalLayer { num_params: 12 },
204        QNNLayerType::MeasurementLayer {
205            measurement_basis: "Pauli-XYZ".to_string(),
206        },
207    ];
208
209    let qnn = QuantumNeuralNetwork::new(layers, 4, 4, 3)?;
210
211    let algorithm = MetaLearningAlgorithm::MetaSGD { inner_steps: 3 };
212
213    let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
214
215    println!("   Created Meta-SGD learner:");
216    println!("   - Learns per-parameter learning rates");
217    println!("   - Inner steps: 3");
218
219    // Generate diverse tasks
220    let generator = TaskGenerator::new(4, 3);
221    let mut tasks = Vec::new();
222
223    // Mix different task types
224    for i in 0..12 {
225        if i % 2 == 0 {
226            tasks.push(generator.generate_rotation_task(30));
227        } else {
228            tasks.push(generator.generate_sinusoid_task(30));
229        }
230    }
231
232    println!("\n   Meta-training on mixed task distribution...");
233    let mut optimizer = Adam::new(0.0005);
234    meta_learner.meta_train(&tasks, &mut optimizer, 50, 4)?;
235
236    if let Some(lr) = meta_learner.per_param_lr() {
237        println!("\n   Learned per-parameter learning rates:");
238        println!(
239            "   - Min LR: {:.4}",
240            lr.iter().cloned().fold(f64::INFINITY, f64::min)
241        );
242        println!(
243            "   - Max LR: {:.4}",
244            lr.iter().cloned().fold(f64::NEG_INFINITY, f64::max)
245        );
246        println!("   - Mean LR: {:.4}", lr.mean().unwrap());
247    }
248
249    Ok(())
250}
251
252/// ANIL demonstration
253fn anil_demo() -> Result<()> {
254    let layers = vec![
255        QNNLayerType::EncodingLayer { num_features: 6 },
256        QNNLayerType::VariationalLayer { num_params: 12 },
257        QNNLayerType::EntanglementLayer {
258            connectivity: "circular".to_string(),
259        },
260        QNNLayerType::VariationalLayer { num_params: 12 },
261        QNNLayerType::VariationalLayer { num_params: 6 }, // Final layer (adapted)
262        QNNLayerType::MeasurementLayer {
263            measurement_basis: "computational".to_string(),
264        },
265    ];
266
267    let qnn = QuantumNeuralNetwork::new(layers, 4, 6, 2)?;
268
269    let algorithm = MetaLearningAlgorithm::ANIL {
270        inner_steps: 10,
271        inner_lr: 0.1,
272    };
273
274    let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
275
276    println!("   Created ANIL (Almost No Inner Loop) learner:");
277    println!("   - Only adapts final layer during inner loop");
278    println!("   - More parameter efficient than MAML");
279    println!("   - Inner steps: 10");
280
281    // Generate binary classification tasks
282    let generator = TaskGenerator::new(6, 2);
283    let tasks: Vec<MetaTask> = (0..15)
284        .map(|_| generator.generate_rotation_task(40))
285        .collect();
286
287    println!("\n   Meta-training on binary classification tasks...");
288    let mut optimizer = Adam::new(0.001);
289    meta_learner.meta_train(&tasks, &mut optimizer, 40, 5)?;
290
291    println!("   ANIL reduces computational cost while maintaining performance");
292
293    Ok(())
294}
295
296/// Continual meta-learning demonstration
297fn continual_meta_learning_demo() -> Result<()> {
298    let layers = vec![
299        QNNLayerType::EncodingLayer { num_features: 4 },
300        QNNLayerType::VariationalLayer { num_params: 8 },
301        QNNLayerType::MeasurementLayer {
302            measurement_basis: "computational".to_string(),
303        },
304    ];
305
306    let qnn = QuantumNeuralNetwork::new(layers, 4, 4, 2)?;
307
308    let algorithm = MetaLearningAlgorithm::Reptile {
309        inner_steps: 5,
310        inner_lr: 0.05,
311    };
312
313    let meta_learner = QuantumMetaLearner::new(algorithm, qnn);
314    let mut continual_learner = ContinualMetaLearner::new(
315        meta_learner,
316        10,  // memory capacity
317        0.3, // replay ratio
318    );
319
320    println!("   Created Continual Meta-Learner:");
321    println!("   - Memory capacity: 10 tasks");
322    println!("   - Replay ratio: 30%");
323
324    // Generate sequence of tasks
325    let generator = TaskGenerator::new(4, 2);
326
327    println!("\n   Learning sequence of tasks...");
328    for i in 0..20 {
329        let task = if i < 10 {
330            generator.generate_rotation_task(30)
331        } else {
332            generator.generate_sinusoid_task(30)
333        };
334
335        continual_learner.learn_task(task)?;
336
337        if i % 5 == 4 {
338            println!(
339                "   Learned {} tasks, memory contains {} unique tasks",
340                i + 1,
341                continual_learner.memory_buffer_len()
342            );
343        }
344    }
345
346    println!("\n   Continual learning prevents catastrophic forgetting");
347
348    Ok(())
349}
350
351/// Task distribution analysis
352fn task_distribution_demo() -> Result<()> {
353    println!("   Analyzing task distributions...\n");
354
355    let generator = TaskGenerator::new(4, 3);
356
357    // Generate multiple tasks and analyze their properties
358    let mut rotation_tasks = Vec::new();
359    let mut sinusoid_tasks = Vec::new();
360
361    for _ in 0..50 {
362        rotation_tasks.push(generator.generate_rotation_task(20));
363        sinusoid_tasks.push(generator.generate_sinusoid_task(20));
364    }
365
366    // Analyze rotation tasks
367    println!("   Rotation Task Distribution:");
368    let angles: Vec<f64> = rotation_tasks
369        .iter()
370        .filter_map(|t| t.metadata.get("rotation_angle").cloned())
371        .collect();
372
373    if !angles.is_empty() {
374        let mean_angle = angles.iter().sum::<f64>() / angles.len() as f64;
375        println!("   - Mean rotation angle: {:.2} rad", mean_angle);
376        println!(
377            "   - Angle range: [{:.2}, {:.2}] rad",
378            angles.iter().cloned().fold(f64::INFINITY, f64::min),
379            angles.iter().cloned().fold(f64::NEG_INFINITY, f64::max)
380        );
381    }
382
383    // Analyze sinusoid tasks
384    println!("\n   Sinusoid Task Distribution:");
385    let amplitudes: Vec<f64> = sinusoid_tasks
386        .iter()
387        .filter_map(|t| t.metadata.get("amplitude").cloned())
388        .collect();
389
390    if !amplitudes.is_empty() {
391        let mean_amp = amplitudes.iter().sum::<f64>() / amplitudes.len() as f64;
392        println!("   - Mean amplitude: {:.2}", mean_amp);
393        println!(
394            "   - Amplitude range: [{:.2}, {:.2}]",
395            amplitudes.iter().cloned().fold(f64::INFINITY, f64::min),
396            amplitudes.iter().cloned().fold(f64::NEG_INFINITY, f64::max)
397        );
398    }
399
400    // Compare task complexities
401    println!("\n   Task Complexity Comparison:");
402    println!(
403        "   - Rotation tasks: {} training samples each",
404        rotation_tasks[0].train_data.len()
405    );
406    println!(
407        "   - Sinusoid tasks: {} training samples each",
408        sinusoid_tasks[0].train_data.len()
409    );
410    println!("   - Both use binary classification for simplicity");
411
412    Ok(())
413}