pub struct TaskGenerator { /* private fields */ }Expand description
Task generator for meta-learning experiments
Implementations§
Source§impl TaskGenerator
impl TaskGenerator
Sourcepub fn new(feature_dim: usize, num_classes: usize) -> Self
pub fn new(feature_dim: usize, num_classes: usize) -> Self
Create new task generator
Examples found in repository?
examples/quantum_meta_learning.rs (line 79)
48fn maml_demo() -> Result<()> {
49 // Create quantum model
50 let layers = vec![
51 QNNLayerType::EncodingLayer { num_features: 4 },
52 QNNLayerType::VariationalLayer { num_params: 12 },
53 QNNLayerType::EntanglementLayer {
54 connectivity: "circular".to_string(),
55 },
56 QNNLayerType::VariationalLayer { num_params: 12 },
57 QNNLayerType::MeasurementLayer {
58 measurement_basis: "computational".to_string(),
59 },
60 ];
61
62 let qnn = QuantumNeuralNetwork::new(layers, 4, 4, 3)?;
63
64 // Create MAML learner
65 let algorithm = MetaLearningAlgorithm::MAML {
66 inner_steps: 5,
67 inner_lr: 0.01,
68 first_order: true, // Use first-order approximation for efficiency
69 };
70
71 let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
72
73 println!(" Created MAML meta-learner:");
74 println!(" - Inner steps: 5");
75 println!(" - Inner learning rate: 0.01");
76 println!(" - Using first-order approximation");
77
78 // Generate tasks
79 let generator = TaskGenerator::new(4, 3);
80 let tasks: Vec<MetaTask> = (0..20)
81 .map(|_| generator.generate_rotation_task(30))
82 .collect();
83
84 // Meta-train
85 println!("\n Meta-training on 20 rotation tasks...");
86 let mut optimizer = Adam::new(0.001);
87 meta_learner.meta_train(&tasks, &mut optimizer, 50, 5)?;
88
89 // Test adaptation
90 let test_task = generator.generate_rotation_task(20);
91 println!("\n Testing adaptation to new task...");
92
93 let adapted_params = meta_learner.adapt_to_task(&test_task)?;
94 println!(" Successfully adapted to new task");
95 println!(
96 " Parameter adaptation magnitude: {:.4}",
97 (&adapted_params - meta_learner.meta_params())
98 .mapv(f64::abs)
99 .mean()
100 .unwrap()
101 );
102
103 Ok(())
104}
105
106/// Reptile algorithm demonstration
107fn reptile_demo() -> Result<()> {
108 let layers = vec![
109 QNNLayerType::EncodingLayer { num_features: 2 },
110 QNNLayerType::VariationalLayer { num_params: 8 },
111 QNNLayerType::MeasurementLayer {
112 measurement_basis: "Pauli-Z".to_string(),
113 },
114 ];
115
116 let qnn = QuantumNeuralNetwork::new(layers, 4, 2, 2)?;
117
118 let algorithm = MetaLearningAlgorithm::Reptile {
119 inner_steps: 10,
120 inner_lr: 0.1,
121 };
122
123 let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
124
125 println!(" Created Reptile meta-learner:");
126 println!(" - Inner steps: 10");
127 println!(" - Inner learning rate: 0.1");
128
129 // Generate sinusoid tasks
130 let generator = TaskGenerator::new(2, 2);
131 let tasks: Vec<MetaTask> = (0..15)
132 .map(|_| generator.generate_sinusoid_task(40))
133 .collect();
134
135 println!("\n Meta-training on 15 sinusoid tasks...");
136 let mut optimizer = Adam::new(0.001);
137 meta_learner.meta_train(&tasks, &mut optimizer, 30, 3)?;
138
139 println!(" Reptile training complete");
140
141 // Analyze task similarities
142 println!("\n Task parameter statistics:");
143 for (i, task) in tasks.iter().take(3).enumerate() {
144 if let Some(amplitude) = task.metadata.get("amplitude") {
145 if let Some(phase) = task.metadata.get("phase") {
146 println!(" Task {i}: amplitude={amplitude:.2}, phase={phase:.2}");
147 }
148 }
149 }
150
151 Ok(())
152}
153
154/// `ProtoMAML` demonstration
155fn protomaml_demo() -> Result<()> {
156 let layers = vec![
157 QNNLayerType::EncodingLayer { num_features: 8 },
158 QNNLayerType::VariationalLayer { num_params: 16 },
159 QNNLayerType::EntanglementLayer {
160 connectivity: "full".to_string(),
161 },
162 QNNLayerType::MeasurementLayer {
163 measurement_basis: "computational".to_string(),
164 },
165 ];
166
167 let qnn = QuantumNeuralNetwork::new(layers, 4, 8, 16)?;
168
169 let algorithm = MetaLearningAlgorithm::ProtoMAML {
170 inner_steps: 5,
171 inner_lr: 0.01,
172 proto_weight: 0.5, // Weight for prototype regularization
173 };
174
175 let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
176
177 println!(" Created ProtoMAML meta-learner:");
178 println!(" - Combines MAML with prototypical networks");
179 println!(" - Prototype weight: 0.5");
180
181 // Generate classification tasks
182 let generator = TaskGenerator::new(8, 4);
183 let tasks: Vec<MetaTask> = (0..10)
184 .map(|_| generator.generate_rotation_task(50))
185 .collect();
186
187 println!("\n Meta-training on 4-way classification tasks...");
188 let mut optimizer = Adam::new(0.001);
189 meta_learner.meta_train(&tasks, &mut optimizer, 40, 2)?;
190
191 println!(" ProtoMAML leverages both gradient-based and metric-based learning");
192
193 Ok(())
194}
195
196/// Meta-SGD demonstration
197fn metasgd_demo() -> Result<()> {
198 let layers = vec![
199 QNNLayerType::EncodingLayer { num_features: 4 },
200 QNNLayerType::VariationalLayer { num_params: 12 },
201 QNNLayerType::MeasurementLayer {
202 measurement_basis: "Pauli-XYZ".to_string(),
203 },
204 ];
205
206 let qnn = QuantumNeuralNetwork::new(layers, 4, 4, 3)?;
207
208 let algorithm = MetaLearningAlgorithm::MetaSGD { inner_steps: 3 };
209
210 let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
211
212 println!(" Created Meta-SGD learner:");
213 println!(" - Learns per-parameter learning rates");
214 println!(" - Inner steps: 3");
215
216 // Generate diverse tasks
217 let generator = TaskGenerator::new(4, 3);
218 let mut tasks = Vec::new();
219
220 // Mix different task types
221 for i in 0..12 {
222 if i % 2 == 0 {
223 tasks.push(generator.generate_rotation_task(30));
224 } else {
225 tasks.push(generator.generate_sinusoid_task(30));
226 }
227 }
228
229 println!("\n Meta-training on mixed task distribution...");
230 let mut optimizer = Adam::new(0.0005);
231 meta_learner.meta_train(&tasks, &mut optimizer, 50, 4)?;
232
233 if let Some(lr) = meta_learner.per_param_lr() {
234 println!("\n Learned per-parameter learning rates:");
235 println!(
236 " - Min LR: {:.4}",
237 lr.iter().copied().fold(f64::INFINITY, f64::min)
238 );
239 println!(
240 " - Max LR: {:.4}",
241 lr.iter().copied().fold(f64::NEG_INFINITY, f64::max)
242 );
243 println!(" - Mean LR: {:.4}", lr.mean().unwrap());
244 }
245
246 Ok(())
247}
248
249/// ANIL demonstration
250fn anil_demo() -> Result<()> {
251 let layers = vec![
252 QNNLayerType::EncodingLayer { num_features: 6 },
253 QNNLayerType::VariationalLayer { num_params: 12 },
254 QNNLayerType::EntanglementLayer {
255 connectivity: "circular".to_string(),
256 },
257 QNNLayerType::VariationalLayer { num_params: 12 },
258 QNNLayerType::VariationalLayer { num_params: 6 }, // Final layer (adapted)
259 QNNLayerType::MeasurementLayer {
260 measurement_basis: "computational".to_string(),
261 },
262 ];
263
264 let qnn = QuantumNeuralNetwork::new(layers, 4, 6, 2)?;
265
266 let algorithm = MetaLearningAlgorithm::ANIL {
267 inner_steps: 10,
268 inner_lr: 0.1,
269 };
270
271 let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
272
273 println!(" Created ANIL (Almost No Inner Loop) learner:");
274 println!(" - Only adapts final layer during inner loop");
275 println!(" - More parameter efficient than MAML");
276 println!(" - Inner steps: 10");
277
278 // Generate binary classification tasks
279 let generator = TaskGenerator::new(6, 2);
280 let tasks: Vec<MetaTask> = (0..15)
281 .map(|_| generator.generate_rotation_task(40))
282 .collect();
283
284 println!("\n Meta-training on binary classification tasks...");
285 let mut optimizer = Adam::new(0.001);
286 meta_learner.meta_train(&tasks, &mut optimizer, 40, 5)?;
287
288 println!(" ANIL reduces computational cost while maintaining performance");
289
290 Ok(())
291}
292
293/// Continual meta-learning demonstration
294fn continual_meta_learning_demo() -> Result<()> {
295 let layers = vec![
296 QNNLayerType::EncodingLayer { num_features: 4 },
297 QNNLayerType::VariationalLayer { num_params: 8 },
298 QNNLayerType::MeasurementLayer {
299 measurement_basis: "computational".to_string(),
300 },
301 ];
302
303 let qnn = QuantumNeuralNetwork::new(layers, 4, 4, 2)?;
304
305 let algorithm = MetaLearningAlgorithm::Reptile {
306 inner_steps: 5,
307 inner_lr: 0.05,
308 };
309
310 let meta_learner = QuantumMetaLearner::new(algorithm, qnn);
311 let mut continual_learner = ContinualMetaLearner::new(
312 meta_learner,
313 10, // memory capacity
314 0.3, // replay ratio
315 );
316
317 println!(" Created Continual Meta-Learner:");
318 println!(" - Memory capacity: 10 tasks");
319 println!(" - Replay ratio: 30%");
320
321 // Generate sequence of tasks
322 let generator = TaskGenerator::new(4, 2);
323
324 println!("\n Learning sequence of tasks...");
325 for i in 0..20 {
326 let task = if i < 10 {
327 generator.generate_rotation_task(30)
328 } else {
329 generator.generate_sinusoid_task(30)
330 };
331
332 continual_learner.learn_task(task)?;
333
334 if i % 5 == 4 {
335 println!(
336 " Learned {} tasks, memory contains {} unique tasks",
337 i + 1,
338 continual_learner.memory_buffer_len()
339 );
340 }
341 }
342
343 println!("\n Continual learning prevents catastrophic forgetting");
344
345 Ok(())
346}
347
348/// Task distribution analysis
349fn task_distribution_demo() -> Result<()> {
350 println!(" Analyzing task distributions...\n");
351
352 let generator = TaskGenerator::new(4, 3);
353
354 // Generate multiple tasks and analyze their properties
355 let mut rotation_tasks = Vec::new();
356 let mut sinusoid_tasks = Vec::new();
357
358 for _ in 0..50 {
359 rotation_tasks.push(generator.generate_rotation_task(20));
360 sinusoid_tasks.push(generator.generate_sinusoid_task(20));
361 }
362
363 // Analyze rotation tasks
364 println!(" Rotation Task Distribution:");
365 let angles: Vec<f64> = rotation_tasks
366 .iter()
367 .filter_map(|t| t.metadata.get("rotation_angle").copied())
368 .collect();
369
370 if !angles.is_empty() {
371 let mean_angle = angles.iter().sum::<f64>() / angles.len() as f64;
372 println!(" - Mean rotation angle: {mean_angle:.2} rad");
373 println!(
374 " - Angle range: [{:.2}, {:.2}] rad",
375 angles.iter().copied().fold(f64::INFINITY, f64::min),
376 angles.iter().copied().fold(f64::NEG_INFINITY, f64::max)
377 );
378 }
379
380 // Analyze sinusoid tasks
381 println!("\n Sinusoid Task Distribution:");
382 let amplitudes: Vec<f64> = sinusoid_tasks
383 .iter()
384 .filter_map(|t| t.metadata.get("amplitude").copied())
385 .collect();
386
387 if !amplitudes.is_empty() {
388 let mean_amp = amplitudes.iter().sum::<f64>() / amplitudes.len() as f64;
389 println!(" - Mean amplitude: {mean_amp:.2}");
390 println!(
391 " - Amplitude range: [{:.2}, {:.2}]",
392 amplitudes.iter().copied().fold(f64::INFINITY, f64::min),
393 amplitudes.iter().copied().fold(f64::NEG_INFINITY, f64::max)
394 );
395 }
396
397 // Compare task complexities
398 println!("\n Task Complexity Comparison:");
399 println!(
400 " - Rotation tasks: {} training samples each",
401 rotation_tasks[0].train_data.len()
402 );
403 println!(
404 " - Sinusoid tasks: {} training samples each",
405 sinusoid_tasks[0].train_data.len()
406 );
407 println!(" - Both use binary classification for simplicity");
408
409 Ok(())
410}Sourcepub fn generate_sinusoid_task(&self, num_samples: usize) -> MetaTask
pub fn generate_sinusoid_task(&self, num_samples: usize) -> MetaTask
Generate sinusoid regression task
Examples found in repository?
examples/quantum_meta_learning.rs (line 132)
107fn reptile_demo() -> Result<()> {
108 let layers = vec![
109 QNNLayerType::EncodingLayer { num_features: 2 },
110 QNNLayerType::VariationalLayer { num_params: 8 },
111 QNNLayerType::MeasurementLayer {
112 measurement_basis: "Pauli-Z".to_string(),
113 },
114 ];
115
116 let qnn = QuantumNeuralNetwork::new(layers, 4, 2, 2)?;
117
118 let algorithm = MetaLearningAlgorithm::Reptile {
119 inner_steps: 10,
120 inner_lr: 0.1,
121 };
122
123 let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
124
125 println!(" Created Reptile meta-learner:");
126 println!(" - Inner steps: 10");
127 println!(" - Inner learning rate: 0.1");
128
129 // Generate sinusoid tasks
130 let generator = TaskGenerator::new(2, 2);
131 let tasks: Vec<MetaTask> = (0..15)
132 .map(|_| generator.generate_sinusoid_task(40))
133 .collect();
134
135 println!("\n Meta-training on 15 sinusoid tasks...");
136 let mut optimizer = Adam::new(0.001);
137 meta_learner.meta_train(&tasks, &mut optimizer, 30, 3)?;
138
139 println!(" Reptile training complete");
140
141 // Analyze task similarities
142 println!("\n Task parameter statistics:");
143 for (i, task) in tasks.iter().take(3).enumerate() {
144 if let Some(amplitude) = task.metadata.get("amplitude") {
145 if let Some(phase) = task.metadata.get("phase") {
146 println!(" Task {i}: amplitude={amplitude:.2}, phase={phase:.2}");
147 }
148 }
149 }
150
151 Ok(())
152}
153
154/// `ProtoMAML` demonstration
155fn protomaml_demo() -> Result<()> {
156 let layers = vec![
157 QNNLayerType::EncodingLayer { num_features: 8 },
158 QNNLayerType::VariationalLayer { num_params: 16 },
159 QNNLayerType::EntanglementLayer {
160 connectivity: "full".to_string(),
161 },
162 QNNLayerType::MeasurementLayer {
163 measurement_basis: "computational".to_string(),
164 },
165 ];
166
167 let qnn = QuantumNeuralNetwork::new(layers, 4, 8, 16)?;
168
169 let algorithm = MetaLearningAlgorithm::ProtoMAML {
170 inner_steps: 5,
171 inner_lr: 0.01,
172 proto_weight: 0.5, // Weight for prototype regularization
173 };
174
175 let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
176
177 println!(" Created ProtoMAML meta-learner:");
178 println!(" - Combines MAML with prototypical networks");
179 println!(" - Prototype weight: 0.5");
180
181 // Generate classification tasks
182 let generator = TaskGenerator::new(8, 4);
183 let tasks: Vec<MetaTask> = (0..10)
184 .map(|_| generator.generate_rotation_task(50))
185 .collect();
186
187 println!("\n Meta-training on 4-way classification tasks...");
188 let mut optimizer = Adam::new(0.001);
189 meta_learner.meta_train(&tasks, &mut optimizer, 40, 2)?;
190
191 println!(" ProtoMAML leverages both gradient-based and metric-based learning");
192
193 Ok(())
194}
195
196/// Meta-SGD demonstration
197fn metasgd_demo() -> Result<()> {
198 let layers = vec![
199 QNNLayerType::EncodingLayer { num_features: 4 },
200 QNNLayerType::VariationalLayer { num_params: 12 },
201 QNNLayerType::MeasurementLayer {
202 measurement_basis: "Pauli-XYZ".to_string(),
203 },
204 ];
205
206 let qnn = QuantumNeuralNetwork::new(layers, 4, 4, 3)?;
207
208 let algorithm = MetaLearningAlgorithm::MetaSGD { inner_steps: 3 };
209
210 let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
211
212 println!(" Created Meta-SGD learner:");
213 println!(" - Learns per-parameter learning rates");
214 println!(" - Inner steps: 3");
215
216 // Generate diverse tasks
217 let generator = TaskGenerator::new(4, 3);
218 let mut tasks = Vec::new();
219
220 // Mix different task types
221 for i in 0..12 {
222 if i % 2 == 0 {
223 tasks.push(generator.generate_rotation_task(30));
224 } else {
225 tasks.push(generator.generate_sinusoid_task(30));
226 }
227 }
228
229 println!("\n Meta-training on mixed task distribution...");
230 let mut optimizer = Adam::new(0.0005);
231 meta_learner.meta_train(&tasks, &mut optimizer, 50, 4)?;
232
233 if let Some(lr) = meta_learner.per_param_lr() {
234 println!("\n Learned per-parameter learning rates:");
235 println!(
236 " - Min LR: {:.4}",
237 lr.iter().copied().fold(f64::INFINITY, f64::min)
238 );
239 println!(
240 " - Max LR: {:.4}",
241 lr.iter().copied().fold(f64::NEG_INFINITY, f64::max)
242 );
243 println!(" - Mean LR: {:.4}", lr.mean().unwrap());
244 }
245
246 Ok(())
247}
248
249/// ANIL demonstration
250fn anil_demo() -> Result<()> {
251 let layers = vec![
252 QNNLayerType::EncodingLayer { num_features: 6 },
253 QNNLayerType::VariationalLayer { num_params: 12 },
254 QNNLayerType::EntanglementLayer {
255 connectivity: "circular".to_string(),
256 },
257 QNNLayerType::VariationalLayer { num_params: 12 },
258 QNNLayerType::VariationalLayer { num_params: 6 }, // Final layer (adapted)
259 QNNLayerType::MeasurementLayer {
260 measurement_basis: "computational".to_string(),
261 },
262 ];
263
264 let qnn = QuantumNeuralNetwork::new(layers, 4, 6, 2)?;
265
266 let algorithm = MetaLearningAlgorithm::ANIL {
267 inner_steps: 10,
268 inner_lr: 0.1,
269 };
270
271 let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
272
273 println!(" Created ANIL (Almost No Inner Loop) learner:");
274 println!(" - Only adapts final layer during inner loop");
275 println!(" - More parameter efficient than MAML");
276 println!(" - Inner steps: 10");
277
278 // Generate binary classification tasks
279 let generator = TaskGenerator::new(6, 2);
280 let tasks: Vec<MetaTask> = (0..15)
281 .map(|_| generator.generate_rotation_task(40))
282 .collect();
283
284 println!("\n Meta-training on binary classification tasks...");
285 let mut optimizer = Adam::new(0.001);
286 meta_learner.meta_train(&tasks, &mut optimizer, 40, 5)?;
287
288 println!(" ANIL reduces computational cost while maintaining performance");
289
290 Ok(())
291}
292
293/// Continual meta-learning demonstration
294fn continual_meta_learning_demo() -> Result<()> {
295 let layers = vec![
296 QNNLayerType::EncodingLayer { num_features: 4 },
297 QNNLayerType::VariationalLayer { num_params: 8 },
298 QNNLayerType::MeasurementLayer {
299 measurement_basis: "computational".to_string(),
300 },
301 ];
302
303 let qnn = QuantumNeuralNetwork::new(layers, 4, 4, 2)?;
304
305 let algorithm = MetaLearningAlgorithm::Reptile {
306 inner_steps: 5,
307 inner_lr: 0.05,
308 };
309
310 let meta_learner = QuantumMetaLearner::new(algorithm, qnn);
311 let mut continual_learner = ContinualMetaLearner::new(
312 meta_learner,
313 10, // memory capacity
314 0.3, // replay ratio
315 );
316
317 println!(" Created Continual Meta-Learner:");
318 println!(" - Memory capacity: 10 tasks");
319 println!(" - Replay ratio: 30%");
320
321 // Generate sequence of tasks
322 let generator = TaskGenerator::new(4, 2);
323
324 println!("\n Learning sequence of tasks...");
325 for i in 0..20 {
326 let task = if i < 10 {
327 generator.generate_rotation_task(30)
328 } else {
329 generator.generate_sinusoid_task(30)
330 };
331
332 continual_learner.learn_task(task)?;
333
334 if i % 5 == 4 {
335 println!(
336 " Learned {} tasks, memory contains {} unique tasks",
337 i + 1,
338 continual_learner.memory_buffer_len()
339 );
340 }
341 }
342
343 println!("\n Continual learning prevents catastrophic forgetting");
344
345 Ok(())
346}
347
348/// Task distribution analysis
349fn task_distribution_demo() -> Result<()> {
350 println!(" Analyzing task distributions...\n");
351
352 let generator = TaskGenerator::new(4, 3);
353
354 // Generate multiple tasks and analyze their properties
355 let mut rotation_tasks = Vec::new();
356 let mut sinusoid_tasks = Vec::new();
357
358 for _ in 0..50 {
359 rotation_tasks.push(generator.generate_rotation_task(20));
360 sinusoid_tasks.push(generator.generate_sinusoid_task(20));
361 }
362
363 // Analyze rotation tasks
364 println!(" Rotation Task Distribution:");
365 let angles: Vec<f64> = rotation_tasks
366 .iter()
367 .filter_map(|t| t.metadata.get("rotation_angle").copied())
368 .collect();
369
370 if !angles.is_empty() {
371 let mean_angle = angles.iter().sum::<f64>() / angles.len() as f64;
372 println!(" - Mean rotation angle: {mean_angle:.2} rad");
373 println!(
374 " - Angle range: [{:.2}, {:.2}] rad",
375 angles.iter().copied().fold(f64::INFINITY, f64::min),
376 angles.iter().copied().fold(f64::NEG_INFINITY, f64::max)
377 );
378 }
379
380 // Analyze sinusoid tasks
381 println!("\n Sinusoid Task Distribution:");
382 let amplitudes: Vec<f64> = sinusoid_tasks
383 .iter()
384 .filter_map(|t| t.metadata.get("amplitude").copied())
385 .collect();
386
387 if !amplitudes.is_empty() {
388 let mean_amp = amplitudes.iter().sum::<f64>() / amplitudes.len() as f64;
389 println!(" - Mean amplitude: {mean_amp:.2}");
390 println!(
391 " - Amplitude range: [{:.2}, {:.2}]",
392 amplitudes.iter().copied().fold(f64::INFINITY, f64::min),
393 amplitudes.iter().copied().fold(f64::NEG_INFINITY, f64::max)
394 );
395 }
396
397 // Compare task complexities
398 println!("\n Task Complexity Comparison:");
399 println!(
400 " - Rotation tasks: {} training samples each",
401 rotation_tasks[0].train_data.len()
402 );
403 println!(
404 " - Sinusoid tasks: {} training samples each",
405 sinusoid_tasks[0].train_data.len()
406 );
407 println!(" - Both use binary classification for simplicity");
408
409 Ok(())
410}Sourcepub fn generate_rotation_task(&self, num_samples: usize) -> MetaTask
pub fn generate_rotation_task(&self, num_samples: usize) -> MetaTask
Generate classification task with rotated features
Examples found in repository?
examples/quantum_meta_learning.rs (line 81)
48fn maml_demo() -> Result<()> {
49 // Create quantum model
50 let layers = vec![
51 QNNLayerType::EncodingLayer { num_features: 4 },
52 QNNLayerType::VariationalLayer { num_params: 12 },
53 QNNLayerType::EntanglementLayer {
54 connectivity: "circular".to_string(),
55 },
56 QNNLayerType::VariationalLayer { num_params: 12 },
57 QNNLayerType::MeasurementLayer {
58 measurement_basis: "computational".to_string(),
59 },
60 ];
61
62 let qnn = QuantumNeuralNetwork::new(layers, 4, 4, 3)?;
63
64 // Create MAML learner
65 let algorithm = MetaLearningAlgorithm::MAML {
66 inner_steps: 5,
67 inner_lr: 0.01,
68 first_order: true, // Use first-order approximation for efficiency
69 };
70
71 let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
72
73 println!(" Created MAML meta-learner:");
74 println!(" - Inner steps: 5");
75 println!(" - Inner learning rate: 0.01");
76 println!(" - Using first-order approximation");
77
78 // Generate tasks
79 let generator = TaskGenerator::new(4, 3);
80 let tasks: Vec<MetaTask> = (0..20)
81 .map(|_| generator.generate_rotation_task(30))
82 .collect();
83
84 // Meta-train
85 println!("\n Meta-training on 20 rotation tasks...");
86 let mut optimizer = Adam::new(0.001);
87 meta_learner.meta_train(&tasks, &mut optimizer, 50, 5)?;
88
89 // Test adaptation
90 let test_task = generator.generate_rotation_task(20);
91 println!("\n Testing adaptation to new task...");
92
93 let adapted_params = meta_learner.adapt_to_task(&test_task)?;
94 println!(" Successfully adapted to new task");
95 println!(
96 " Parameter adaptation magnitude: {:.4}",
97 (&adapted_params - meta_learner.meta_params())
98 .mapv(f64::abs)
99 .mean()
100 .unwrap()
101 );
102
103 Ok(())
104}
105
106/// Reptile algorithm demonstration
107fn reptile_demo() -> Result<()> {
108 let layers = vec![
109 QNNLayerType::EncodingLayer { num_features: 2 },
110 QNNLayerType::VariationalLayer { num_params: 8 },
111 QNNLayerType::MeasurementLayer {
112 measurement_basis: "Pauli-Z".to_string(),
113 },
114 ];
115
116 let qnn = QuantumNeuralNetwork::new(layers, 4, 2, 2)?;
117
118 let algorithm = MetaLearningAlgorithm::Reptile {
119 inner_steps: 10,
120 inner_lr: 0.1,
121 };
122
123 let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
124
125 println!(" Created Reptile meta-learner:");
126 println!(" - Inner steps: 10");
127 println!(" - Inner learning rate: 0.1");
128
129 // Generate sinusoid tasks
130 let generator = TaskGenerator::new(2, 2);
131 let tasks: Vec<MetaTask> = (0..15)
132 .map(|_| generator.generate_sinusoid_task(40))
133 .collect();
134
135 println!("\n Meta-training on 15 sinusoid tasks...");
136 let mut optimizer = Adam::new(0.001);
137 meta_learner.meta_train(&tasks, &mut optimizer, 30, 3)?;
138
139 println!(" Reptile training complete");
140
141 // Analyze task similarities
142 println!("\n Task parameter statistics:");
143 for (i, task) in tasks.iter().take(3).enumerate() {
144 if let Some(amplitude) = task.metadata.get("amplitude") {
145 if let Some(phase) = task.metadata.get("phase") {
146 println!(" Task {i}: amplitude={amplitude:.2}, phase={phase:.2}");
147 }
148 }
149 }
150
151 Ok(())
152}
153
154/// `ProtoMAML` demonstration
155fn protomaml_demo() -> Result<()> {
156 let layers = vec![
157 QNNLayerType::EncodingLayer { num_features: 8 },
158 QNNLayerType::VariationalLayer { num_params: 16 },
159 QNNLayerType::EntanglementLayer {
160 connectivity: "full".to_string(),
161 },
162 QNNLayerType::MeasurementLayer {
163 measurement_basis: "computational".to_string(),
164 },
165 ];
166
167 let qnn = QuantumNeuralNetwork::new(layers, 4, 8, 16)?;
168
169 let algorithm = MetaLearningAlgorithm::ProtoMAML {
170 inner_steps: 5,
171 inner_lr: 0.01,
172 proto_weight: 0.5, // Weight for prototype regularization
173 };
174
175 let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
176
177 println!(" Created ProtoMAML meta-learner:");
178 println!(" - Combines MAML with prototypical networks");
179 println!(" - Prototype weight: 0.5");
180
181 // Generate classification tasks
182 let generator = TaskGenerator::new(8, 4);
183 let tasks: Vec<MetaTask> = (0..10)
184 .map(|_| generator.generate_rotation_task(50))
185 .collect();
186
187 println!("\n Meta-training on 4-way classification tasks...");
188 let mut optimizer = Adam::new(0.001);
189 meta_learner.meta_train(&tasks, &mut optimizer, 40, 2)?;
190
191 println!(" ProtoMAML leverages both gradient-based and metric-based learning");
192
193 Ok(())
194}
195
196/// Meta-SGD demonstration
197fn metasgd_demo() -> Result<()> {
198 let layers = vec![
199 QNNLayerType::EncodingLayer { num_features: 4 },
200 QNNLayerType::VariationalLayer { num_params: 12 },
201 QNNLayerType::MeasurementLayer {
202 measurement_basis: "Pauli-XYZ".to_string(),
203 },
204 ];
205
206 let qnn = QuantumNeuralNetwork::new(layers, 4, 4, 3)?;
207
208 let algorithm = MetaLearningAlgorithm::MetaSGD { inner_steps: 3 };
209
210 let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
211
212 println!(" Created Meta-SGD learner:");
213 println!(" - Learns per-parameter learning rates");
214 println!(" - Inner steps: 3");
215
216 // Generate diverse tasks
217 let generator = TaskGenerator::new(4, 3);
218 let mut tasks = Vec::new();
219
220 // Mix different task types
221 for i in 0..12 {
222 if i % 2 == 0 {
223 tasks.push(generator.generate_rotation_task(30));
224 } else {
225 tasks.push(generator.generate_sinusoid_task(30));
226 }
227 }
228
229 println!("\n Meta-training on mixed task distribution...");
230 let mut optimizer = Adam::new(0.0005);
231 meta_learner.meta_train(&tasks, &mut optimizer, 50, 4)?;
232
233 if let Some(lr) = meta_learner.per_param_lr() {
234 println!("\n Learned per-parameter learning rates:");
235 println!(
236 " - Min LR: {:.4}",
237 lr.iter().copied().fold(f64::INFINITY, f64::min)
238 );
239 println!(
240 " - Max LR: {:.4}",
241 lr.iter().copied().fold(f64::NEG_INFINITY, f64::max)
242 );
243 println!(" - Mean LR: {:.4}", lr.mean().unwrap());
244 }
245
246 Ok(())
247}
248
249/// ANIL demonstration
250fn anil_demo() -> Result<()> {
251 let layers = vec![
252 QNNLayerType::EncodingLayer { num_features: 6 },
253 QNNLayerType::VariationalLayer { num_params: 12 },
254 QNNLayerType::EntanglementLayer {
255 connectivity: "circular".to_string(),
256 },
257 QNNLayerType::VariationalLayer { num_params: 12 },
258 QNNLayerType::VariationalLayer { num_params: 6 }, // Final layer (adapted)
259 QNNLayerType::MeasurementLayer {
260 measurement_basis: "computational".to_string(),
261 },
262 ];
263
264 let qnn = QuantumNeuralNetwork::new(layers, 4, 6, 2)?;
265
266 let algorithm = MetaLearningAlgorithm::ANIL {
267 inner_steps: 10,
268 inner_lr: 0.1,
269 };
270
271 let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
272
273 println!(" Created ANIL (Almost No Inner Loop) learner:");
274 println!(" - Only adapts final layer during inner loop");
275 println!(" - More parameter efficient than MAML");
276 println!(" - Inner steps: 10");
277
278 // Generate binary classification tasks
279 let generator = TaskGenerator::new(6, 2);
280 let tasks: Vec<MetaTask> = (0..15)
281 .map(|_| generator.generate_rotation_task(40))
282 .collect();
283
284 println!("\n Meta-training on binary classification tasks...");
285 let mut optimizer = Adam::new(0.001);
286 meta_learner.meta_train(&tasks, &mut optimizer, 40, 5)?;
287
288 println!(" ANIL reduces computational cost while maintaining performance");
289
290 Ok(())
291}
292
293/// Continual meta-learning demonstration
294fn continual_meta_learning_demo() -> Result<()> {
295 let layers = vec![
296 QNNLayerType::EncodingLayer { num_features: 4 },
297 QNNLayerType::VariationalLayer { num_params: 8 },
298 QNNLayerType::MeasurementLayer {
299 measurement_basis: "computational".to_string(),
300 },
301 ];
302
303 let qnn = QuantumNeuralNetwork::new(layers, 4, 4, 2)?;
304
305 let algorithm = MetaLearningAlgorithm::Reptile {
306 inner_steps: 5,
307 inner_lr: 0.05,
308 };
309
310 let meta_learner = QuantumMetaLearner::new(algorithm, qnn);
311 let mut continual_learner = ContinualMetaLearner::new(
312 meta_learner,
313 10, // memory capacity
314 0.3, // replay ratio
315 );
316
317 println!(" Created Continual Meta-Learner:");
318 println!(" - Memory capacity: 10 tasks");
319 println!(" - Replay ratio: 30%");
320
321 // Generate sequence of tasks
322 let generator = TaskGenerator::new(4, 2);
323
324 println!("\n Learning sequence of tasks...");
325 for i in 0..20 {
326 let task = if i < 10 {
327 generator.generate_rotation_task(30)
328 } else {
329 generator.generate_sinusoid_task(30)
330 };
331
332 continual_learner.learn_task(task)?;
333
334 if i % 5 == 4 {
335 println!(
336 " Learned {} tasks, memory contains {} unique tasks",
337 i + 1,
338 continual_learner.memory_buffer_len()
339 );
340 }
341 }
342
343 println!("\n Continual learning prevents catastrophic forgetting");
344
345 Ok(())
346}
347
348/// Task distribution analysis
349fn task_distribution_demo() -> Result<()> {
350 println!(" Analyzing task distributions...\n");
351
352 let generator = TaskGenerator::new(4, 3);
353
354 // Generate multiple tasks and analyze their properties
355 let mut rotation_tasks = Vec::new();
356 let mut sinusoid_tasks = Vec::new();
357
358 for _ in 0..50 {
359 rotation_tasks.push(generator.generate_rotation_task(20));
360 sinusoid_tasks.push(generator.generate_sinusoid_task(20));
361 }
362
363 // Analyze rotation tasks
364 println!(" Rotation Task Distribution:");
365 let angles: Vec<f64> = rotation_tasks
366 .iter()
367 .filter_map(|t| t.metadata.get("rotation_angle").copied())
368 .collect();
369
370 if !angles.is_empty() {
371 let mean_angle = angles.iter().sum::<f64>() / angles.len() as f64;
372 println!(" - Mean rotation angle: {mean_angle:.2} rad");
373 println!(
374 " - Angle range: [{:.2}, {:.2}] rad",
375 angles.iter().copied().fold(f64::INFINITY, f64::min),
376 angles.iter().copied().fold(f64::NEG_INFINITY, f64::max)
377 );
378 }
379
380 // Analyze sinusoid tasks
381 println!("\n Sinusoid Task Distribution:");
382 let amplitudes: Vec<f64> = sinusoid_tasks
383 .iter()
384 .filter_map(|t| t.metadata.get("amplitude").copied())
385 .collect();
386
387 if !amplitudes.is_empty() {
388 let mean_amp = amplitudes.iter().sum::<f64>() / amplitudes.len() as f64;
389 println!(" - Mean amplitude: {mean_amp:.2}");
390 println!(
391 " - Amplitude range: [{:.2}, {:.2}]",
392 amplitudes.iter().copied().fold(f64::INFINITY, f64::min),
393 amplitudes.iter().copied().fold(f64::NEG_INFINITY, f64::max)
394 );
395 }
396
397 // Compare task complexities
398 println!("\n Task Complexity Comparison:");
399 println!(
400 " - Rotation tasks: {} training samples each",
401 rotation_tasks[0].train_data.len()
402 );
403 println!(
404 " - Sinusoid tasks: {} training samples each",
405 sinusoid_tasks[0].train_data.len()
406 );
407 println!(" - Both use binary classification for simplicity");
408
409 Ok(())
410}Auto Trait Implementations§
impl Freeze for TaskGenerator
impl RefUnwindSafe for TaskGenerator
impl Send for TaskGenerator
impl Sync for TaskGenerator
impl Unpin for TaskGenerator
impl UnwindSafe for TaskGenerator
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more
Source§impl<T> IntoEither for T
impl<T> IntoEither for T
Source§fn into_either(self, into_left: bool) -> Either<Self, Self>
fn into_either(self, into_left: bool) -> Either<Self, Self>
Converts
self into a Left variant of Either<Self, Self>
if into_left is true.
Converts self into a Right variant of Either<Self, Self>
otherwise. Read moreSource§fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
Converts
self into a Left variant of Either<Self, Self>
if into_left(&self) returns true.
Converts self into a Right variant of Either<Self, Self>
otherwise. Read moreSource§impl<T> Pointable for T
impl<T> Pointable for T
Source§impl<SS, SP> SupersetOf<SS> for SPwhere
SS: SubsetOf<SP>,
impl<SS, SP> SupersetOf<SS> for SPwhere
SS: SubsetOf<SP>,
Source§fn to_subset(&self) -> Option<SS>
fn to_subset(&self) -> Option<SS>
The inverse inclusion map: attempts to construct
self from the equivalent element of its
superset. Read moreSource§fn is_in_subset(&self) -> bool
fn is_in_subset(&self) -> bool
Checks if
self is actually part of its subset T (and can be converted to it).Source§fn to_subset_unchecked(&self) -> SS
fn to_subset_unchecked(&self) -> SS
Use with care! Same as
self.to_subset but without any property checks. Always succeeds.Source§fn from_subset(element: &SS) -> SP
fn from_subset(element: &SS) -> SP
The inclusion map: converts
self to the equivalent element of its superset.