pub struct TaskGenerator { /* private fields */ }Expand description
Task generator for meta-learning experiments
Implementations§
Source§impl TaskGenerator
impl TaskGenerator
Sourcepub fn new(feature_dim: usize, num_classes: usize) -> Self
pub fn new(feature_dim: usize, num_classes: usize) -> Self
Create new task generator
Examples found in repository?
examples/quantum_meta_learning.rs (line 87)
56fn maml_demo() -> Result<()> {
57 // Create quantum model
58 let layers = vec![
59 QNNLayerType::EncodingLayer { num_features: 4 },
60 QNNLayerType::VariationalLayer { num_params: 12 },
61 QNNLayerType::EntanglementLayer {
62 connectivity: "circular".to_string(),
63 },
64 QNNLayerType::VariationalLayer { num_params: 12 },
65 QNNLayerType::MeasurementLayer {
66 measurement_basis: "computational".to_string(),
67 },
68 ];
69
70 let qnn = QuantumNeuralNetwork::new(layers, 4, 4, 3)?;
71
72 // Create MAML learner
73 let algorithm = MetaLearningAlgorithm::MAML {
74 inner_steps: 5,
75 inner_lr: 0.01,
76 first_order: true, // Use first-order approximation for efficiency
77 };
78
79 let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
80
81 println!(" Created MAML meta-learner:");
82 println!(" - Inner steps: 5");
83 println!(" - Inner learning rate: 0.01");
84 println!(" - Using first-order approximation");
85
86 // Generate tasks
87 let generator = TaskGenerator::new(4, 3);
88 let tasks: Vec<MetaTask> = (0..20)
89 .map(|_| generator.generate_rotation_task(30))
90 .collect();
91
92 // Meta-train
93 println!("\n Meta-training on 20 rotation tasks...");
94 let mut optimizer = Adam::new(0.001);
95 meta_learner.meta_train(&tasks, &mut optimizer, 50, 5)?;
96
97 // Test adaptation
98 let test_task = generator.generate_rotation_task(20);
99 println!("\n Testing adaptation to new task...");
100
101 let adapted_params = meta_learner.adapt_to_task(&test_task)?;
102 println!(" Successfully adapted to new task");
103 println!(
104 " Parameter adaptation magnitude: {:.4}",
105 (&adapted_params - meta_learner.meta_params())
106 .mapv(f64::abs)
107 .mean()
108 .unwrap()
109 );
110
111 Ok(())
112}
113
114/// Reptile algorithm demonstration
115fn reptile_demo() -> Result<()> {
116 let layers = vec![
117 QNNLayerType::EncodingLayer { num_features: 2 },
118 QNNLayerType::VariationalLayer { num_params: 8 },
119 QNNLayerType::MeasurementLayer {
120 measurement_basis: "Pauli-Z".to_string(),
121 },
122 ];
123
124 let qnn = QuantumNeuralNetwork::new(layers, 4, 2, 2)?;
125
126 let algorithm = MetaLearningAlgorithm::Reptile {
127 inner_steps: 10,
128 inner_lr: 0.1,
129 };
130
131 let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
132
133 println!(" Created Reptile meta-learner:");
134 println!(" - Inner steps: 10");
135 println!(" - Inner learning rate: 0.1");
136
137 // Generate sinusoid tasks
138 let generator = TaskGenerator::new(2, 2);
139 let tasks: Vec<MetaTask> = (0..15)
140 .map(|_| generator.generate_sinusoid_task(40))
141 .collect();
142
143 println!("\n Meta-training on 15 sinusoid tasks...");
144 let mut optimizer = Adam::new(0.001);
145 meta_learner.meta_train(&tasks, &mut optimizer, 30, 3)?;
146
147 println!(" Reptile training complete");
148
149 // Analyze task similarities
150 println!("\n Task parameter statistics:");
151 for (i, task) in tasks.iter().take(3).enumerate() {
152 if let Some(amplitude) = task.metadata.get("amplitude") {
153 if let Some(phase) = task.metadata.get("phase") {
154 println!(" Task {i}: amplitude={amplitude:.2}, phase={phase:.2}");
155 }
156 }
157 }
158
159 Ok(())
160}
161
162/// `ProtoMAML` demonstration
163fn protomaml_demo() -> Result<()> {
164 let layers = vec![
165 QNNLayerType::EncodingLayer { num_features: 8 },
166 QNNLayerType::VariationalLayer { num_params: 16 },
167 QNNLayerType::EntanglementLayer {
168 connectivity: "full".to_string(),
169 },
170 QNNLayerType::MeasurementLayer {
171 measurement_basis: "computational".to_string(),
172 },
173 ];
174
175 let qnn = QuantumNeuralNetwork::new(layers, 4, 8, 16)?;
176
177 let algorithm = MetaLearningAlgorithm::ProtoMAML {
178 inner_steps: 5,
179 inner_lr: 0.01,
180 proto_weight: 0.5, // Weight for prototype regularization
181 };
182
183 let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
184
185 println!(" Created ProtoMAML meta-learner:");
186 println!(" - Combines MAML with prototypical networks");
187 println!(" - Prototype weight: 0.5");
188
189 // Generate classification tasks
190 let generator = TaskGenerator::new(8, 4);
191 let tasks: Vec<MetaTask> = (0..10)
192 .map(|_| generator.generate_rotation_task(50))
193 .collect();
194
195 println!("\n Meta-training on 4-way classification tasks...");
196 let mut optimizer = Adam::new(0.001);
197 meta_learner.meta_train(&tasks, &mut optimizer, 40, 2)?;
198
199 println!(" ProtoMAML leverages both gradient-based and metric-based learning");
200
201 Ok(())
202}
203
204/// Meta-SGD demonstration
205fn metasgd_demo() -> Result<()> {
206 let layers = vec![
207 QNNLayerType::EncodingLayer { num_features: 4 },
208 QNNLayerType::VariationalLayer { num_params: 12 },
209 QNNLayerType::MeasurementLayer {
210 measurement_basis: "Pauli-XYZ".to_string(),
211 },
212 ];
213
214 let qnn = QuantumNeuralNetwork::new(layers, 4, 4, 3)?;
215
216 let algorithm = MetaLearningAlgorithm::MetaSGD { inner_steps: 3 };
217
218 let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
219
220 println!(" Created Meta-SGD learner:");
221 println!(" - Learns per-parameter learning rates");
222 println!(" - Inner steps: 3");
223
224 // Generate diverse tasks
225 let generator = TaskGenerator::new(4, 3);
226 let mut tasks = Vec::new();
227
228 // Mix different task types
229 for i in 0..12 {
230 if i % 2 == 0 {
231 tasks.push(generator.generate_rotation_task(30));
232 } else {
233 tasks.push(generator.generate_sinusoid_task(30));
234 }
235 }
236
237 println!("\n Meta-training on mixed task distribution...");
238 let mut optimizer = Adam::new(0.0005);
239 meta_learner.meta_train(&tasks, &mut optimizer, 50, 4)?;
240
241 if let Some(lr) = meta_learner.per_param_lr() {
242 println!("\n Learned per-parameter learning rates:");
243 println!(
244 " - Min LR: {:.4}",
245 lr.iter().copied().fold(f64::INFINITY, f64::min)
246 );
247 println!(
248 " - Max LR: {:.4}",
249 lr.iter().copied().fold(f64::NEG_INFINITY, f64::max)
250 );
251 println!(" - Mean LR: {:.4}", lr.mean().unwrap());
252 }
253
254 Ok(())
255}
256
257/// ANIL demonstration
258fn anil_demo() -> Result<()> {
259 let layers = vec![
260 QNNLayerType::EncodingLayer { num_features: 6 },
261 QNNLayerType::VariationalLayer { num_params: 12 },
262 QNNLayerType::EntanglementLayer {
263 connectivity: "circular".to_string(),
264 },
265 QNNLayerType::VariationalLayer { num_params: 12 },
266 QNNLayerType::VariationalLayer { num_params: 6 }, // Final layer (adapted)
267 QNNLayerType::MeasurementLayer {
268 measurement_basis: "computational".to_string(),
269 },
270 ];
271
272 let qnn = QuantumNeuralNetwork::new(layers, 4, 6, 2)?;
273
274 let algorithm = MetaLearningAlgorithm::ANIL {
275 inner_steps: 10,
276 inner_lr: 0.1,
277 };
278
279 let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
280
281 println!(" Created ANIL (Almost No Inner Loop) learner:");
282 println!(" - Only adapts final layer during inner loop");
283 println!(" - More parameter efficient than MAML");
284 println!(" - Inner steps: 10");
285
286 // Generate binary classification tasks
287 let generator = TaskGenerator::new(6, 2);
288 let tasks: Vec<MetaTask> = (0..15)
289 .map(|_| generator.generate_rotation_task(40))
290 .collect();
291
292 println!("\n Meta-training on binary classification tasks...");
293 let mut optimizer = Adam::new(0.001);
294 meta_learner.meta_train(&tasks, &mut optimizer, 40, 5)?;
295
296 println!(" ANIL reduces computational cost while maintaining performance");
297
298 Ok(())
299}
300
301/// Continual meta-learning demonstration
302fn continual_meta_learning_demo() -> Result<()> {
303 let layers = vec![
304 QNNLayerType::EncodingLayer { num_features: 4 },
305 QNNLayerType::VariationalLayer { num_params: 8 },
306 QNNLayerType::MeasurementLayer {
307 measurement_basis: "computational".to_string(),
308 },
309 ];
310
311 let qnn = QuantumNeuralNetwork::new(layers, 4, 4, 2)?;
312
313 let algorithm = MetaLearningAlgorithm::Reptile {
314 inner_steps: 5,
315 inner_lr: 0.05,
316 };
317
318 let meta_learner = QuantumMetaLearner::new(algorithm, qnn);
319 let mut continual_learner = ContinualMetaLearner::new(
320 meta_learner,
321 10, // memory capacity
322 0.3, // replay ratio
323 );
324
325 println!(" Created Continual Meta-Learner:");
326 println!(" - Memory capacity: 10 tasks");
327 println!(" - Replay ratio: 30%");
328
329 // Generate sequence of tasks
330 let generator = TaskGenerator::new(4, 2);
331
332 println!("\n Learning sequence of tasks...");
333 for i in 0..20 {
334 let task = if i < 10 {
335 generator.generate_rotation_task(30)
336 } else {
337 generator.generate_sinusoid_task(30)
338 };
339
340 continual_learner.learn_task(task)?;
341
342 if i % 5 == 4 {
343 println!(
344 " Learned {} tasks, memory contains {} unique tasks",
345 i + 1,
346 continual_learner.memory_buffer_len()
347 );
348 }
349 }
350
351 println!("\n Continual learning prevents catastrophic forgetting");
352
353 Ok(())
354}
355
356/// Task distribution analysis
357fn task_distribution_demo() -> Result<()> {
358 println!(" Analyzing task distributions...\n");
359
360 let generator = TaskGenerator::new(4, 3);
361
362 // Generate multiple tasks and analyze their properties
363 let mut rotation_tasks = Vec::new();
364 let mut sinusoid_tasks = Vec::new();
365
366 for _ in 0..50 {
367 rotation_tasks.push(generator.generate_rotation_task(20));
368 sinusoid_tasks.push(generator.generate_sinusoid_task(20));
369 }
370
371 // Analyze rotation tasks
372 println!(" Rotation Task Distribution:");
373 let angles: Vec<f64> = rotation_tasks
374 .iter()
375 .filter_map(|t| t.metadata.get("rotation_angle").copied())
376 .collect();
377
378 if !angles.is_empty() {
379 let mean_angle = angles.iter().sum::<f64>() / angles.len() as f64;
380 println!(" - Mean rotation angle: {mean_angle:.2} rad");
381 println!(
382 " - Angle range: [{:.2}, {:.2}] rad",
383 angles.iter().copied().fold(f64::INFINITY, f64::min),
384 angles.iter().copied().fold(f64::NEG_INFINITY, f64::max)
385 );
386 }
387
388 // Analyze sinusoid tasks
389 println!("\n Sinusoid Task Distribution:");
390 let amplitudes: Vec<f64> = sinusoid_tasks
391 .iter()
392 .filter_map(|t| t.metadata.get("amplitude").copied())
393 .collect();
394
395 if !amplitudes.is_empty() {
396 let mean_amp = amplitudes.iter().sum::<f64>() / amplitudes.len() as f64;
397 println!(" - Mean amplitude: {mean_amp:.2}");
398 println!(
399 " - Amplitude range: [{:.2}, {:.2}]",
400 amplitudes.iter().copied().fold(f64::INFINITY, f64::min),
401 amplitudes.iter().copied().fold(f64::NEG_INFINITY, f64::max)
402 );
403 }
404
405 // Compare task complexities
406 println!("\n Task Complexity Comparison:");
407 println!(
408 " - Rotation tasks: {} training samples each",
409 rotation_tasks[0].train_data.len()
410 );
411 println!(
412 " - Sinusoid tasks: {} training samples each",
413 sinusoid_tasks[0].train_data.len()
414 );
415 println!(" - Both use binary classification for simplicity");
416
417 Ok(())
418}Sourcepub fn generate_sinusoid_task(&self, num_samples: usize) -> MetaTask
pub fn generate_sinusoid_task(&self, num_samples: usize) -> MetaTask
Generate sinusoid regression task
Examples found in repository?
examples/quantum_meta_learning.rs (line 140)
115fn reptile_demo() -> Result<()> {
116 let layers = vec![
117 QNNLayerType::EncodingLayer { num_features: 2 },
118 QNNLayerType::VariationalLayer { num_params: 8 },
119 QNNLayerType::MeasurementLayer {
120 measurement_basis: "Pauli-Z".to_string(),
121 },
122 ];
123
124 let qnn = QuantumNeuralNetwork::new(layers, 4, 2, 2)?;
125
126 let algorithm = MetaLearningAlgorithm::Reptile {
127 inner_steps: 10,
128 inner_lr: 0.1,
129 };
130
131 let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
132
133 println!(" Created Reptile meta-learner:");
134 println!(" - Inner steps: 10");
135 println!(" - Inner learning rate: 0.1");
136
137 // Generate sinusoid tasks
138 let generator = TaskGenerator::new(2, 2);
139 let tasks: Vec<MetaTask> = (0..15)
140 .map(|_| generator.generate_sinusoid_task(40))
141 .collect();
142
143 println!("\n Meta-training on 15 sinusoid tasks...");
144 let mut optimizer = Adam::new(0.001);
145 meta_learner.meta_train(&tasks, &mut optimizer, 30, 3)?;
146
147 println!(" Reptile training complete");
148
149 // Analyze task similarities
150 println!("\n Task parameter statistics:");
151 for (i, task) in tasks.iter().take(3).enumerate() {
152 if let Some(amplitude) = task.metadata.get("amplitude") {
153 if let Some(phase) = task.metadata.get("phase") {
154 println!(" Task {i}: amplitude={amplitude:.2}, phase={phase:.2}");
155 }
156 }
157 }
158
159 Ok(())
160}
161
162/// `ProtoMAML` demonstration
163fn protomaml_demo() -> Result<()> {
164 let layers = vec![
165 QNNLayerType::EncodingLayer { num_features: 8 },
166 QNNLayerType::VariationalLayer { num_params: 16 },
167 QNNLayerType::EntanglementLayer {
168 connectivity: "full".to_string(),
169 },
170 QNNLayerType::MeasurementLayer {
171 measurement_basis: "computational".to_string(),
172 },
173 ];
174
175 let qnn = QuantumNeuralNetwork::new(layers, 4, 8, 16)?;
176
177 let algorithm = MetaLearningAlgorithm::ProtoMAML {
178 inner_steps: 5,
179 inner_lr: 0.01,
180 proto_weight: 0.5, // Weight for prototype regularization
181 };
182
183 let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
184
185 println!(" Created ProtoMAML meta-learner:");
186 println!(" - Combines MAML with prototypical networks");
187 println!(" - Prototype weight: 0.5");
188
189 // Generate classification tasks
190 let generator = TaskGenerator::new(8, 4);
191 let tasks: Vec<MetaTask> = (0..10)
192 .map(|_| generator.generate_rotation_task(50))
193 .collect();
194
195 println!("\n Meta-training on 4-way classification tasks...");
196 let mut optimizer = Adam::new(0.001);
197 meta_learner.meta_train(&tasks, &mut optimizer, 40, 2)?;
198
199 println!(" ProtoMAML leverages both gradient-based and metric-based learning");
200
201 Ok(())
202}
203
204/// Meta-SGD demonstration
205fn metasgd_demo() -> Result<()> {
206 let layers = vec![
207 QNNLayerType::EncodingLayer { num_features: 4 },
208 QNNLayerType::VariationalLayer { num_params: 12 },
209 QNNLayerType::MeasurementLayer {
210 measurement_basis: "Pauli-XYZ".to_string(),
211 },
212 ];
213
214 let qnn = QuantumNeuralNetwork::new(layers, 4, 4, 3)?;
215
216 let algorithm = MetaLearningAlgorithm::MetaSGD { inner_steps: 3 };
217
218 let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
219
220 println!(" Created Meta-SGD learner:");
221 println!(" - Learns per-parameter learning rates");
222 println!(" - Inner steps: 3");
223
224 // Generate diverse tasks
225 let generator = TaskGenerator::new(4, 3);
226 let mut tasks = Vec::new();
227
228 // Mix different task types
229 for i in 0..12 {
230 if i % 2 == 0 {
231 tasks.push(generator.generate_rotation_task(30));
232 } else {
233 tasks.push(generator.generate_sinusoid_task(30));
234 }
235 }
236
237 println!("\n Meta-training on mixed task distribution...");
238 let mut optimizer = Adam::new(0.0005);
239 meta_learner.meta_train(&tasks, &mut optimizer, 50, 4)?;
240
241 if let Some(lr) = meta_learner.per_param_lr() {
242 println!("\n Learned per-parameter learning rates:");
243 println!(
244 " - Min LR: {:.4}",
245 lr.iter().copied().fold(f64::INFINITY, f64::min)
246 );
247 println!(
248 " - Max LR: {:.4}",
249 lr.iter().copied().fold(f64::NEG_INFINITY, f64::max)
250 );
251 println!(" - Mean LR: {:.4}", lr.mean().unwrap());
252 }
253
254 Ok(())
255}
256
257/// ANIL demonstration
258fn anil_demo() -> Result<()> {
259 let layers = vec![
260 QNNLayerType::EncodingLayer { num_features: 6 },
261 QNNLayerType::VariationalLayer { num_params: 12 },
262 QNNLayerType::EntanglementLayer {
263 connectivity: "circular".to_string(),
264 },
265 QNNLayerType::VariationalLayer { num_params: 12 },
266 QNNLayerType::VariationalLayer { num_params: 6 }, // Final layer (adapted)
267 QNNLayerType::MeasurementLayer {
268 measurement_basis: "computational".to_string(),
269 },
270 ];
271
272 let qnn = QuantumNeuralNetwork::new(layers, 4, 6, 2)?;
273
274 let algorithm = MetaLearningAlgorithm::ANIL {
275 inner_steps: 10,
276 inner_lr: 0.1,
277 };
278
279 let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
280
281 println!(" Created ANIL (Almost No Inner Loop) learner:");
282 println!(" - Only adapts final layer during inner loop");
283 println!(" - More parameter efficient than MAML");
284 println!(" - Inner steps: 10");
285
286 // Generate binary classification tasks
287 let generator = TaskGenerator::new(6, 2);
288 let tasks: Vec<MetaTask> = (0..15)
289 .map(|_| generator.generate_rotation_task(40))
290 .collect();
291
292 println!("\n Meta-training on binary classification tasks...");
293 let mut optimizer = Adam::new(0.001);
294 meta_learner.meta_train(&tasks, &mut optimizer, 40, 5)?;
295
296 println!(" ANIL reduces computational cost while maintaining performance");
297
298 Ok(())
299}
300
301/// Continual meta-learning demonstration
302fn continual_meta_learning_demo() -> Result<()> {
303 let layers = vec![
304 QNNLayerType::EncodingLayer { num_features: 4 },
305 QNNLayerType::VariationalLayer { num_params: 8 },
306 QNNLayerType::MeasurementLayer {
307 measurement_basis: "computational".to_string(),
308 },
309 ];
310
311 let qnn = QuantumNeuralNetwork::new(layers, 4, 4, 2)?;
312
313 let algorithm = MetaLearningAlgorithm::Reptile {
314 inner_steps: 5,
315 inner_lr: 0.05,
316 };
317
318 let meta_learner = QuantumMetaLearner::new(algorithm, qnn);
319 let mut continual_learner = ContinualMetaLearner::new(
320 meta_learner,
321 10, // memory capacity
322 0.3, // replay ratio
323 );
324
325 println!(" Created Continual Meta-Learner:");
326 println!(" - Memory capacity: 10 tasks");
327 println!(" - Replay ratio: 30%");
328
329 // Generate sequence of tasks
330 let generator = TaskGenerator::new(4, 2);
331
332 println!("\n Learning sequence of tasks...");
333 for i in 0..20 {
334 let task = if i < 10 {
335 generator.generate_rotation_task(30)
336 } else {
337 generator.generate_sinusoid_task(30)
338 };
339
340 continual_learner.learn_task(task)?;
341
342 if i % 5 == 4 {
343 println!(
344 " Learned {} tasks, memory contains {} unique tasks",
345 i + 1,
346 continual_learner.memory_buffer_len()
347 );
348 }
349 }
350
351 println!("\n Continual learning prevents catastrophic forgetting");
352
353 Ok(())
354}
355
356/// Task distribution analysis
357fn task_distribution_demo() -> Result<()> {
358 println!(" Analyzing task distributions...\n");
359
360 let generator = TaskGenerator::new(4, 3);
361
362 // Generate multiple tasks and analyze their properties
363 let mut rotation_tasks = Vec::new();
364 let mut sinusoid_tasks = Vec::new();
365
366 for _ in 0..50 {
367 rotation_tasks.push(generator.generate_rotation_task(20));
368 sinusoid_tasks.push(generator.generate_sinusoid_task(20));
369 }
370
371 // Analyze rotation tasks
372 println!(" Rotation Task Distribution:");
373 let angles: Vec<f64> = rotation_tasks
374 .iter()
375 .filter_map(|t| t.metadata.get("rotation_angle").copied())
376 .collect();
377
378 if !angles.is_empty() {
379 let mean_angle = angles.iter().sum::<f64>() / angles.len() as f64;
380 println!(" - Mean rotation angle: {mean_angle:.2} rad");
381 println!(
382 " - Angle range: [{:.2}, {:.2}] rad",
383 angles.iter().copied().fold(f64::INFINITY, f64::min),
384 angles.iter().copied().fold(f64::NEG_INFINITY, f64::max)
385 );
386 }
387
388 // Analyze sinusoid tasks
389 println!("\n Sinusoid Task Distribution:");
390 let amplitudes: Vec<f64> = sinusoid_tasks
391 .iter()
392 .filter_map(|t| t.metadata.get("amplitude").copied())
393 .collect();
394
395 if !amplitudes.is_empty() {
396 let mean_amp = amplitudes.iter().sum::<f64>() / amplitudes.len() as f64;
397 println!(" - Mean amplitude: {mean_amp:.2}");
398 println!(
399 " - Amplitude range: [{:.2}, {:.2}]",
400 amplitudes.iter().copied().fold(f64::INFINITY, f64::min),
401 amplitudes.iter().copied().fold(f64::NEG_INFINITY, f64::max)
402 );
403 }
404
405 // Compare task complexities
406 println!("\n Task Complexity Comparison:");
407 println!(
408 " - Rotation tasks: {} training samples each",
409 rotation_tasks[0].train_data.len()
410 );
411 println!(
412 " - Sinusoid tasks: {} training samples each",
413 sinusoid_tasks[0].train_data.len()
414 );
415 println!(" - Both use binary classification for simplicity");
416
417 Ok(())
418}Sourcepub fn generate_rotation_task(&self, num_samples: usize) -> MetaTask
pub fn generate_rotation_task(&self, num_samples: usize) -> MetaTask
Generate classification task with rotated features
Examples found in repository?
examples/quantum_meta_learning.rs (line 89)
56fn maml_demo() -> Result<()> {
57 // Create quantum model
58 let layers = vec![
59 QNNLayerType::EncodingLayer { num_features: 4 },
60 QNNLayerType::VariationalLayer { num_params: 12 },
61 QNNLayerType::EntanglementLayer {
62 connectivity: "circular".to_string(),
63 },
64 QNNLayerType::VariationalLayer { num_params: 12 },
65 QNNLayerType::MeasurementLayer {
66 measurement_basis: "computational".to_string(),
67 },
68 ];
69
70 let qnn = QuantumNeuralNetwork::new(layers, 4, 4, 3)?;
71
72 // Create MAML learner
73 let algorithm = MetaLearningAlgorithm::MAML {
74 inner_steps: 5,
75 inner_lr: 0.01,
76 first_order: true, // Use first-order approximation for efficiency
77 };
78
79 let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
80
81 println!(" Created MAML meta-learner:");
82 println!(" - Inner steps: 5");
83 println!(" - Inner learning rate: 0.01");
84 println!(" - Using first-order approximation");
85
86 // Generate tasks
87 let generator = TaskGenerator::new(4, 3);
88 let tasks: Vec<MetaTask> = (0..20)
89 .map(|_| generator.generate_rotation_task(30))
90 .collect();
91
92 // Meta-train
93 println!("\n Meta-training on 20 rotation tasks...");
94 let mut optimizer = Adam::new(0.001);
95 meta_learner.meta_train(&tasks, &mut optimizer, 50, 5)?;
96
97 // Test adaptation
98 let test_task = generator.generate_rotation_task(20);
99 println!("\n Testing adaptation to new task...");
100
101 let adapted_params = meta_learner.adapt_to_task(&test_task)?;
102 println!(" Successfully adapted to new task");
103 println!(
104 " Parameter adaptation magnitude: {:.4}",
105 (&adapted_params - meta_learner.meta_params())
106 .mapv(f64::abs)
107 .mean()
108 .unwrap()
109 );
110
111 Ok(())
112}
113
114/// Reptile algorithm demonstration
115fn reptile_demo() -> Result<()> {
116 let layers = vec![
117 QNNLayerType::EncodingLayer { num_features: 2 },
118 QNNLayerType::VariationalLayer { num_params: 8 },
119 QNNLayerType::MeasurementLayer {
120 measurement_basis: "Pauli-Z".to_string(),
121 },
122 ];
123
124 let qnn = QuantumNeuralNetwork::new(layers, 4, 2, 2)?;
125
126 let algorithm = MetaLearningAlgorithm::Reptile {
127 inner_steps: 10,
128 inner_lr: 0.1,
129 };
130
131 let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
132
133 println!(" Created Reptile meta-learner:");
134 println!(" - Inner steps: 10");
135 println!(" - Inner learning rate: 0.1");
136
137 // Generate sinusoid tasks
138 let generator = TaskGenerator::new(2, 2);
139 let tasks: Vec<MetaTask> = (0..15)
140 .map(|_| generator.generate_sinusoid_task(40))
141 .collect();
142
143 println!("\n Meta-training on 15 sinusoid tasks...");
144 let mut optimizer = Adam::new(0.001);
145 meta_learner.meta_train(&tasks, &mut optimizer, 30, 3)?;
146
147 println!(" Reptile training complete");
148
149 // Analyze task similarities
150 println!("\n Task parameter statistics:");
151 for (i, task) in tasks.iter().take(3).enumerate() {
152 if let Some(amplitude) = task.metadata.get("amplitude") {
153 if let Some(phase) = task.metadata.get("phase") {
154 println!(" Task {i}: amplitude={amplitude:.2}, phase={phase:.2}");
155 }
156 }
157 }
158
159 Ok(())
160}
161
162/// `ProtoMAML` demonstration
163fn protomaml_demo() -> Result<()> {
164 let layers = vec![
165 QNNLayerType::EncodingLayer { num_features: 8 },
166 QNNLayerType::VariationalLayer { num_params: 16 },
167 QNNLayerType::EntanglementLayer {
168 connectivity: "full".to_string(),
169 },
170 QNNLayerType::MeasurementLayer {
171 measurement_basis: "computational".to_string(),
172 },
173 ];
174
175 let qnn = QuantumNeuralNetwork::new(layers, 4, 8, 16)?;
176
177 let algorithm = MetaLearningAlgorithm::ProtoMAML {
178 inner_steps: 5,
179 inner_lr: 0.01,
180 proto_weight: 0.5, // Weight for prototype regularization
181 };
182
183 let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
184
185 println!(" Created ProtoMAML meta-learner:");
186 println!(" - Combines MAML with prototypical networks");
187 println!(" - Prototype weight: 0.5");
188
189 // Generate classification tasks
190 let generator = TaskGenerator::new(8, 4);
191 let tasks: Vec<MetaTask> = (0..10)
192 .map(|_| generator.generate_rotation_task(50))
193 .collect();
194
195 println!("\n Meta-training on 4-way classification tasks...");
196 let mut optimizer = Adam::new(0.001);
197 meta_learner.meta_train(&tasks, &mut optimizer, 40, 2)?;
198
199 println!(" ProtoMAML leverages both gradient-based and metric-based learning");
200
201 Ok(())
202}
203
204/// Meta-SGD demonstration
205fn metasgd_demo() -> Result<()> {
206 let layers = vec![
207 QNNLayerType::EncodingLayer { num_features: 4 },
208 QNNLayerType::VariationalLayer { num_params: 12 },
209 QNNLayerType::MeasurementLayer {
210 measurement_basis: "Pauli-XYZ".to_string(),
211 },
212 ];
213
214 let qnn = QuantumNeuralNetwork::new(layers, 4, 4, 3)?;
215
216 let algorithm = MetaLearningAlgorithm::MetaSGD { inner_steps: 3 };
217
218 let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
219
220 println!(" Created Meta-SGD learner:");
221 println!(" - Learns per-parameter learning rates");
222 println!(" - Inner steps: 3");
223
224 // Generate diverse tasks
225 let generator = TaskGenerator::new(4, 3);
226 let mut tasks = Vec::new();
227
228 // Mix different task types
229 for i in 0..12 {
230 if i % 2 == 0 {
231 tasks.push(generator.generate_rotation_task(30));
232 } else {
233 tasks.push(generator.generate_sinusoid_task(30));
234 }
235 }
236
237 println!("\n Meta-training on mixed task distribution...");
238 let mut optimizer = Adam::new(0.0005);
239 meta_learner.meta_train(&tasks, &mut optimizer, 50, 4)?;
240
241 if let Some(lr) = meta_learner.per_param_lr() {
242 println!("\n Learned per-parameter learning rates:");
243 println!(
244 " - Min LR: {:.4}",
245 lr.iter().copied().fold(f64::INFINITY, f64::min)
246 );
247 println!(
248 " - Max LR: {:.4}",
249 lr.iter().copied().fold(f64::NEG_INFINITY, f64::max)
250 );
251 println!(" - Mean LR: {:.4}", lr.mean().unwrap());
252 }
253
254 Ok(())
255}
256
257/// ANIL demonstration
258fn anil_demo() -> Result<()> {
259 let layers = vec![
260 QNNLayerType::EncodingLayer { num_features: 6 },
261 QNNLayerType::VariationalLayer { num_params: 12 },
262 QNNLayerType::EntanglementLayer {
263 connectivity: "circular".to_string(),
264 },
265 QNNLayerType::VariationalLayer { num_params: 12 },
266 QNNLayerType::VariationalLayer { num_params: 6 }, // Final layer (adapted)
267 QNNLayerType::MeasurementLayer {
268 measurement_basis: "computational".to_string(),
269 },
270 ];
271
272 let qnn = QuantumNeuralNetwork::new(layers, 4, 6, 2)?;
273
274 let algorithm = MetaLearningAlgorithm::ANIL {
275 inner_steps: 10,
276 inner_lr: 0.1,
277 };
278
279 let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
280
281 println!(" Created ANIL (Almost No Inner Loop) learner:");
282 println!(" - Only adapts final layer during inner loop");
283 println!(" - More parameter efficient than MAML");
284 println!(" - Inner steps: 10");
285
286 // Generate binary classification tasks
287 let generator = TaskGenerator::new(6, 2);
288 let tasks: Vec<MetaTask> = (0..15)
289 .map(|_| generator.generate_rotation_task(40))
290 .collect();
291
292 println!("\n Meta-training on binary classification tasks...");
293 let mut optimizer = Adam::new(0.001);
294 meta_learner.meta_train(&tasks, &mut optimizer, 40, 5)?;
295
296 println!(" ANIL reduces computational cost while maintaining performance");
297
298 Ok(())
299}
300
301/// Continual meta-learning demonstration
302fn continual_meta_learning_demo() -> Result<()> {
303 let layers = vec![
304 QNNLayerType::EncodingLayer { num_features: 4 },
305 QNNLayerType::VariationalLayer { num_params: 8 },
306 QNNLayerType::MeasurementLayer {
307 measurement_basis: "computational".to_string(),
308 },
309 ];
310
311 let qnn = QuantumNeuralNetwork::new(layers, 4, 4, 2)?;
312
313 let algorithm = MetaLearningAlgorithm::Reptile {
314 inner_steps: 5,
315 inner_lr: 0.05,
316 };
317
318 let meta_learner = QuantumMetaLearner::new(algorithm, qnn);
319 let mut continual_learner = ContinualMetaLearner::new(
320 meta_learner,
321 10, // memory capacity
322 0.3, // replay ratio
323 );
324
325 println!(" Created Continual Meta-Learner:");
326 println!(" - Memory capacity: 10 tasks");
327 println!(" - Replay ratio: 30%");
328
329 // Generate sequence of tasks
330 let generator = TaskGenerator::new(4, 2);
331
332 println!("\n Learning sequence of tasks...");
333 for i in 0..20 {
334 let task = if i < 10 {
335 generator.generate_rotation_task(30)
336 } else {
337 generator.generate_sinusoid_task(30)
338 };
339
340 continual_learner.learn_task(task)?;
341
342 if i % 5 == 4 {
343 println!(
344 " Learned {} tasks, memory contains {} unique tasks",
345 i + 1,
346 continual_learner.memory_buffer_len()
347 );
348 }
349 }
350
351 println!("\n Continual learning prevents catastrophic forgetting");
352
353 Ok(())
354}
355
356/// Task distribution analysis
357fn task_distribution_demo() -> Result<()> {
358 println!(" Analyzing task distributions...\n");
359
360 let generator = TaskGenerator::new(4, 3);
361
362 // Generate multiple tasks and analyze their properties
363 let mut rotation_tasks = Vec::new();
364 let mut sinusoid_tasks = Vec::new();
365
366 for _ in 0..50 {
367 rotation_tasks.push(generator.generate_rotation_task(20));
368 sinusoid_tasks.push(generator.generate_sinusoid_task(20));
369 }
370
371 // Analyze rotation tasks
372 println!(" Rotation Task Distribution:");
373 let angles: Vec<f64> = rotation_tasks
374 .iter()
375 .filter_map(|t| t.metadata.get("rotation_angle").copied())
376 .collect();
377
378 if !angles.is_empty() {
379 let mean_angle = angles.iter().sum::<f64>() / angles.len() as f64;
380 println!(" - Mean rotation angle: {mean_angle:.2} rad");
381 println!(
382 " - Angle range: [{:.2}, {:.2}] rad",
383 angles.iter().copied().fold(f64::INFINITY, f64::min),
384 angles.iter().copied().fold(f64::NEG_INFINITY, f64::max)
385 );
386 }
387
388 // Analyze sinusoid tasks
389 println!("\n Sinusoid Task Distribution:");
390 let amplitudes: Vec<f64> = sinusoid_tasks
391 .iter()
392 .filter_map(|t| t.metadata.get("amplitude").copied())
393 .collect();
394
395 if !amplitudes.is_empty() {
396 let mean_amp = amplitudes.iter().sum::<f64>() / amplitudes.len() as f64;
397 println!(" - Mean amplitude: {mean_amp:.2}");
398 println!(
399 " - Amplitude range: [{:.2}, {:.2}]",
400 amplitudes.iter().copied().fold(f64::INFINITY, f64::min),
401 amplitudes.iter().copied().fold(f64::NEG_INFINITY, f64::max)
402 );
403 }
404
405 // Compare task complexities
406 println!("\n Task Complexity Comparison:");
407 println!(
408 " - Rotation tasks: {} training samples each",
409 rotation_tasks[0].train_data.len()
410 );
411 println!(
412 " - Sinusoid tasks: {} training samples each",
413 sinusoid_tasks[0].train_data.len()
414 );
415 println!(" - Both use binary classification for simplicity");
416
417 Ok(())
418}Auto Trait Implementations§
impl Freeze for TaskGenerator
impl RefUnwindSafe for TaskGenerator
impl Send for TaskGenerator
impl Sync for TaskGenerator
impl Unpin for TaskGenerator
impl UnwindSafe for TaskGenerator
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more
Source§impl<T> IntoEither for T
impl<T> IntoEither for T
Source§fn into_either(self, into_left: bool) -> Either<Self, Self>
fn into_either(self, into_left: bool) -> Either<Self, Self>
Converts
self into a Left variant of Either<Self, Self>
if into_left is true.
Converts self into a Right variant of Either<Self, Self>
otherwise. Read moreSource§fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
Converts
self into a Left variant of Either<Self, Self>
if into_left(&self) returns true.
Converts self into a Right variant of Either<Self, Self>
otherwise. Read moreSource§impl<T> Pointable for T
impl<T> Pointable for T
Source§impl<SS, SP> SupersetOf<SS> for SPwhere
SS: SubsetOf<SP>,
impl<SS, SP> SupersetOf<SS> for SPwhere
SS: SubsetOf<SP>,
Source§fn to_subset(&self) -> Option<SS>
fn to_subset(&self) -> Option<SS>
The inverse inclusion map: attempts to construct
self from the equivalent element of its
superset. Read moreSource§fn is_in_subset(&self) -> bool
fn is_in_subset(&self) -> bool
Checks if
self is actually part of its subset T (and can be converted to it).Source§fn to_subset_unchecked(&self) -> SS
fn to_subset_unchecked(&self) -> SS
Use with care! Same as
self.to_subset but without any property checks. Always succeeds.Source§fn from_subset(element: &SS) -> SP
fn from_subset(element: &SS) -> SP
The inclusion map: converts
self to the equivalent element of its superset.