pub struct QuantumMetaLearner { /* private fields */ }
Expand description
Base quantum meta-learner
Implementations§
Source§impl QuantumMetaLearner
impl QuantumMetaLearner
Sourcepub fn new(
algorithm: MetaLearningAlgorithm,
model: QuantumNeuralNetwork,
) -> Self
pub fn new( algorithm: MetaLearningAlgorithm, model: QuantumNeuralNetwork, ) -> Self
Create a new quantum meta-learner
Examples found in repository?
examples/quantum_meta_learning.rs (line 71)
48fn maml_demo() -> Result<()> {
49 // Create quantum model
50 let layers = vec![
51 QNNLayerType::EncodingLayer { num_features: 4 },
52 QNNLayerType::VariationalLayer { num_params: 12 },
53 QNNLayerType::EntanglementLayer {
54 connectivity: "circular".to_string(),
55 },
56 QNNLayerType::VariationalLayer { num_params: 12 },
57 QNNLayerType::MeasurementLayer {
58 measurement_basis: "computational".to_string(),
59 },
60 ];
61
62 let qnn = QuantumNeuralNetwork::new(layers, 4, 4, 3)?;
63
64 // Create MAML learner
65 let algorithm = MetaLearningAlgorithm::MAML {
66 inner_steps: 5,
67 inner_lr: 0.01,
68 first_order: true, // Use first-order approximation for efficiency
69 };
70
71 let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
72
73 println!(" Created MAML meta-learner:");
74 println!(" - Inner steps: 5");
75 println!(" - Inner learning rate: 0.01");
76 println!(" - Using first-order approximation");
77
78 // Generate tasks
79 let generator = TaskGenerator::new(4, 3);
80 let tasks: Vec<MetaTask> = (0..20)
81 .map(|_| generator.generate_rotation_task(30))
82 .collect();
83
84 // Meta-train
85 println!("\n Meta-training on 20 rotation tasks...");
86 let mut optimizer = Adam::new(0.001);
87 meta_learner.meta_train(&tasks, &mut optimizer, 50, 5)?;
88
89 // Test adaptation
90 let test_task = generator.generate_rotation_task(20);
91 println!("\n Testing adaptation to new task...");
92
93 let adapted_params = meta_learner.adapt_to_task(&test_task)?;
94 println!(" Successfully adapted to new task");
95 println!(
96 " Parameter adaptation magnitude: {:.4}",
97 (&adapted_params - meta_learner.meta_params())
98 .mapv(|x| x.abs())
99 .mean()
100 .unwrap()
101 );
102
103 Ok(())
104}
105
106/// Reptile algorithm demonstration
107fn reptile_demo() -> Result<()> {
108 let layers = vec![
109 QNNLayerType::EncodingLayer { num_features: 2 },
110 QNNLayerType::VariationalLayer { num_params: 8 },
111 QNNLayerType::MeasurementLayer {
112 measurement_basis: "Pauli-Z".to_string(),
113 },
114 ];
115
116 let qnn = QuantumNeuralNetwork::new(layers, 4, 2, 2)?;
117
118 let algorithm = MetaLearningAlgorithm::Reptile {
119 inner_steps: 10,
120 inner_lr: 0.1,
121 };
122
123 let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
124
125 println!(" Created Reptile meta-learner:");
126 println!(" - Inner steps: 10");
127 println!(" - Inner learning rate: 0.1");
128
129 // Generate sinusoid tasks
130 let generator = TaskGenerator::new(2, 2);
131 let tasks: Vec<MetaTask> = (0..15)
132 .map(|_| generator.generate_sinusoid_task(40))
133 .collect();
134
135 println!("\n Meta-training on 15 sinusoid tasks...");
136 let mut optimizer = Adam::new(0.001);
137 meta_learner.meta_train(&tasks, &mut optimizer, 30, 3)?;
138
139 println!(" Reptile training complete");
140
141 // Analyze task similarities
142 println!("\n Task parameter statistics:");
143 for (i, task) in tasks.iter().take(3).enumerate() {
144 if let Some(amplitude) = task.metadata.get("amplitude") {
145 if let Some(phase) = task.metadata.get("phase") {
146 println!(
147 " Task {}: amplitude={:.2}, phase={:.2}",
148 i, amplitude, phase
149 );
150 }
151 }
152 }
153
154 Ok(())
155}
156
157/// ProtoMAML demonstration
158fn protomaml_demo() -> Result<()> {
159 let layers = vec![
160 QNNLayerType::EncodingLayer { num_features: 8 },
161 QNNLayerType::VariationalLayer { num_params: 16 },
162 QNNLayerType::EntanglementLayer {
163 connectivity: "full".to_string(),
164 },
165 QNNLayerType::MeasurementLayer {
166 measurement_basis: "computational".to_string(),
167 },
168 ];
169
170 let qnn = QuantumNeuralNetwork::new(layers, 4, 8, 16)?;
171
172 let algorithm = MetaLearningAlgorithm::ProtoMAML {
173 inner_steps: 5,
174 inner_lr: 0.01,
175 proto_weight: 0.5, // Weight for prototype regularization
176 };
177
178 let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
179
180 println!(" Created ProtoMAML meta-learner:");
181 println!(" - Combines MAML with prototypical networks");
182 println!(" - Prototype weight: 0.5");
183
184 // Generate classification tasks
185 let generator = TaskGenerator::new(8, 4);
186 let tasks: Vec<MetaTask> = (0..10)
187 .map(|_| generator.generate_rotation_task(50))
188 .collect();
189
190 println!("\n Meta-training on 4-way classification tasks...");
191 let mut optimizer = Adam::new(0.001);
192 meta_learner.meta_train(&tasks, &mut optimizer, 40, 2)?;
193
194 println!(" ProtoMAML leverages both gradient-based and metric-based learning");
195
196 Ok(())
197}
198
199/// Meta-SGD demonstration
200fn metasgd_demo() -> Result<()> {
201 let layers = vec![
202 QNNLayerType::EncodingLayer { num_features: 4 },
203 QNNLayerType::VariationalLayer { num_params: 12 },
204 QNNLayerType::MeasurementLayer {
205 measurement_basis: "Pauli-XYZ".to_string(),
206 },
207 ];
208
209 let qnn = QuantumNeuralNetwork::new(layers, 4, 4, 3)?;
210
211 let algorithm = MetaLearningAlgorithm::MetaSGD { inner_steps: 3 };
212
213 let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
214
215 println!(" Created Meta-SGD learner:");
216 println!(" - Learns per-parameter learning rates");
217 println!(" - Inner steps: 3");
218
219 // Generate diverse tasks
220 let generator = TaskGenerator::new(4, 3);
221 let mut tasks = Vec::new();
222
223 // Mix different task types
224 for i in 0..12 {
225 if i % 2 == 0 {
226 tasks.push(generator.generate_rotation_task(30));
227 } else {
228 tasks.push(generator.generate_sinusoid_task(30));
229 }
230 }
231
232 println!("\n Meta-training on mixed task distribution...");
233 let mut optimizer = Adam::new(0.0005);
234 meta_learner.meta_train(&tasks, &mut optimizer, 50, 4)?;
235
236 if let Some(lr) = meta_learner.per_param_lr() {
237 println!("\n Learned per-parameter learning rates:");
238 println!(
239 " - Min LR: {:.4}",
240 lr.iter().cloned().fold(f64::INFINITY, f64::min)
241 );
242 println!(
243 " - Max LR: {:.4}",
244 lr.iter().cloned().fold(f64::NEG_INFINITY, f64::max)
245 );
246 println!(" - Mean LR: {:.4}", lr.mean().unwrap());
247 }
248
249 Ok(())
250}
251
252/// ANIL demonstration
253fn anil_demo() -> Result<()> {
254 let layers = vec![
255 QNNLayerType::EncodingLayer { num_features: 6 },
256 QNNLayerType::VariationalLayer { num_params: 12 },
257 QNNLayerType::EntanglementLayer {
258 connectivity: "circular".to_string(),
259 },
260 QNNLayerType::VariationalLayer { num_params: 12 },
261 QNNLayerType::VariationalLayer { num_params: 6 }, // Final layer (adapted)
262 QNNLayerType::MeasurementLayer {
263 measurement_basis: "computational".to_string(),
264 },
265 ];
266
267 let qnn = QuantumNeuralNetwork::new(layers, 4, 6, 2)?;
268
269 let algorithm = MetaLearningAlgorithm::ANIL {
270 inner_steps: 10,
271 inner_lr: 0.1,
272 };
273
274 let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
275
276 println!(" Created ANIL (Almost No Inner Loop) learner:");
277 println!(" - Only adapts final layer during inner loop");
278 println!(" - More parameter efficient than MAML");
279 println!(" - Inner steps: 10");
280
281 // Generate binary classification tasks
282 let generator = TaskGenerator::new(6, 2);
283 let tasks: Vec<MetaTask> = (0..15)
284 .map(|_| generator.generate_rotation_task(40))
285 .collect();
286
287 println!("\n Meta-training on binary classification tasks...");
288 let mut optimizer = Adam::new(0.001);
289 meta_learner.meta_train(&tasks, &mut optimizer, 40, 5)?;
290
291 println!(" ANIL reduces computational cost while maintaining performance");
292
293 Ok(())
294}
295
296/// Continual meta-learning demonstration
297fn continual_meta_learning_demo() -> Result<()> {
298 let layers = vec![
299 QNNLayerType::EncodingLayer { num_features: 4 },
300 QNNLayerType::VariationalLayer { num_params: 8 },
301 QNNLayerType::MeasurementLayer {
302 measurement_basis: "computational".to_string(),
303 },
304 ];
305
306 let qnn = QuantumNeuralNetwork::new(layers, 4, 4, 2)?;
307
308 let algorithm = MetaLearningAlgorithm::Reptile {
309 inner_steps: 5,
310 inner_lr: 0.05,
311 };
312
313 let meta_learner = QuantumMetaLearner::new(algorithm, qnn);
314 let mut continual_learner = ContinualMetaLearner::new(
315 meta_learner,
316 10, // memory capacity
317 0.3, // replay ratio
318 );
319
320 println!(" Created Continual Meta-Learner:");
321 println!(" - Memory capacity: 10 tasks");
322 println!(" - Replay ratio: 30%");
323
324 // Generate sequence of tasks
325 let generator = TaskGenerator::new(4, 2);
326
327 println!("\n Learning sequence of tasks...");
328 for i in 0..20 {
329 let task = if i < 10 {
330 generator.generate_rotation_task(30)
331 } else {
332 generator.generate_sinusoid_task(30)
333 };
334
335 continual_learner.learn_task(task)?;
336
337 if i % 5 == 4 {
338 println!(
339 " Learned {} tasks, memory contains {} unique tasks",
340 i + 1,
341 continual_learner.memory_buffer_len()
342 );
343 }
344 }
345
346 println!("\n Continual learning prevents catastrophic forgetting");
347
348 Ok(())
349}
Sourcepub fn meta_train(
&mut self,
tasks: &[MetaTask],
meta_optimizer: &mut dyn Optimizer,
meta_epochs: usize,
tasks_per_batch: usize,
) -> Result<()>
pub fn meta_train( &mut self, tasks: &[MetaTask], meta_optimizer: &mut dyn Optimizer, meta_epochs: usize, tasks_per_batch: usize, ) -> Result<()>
Meta-train on multiple tasks
Examples found in repository?
examples/quantum_meta_learning.rs (line 87)
48fn maml_demo() -> Result<()> {
49 // Create quantum model
50 let layers = vec![
51 QNNLayerType::EncodingLayer { num_features: 4 },
52 QNNLayerType::VariationalLayer { num_params: 12 },
53 QNNLayerType::EntanglementLayer {
54 connectivity: "circular".to_string(),
55 },
56 QNNLayerType::VariationalLayer { num_params: 12 },
57 QNNLayerType::MeasurementLayer {
58 measurement_basis: "computational".to_string(),
59 },
60 ];
61
62 let qnn = QuantumNeuralNetwork::new(layers, 4, 4, 3)?;
63
64 // Create MAML learner
65 let algorithm = MetaLearningAlgorithm::MAML {
66 inner_steps: 5,
67 inner_lr: 0.01,
68 first_order: true, // Use first-order approximation for efficiency
69 };
70
71 let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
72
73 println!(" Created MAML meta-learner:");
74 println!(" - Inner steps: 5");
75 println!(" - Inner learning rate: 0.01");
76 println!(" - Using first-order approximation");
77
78 // Generate tasks
79 let generator = TaskGenerator::new(4, 3);
80 let tasks: Vec<MetaTask> = (0..20)
81 .map(|_| generator.generate_rotation_task(30))
82 .collect();
83
84 // Meta-train
85 println!("\n Meta-training on 20 rotation tasks...");
86 let mut optimizer = Adam::new(0.001);
87 meta_learner.meta_train(&tasks, &mut optimizer, 50, 5)?;
88
89 // Test adaptation
90 let test_task = generator.generate_rotation_task(20);
91 println!("\n Testing adaptation to new task...");
92
93 let adapted_params = meta_learner.adapt_to_task(&test_task)?;
94 println!(" Successfully adapted to new task");
95 println!(
96 " Parameter adaptation magnitude: {:.4}",
97 (&adapted_params - meta_learner.meta_params())
98 .mapv(|x| x.abs())
99 .mean()
100 .unwrap()
101 );
102
103 Ok(())
104}
105
106/// Reptile algorithm demonstration
107fn reptile_demo() -> Result<()> {
108 let layers = vec![
109 QNNLayerType::EncodingLayer { num_features: 2 },
110 QNNLayerType::VariationalLayer { num_params: 8 },
111 QNNLayerType::MeasurementLayer {
112 measurement_basis: "Pauli-Z".to_string(),
113 },
114 ];
115
116 let qnn = QuantumNeuralNetwork::new(layers, 4, 2, 2)?;
117
118 let algorithm = MetaLearningAlgorithm::Reptile {
119 inner_steps: 10,
120 inner_lr: 0.1,
121 };
122
123 let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
124
125 println!(" Created Reptile meta-learner:");
126 println!(" - Inner steps: 10");
127 println!(" - Inner learning rate: 0.1");
128
129 // Generate sinusoid tasks
130 let generator = TaskGenerator::new(2, 2);
131 let tasks: Vec<MetaTask> = (0..15)
132 .map(|_| generator.generate_sinusoid_task(40))
133 .collect();
134
135 println!("\n Meta-training on 15 sinusoid tasks...");
136 let mut optimizer = Adam::new(0.001);
137 meta_learner.meta_train(&tasks, &mut optimizer, 30, 3)?;
138
139 println!(" Reptile training complete");
140
141 // Analyze task similarities
142 println!("\n Task parameter statistics:");
143 for (i, task) in tasks.iter().take(3).enumerate() {
144 if let Some(amplitude) = task.metadata.get("amplitude") {
145 if let Some(phase) = task.metadata.get("phase") {
146 println!(
147 " Task {}: amplitude={:.2}, phase={:.2}",
148 i, amplitude, phase
149 );
150 }
151 }
152 }
153
154 Ok(())
155}
156
157/// ProtoMAML demonstration
158fn protomaml_demo() -> Result<()> {
159 let layers = vec![
160 QNNLayerType::EncodingLayer { num_features: 8 },
161 QNNLayerType::VariationalLayer { num_params: 16 },
162 QNNLayerType::EntanglementLayer {
163 connectivity: "full".to_string(),
164 },
165 QNNLayerType::MeasurementLayer {
166 measurement_basis: "computational".to_string(),
167 },
168 ];
169
170 let qnn = QuantumNeuralNetwork::new(layers, 4, 8, 16)?;
171
172 let algorithm = MetaLearningAlgorithm::ProtoMAML {
173 inner_steps: 5,
174 inner_lr: 0.01,
175 proto_weight: 0.5, // Weight for prototype regularization
176 };
177
178 let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
179
180 println!(" Created ProtoMAML meta-learner:");
181 println!(" - Combines MAML with prototypical networks");
182 println!(" - Prototype weight: 0.5");
183
184 // Generate classification tasks
185 let generator = TaskGenerator::new(8, 4);
186 let tasks: Vec<MetaTask> = (0..10)
187 .map(|_| generator.generate_rotation_task(50))
188 .collect();
189
190 println!("\n Meta-training on 4-way classification tasks...");
191 let mut optimizer = Adam::new(0.001);
192 meta_learner.meta_train(&tasks, &mut optimizer, 40, 2)?;
193
194 println!(" ProtoMAML leverages both gradient-based and metric-based learning");
195
196 Ok(())
197}
198
199/// Meta-SGD demonstration
200fn metasgd_demo() -> Result<()> {
201 let layers = vec![
202 QNNLayerType::EncodingLayer { num_features: 4 },
203 QNNLayerType::VariationalLayer { num_params: 12 },
204 QNNLayerType::MeasurementLayer {
205 measurement_basis: "Pauli-XYZ".to_string(),
206 },
207 ];
208
209 let qnn = QuantumNeuralNetwork::new(layers, 4, 4, 3)?;
210
211 let algorithm = MetaLearningAlgorithm::MetaSGD { inner_steps: 3 };
212
213 let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
214
215 println!(" Created Meta-SGD learner:");
216 println!(" - Learns per-parameter learning rates");
217 println!(" - Inner steps: 3");
218
219 // Generate diverse tasks
220 let generator = TaskGenerator::new(4, 3);
221 let mut tasks = Vec::new();
222
223 // Mix different task types
224 for i in 0..12 {
225 if i % 2 == 0 {
226 tasks.push(generator.generate_rotation_task(30));
227 } else {
228 tasks.push(generator.generate_sinusoid_task(30));
229 }
230 }
231
232 println!("\n Meta-training on mixed task distribution...");
233 let mut optimizer = Adam::new(0.0005);
234 meta_learner.meta_train(&tasks, &mut optimizer, 50, 4)?;
235
236 if let Some(lr) = meta_learner.per_param_lr() {
237 println!("\n Learned per-parameter learning rates:");
238 println!(
239 " - Min LR: {:.4}",
240 lr.iter().cloned().fold(f64::INFINITY, f64::min)
241 );
242 println!(
243 " - Max LR: {:.4}",
244 lr.iter().cloned().fold(f64::NEG_INFINITY, f64::max)
245 );
246 println!(" - Mean LR: {:.4}", lr.mean().unwrap());
247 }
248
249 Ok(())
250}
251
252/// ANIL demonstration
253fn anil_demo() -> Result<()> {
254 let layers = vec![
255 QNNLayerType::EncodingLayer { num_features: 6 },
256 QNNLayerType::VariationalLayer { num_params: 12 },
257 QNNLayerType::EntanglementLayer {
258 connectivity: "circular".to_string(),
259 },
260 QNNLayerType::VariationalLayer { num_params: 12 },
261 QNNLayerType::VariationalLayer { num_params: 6 }, // Final layer (adapted)
262 QNNLayerType::MeasurementLayer {
263 measurement_basis: "computational".to_string(),
264 },
265 ];
266
267 let qnn = QuantumNeuralNetwork::new(layers, 4, 6, 2)?;
268
269 let algorithm = MetaLearningAlgorithm::ANIL {
270 inner_steps: 10,
271 inner_lr: 0.1,
272 };
273
274 let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
275
276 println!(" Created ANIL (Almost No Inner Loop) learner:");
277 println!(" - Only adapts final layer during inner loop");
278 println!(" - More parameter efficient than MAML");
279 println!(" - Inner steps: 10");
280
281 // Generate binary classification tasks
282 let generator = TaskGenerator::new(6, 2);
283 let tasks: Vec<MetaTask> = (0..15)
284 .map(|_| generator.generate_rotation_task(40))
285 .collect();
286
287 println!("\n Meta-training on binary classification tasks...");
288 let mut optimizer = Adam::new(0.001);
289 meta_learner.meta_train(&tasks, &mut optimizer, 40, 5)?;
290
291 println!(" ANIL reduces computational cost while maintaining performance");
292
293 Ok(())
294}
Sourcepub fn adapt_to_task(&mut self, task: &MetaTask) -> Result<Array1<f64>>
pub fn adapt_to_task(&mut self, task: &MetaTask) -> Result<Array1<f64>>
Adapt to new task
Examples found in repository?
examples/quantum_meta_learning.rs (line 93)
48fn maml_demo() -> Result<()> {
49 // Create quantum model
50 let layers = vec![
51 QNNLayerType::EncodingLayer { num_features: 4 },
52 QNNLayerType::VariationalLayer { num_params: 12 },
53 QNNLayerType::EntanglementLayer {
54 connectivity: "circular".to_string(),
55 },
56 QNNLayerType::VariationalLayer { num_params: 12 },
57 QNNLayerType::MeasurementLayer {
58 measurement_basis: "computational".to_string(),
59 },
60 ];
61
62 let qnn = QuantumNeuralNetwork::new(layers, 4, 4, 3)?;
63
64 // Create MAML learner
65 let algorithm = MetaLearningAlgorithm::MAML {
66 inner_steps: 5,
67 inner_lr: 0.01,
68 first_order: true, // Use first-order approximation for efficiency
69 };
70
71 let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
72
73 println!(" Created MAML meta-learner:");
74 println!(" - Inner steps: 5");
75 println!(" - Inner learning rate: 0.01");
76 println!(" - Using first-order approximation");
77
78 // Generate tasks
79 let generator = TaskGenerator::new(4, 3);
80 let tasks: Vec<MetaTask> = (0..20)
81 .map(|_| generator.generate_rotation_task(30))
82 .collect();
83
84 // Meta-train
85 println!("\n Meta-training on 20 rotation tasks...");
86 let mut optimizer = Adam::new(0.001);
87 meta_learner.meta_train(&tasks, &mut optimizer, 50, 5)?;
88
89 // Test adaptation
90 let test_task = generator.generate_rotation_task(20);
91 println!("\n Testing adaptation to new task...");
92
93 let adapted_params = meta_learner.adapt_to_task(&test_task)?;
94 println!(" Successfully adapted to new task");
95 println!(
96 " Parameter adaptation magnitude: {:.4}",
97 (&adapted_params - meta_learner.meta_params())
98 .mapv(|x| x.abs())
99 .mean()
100 .unwrap()
101 );
102
103 Ok(())
104}
Sourcepub fn meta_params(&self) -> &Array1<f64>
pub fn meta_params(&self) -> &Array1<f64>
Get meta parameters
Examples found in repository?
examples/quantum_meta_learning.rs (line 97)
48fn maml_demo() -> Result<()> {
49 // Create quantum model
50 let layers = vec![
51 QNNLayerType::EncodingLayer { num_features: 4 },
52 QNNLayerType::VariationalLayer { num_params: 12 },
53 QNNLayerType::EntanglementLayer {
54 connectivity: "circular".to_string(),
55 },
56 QNNLayerType::VariationalLayer { num_params: 12 },
57 QNNLayerType::MeasurementLayer {
58 measurement_basis: "computational".to_string(),
59 },
60 ];
61
62 let qnn = QuantumNeuralNetwork::new(layers, 4, 4, 3)?;
63
64 // Create MAML learner
65 let algorithm = MetaLearningAlgorithm::MAML {
66 inner_steps: 5,
67 inner_lr: 0.01,
68 first_order: true, // Use first-order approximation for efficiency
69 };
70
71 let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
72
73 println!(" Created MAML meta-learner:");
74 println!(" - Inner steps: 5");
75 println!(" - Inner learning rate: 0.01");
76 println!(" - Using first-order approximation");
77
78 // Generate tasks
79 let generator = TaskGenerator::new(4, 3);
80 let tasks: Vec<MetaTask> = (0..20)
81 .map(|_| generator.generate_rotation_task(30))
82 .collect();
83
84 // Meta-train
85 println!("\n Meta-training on 20 rotation tasks...");
86 let mut optimizer = Adam::new(0.001);
87 meta_learner.meta_train(&tasks, &mut optimizer, 50, 5)?;
88
89 // Test adaptation
90 let test_task = generator.generate_rotation_task(20);
91 println!("\n Testing adaptation to new task...");
92
93 let adapted_params = meta_learner.adapt_to_task(&test_task)?;
94 println!(" Successfully adapted to new task");
95 println!(
96 " Parameter adaptation magnitude: {:.4}",
97 (&adapted_params - meta_learner.meta_params())
98 .mapv(|x| x.abs())
99 .mean()
100 .unwrap()
101 );
102
103 Ok(())
104}
Sourcepub fn per_param_lr(&self) -> Option<&Array1<f64>>
pub fn per_param_lr(&self) -> Option<&Array1<f64>>
Get per-parameter learning rates
Examples found in repository?
examples/quantum_meta_learning.rs (line 236)
200fn metasgd_demo() -> Result<()> {
201 let layers = vec![
202 QNNLayerType::EncodingLayer { num_features: 4 },
203 QNNLayerType::VariationalLayer { num_params: 12 },
204 QNNLayerType::MeasurementLayer {
205 measurement_basis: "Pauli-XYZ".to_string(),
206 },
207 ];
208
209 let qnn = QuantumNeuralNetwork::new(layers, 4, 4, 3)?;
210
211 let algorithm = MetaLearningAlgorithm::MetaSGD { inner_steps: 3 };
212
213 let mut meta_learner = QuantumMetaLearner::new(algorithm, qnn);
214
215 println!(" Created Meta-SGD learner:");
216 println!(" - Learns per-parameter learning rates");
217 println!(" - Inner steps: 3");
218
219 // Generate diverse tasks
220 let generator = TaskGenerator::new(4, 3);
221 let mut tasks = Vec::new();
222
223 // Mix different task types
224 for i in 0..12 {
225 if i % 2 == 0 {
226 tasks.push(generator.generate_rotation_task(30));
227 } else {
228 tasks.push(generator.generate_sinusoid_task(30));
229 }
230 }
231
232 println!("\n Meta-training on mixed task distribution...");
233 let mut optimizer = Adam::new(0.0005);
234 meta_learner.meta_train(&tasks, &mut optimizer, 50, 4)?;
235
236 if let Some(lr) = meta_learner.per_param_lr() {
237 println!("\n Learned per-parameter learning rates:");
238 println!(
239 " - Min LR: {:.4}",
240 lr.iter().cloned().fold(f64::INFINITY, f64::min)
241 );
242 println!(
243 " - Max LR: {:.4}",
244 lr.iter().cloned().fold(f64::NEG_INFINITY, f64::max)
245 );
246 println!(" - Mean LR: {:.4}", lr.mean().unwrap());
247 }
248
249 Ok(())
250}
Auto Trait Implementations§
impl Freeze for QuantumMetaLearner
impl RefUnwindSafe for QuantumMetaLearner
impl Send for QuantumMetaLearner
impl Sync for QuantumMetaLearner
impl Unpin for QuantumMetaLearner
impl UnwindSafe for QuantumMetaLearner
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more
Source§impl<T> IntoEither for T
impl<T> IntoEither for T
Source§fn into_either(self, into_left: bool) -> Either<Self, Self>
fn into_either(self, into_left: bool) -> Either<Self, Self>
Converts
self
into a Left
variant of Either<Self, Self>
if into_left
is true
.
Converts self
into a Right
variant of Either<Self, Self>
otherwise. Read moreSource§fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
Converts
self
into a Left
variant of Either<Self, Self>
if into_left(&self)
returns true
.
Converts self
into a Right
variant of Either<Self, Self>
otherwise. Read moreSource§impl<T> Pointable for T
impl<T> Pointable for T
Source§impl<SS, SP> SupersetOf<SS> for SPwhere
SS: SubsetOf<SP>,
impl<SS, SP> SupersetOf<SS> for SPwhere
SS: SubsetOf<SP>,
Source§fn to_subset(&self) -> Option<SS>
fn to_subset(&self) -> Option<SS>
The inverse inclusion map: attempts to construct
self
from the equivalent element of its
superset. Read moreSource§fn is_in_subset(&self) -> bool
fn is_in_subset(&self) -> bool
Checks if
self
is actually part of its subset T
(and can be converted to it).Source§fn to_subset_unchecked(&self) -> SS
fn to_subset_unchecked(&self) -> SS
Use with care! Same as
self.to_subset
but without any property checks. Always succeeds.Source§fn from_subset(element: &SS) -> SP
fn from_subset(element: &SS) -> SP
The inclusion map: converts
self
to the equivalent element of its superset.