quantrs2_sim/quantum_reservoir_computing_enhanced/
state.rs1use scirs2_core::ndarray::{Array1, Array2};
6use scirs2_core::Complex64;
7use serde::{Deserialize, Serialize};
8use std::collections::{HashMap, VecDeque};
9
10#[derive(Debug, Clone, Default, Serialize, Deserialize)]
12pub struct MemoryMetrics {
13 pub linear_capacity: f64,
15 pub nonlinear_capacity: f64,
17 pub total_capacity: f64,
19 pub processing_capacity: f64,
21 pub correlation_length: f64,
23 pub decay_rate: f64,
25 pub efficiency: f64,
27}
28
29#[derive(Debug, Clone)]
31pub struct QuantumReservoirState {
32 pub state_vector: Array1<Complex64>,
34 pub state_history: VecDeque<Array1<Complex64>>,
36 pub observables: HashMap<String, f64>,
38 pub correlations: Array2<f64>,
40 pub higher_order_correlations: HashMap<String, f64>,
42 pub entanglement_measures: HashMap<String, f64>,
44 pub memory_metrics: MemoryMetrics,
46 pub time_index: usize,
48 pub last_update: f64,
50 pub activity_level: f64,
52 pub performance_history: VecDeque<f64>,
54}
55
56impl QuantumReservoirState {
57 #[must_use]
59 pub fn new(num_qubits: usize, memory_capacity: usize) -> Self {
60 let state_size = 1 << num_qubits;
61 let mut state_vector = Array1::zeros(state_size);
62 state_vector[0] = Complex64::new(1.0, 0.0); Self {
65 state_vector,
66 state_history: VecDeque::with_capacity(memory_capacity),
67 observables: HashMap::new(),
68 correlations: Array2::zeros((num_qubits, num_qubits)),
69 higher_order_correlations: HashMap::new(),
70 entanglement_measures: HashMap::new(),
71 memory_metrics: MemoryMetrics::default(),
72 time_index: 0,
73 last_update: 0.0,
74 activity_level: 0.0,
75 performance_history: VecDeque::with_capacity(1000),
76 }
77 }
78
79 pub fn update_state(&mut self, new_state: Array1<Complex64>, timestamp: f64) {
81 self.state_history.push_back(self.state_vector.clone());
83 if self.state_history.len() > self.state_history.capacity() {
84 self.state_history.pop_front();
85 }
86
87 self.state_vector = new_state;
89 self.time_index += 1;
90 self.last_update = timestamp;
91
92 self.update_activity_level();
94 }
95
96 fn update_activity_level(&mut self) {
98 let activity = self
99 .state_vector
100 .iter()
101 .map(scirs2_core::Complex::norm_sqr)
102 .sum::<f64>()
103 / self.state_vector.len() as f64;
104
105 let alpha = 0.1;
107 self.activity_level = alpha * activity + (1.0 - alpha) * self.activity_level;
108 }
109
110 #[must_use]
112 pub fn calculate_memory_decay(&self) -> f64 {
113 if self.state_history.len() < 2 {
114 return 0.0;
115 }
116
117 let mut total_decay = 0.0;
118 let current_state = &self.state_vector;
119
120 for (i, past_state) in self.state_history.iter().enumerate() {
121 let fidelity = self.calculate_fidelity(current_state, past_state);
122 let time_diff = (self.state_history.len() - i) as f64;
123 total_decay += fidelity * (-time_diff * 0.1).exp();
124 }
125
126 total_decay / self.state_history.len() as f64
127 }
128
129 fn calculate_fidelity(&self, state1: &Array1<Complex64>, state2: &Array1<Complex64>) -> f64 {
131 let overlap = state1
132 .iter()
133 .zip(state2.iter())
134 .map(|(a, b)| a.conj() * b)
135 .sum::<Complex64>();
136 overlap.norm_sqr()
137 }
138}
139
140#[derive(Debug, Clone)]
142pub struct ReservoirTrainingData {
143 pub inputs: Vec<Array1<f64>>,
145 pub targets: Vec<Array1<f64>>,
147 pub timestamps: Vec<f64>,
149 pub features: Option<Vec<Array1<f64>>>,
151 pub labels: Option<Vec<usize>>,
153 pub sequence_lengths: Option<Vec<usize>>,
155 pub missing_mask: Option<Vec<Array1<bool>>>,
157 pub sample_weights: Option<Vec<f64>>,
159 pub metadata: Option<Vec<HashMap<String, String>>>,
161}
162
163impl ReservoirTrainingData {
164 #[must_use]
166 pub const fn new(
167 inputs: Vec<Array1<f64>>,
168 targets: Vec<Array1<f64>>,
169 timestamps: Vec<f64>,
170 ) -> Self {
171 Self {
172 inputs,
173 targets,
174 timestamps,
175 features: None,
176 labels: None,
177 sequence_lengths: None,
178 missing_mask: None,
179 sample_weights: None,
180 metadata: None,
181 }
182 }
183
184 #[must_use]
186 pub fn with_features(mut self, features: Vec<Array1<f64>>) -> Self {
187 self.features = Some(features);
188 self
189 }
190
191 #[must_use]
193 pub fn with_labels(mut self, labels: Vec<usize>) -> Self {
194 self.labels = Some(labels);
195 self
196 }
197
198 #[must_use]
200 pub fn with_weights(mut self, weights: Vec<f64>) -> Self {
201 self.sample_weights = Some(weights);
202 self
203 }
204
205 #[must_use]
207 pub fn len(&self) -> usize {
208 self.inputs.len()
209 }
210
211 #[must_use]
213 pub fn is_empty(&self) -> bool {
214 self.inputs.is_empty()
215 }
216
217 #[must_use]
219 pub fn train_test_split(&self, test_ratio: f64) -> (Self, Self) {
220 let test_size = (self.len() as f64 * test_ratio) as usize;
221 let train_size = self.len() - test_size;
222
223 let train_data = Self {
224 inputs: self.inputs[..train_size].to_vec(),
225 targets: self.targets[..train_size].to_vec(),
226 timestamps: self.timestamps[..train_size].to_vec(),
227 features: self.features.as_ref().map(|f| f[..train_size].to_vec()),
228 labels: self.labels.as_ref().map(|l| l[..train_size].to_vec()),
229 sequence_lengths: self
230 .sequence_lengths
231 .as_ref()
232 .map(|s| s[..train_size].to_vec()),
233 missing_mask: self.missing_mask.as_ref().map(|m| m[..train_size].to_vec()),
234 sample_weights: self
235 .sample_weights
236 .as_ref()
237 .map(|w| w[..train_size].to_vec()),
238 metadata: self.metadata.as_ref().map(|m| m[..train_size].to_vec()),
239 };
240
241 let test_data = Self {
242 inputs: self.inputs[train_size..].to_vec(),
243 targets: self.targets[train_size..].to_vec(),
244 timestamps: self.timestamps[train_size..].to_vec(),
245 features: self.features.as_ref().map(|f| f[train_size..].to_vec()),
246 labels: self.labels.as_ref().map(|l| l[train_size..].to_vec()),
247 sequence_lengths: self
248 .sequence_lengths
249 .as_ref()
250 .map(|s| s[train_size..].to_vec()),
251 missing_mask: self.missing_mask.as_ref().map(|m| m[train_size..].to_vec()),
252 sample_weights: self
253 .sample_weights
254 .as_ref()
255 .map(|w| w[train_size..].to_vec()),
256 metadata: self.metadata.as_ref().map(|m| m[train_size..].to_vec()),
257 };
258
259 (train_data, test_data)
260 }
261}
262
263#[derive(Debug, Clone)]
265pub struct TrainingExample {
266 pub input: Array1<f64>,
268 pub reservoir_state: Array1<f64>,
270 pub features: Array1<f64>,
272 pub target: Array1<f64>,
274 pub prediction: Array1<f64>,
276 pub error: f64,
278 pub confidence: f64,
280 pub timestamp: f64,
282 pub metadata: HashMap<String, f64>,
284}
285
286#[derive(Debug, Clone, Default, Serialize, Deserialize)]
288pub struct ReservoirMetrics {
289 pub training_examples: usize,
291 pub prediction_accuracy: f64,
293 pub memory_capacity: f64,
295 pub nonlinear_memory_capacity: f64,
297 pub processing_capacity: f64,
299 pub generalization_error: f64,
301 pub echo_state_property: f64,
303 pub avg_processing_time_ms: f64,
305 pub quantum_resource_usage: f64,
307 pub temporal_correlation_length: f64,
309 pub reservoir_efficiency: f64,
311 pub adaptation_rate: f64,
313 pub plasticity_level: f64,
315 pub hardware_utilization: f64,
317 pub error_mitigation_overhead: f64,
319 pub quantum_advantage: f64,
321 pub computational_complexity: f64,
323}
324
325#[derive(Debug, Clone, Serialize, Deserialize)]
327pub struct TrainingResult {
328 pub training_error: f64,
330 pub test_error: f64,
332 pub training_time_ms: f64,
334 pub num_examples: usize,
336 pub echo_state_property: f64,
338 pub memory_capacity: f64,
340 pub nonlinear_capacity: f64,
342 pub processing_capacity: f64,
344}