1use crate::common::IntegrateFloat;
34use crate::error::{IntegrateError, IntegrateResult};
35use scirs2_core::ndarray::{Array1, Array2, ArrayView1};
36use std::collections::VecDeque;
37
38#[derive(Debug, Clone)]
40pub struct AcceleratorOptions<F: IntegrateFloat> {
41 pub memory_depth: usize,
43 pub regularization: F,
45 pub min_iterates: usize,
47 pub use_qr: bool,
49 pub damping: F,
51 pub restart_period: Option<usize>,
53}
54
55impl<F: IntegrateFloat> Default for AcceleratorOptions<F> {
56 fn default() -> Self {
57 Self {
58 memory_depth: 5,
59 regularization: F::from_f64(1e-12).unwrap_or_else(|| F::from(1e-12).unwrap()),
60 min_iterates: 2,
61 use_qr: true,
62 damping: F::one(),
63 restart_period: Some(20),
64 }
65 }
66}
67
68#[derive(Debug)]
74pub struct AndersonAccelerator<F: IntegrateFloat> {
75 dimension: usize,
77 options: AcceleratorOptions<F>,
79 x_history: VecDeque<Array1<F>>,
81 g_history: VecDeque<Array1<F>>,
83 residual_history: VecDeque<Array1<F>>,
85 iteration_count: usize,
87 is_active: bool,
89}
90
91impl<F: IntegrateFloat> AndersonAccelerator<F> {
92 pub fn new(dimension: usize, options: AcceleratorOptions<F>) -> Self {
94 Self {
95 dimension,
96 options,
97 x_history: VecDeque::new(),
98 g_history: VecDeque::new(),
99 residual_history: VecDeque::new(),
100 iteration_count: 0,
101 is_active: false,
102 }
103 }
104
105 pub fn with_memory_depth(_dimension: usize, memorydepth: usize) -> Self {
107 let options = AcceleratorOptions {
108 memory_depth: memorydepth,
109 ..Default::default()
110 };
111 Self::new(_dimension, options)
112 }
113
114 pub fn accelerate(
116 &mut self,
117 x_current: ArrayView1<F>,
118 g_x_current: ArrayView1<F>,
119 ) -> Option<Array1<F>> {
120 if x_current.len() != self.dimension || g_x_current.len() != self.dimension {
121 return None;
122 }
123
124 let residual = &g_x_current.to_owned() - &x_current.to_owned();
126
127 self.x_history.push_back(x_current.to_owned());
129 self.g_history.push_back(g_x_current.to_owned());
130 self.residual_history.push_back(residual);
131
132 while self.x_history.len() > self.options.memory_depth {
134 self.x_history.pop_front();
135 self.g_history.pop_front();
136 self.residual_history.pop_front();
137 }
138
139 self.iteration_count += 1;
140
141 if let Some(restart_period) = self.options.restart_period {
143 if self.iteration_count.is_multiple_of(restart_period) {
144 self.restart();
145 return Some(g_x_current.to_owned());
146 }
147 }
148
149 if self.residual_history.len() < self.options.min_iterates {
151 return Some(g_x_current.to_owned());
152 }
153
154 self.is_active = true;
156 match self.compute_anderson_update() {
157 Ok(x_accelerated) => Some(x_accelerated),
158 Err(_) => {
159 self.restart();
161 Some(g_x_current.to_owned())
162 }
163 }
164 }
165
166 fn compute_anderson_update(&self) -> IntegrateResult<Array1<F>> {
168 let m = self.residual_history.len() - 1; if m == 0 {
171 return self
173 .g_history
174 .back()
175 .ok_or_else(|| {
176 IntegrateError::ComputationError(
177 "No history available for Anderson acceleration".to_string(),
178 )
179 })
180 .cloned();
181 }
182
183 let mut delta_r = Array2::zeros((self.dimension, m));
185
186 for j in 0..m {
187 let r_diff = &self.residual_history[j + 1] - &self.residual_history[j];
188 for i in 0..self.dimension {
189 delta_r[[i, j]] = r_diff[i];
190 }
191 }
192
193 let r_m = self.residual_history.back().ok_or_else(|| {
195 IntegrateError::ComputationError(
196 "No residual history available for Anderson acceleration".to_string(),
197 )
198 })?;
199 let alpha = self.solve_least_squares(&delta_r, r_m.view())?;
200
201 let mut x_accelerated = Array1::zeros(self.dimension);
203 let mut g_accelerated = Array1::zeros(self.dimension);
204
205 let alpha_sum: F = alpha.sum();
208 let weight_m = F::one() - alpha_sum;
209
210 let x_m = self.x_history.back().ok_or_else(|| {
212 IntegrateError::ComputationError(
213 "No x history available for Anderson acceleration".to_string(),
214 )
215 })?;
216 let g_m = self.g_history.back().ok_or_else(|| {
217 IntegrateError::ComputationError(
218 "No g history available for Anderson acceleration".to_string(),
219 )
220 })?;
221
222 x_accelerated = &x_accelerated + &(x_m * weight_m);
223 g_accelerated = &g_accelerated + &(g_m * weight_m);
224
225 for (j, alpha_j) in alpha.iter().enumerate() {
227 x_accelerated = &x_accelerated + &(&self.x_history[j] * *alpha_j);
228 g_accelerated = &g_accelerated + &(&self.g_history[j] * *alpha_j);
229 }
230
231 let beta = self.options.damping;
233 let x_new = &x_accelerated * (F::one() - beta) + &g_accelerated * beta;
234
235 Ok(x_new)
236 }
237
238 fn solve_least_squares(&self, a: &Array2<F>, b: ArrayView1<F>) -> IntegrateResult<Array1<F>> {
240 let (n, m) = a.dim();
241
242 if self.options.use_qr && n >= m {
243 self.solve_qr(a, b)
244 } else {
245 self.solve_normal_equations(a, b)
246 }
247 }
248
249 fn solve_qr(&self, a: &Array2<F>, b: ArrayView1<F>) -> IntegrateResult<Array1<F>> {
251 self.solve_normal_equations(a, b)
254 }
255
256 fn solve_normal_equations(
258 &self,
259 a: &Array2<F>,
260 b: ArrayView1<F>,
261 ) -> IntegrateResult<Array1<F>> {
262 let (n, m) = a.dim();
263
264 let mut ata = Array2::zeros((m, m));
266 for i in 0..m {
267 for j in 0..m {
268 let mut sum = F::zero();
269 for k in 0..n {
270 sum += a[[k, i]] * a[[k, j]];
271 }
272 ata[[i, j]] = sum;
273 }
274 }
275
276 for i in 0..m {
278 ata[[i, i]] += self.options.regularization;
279 }
280
281 let mut atb = Array1::zeros(m);
283 for i in 0..m {
284 let mut sum = F::zero();
285 for k in 0..n {
286 sum += a[[k, i]] * b[k];
287 }
288 atb[i] = sum;
289 }
290
291 self.solve_linear_system(ata, atb)
293 }
294
295 fn solve_linear_system(
297 &self,
298 mut a: Array2<F>,
299 mut b: Array1<F>,
300 ) -> IntegrateResult<Array1<F>> {
301 let n = a.nrows();
302
303 for k in 0..n {
305 let mut max_val = a[[k, k]].abs();
307 let mut max_row = k;
308
309 for i in (k + 1)..n {
310 let abs_val = a[[i, k]].abs();
311 if abs_val > max_val {
312 max_val = abs_val;
313 max_row = i;
314 }
315 }
316
317 if max_val < self.options.regularization {
319 return Err(IntegrateError::ComputationError(
320 "Singular matrix in Anderson acceleration".to_string(),
321 ));
322 }
323
324 if max_row != k {
326 for j in 0..n {
327 let temp = a[[k, j]];
328 a[[k, j]] = a[[max_row, j]];
329 a[[max_row, j]] = temp;
330 }
331 let temp = b[k];
332 b[k] = b[max_row];
333 b[max_row] = temp;
334 }
335
336 for i in (k + 1)..n {
338 if a[[k, k]].abs() < F::from_f64(1e-14).unwrap_or_else(|| F::from(1e-14).unwrap()) {
340 return Err(IntegrateError::ComputationError(
341 "Zero diagonal element in Gaussian elimination".to_string(),
342 ));
343 }
344 let factor = a[[i, k]] / a[[k, k]];
345 for j in (k + 1)..n {
346 a[[i, j]] = a[[i, j]] - factor * a[[k, j]];
347 }
348 b[i] = b[i] - factor * b[k];
349 }
350 }
351
352 let mut x = Array1::zeros(n);
354 for i in (0..n).rev() {
355 let mut sum = F::zero();
356 for j in (i + 1)..n {
357 sum += a[[i, j]] * x[j];
358 }
359 if a[[i, i]].abs() < F::from_f64(1e-14).unwrap_or_else(|| F::from(1e-14).unwrap()) {
361 return Err(IntegrateError::ComputationError(
362 "Zero diagonal element in back substitution".to_string(),
363 ));
364 }
365 x[i] = (b[i] - sum) / a[[i, i]];
366 }
367
368 Ok(x)
369 }
370
371 pub fn restart(&mut self) {
373 self.x_history.clear();
374 self.g_history.clear();
375 self.residual_history.clear();
376 self.is_active = false;
377 }
378
379 pub fn is_active(&self) -> bool {
381 self.is_active
382 }
383
384 pub fn memory_usage(&self) -> usize {
386 self.x_history.len()
387 }
388
389 pub fn iteration_count(&self) -> usize {
391 self.iteration_count
392 }
393}
394
395pub struct AitkenAccelerator<F: IntegrateFloat> {
397 history: VecDeque<F>,
398}
399
400impl<F: IntegrateFloat> AitkenAccelerator<F> {
401 pub fn new() -> Self {
403 Self {
404 history: VecDeque::new(),
405 }
406 }
407
408 pub fn accelerate(&mut self, value: F) -> Option<F> {
410 self.history.push_back(value);
411
412 while self.history.len() > 3 {
414 self.history.pop_front();
415 }
416
417 if self.history.len() == 3 {
418 let x0 = self.history[0];
419 let x1 = self.history[1];
420 let x2 = self.history[2];
421
422 let numerator = (x2 - x1) * (x2 - x1);
424 let two = F::from_f64(2.0).unwrap_or_else(|| F::from(2).unwrap());
425 let denominator = x2 - two * x1 + x0;
426 let epsilon = F::from_f64(1e-12).unwrap_or_else(|| F::from(1e-12).unwrap());
427
428 if denominator.abs() > epsilon {
429 Some(x2 - numerator / denominator)
430 } else {
431 Some(x2)
432 }
433 } else {
434 Some(value)
435 }
436 }
437
438 pub fn restart(&mut self) {
440 self.history.clear();
441 }
442}
443
444impl<F: IntegrateFloat> Default for AitkenAccelerator<F> {
445 fn default() -> Self {
446 Self::new()
447 }
448}
449
450#[cfg(test)]
451mod tests {
452 use super::*;
453
454 #[test]
455 fn test_anderson_accelerator() {
456 let mut accelerator = AndersonAccelerator::new(1, AcceleratorOptions::default());
459
460 let mut x = Array1::from_vec(vec![0.0]);
461
462 for _iter in 0..10 {
463 let g_x = Array1::from_vec(vec![0.5 * x[0] + 1.0]);
464
465 if let Some(x_new) = accelerator.accelerate(x.view(), g_x.view()) {
466 x = x_new;
467 }
468 }
469
470 assert!((x[0] - 2.0_f64).abs() < 0.1);
472 }
473
474 #[test]
475 fn test_aitken_accelerator() {
476 let mut accelerator = AitkenAccelerator::new();
477
478 let values = vec![0.0, 0.5, 0.666667, 0.75, 0.8];
480 let mut result = 0.0;
481
482 for value in values {
483 if let Some(accelerated) = accelerator.accelerate(value) {
484 result = accelerated;
485 }
486 }
487
488 assert!(result > 0.8);
490 }
491
492 #[test]
493 fn test_anderson_with_regularization() {
494 let options = AcceleratorOptions {
495 regularization: 1e-8,
496 memory_depth: 3,
497 ..Default::default()
498 };
499
500 let mut accelerator = AndersonAccelerator::new(2, options);
501
502 let mut x: Array1<f64> = Array1::from_vec(vec![0.0, 0.0]);
504
505 for _iter in 0..5 {
506 let g_x = Array1::from_vec(vec![
507 0.3 * x[0] + 0.1 * x[1] + 1.0,
508 0.1 * x[0] + 0.4 * x[1] + 0.5,
509 ]);
510
511 if let Some(x_new) = accelerator.accelerate(x.view(), g_x.view()) {
512 x = x_new;
513 }
514 }
515
516 assert!(x[0].is_finite() && x[1].is_finite());
518 }
519}