1use crate::error::OptimizeError;
9use crate::unconstrained::{line_search::backtracking_line_search, OptimizeResult};
10use scirs2_core::ndarray::{Array1, ArrayView1};
11use scirs2_core::random::rngs::StdRng;
12use scirs2_core::random::{Rng, SeedableRng};
13use std::collections::VecDeque;
14
15#[derive(Debug, Clone)]
17pub struct SubspaceOptions {
18 pub max_iter: usize,
20 pub tol: f64,
22 pub subspace_dim: usize,
24 pub block_size: usize,
26 pub coord_max_iter: usize,
28 pub memory_limit: usize,
30 pub seed: Option<u64>,
32 pub adaptive_subspace: bool,
34 pub subspace_update_freq: usize,
36 pub min_improvement: f64,
38}
39
40impl Default for SubspaceOptions {
41 fn default() -> Self {
42 Self {
43 max_iter: 1000,
44 tol: 1e-6,
45 subspace_dim: 100,
46 block_size: 50,
47 coord_max_iter: 10,
48 memory_limit: 20,
49 seed: None,
50 adaptive_subspace: true,
51 subspace_update_freq: 10,
52 min_improvement: 1e-12,
53 }
54 }
55}
56
57#[derive(Debug, Clone, Copy)]
59pub enum SubspaceMethod {
60 RandomCoordinateDescent,
62 BlockCoordinateDescent,
64 RandomSubspace,
66 AdaptiveSubspace,
68 CyclicalCoordinateDescent,
70}
71
72struct SubspaceState {
74 gradient_history: VecDeque<Array1<f64>>,
76 function_history: VecDeque<f64>,
78 current_subspace: Option<Vec<Array1<f64>>>,
80 rng: StdRng,
82 #[allow(dead_code)]
84 update_counter: usize,
85}
86
87impl SubspaceState {
88 fn new(seed: Option<u64>) -> Self {
89 let rng = match seed {
90 Some(s) => StdRng::seed_from_u64(s),
91 None => StdRng::seed_from_u64(42), };
93
94 Self {
95 gradient_history: VecDeque::new(),
96 function_history: VecDeque::new(),
97 current_subspace: None,
98 rng,
99 update_counter: 0,
100 }
101 }
102
103 fn add_gradient(&mut self, grad: Array1<f64>, memory_limit: usize) {
105 self.gradient_history.push_back(grad);
106 if self.gradient_history.len() > memory_limit {
107 self.gradient_history.pop_front();
108 }
109 }
110
111 fn add_function_value(&mut self, fval: f64) {
113 self.function_history.push_back(fval);
114 if self.function_history.len() > 50 {
115 self.function_history.pop_front();
117 }
118 }
119
120 fn generate_random_subspace(&mut self, n: usize, subspace_dim: usize) -> Vec<Array1<f64>> {
122 let mut basis = Vec::new();
123 for _ in 0..subspace_dim.min(n) {
124 let mut vec = Array1::zeros(n);
125 let num_nonzeros = (n / 10).clamp(1, 20); for _ in 0..num_nonzeros {
128 let idx = self.rng.gen_range(0..n);
129 vec[idx] = self.rng.gen_range(-1.0..1.0); }
131 let norm = vec.mapv(|x: f64| x.powi(2)).sum().sqrt();
133 if norm > 1e-12 {
134 vec /= norm;
135 }
136 basis.push(vec);
137 }
138 basis
139 }
140
141 fn generate_adaptive_subspace(&self, subspace_dim: usize) -> Vec<Array1<f64>> {
143 if self.gradient_history.len() < 2 {
144 return Vec::new();
145 }
146
147 let _n = self.gradient_history[0].len();
148 let mut basis = Vec::new();
149
150 let recent_grads: Vec<_> = self
152 .gradient_history
153 .iter()
154 .rev()
155 .take(subspace_dim)
156 .collect();
157
158 for grad in recent_grads {
159 let norm = grad.mapv(|x: f64| x.powi(2)).sum().sqrt();
160 if norm > 1e-12 {
161 basis.push(grad / norm);
162 }
163 if basis.len() >= subspace_dim {
164 break;
165 }
166 }
167
168 if basis.len() < subspace_dim && self.gradient_history.len() > 1 {
170 for i in 1..self.gradient_history.len() {
171 if basis.len() >= subspace_dim {
172 break;
173 }
174 let diff = &self.gradient_history[i] - &self.gradient_history[i - 1];
175 let norm = diff.mapv(|x: f64| x.powi(2)).sum().sqrt();
176 if norm > 1e-12 {
177 basis.push(diff / norm);
178 }
179 }
180 }
181
182 orthogonalize_basis(&mut basis);
184
185 basis
186 }
187}
188
189#[allow(dead_code)]
191fn orthogonalize_basis(basis: &mut Vec<Array1<f64>>) {
192 for i in 0..basis.len() {
193 let norm = basis[i].mapv(|x: f64| x.powi(2)).sum().sqrt();
195 if norm > 1e-12 {
196 basis[i] = &basis[i] / norm;
197 } else {
198 continue;
199 }
200
201 for j in i + 1..basis.len() {
203 let dot_product = basis[i].dot(&basis[j]);
204 basis[j] = &basis[j] - dot_product * &basis[i];
205 }
206 }
207
208 basis.retain(|v| v.mapv(|x: f64| x.powi(2)).sum().sqrt() > 1e-12);
210}
211
212#[allow(dead_code)]
214pub fn minimize_random_coordinate_descent<F>(
215 mut fun: F,
216 x0: Array1<f64>,
217 options: Option<SubspaceOptions>,
218) -> Result<OptimizeResult<f64>, OptimizeError>
219where
220 F: FnMut(&ArrayView1<f64>) -> f64,
221{
222 let options = options.unwrap_or_default();
223 let mut x = x0.clone();
224 let mut state = SubspaceState::new(options.seed);
225 let mut nfev = 0;
226 let n = x.len();
227
228 let mut best_f = fun(&x.view());
229 nfev += 1;
230
231 for iter in 0..options.max_iter {
232 let mut improved = false;
233
234 for _ in 0..options.coord_max_iter {
236 let coord = state.rng.gen_range(0..n);
238
239 let _f_current = fun(&x.view());
241 nfev += 1;
242
243 let eps = 1e-4;
245 let mut x_plus = x.clone();
246 let mut x_minus = x.clone();
247 x_plus[coord] += eps;
248 x_minus[coord] -= eps;
249
250 let f_plus = fun(&x_plus.view());
251 let f_minus = fun(&x_minus.view());
252 nfev += 2;
253
254 let grad_coord = (f_plus - f_minus) / (2.0 * eps);
256
257 if grad_coord.abs() > options.tol {
258 let direction = -grad_coord.signum();
260 let step_size = find_step_size(&mut fun, &x, coord, direction, &mut nfev);
261
262 if step_size > 0.0 {
263 x[coord] += direction * step_size;
264 let new_f = fun(&x.view());
265 nfev += 1;
266
267 if new_f < best_f - options.min_improvement {
268 best_f = new_f;
269 improved = true;
270 }
271 }
272 }
273 }
274
275 if !improved {
277 return Ok(OptimizeResult {
278 x,
279 fun: best_f,
280 nit: iter,
281 func_evals: nfev,
282 nfev,
283 jacobian: None,
284 hessian: None,
285 success: true,
286 message: "Optimization terminated successfully.".to_string(),
287 });
288 }
289 }
290
291 Ok(OptimizeResult {
292 x,
293 fun: best_f,
294 nit: options.max_iter,
295 func_evals: nfev,
296 nfev,
297 jacobian: None,
298 hessian: None,
299 success: false,
300 message: "Maximum iterations reached.".to_string(),
301 })
302}
303
304#[allow(dead_code)]
306pub fn minimize_block_coordinate_descent<F>(
307 mut fun: F,
308 x0: Array1<f64>,
309 options: Option<SubspaceOptions>,
310) -> Result<OptimizeResult<f64>, OptimizeError>
311where
312 F: FnMut(&ArrayView1<f64>) -> f64,
313{
314 let options = options.unwrap_or_default();
315 let mut x = x0.clone();
316 let _state = SubspaceState::new(options.seed);
317 let mut nfev = 0;
318 let n = x.len();
319
320 let mut best_f = fun(&x.view());
321 nfev += 1;
322
323 for iter in 0..options.max_iter {
324 let mut improved = false;
325
326 let num_blocks = n.div_ceil(options.block_size);
328
329 for block_idx in 0..num_blocks {
330 let start_idx = block_idx * options.block_size;
331 let end_idx = ((block_idx + 1) * options.block_size).min(n);
332
333 let block_improved =
335 optimize_block(&mut fun, &mut x, start_idx, end_idx, &options, &mut nfev)?;
336
337 if block_improved {
338 improved = true;
339 let new_f = fun(&x.view());
340 nfev += 1;
341 if new_f < best_f {
342 best_f = new_f;
343 }
344 }
345 }
346
347 if !improved {
349 return Ok(OptimizeResult {
350 x,
351 fun: best_f,
352 nit: iter,
353 func_evals: nfev,
354 nfev,
355 jacobian: None,
356 hessian: None,
357 success: true,
358 message: "Optimization terminated successfully.".to_string(),
359 });
360 }
361 }
362
363 Ok(OptimizeResult {
364 x,
365 fun: best_f,
366 nit: options.max_iter,
367 func_evals: nfev,
368 nfev,
369 jacobian: None,
370 hessian: None,
371 success: false,
372 message: "Maximum iterations reached.".to_string(),
373 })
374}
375
376#[allow(dead_code)]
378pub fn minimize_random_subspace<F>(
379 mut fun: F,
380 x0: Array1<f64>,
381 options: Option<SubspaceOptions>,
382) -> Result<OptimizeResult<f64>, OptimizeError>
383where
384 F: FnMut(&ArrayView1<f64>) -> f64,
385{
386 let options = options.unwrap_or_default();
387 let mut x = x0.clone();
388 let mut state = SubspaceState::new(options.seed);
389 let mut nfev = 0;
390 let n = x.len();
391
392 let mut best_f = fun(&x.view());
393 nfev += 1;
394
395 for iter in 0..options.max_iter {
396 if iter % options.subspace_update_freq == 0 || state.current_subspace.is_none() {
398 state.current_subspace = Some(state.generate_random_subspace(n, options.subspace_dim));
399 }
400
401 let subspace = state.current_subspace.as_ref().unwrap().clone();
402 if subspace.is_empty() {
403 break;
404 }
405
406 let grad = compute_finite_diff_gradient(&mut fun, &x, &mut nfev);
408 state.add_gradient(grad.clone(), options.memory_limit);
409 state.add_function_value(best_f);
410
411 let mut subspace_grad = Array1::zeros(subspace.len());
413 for (i, basis_vec) in subspace.iter().enumerate() {
414 subspace_grad[i] = grad.dot(basis_vec);
415 }
416
417 let grad_norm = subspace_grad.mapv(|x: f64| x.powi(2)).sum().sqrt();
419 if grad_norm < options.tol {
420 return Ok(OptimizeResult {
421 x,
422 fun: best_f,
423 nit: iter,
424 func_evals: nfev,
425 nfev,
426 jacobian: Some(grad),
427 hessian: None,
428 success: true,
429 message: "Optimization terminated successfully.".to_string(),
430 });
431 }
432
433 let mut search_direction = Array1::zeros(n);
435 for (i, &coeff) in subspace_grad.iter().enumerate() {
436 search_direction = search_direction + coeff * &subspace[i];
437 }
438
439 let direction_norm = search_direction.mapv(|x: f64| x.powi(2)).sum().sqrt();
441 if direction_norm > 1e-12 {
442 search_direction /= direction_norm;
443 } else {
444 continue;
445 }
446
447 let (step_size, _) = backtracking_line_search(
449 &mut |x_view| fun(x_view),
450 &x.view(),
451 best_f,
452 &search_direction.view(),
453 &(-&grad).view(),
454 1.0,
455 1e-4,
456 0.5,
457 None,
458 );
459 nfev += 1; let x_new = &x - step_size * &search_direction;
463 let f_new = fun(&x_new.view());
464 nfev += 1;
465
466 if f_new < best_f - options.min_improvement {
467 x = x_new;
468 best_f = f_new;
469 }
470 }
471
472 let final_grad = compute_finite_diff_gradient(&mut fun, &x, &mut nfev);
473
474 Ok(OptimizeResult {
475 x,
476 fun: best_f,
477 nit: options.max_iter,
478 func_evals: nfev,
479 nfev,
480 jacobian: Some(final_grad),
481 hessian: None,
482 success: false,
483 message: "Maximum iterations reached.".to_string(),
484 })
485}
486
487#[allow(dead_code)]
489pub fn minimize_adaptive_subspace<F>(
490 mut fun: F,
491 x0: Array1<f64>,
492 options: Option<SubspaceOptions>,
493) -> Result<OptimizeResult<f64>, OptimizeError>
494where
495 F: FnMut(&ArrayView1<f64>) -> f64,
496{
497 let options = options.unwrap_or_default();
498 let mut x = x0.clone();
499 let mut state = SubspaceState::new(options.seed);
500 let mut nfev = 0;
501
502 let mut best_f = fun(&x.view());
503 nfev += 1;
504
505 for iter in 0..options.max_iter {
506 let grad = compute_finite_diff_gradient(&mut fun, &x, &mut nfev);
508 state.add_gradient(grad.clone(), options.memory_limit);
509 state.add_function_value(best_f);
510
511 if iter % options.subspace_update_freq == 0 && state.gradient_history.len() > 1 {
513 let new_subspace = state.generate_adaptive_subspace(options.subspace_dim);
514 if !new_subspace.is_empty() {
515 state.current_subspace = Some(new_subspace);
516 }
517 }
518
519 let search_direction = if let Some(ref subspace) = state.current_subspace {
521 if !subspace.is_empty() {
522 let mut projected_grad = Array1::zeros(x.len());
524 for basis_vec in subspace {
525 let projection = grad.dot(basis_vec);
526 projected_grad = projected_grad + projection * basis_vec;
527 }
528 projected_grad
529 } else {
530 grad.clone()
531 }
532 } else {
533 grad.clone()
534 };
535
536 let grad_norm = search_direction.mapv(|x: f64| x.powi(2)).sum().sqrt();
538 if grad_norm < options.tol {
539 return Ok(OptimizeResult {
540 x,
541 fun: best_f,
542 nit: iter,
543 func_evals: nfev,
544 nfev,
545 jacobian: Some(grad),
546 hessian: None,
547 success: true,
548 message: "Optimization terminated successfully.".to_string(),
549 });
550 }
551
552 let (step_size, _) = backtracking_line_search(
554 &mut |x_view| fun(x_view),
555 &x.view(),
556 best_f,
557 &(-&search_direction).view(),
558 &(-&grad).view(),
559 1.0,
560 1e-4,
561 0.5,
562 None,
563 );
564
565 let x_new = &x - step_size * &search_direction;
567 let f_new = fun(&x_new.view());
568 nfev += 1;
569
570 if f_new < best_f - options.min_improvement {
571 x = x_new;
572 best_f = f_new;
573 }
574 }
575
576 let final_grad = compute_finite_diff_gradient(&mut fun, &x, &mut nfev);
577
578 Ok(OptimizeResult {
579 x,
580 fun: best_f,
581 nit: options.max_iter,
582 func_evals: nfev,
583 nfev,
584 jacobian: Some(final_grad),
585 hessian: None,
586 success: false,
587 message: "Maximum iterations reached.".to_string(),
588 })
589}
590
591#[allow(dead_code)]
593pub fn minimize_subspace<F>(
594 fun: F,
595 x0: Array1<f64>,
596 method: SubspaceMethod,
597 options: Option<SubspaceOptions>,
598) -> Result<OptimizeResult<f64>, OptimizeError>
599where
600 F: FnMut(&ArrayView1<f64>) -> f64,
601{
602 match method {
603 SubspaceMethod::RandomCoordinateDescent => {
604 minimize_random_coordinate_descent(fun, x0, options)
605 }
606 SubspaceMethod::BlockCoordinateDescent => {
607 minimize_block_coordinate_descent(fun, x0, options)
608 }
609 SubspaceMethod::RandomSubspace => minimize_random_subspace(fun, x0, options),
610 SubspaceMethod::AdaptiveSubspace => minimize_adaptive_subspace(fun, x0, options),
611 SubspaceMethod::CyclicalCoordinateDescent => {
612 minimize_cyclical_coordinate_descent(fun, x0, options)
613 }
614 }
615}
616
617#[allow(dead_code)]
619pub fn minimize_cyclical_coordinate_descent<F>(
620 mut fun: F,
621 x0: Array1<f64>,
622 options: Option<SubspaceOptions>,
623) -> Result<OptimizeResult<f64>, OptimizeError>
624where
625 F: FnMut(&ArrayView1<f64>) -> f64,
626{
627 let options = options.unwrap_or_default();
628 let mut x = x0.clone();
629 let mut nfev = 0;
630 let n = x.len();
631
632 let mut best_f = fun(&x.view());
633 nfev += 1;
634
635 for iter in 0..options.max_iter {
636 let mut improved = false;
637
638 for coord in 0..n {
640 let _f_current = fun(&x.view());
642 nfev += 1;
643
644 let eps = 1e-6;
646 let mut x_plus = x.clone();
647 let mut x_minus = x.clone();
648 x_plus[coord] += eps;
649 x_minus[coord] -= eps;
650
651 let f_plus = fun(&x_plus.view());
652 let f_minus = fun(&x_minus.view());
653 nfev += 2;
654
655 let grad_coord = (f_plus - f_minus) / (2.0 * eps);
656
657 if grad_coord.abs() > options.tol {
658 let direction = -grad_coord.signum();
660 let step_size = find_step_size(&mut fun, &x, coord, direction, &mut nfev);
661
662 if step_size > 0.0 {
663 x[coord] += direction * step_size;
664 let new_f = fun(&x.view());
665 nfev += 1;
666
667 if new_f < best_f - options.min_improvement {
668 best_f = new_f;
669 improved = true;
670 }
671 }
672 }
673 }
674
675 if !improved {
677 return Ok(OptimizeResult {
678 x,
679 fun: best_f,
680 nit: iter,
681 func_evals: nfev,
682 nfev,
683 jacobian: None,
684 hessian: None,
685 success: true,
686 message: "Optimization terminated successfully.".to_string(),
687 });
688 }
689 }
690
691 Ok(OptimizeResult {
692 x,
693 fun: best_f,
694 nit: options.max_iter,
695 func_evals: nfev,
696 nfev,
697 jacobian: None,
698 hessian: None,
699 success: false,
700 message: "Maximum iterations reached.".to_string(),
701 })
702}
703
704#[allow(dead_code)]
706fn find_step_size<F>(
707 fun: &mut F,
708 x: &Array1<f64>,
709 coord: usize,
710 direction: f64,
711 nfev: &mut usize,
712) -> f64
713where
714 F: FnMut(&ArrayView1<f64>) -> f64,
715{
716 let f0 = fun(&x.view());
717 *nfev += 1;
718
719 let mut step = 1.0;
720 let mut best_step = 0.0;
721 let mut best_f = f0;
722
723 for _ in 0..10 {
725 let mut x_new = x.clone();
726 x_new[coord] += direction * step;
727 let f_new = fun(&x_new.view());
728 *nfev += 1;
729
730 if f_new < best_f {
731 best_f = f_new;
732 best_step = step;
733 } else {
734 break; }
736
737 step *= 2.0; }
739
740 if best_step > 0.0 {
742 step = best_step * 0.1;
743 for _ in 0..5 {
744 let mut x_new = x.clone();
745 x_new[coord] += direction * step;
746 let f_new = fun(&x_new.view());
747 *nfev += 1;
748
749 if f_new < best_f {
750 best_f = f_new;
751 best_step = step;
752 }
753
754 step += best_step * 0.1;
755 if step > best_step * 2.0 {
756 break;
757 }
758 }
759 }
760
761 best_step
762}
763
764#[allow(dead_code)]
766fn optimize_block<F>(
767 fun: &mut F,
768 x: &mut Array1<f64>,
769 start_idx: usize,
770 end_idx: usize,
771 options: &SubspaceOptions,
772 nfev: &mut usize,
773) -> Result<bool, OptimizeError>
774where
775 F: FnMut(&ArrayView1<f64>) -> f64,
776{
777 let mut improved = false;
778 let block_size = end_idx - start_idx;
779
780 let mut block_x = Array1::zeros(block_size);
782 for i in 0..block_size {
783 block_x[i] = x[start_idx + i];
784 }
785
786 let f_orig = fun(&x.view());
788 *nfev += 1;
789
790 for _iter in 0..options.coord_max_iter {
792 let mut block_improved = false;
793
794 for i in 0..block_size {
795 let coord_idx = start_idx + i;
796
797 let eps = 1e-6;
799 let original_val = x[coord_idx];
800
801 x[coord_idx] = original_val + eps;
802 let f_plus = fun(&x.view());
803 x[coord_idx] = original_val - eps;
804 let f_minus = fun(&x.view());
805 x[coord_idx] = original_val; *nfev += 2;
807
808 let grad_coord = (f_plus - f_minus) / (2.0 * eps);
809
810 if grad_coord.abs() > options.tol {
811 let step = -0.01 * grad_coord.signum();
813 x[coord_idx] += step;
814
815 let f_new = fun(&x.view());
816 *nfev += 1;
817
818 if f_new < f_orig {
819 block_improved = true;
820 improved = true;
821 } else {
822 x[coord_idx] = original_val; }
824 }
825 }
826
827 if !block_improved {
828 break;
829 }
830 }
831
832 Ok(improved)
833}
834
835#[allow(dead_code)]
837fn compute_finite_diff_gradient<F>(fun: &mut F, x: &Array1<f64>, nfev: &mut usize) -> Array1<f64>
838where
839 F: FnMut(&ArrayView1<f64>) -> f64,
840{
841 let n = x.len();
842 let mut grad = Array1::zeros(n);
843 let eps = 1e-8;
844
845 let f0 = fun(&x.view());
846 *nfev += 1;
847
848 for i in 0..n {
849 let mut x_plus = x.clone();
850 x_plus[i] += eps;
851 let f_plus = fun(&x_plus.view());
852 *nfev += 1;
853
854 grad[i] = (f_plus - f0) / eps;
855 }
856
857 grad
858}
859
860#[cfg(test)]
861mod tests {
862 use super::*;
863 use approx::assert_abs_diff_eq;
864 use scirs2_core::ndarray::array;
865
866 #[test]
867 fn test_random_coordinate_descent() {
868 let fun = |x: &ArrayView1<f64>| x.iter().map(|&xi| xi.powi(2)).sum::<f64>();
870
871 let x0 = Array1::from_vec(vec![1.0; 10]);
872 let options = SubspaceOptions {
873 max_iter: 100,
874 tol: 1e-6,
875 coord_max_iter: 5,
876 seed: Some(42),
877 ..Default::default()
878 };
879
880 let result = minimize_random_coordinate_descent(fun, x0, Some(options)).unwrap();
881
882 assert!(result.success);
883 for &xi in result.x.iter() {
885 assert_abs_diff_eq!(xi, 0.0, epsilon = 1e-2);
886 }
887 assert!(result.fun < 1e-2);
888 }
889
890 #[test]
891 fn test_block_coordinate_descent() {
892 let fun = |x: &ArrayView1<f64>| {
894 x.iter()
895 .enumerate()
896 .map(|(i, &xi)| (i + 1) as f64 * xi.powi(2))
897 .sum::<f64>()
898 };
899
900 let x0 = Array1::from_vec(vec![1.0; 20]);
901 let options = SubspaceOptions {
902 max_iter: 50,
903 block_size: 5,
904 tol: 1e-6,
905 seed: Some(42),
906 ..Default::default()
907 };
908
909 let result = minimize_block_coordinate_descent(fun, x0, Some(options)).unwrap();
910
911 assert!(result.success);
912 for &xi in result.x.iter() {
914 assert_abs_diff_eq!(xi, 0.0, epsilon = 1e-2);
915 }
916 }
917
918 #[test]
919 fn test_cyclical_coordinate_descent() {
920 let fun = |x: &ArrayView1<f64>| x[0].powi(2) + 2.0 * x[1].powi(2);
922
923 let x0 = array![2.0, 2.0];
924 let options = SubspaceOptions {
925 max_iter: 50,
926 tol: 1e-6,
927 seed: Some(42),
928 ..Default::default()
929 };
930
931 let result = minimize_cyclical_coordinate_descent(fun, x0, Some(options)).unwrap();
932
933 assert!(result.success);
934 assert_abs_diff_eq!(result.x[0], 0.0, epsilon = 1e-2);
935 assert_abs_diff_eq!(result.x[1], 0.0, epsilon = 1e-2);
936 assert!(result.fun < 1e-2);
937 }
938
939 #[test]
940 fn test_random_subspace() {
941 let fun = |x: &ArrayView1<f64>| x.iter().map(|&xi| xi.powi(2)).sum::<f64>();
943
944 let x0 = Array1::from_vec(vec![1.0; 50]); let options = SubspaceOptions {
946 max_iter: 100,
947 subspace_dim: 10,
948 tol: 1e-3,
949 seed: Some(42),
950 ..Default::default()
951 };
952
953 let result = minimize_random_subspace(fun, x0, Some(options)).unwrap();
954
955 assert!(result.fun <= 50.0); }
958
959 #[test]
960 fn test_subspace_method_enum() {
961 let fun = |x: &ArrayView1<f64>| x[0].powi(2) + x[1].powi(2);
962 let x0 = array![1.0, 1.0];
963 let options = SubspaceOptions {
964 max_iter: 20,
965 tol: 1e-6,
966 seed: Some(42),
967 ..Default::default()
968 };
969
970 let methods = [
972 SubspaceMethod::RandomCoordinateDescent,
973 SubspaceMethod::BlockCoordinateDescent,
974 SubspaceMethod::CyclicalCoordinateDescent,
975 SubspaceMethod::RandomSubspace,
976 SubspaceMethod::AdaptiveSubspace,
977 ];
978
979 for method in &methods {
980 let result = minimize_subspace(fun, x0.clone(), *method, Some(options.clone()));
981 assert!(result.is_ok(), "Method {:?} failed", method);
982 }
983 }
984}