1use crate::error::OptimizeError;
9use crate::unconstrained::{line_search::backtracking_line_search, OptimizeResult};
10use ndarray::{Array1, ArrayView1};
11use rand::prelude::*;
12use rand::SeedableRng;
13use std::collections::VecDeque;
14
15#[derive(Debug, Clone)]
17pub struct SubspaceOptions {
18 pub max_iter: usize,
20 pub tol: f64,
22 pub subspace_dim: usize,
24 pub block_size: usize,
26 pub coord_max_iter: usize,
28 pub memory_limit: usize,
30 pub seed: Option<u64>,
32 pub adaptive_subspace: bool,
34 pub subspace_update_freq: usize,
36 pub min_improvement: f64,
38}
39
40impl Default for SubspaceOptions {
41 fn default() -> Self {
42 Self {
43 max_iter: 1000,
44 tol: 1e-6,
45 subspace_dim: 100,
46 block_size: 50,
47 coord_max_iter: 10,
48 memory_limit: 20,
49 seed: None,
50 adaptive_subspace: true,
51 subspace_update_freq: 10,
52 min_improvement: 1e-12,
53 }
54 }
55}
56
57#[derive(Debug, Clone, Copy)]
59pub enum SubspaceMethod {
60 RandomCoordinateDescent,
62 BlockCoordinateDescent,
64 RandomSubspace,
66 AdaptiveSubspace,
68 CyclicalCoordinateDescent,
70}
71
72struct SubspaceState {
74 gradient_history: VecDeque<Array1<f64>>,
76 function_history: VecDeque<f64>,
78 current_subspace: Option<Vec<Array1<f64>>>,
80 rng: StdRng,
82 #[allow(dead_code)]
84 update_counter: usize,
85}
86
87impl SubspaceState {
88 fn new(seed: Option<u64>) -> Self {
89 let rng = match seed {
90 Some(s) => StdRng::seed_from_u64(s),
91 None => StdRng::seed_from_u64(42), };
93
94 Self {
95 gradient_history: VecDeque::new(),
96 function_history: VecDeque::new(),
97 current_subspace: None,
98 rng,
99 update_counter: 0,
100 }
101 }
102
103 fn add_gradient(&mut self, grad: Array1<f64>, memory_limit: usize) {
105 self.gradient_history.push_back(grad);
106 if self.gradient_history.len() > memory_limit {
107 self.gradient_history.pop_front();
108 }
109 }
110
111 fn add_function_value(&mut self, fval: f64) {
113 self.function_history.push_back(fval);
114 if self.function_history.len() > 50 {
115 self.function_history.pop_front();
117 }
118 }
119
120 fn generate_random_subspace(&mut self, n: usize, subspace_dim: usize) -> Vec<Array1<f64>> {
122 let mut basis = Vec::new();
123 for _ in 0..subspace_dim.min(n) {
124 let mut vec = Array1::zeros(n);
125 let num_nonzeros = (n / 10).clamp(1, 20); for _ in 0..num_nonzeros {
128 let idx = self.rng.random_range(0..n);
129 vec[idx] = self.rng.random::<f64>() * 2.0 - 1.0; }
131 let norm = vec.mapv(|x: f64| x.powi(2)).sum().sqrt();
133 if norm > 1e-12 {
134 vec /= norm;
135 }
136 basis.push(vec);
137 }
138 basis
139 }
140
141 fn generate_adaptive_subspace(&self, subspace_dim: usize) -> Vec<Array1<f64>> {
143 if self.gradient_history.len() < 2 {
144 return Vec::new();
145 }
146
147 let _n = self.gradient_history[0].len();
148 let mut basis = Vec::new();
149
150 let recent_grads: Vec<_> = self
152 .gradient_history
153 .iter()
154 .rev()
155 .take(subspace_dim)
156 .collect();
157
158 for grad in recent_grads {
159 let norm = grad.mapv(|x: f64| x.powi(2)).sum().sqrt();
160 if norm > 1e-12 {
161 basis.push(grad / norm);
162 }
163 if basis.len() >= subspace_dim {
164 break;
165 }
166 }
167
168 if basis.len() < subspace_dim && self.gradient_history.len() > 1 {
170 for i in 1..self.gradient_history.len() {
171 if basis.len() >= subspace_dim {
172 break;
173 }
174 let diff = &self.gradient_history[i] - &self.gradient_history[i - 1];
175 let norm = diff.mapv(|x: f64| x.powi(2)).sum().sqrt();
176 if norm > 1e-12 {
177 basis.push(diff / norm);
178 }
179 }
180 }
181
182 orthogonalize_basis(&mut basis);
184
185 basis
186 }
187}
188
189fn orthogonalize_basis(basis: &mut Vec<Array1<f64>>) {
191 for i in 0..basis.len() {
192 let norm = basis[i].mapv(|x: f64| x.powi(2)).sum().sqrt();
194 if norm > 1e-12 {
195 basis[i] = &basis[i] / norm;
196 } else {
197 continue;
198 }
199
200 for j in i + 1..basis.len() {
202 let dot_product = basis[i].dot(&basis[j]);
203 basis[j] = &basis[j] - dot_product * &basis[i];
204 }
205 }
206
207 basis.retain(|v| v.mapv(|x: f64| x.powi(2)).sum().sqrt() > 1e-12);
209}
210
211pub fn minimize_random_coordinate_descent<F>(
213 mut fun: F,
214 x0: Array1<f64>,
215 options: Option<SubspaceOptions>,
216) -> Result<OptimizeResult<f64>, OptimizeError>
217where
218 F: FnMut(&ArrayView1<f64>) -> f64,
219{
220 let options = options.unwrap_or_default();
221 let mut x = x0.clone();
222 let mut state = SubspaceState::new(options.seed);
223 let mut nfev = 0;
224 let n = x.len();
225
226 let mut best_f = fun(&x.view());
227 nfev += 1;
228
229 for iter in 0..options.max_iter {
230 let mut improved = false;
231
232 for _ in 0..options.coord_max_iter {
234 let coord = state.rng.random_range(0..n);
236
237 let _f_current = fun(&x.view());
239 nfev += 1;
240
241 let eps = 1e-4;
243 let mut x_plus = x.clone();
244 let mut x_minus = x.clone();
245 x_plus[coord] += eps;
246 x_minus[coord] -= eps;
247
248 let f_plus = fun(&x_plus.view());
249 let f_minus = fun(&x_minus.view());
250 nfev += 2;
251
252 let grad_coord = (f_plus - f_minus) / (2.0 * eps);
254
255 if grad_coord.abs() > options.tol {
256 let direction = -grad_coord.signum();
258 let step_size = find_step_size(&mut fun, &x, coord, direction, &mut nfev);
259
260 if step_size > 0.0 {
261 x[coord] += direction * step_size;
262 let new_f = fun(&x.view());
263 nfev += 1;
264
265 if new_f < best_f - options.min_improvement {
266 best_f = new_f;
267 improved = true;
268 }
269 }
270 }
271 }
272
273 if !improved {
275 return Ok(OptimizeResult {
276 x,
277 fun: best_f,
278 iterations: iter,
279 nit: iter,
280 func_evals: nfev,
281 nfev,
282 jacobian: None,
283 hessian: None,
284 success: true,
285 message: "Optimization terminated successfully.".to_string(),
286 });
287 }
288 }
289
290 Ok(OptimizeResult {
291 x,
292 fun: best_f,
293 iterations: options.max_iter,
294 nit: options.max_iter,
295 func_evals: nfev,
296 nfev,
297 jacobian: None,
298 hessian: None,
299 success: false,
300 message: "Maximum iterations reached.".to_string(),
301 })
302}
303
304pub fn minimize_block_coordinate_descent<F>(
306 mut fun: F,
307 x0: Array1<f64>,
308 options: Option<SubspaceOptions>,
309) -> Result<OptimizeResult<f64>, OptimizeError>
310where
311 F: FnMut(&ArrayView1<f64>) -> f64,
312{
313 let options = options.unwrap_or_default();
314 let mut x = x0.clone();
315 let _state = SubspaceState::new(options.seed);
316 let mut nfev = 0;
317 let n = x.len();
318
319 let mut best_f = fun(&x.view());
320 nfev += 1;
321
322 for iter in 0..options.max_iter {
323 let mut improved = false;
324
325 let num_blocks = n.div_ceil(options.block_size);
327
328 for block_idx in 0..num_blocks {
329 let start_idx = block_idx * options.block_size;
330 let end_idx = ((block_idx + 1) * options.block_size).min(n);
331
332 let block_improved =
334 optimize_block(&mut fun, &mut x, start_idx, end_idx, &options, &mut nfev)?;
335
336 if block_improved {
337 improved = true;
338 let new_f = fun(&x.view());
339 nfev += 1;
340 if new_f < best_f {
341 best_f = new_f;
342 }
343 }
344 }
345
346 if !improved {
348 return Ok(OptimizeResult {
349 x,
350 fun: best_f,
351 iterations: iter,
352 nit: iter,
353 func_evals: nfev,
354 nfev,
355 jacobian: None,
356 hessian: None,
357 success: true,
358 message: "Optimization terminated successfully.".to_string(),
359 });
360 }
361 }
362
363 Ok(OptimizeResult {
364 x,
365 fun: best_f,
366 iterations: options.max_iter,
367 nit: options.max_iter,
368 func_evals: nfev,
369 nfev,
370 jacobian: None,
371 hessian: None,
372 success: false,
373 message: "Maximum iterations reached.".to_string(),
374 })
375}
376
377pub fn minimize_random_subspace<F>(
379 mut fun: F,
380 x0: Array1<f64>,
381 options: Option<SubspaceOptions>,
382) -> Result<OptimizeResult<f64>, OptimizeError>
383where
384 F: FnMut(&ArrayView1<f64>) -> f64,
385{
386 let options = options.unwrap_or_default();
387 let mut x = x0.clone();
388 let mut state = SubspaceState::new(options.seed);
389 let mut nfev = 0;
390 let n = x.len();
391
392 let mut best_f = fun(&x.view());
393 nfev += 1;
394
395 for iter in 0..options.max_iter {
396 if iter % options.subspace_update_freq == 0 || state.current_subspace.is_none() {
398 state.current_subspace = Some(state.generate_random_subspace(n, options.subspace_dim));
399 }
400
401 let subspace = state.current_subspace.as_ref().unwrap().clone();
402 if subspace.is_empty() {
403 break;
404 }
405
406 let grad = compute_finite_diff_gradient(&mut fun, &x, &mut nfev);
408 state.add_gradient(grad.clone(), options.memory_limit);
409 state.add_function_value(best_f);
410
411 let mut subspace_grad = Array1::zeros(subspace.len());
413 for (i, basis_vec) in subspace.iter().enumerate() {
414 subspace_grad[i] = grad.dot(basis_vec);
415 }
416
417 let grad_norm = subspace_grad.mapv(|x: f64| x.powi(2)).sum().sqrt();
419 if grad_norm < options.tol {
420 return Ok(OptimizeResult {
421 x,
422 fun: best_f,
423 iterations: iter,
424 nit: iter,
425 func_evals: nfev,
426 nfev,
427 jacobian: Some(grad),
428 hessian: None,
429 success: true,
430 message: "Optimization terminated successfully.".to_string(),
431 });
432 }
433
434 let mut search_direction = Array1::zeros(n);
436 for (i, &coeff) in subspace_grad.iter().enumerate() {
437 search_direction = search_direction + coeff * &subspace[i];
438 }
439
440 let direction_norm = search_direction.mapv(|x: f64| x.powi(2)).sum().sqrt();
442 if direction_norm > 1e-12 {
443 search_direction /= direction_norm;
444 } else {
445 continue;
446 }
447
448 let (step_size, _) = backtracking_line_search(
450 &mut |x_view| fun(x_view),
451 &x.view(),
452 best_f,
453 &search_direction.view(),
454 &(-&grad).view(),
455 1.0,
456 1e-4,
457 0.5,
458 None,
459 );
460 nfev += 1; let x_new = &x - step_size * &search_direction;
464 let f_new = fun(&x_new.view());
465 nfev += 1;
466
467 if f_new < best_f - options.min_improvement {
468 x = x_new;
469 best_f = f_new;
470 }
471 }
472
473 let final_grad = compute_finite_diff_gradient(&mut fun, &x, &mut nfev);
474
475 Ok(OptimizeResult {
476 x,
477 fun: best_f,
478 iterations: options.max_iter,
479 nit: options.max_iter,
480 func_evals: nfev,
481 nfev,
482 jacobian: Some(final_grad),
483 hessian: None,
484 success: false,
485 message: "Maximum iterations reached.".to_string(),
486 })
487}
488
489pub fn minimize_adaptive_subspace<F>(
491 mut fun: F,
492 x0: Array1<f64>,
493 options: Option<SubspaceOptions>,
494) -> Result<OptimizeResult<f64>, OptimizeError>
495where
496 F: FnMut(&ArrayView1<f64>) -> f64,
497{
498 let options = options.unwrap_or_default();
499 let mut x = x0.clone();
500 let mut state = SubspaceState::new(options.seed);
501 let mut nfev = 0;
502
503 let mut best_f = fun(&x.view());
504 nfev += 1;
505
506 for iter in 0..options.max_iter {
507 let grad = compute_finite_diff_gradient(&mut fun, &x, &mut nfev);
509 state.add_gradient(grad.clone(), options.memory_limit);
510 state.add_function_value(best_f);
511
512 if iter % options.subspace_update_freq == 0 && state.gradient_history.len() > 1 {
514 let new_subspace = state.generate_adaptive_subspace(options.subspace_dim);
515 if !new_subspace.is_empty() {
516 state.current_subspace = Some(new_subspace);
517 }
518 }
519
520 let search_direction = if let Some(ref subspace) = state.current_subspace {
522 if !subspace.is_empty() {
523 let mut projected_grad = Array1::zeros(x.len());
525 for basis_vec in subspace {
526 let projection = grad.dot(basis_vec);
527 projected_grad = projected_grad + projection * basis_vec;
528 }
529 projected_grad
530 } else {
531 grad.clone()
532 }
533 } else {
534 grad.clone()
535 };
536
537 let grad_norm = search_direction.mapv(|x: f64| x.powi(2)).sum().sqrt();
539 if grad_norm < options.tol {
540 return Ok(OptimizeResult {
541 x,
542 fun: best_f,
543 iterations: iter,
544 nit: iter,
545 func_evals: nfev,
546 nfev,
547 jacobian: Some(grad),
548 hessian: None,
549 success: true,
550 message: "Optimization terminated successfully.".to_string(),
551 });
552 }
553
554 let (step_size, _) = backtracking_line_search(
556 &mut |x_view| fun(x_view),
557 &x.view(),
558 best_f,
559 &(-&search_direction).view(),
560 &(-&grad).view(),
561 1.0,
562 1e-4,
563 0.5,
564 None,
565 );
566
567 let x_new = &x - step_size * &search_direction;
569 let f_new = fun(&x_new.view());
570 nfev += 1;
571
572 if f_new < best_f - options.min_improvement {
573 x = x_new;
574 best_f = f_new;
575 }
576 }
577
578 let final_grad = compute_finite_diff_gradient(&mut fun, &x, &mut nfev);
579
580 Ok(OptimizeResult {
581 x,
582 fun: best_f,
583 iterations: options.max_iter,
584 nit: options.max_iter,
585 func_evals: nfev,
586 nfev,
587 jacobian: Some(final_grad),
588 hessian: None,
589 success: false,
590 message: "Maximum iterations reached.".to_string(),
591 })
592}
593
594pub fn minimize_subspace<F>(
596 fun: F,
597 x0: Array1<f64>,
598 method: SubspaceMethod,
599 options: Option<SubspaceOptions>,
600) -> Result<OptimizeResult<f64>, OptimizeError>
601where
602 F: FnMut(&ArrayView1<f64>) -> f64,
603{
604 match method {
605 SubspaceMethod::RandomCoordinateDescent => {
606 minimize_random_coordinate_descent(fun, x0, options)
607 }
608 SubspaceMethod::BlockCoordinateDescent => {
609 minimize_block_coordinate_descent(fun, x0, options)
610 }
611 SubspaceMethod::RandomSubspace => minimize_random_subspace(fun, x0, options),
612 SubspaceMethod::AdaptiveSubspace => minimize_adaptive_subspace(fun, x0, options),
613 SubspaceMethod::CyclicalCoordinateDescent => {
614 minimize_cyclical_coordinate_descent(fun, x0, options)
615 }
616 }
617}
618
619pub fn minimize_cyclical_coordinate_descent<F>(
621 mut fun: F,
622 x0: Array1<f64>,
623 options: Option<SubspaceOptions>,
624) -> Result<OptimizeResult<f64>, OptimizeError>
625where
626 F: FnMut(&ArrayView1<f64>) -> f64,
627{
628 let options = options.unwrap_or_default();
629 let mut x = x0.clone();
630 let mut nfev = 0;
631 let n = x.len();
632
633 let mut best_f = fun(&x.view());
634 nfev += 1;
635
636 for iter in 0..options.max_iter {
637 let mut improved = false;
638
639 for coord in 0..n {
641 let _f_current = fun(&x.view());
643 nfev += 1;
644
645 let eps = 1e-6;
647 let mut x_plus = x.clone();
648 let mut x_minus = x.clone();
649 x_plus[coord] += eps;
650 x_minus[coord] -= eps;
651
652 let f_plus = fun(&x_plus.view());
653 let f_minus = fun(&x_minus.view());
654 nfev += 2;
655
656 let grad_coord = (f_plus - f_minus) / (2.0 * eps);
657
658 if grad_coord.abs() > options.tol {
659 let direction = -grad_coord.signum();
661 let step_size = find_step_size(&mut fun, &x, coord, direction, &mut nfev);
662
663 if step_size > 0.0 {
664 x[coord] += direction * step_size;
665 let new_f = fun(&x.view());
666 nfev += 1;
667
668 if new_f < best_f - options.min_improvement {
669 best_f = new_f;
670 improved = true;
671 }
672 }
673 }
674 }
675
676 if !improved {
678 return Ok(OptimizeResult {
679 x,
680 fun: best_f,
681 iterations: iter,
682 nit: iter,
683 func_evals: nfev,
684 nfev,
685 jacobian: None,
686 hessian: None,
687 success: true,
688 message: "Optimization terminated successfully.".to_string(),
689 });
690 }
691 }
692
693 Ok(OptimizeResult {
694 x,
695 fun: best_f,
696 iterations: options.max_iter,
697 nit: options.max_iter,
698 func_evals: nfev,
699 nfev,
700 jacobian: None,
701 hessian: None,
702 success: false,
703 message: "Maximum iterations reached.".to_string(),
704 })
705}
706
707fn find_step_size<F>(
709 fun: &mut F,
710 x: &Array1<f64>,
711 coord: usize,
712 direction: f64,
713 nfev: &mut usize,
714) -> f64
715where
716 F: FnMut(&ArrayView1<f64>) -> f64,
717{
718 let f0 = fun(&x.view());
719 *nfev += 1;
720
721 let mut step = 1.0;
722 let mut best_step = 0.0;
723 let mut best_f = f0;
724
725 for _ in 0..10 {
727 let mut x_new = x.clone();
728 x_new[coord] += direction * step;
729 let f_new = fun(&x_new.view());
730 *nfev += 1;
731
732 if f_new < best_f {
733 best_f = f_new;
734 best_step = step;
735 } else {
736 break; }
738
739 step *= 2.0; }
741
742 if best_step > 0.0 {
744 step = best_step * 0.1;
745 for _ in 0..5 {
746 let mut x_new = x.clone();
747 x_new[coord] += direction * step;
748 let f_new = fun(&x_new.view());
749 *nfev += 1;
750
751 if f_new < best_f {
752 best_f = f_new;
753 best_step = step;
754 }
755
756 step += best_step * 0.1;
757 if step > best_step * 2.0 {
758 break;
759 }
760 }
761 }
762
763 best_step
764}
765
766fn optimize_block<F>(
768 fun: &mut F,
769 x: &mut Array1<f64>,
770 start_idx: usize,
771 end_idx: usize,
772 options: &SubspaceOptions,
773 nfev: &mut usize,
774) -> Result<bool, OptimizeError>
775where
776 F: FnMut(&ArrayView1<f64>) -> f64,
777{
778 let mut improved = false;
779 let block_size = end_idx - start_idx;
780
781 let mut block_x = Array1::zeros(block_size);
783 for i in 0..block_size {
784 block_x[i] = x[start_idx + i];
785 }
786
787 let f_orig = fun(&x.view());
789 *nfev += 1;
790
791 for _iter in 0..options.coord_max_iter {
793 let mut block_improved = false;
794
795 for i in 0..block_size {
796 let coord_idx = start_idx + i;
797
798 let eps = 1e-6;
800 let original_val = x[coord_idx];
801
802 x[coord_idx] = original_val + eps;
803 let f_plus = fun(&x.view());
804 x[coord_idx] = original_val - eps;
805 let f_minus = fun(&x.view());
806 x[coord_idx] = original_val; *nfev += 2;
808
809 let grad_coord = (f_plus - f_minus) / (2.0 * eps);
810
811 if grad_coord.abs() > options.tol {
812 let step = -0.01 * grad_coord.signum();
814 x[coord_idx] += step;
815
816 let f_new = fun(&x.view());
817 *nfev += 1;
818
819 if f_new < f_orig {
820 block_improved = true;
821 improved = true;
822 } else {
823 x[coord_idx] = original_val; }
825 }
826 }
827
828 if !block_improved {
829 break;
830 }
831 }
832
833 Ok(improved)
834}
835
836fn compute_finite_diff_gradient<F>(fun: &mut F, x: &Array1<f64>, nfev: &mut usize) -> Array1<f64>
838where
839 F: FnMut(&ArrayView1<f64>) -> f64,
840{
841 let n = x.len();
842 let mut grad = Array1::zeros(n);
843 let eps = 1e-8;
844
845 let f0 = fun(&x.view());
846 *nfev += 1;
847
848 for i in 0..n {
849 let mut x_plus = x.clone();
850 x_plus[i] += eps;
851 let f_plus = fun(&x_plus.view());
852 *nfev += 1;
853
854 grad[i] = (f_plus - f0) / eps;
855 }
856
857 grad
858}
859
860#[cfg(test)]
861mod tests {
862 use super::*;
863 use approx::assert_abs_diff_eq;
864 use ndarray::array;
865
866 #[test]
867 fn test_random_coordinate_descent() {
868 let fun = |x: &ArrayView1<f64>| x.iter().map(|&xi| xi.powi(2)).sum::<f64>();
870
871 let x0 = Array1::from_vec(vec![1.0; 10]);
872 let options = SubspaceOptions {
873 max_iter: 100,
874 tol: 1e-6,
875 coord_max_iter: 5,
876 seed: Some(42),
877 ..Default::default()
878 };
879
880 let result = minimize_random_coordinate_descent(fun, x0, Some(options)).unwrap();
881
882 assert!(result.success);
883 for &xi in result.x.iter() {
885 assert_abs_diff_eq!(xi, 0.0, epsilon = 1e-2);
886 }
887 assert!(result.fun < 1e-2);
888 }
889
890 #[test]
891 fn test_block_coordinate_descent() {
892 let fun = |x: &ArrayView1<f64>| {
894 x.iter()
895 .enumerate()
896 .map(|(i, &xi)| (i + 1) as f64 * xi.powi(2))
897 .sum::<f64>()
898 };
899
900 let x0 = Array1::from_vec(vec![1.0; 20]);
901 let options = SubspaceOptions {
902 max_iter: 50,
903 block_size: 5,
904 tol: 1e-6,
905 seed: Some(42),
906 ..Default::default()
907 };
908
909 let result = minimize_block_coordinate_descent(fun, x0, Some(options)).unwrap();
910
911 assert!(result.success);
912 for &xi in result.x.iter() {
914 assert_abs_diff_eq!(xi, 0.0, epsilon = 1e-2);
915 }
916 }
917
918 #[test]
919 fn test_cyclical_coordinate_descent() {
920 let fun = |x: &ArrayView1<f64>| x[0].powi(2) + 2.0 * x[1].powi(2);
922
923 let x0 = array![2.0, 2.0];
924 let options = SubspaceOptions {
925 max_iter: 50,
926 tol: 1e-6,
927 seed: Some(42),
928 ..Default::default()
929 };
930
931 let result = minimize_cyclical_coordinate_descent(fun, x0, Some(options)).unwrap();
932
933 assert!(result.success);
934 assert_abs_diff_eq!(result.x[0], 0.0, epsilon = 1e-2);
935 assert_abs_diff_eq!(result.x[1], 0.0, epsilon = 1e-2);
936 assert!(result.fun < 1e-2);
937 }
938
939 #[test]
940 fn test_random_subspace() {
941 let fun = |x: &ArrayView1<f64>| x.iter().map(|&xi| xi.powi(2)).sum::<f64>();
943
944 let x0 = Array1::from_vec(vec![1.0; 50]); let options = SubspaceOptions {
946 max_iter: 100,
947 subspace_dim: 10,
948 tol: 1e-3,
949 seed: Some(42),
950 ..Default::default()
951 };
952
953 let result = minimize_random_subspace(fun, x0, Some(options)).unwrap();
954
955 assert!(result.fun <= 50.0); }
958
959 #[test]
960 fn test_subspace_method_enum() {
961 let fun = |x: &ArrayView1<f64>| x[0].powi(2) + x[1].powi(2);
962 let x0 = array![1.0, 1.0];
963 let options = SubspaceOptions {
964 max_iter: 20,
965 tol: 1e-6,
966 seed: Some(42),
967 ..Default::default()
968 };
969
970 let methods = [
972 SubspaceMethod::RandomCoordinateDescent,
973 SubspaceMethod::BlockCoordinateDescent,
974 SubspaceMethod::CyclicalCoordinateDescent,
975 SubspaceMethod::RandomSubspace,
976 SubspaceMethod::AdaptiveSubspace,
977 ];
978
979 for method in &methods {
980 let result = minimize_subspace(fun, x0.clone(), *method, Some(options.clone()));
981 assert!(result.is_ok(), "Method {:?} failed", method);
982 }
983 }
984}