1use crate::error::OptimizeResult;
41use crate::result::OptimizeResults;
42use scirs2_core::ndarray::{Array1, Array2, ArrayBase, Data, Ix1};
43use std::fmt;
44
45#[derive(Debug, Clone, Copy, PartialEq, Eq)]
47pub enum Method {
48 TrustRegionReflective,
50
51 LevenbergMarquardt,
53
54 Dogbox,
56}
57
58impl fmt::Display for Method {
59 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
60 match self {
61 Method::TrustRegionReflective => write!(f, "trf"),
62 Method::LevenbergMarquardt => write!(f, "lm"),
63 Method::Dogbox => write!(f, "dogbox"),
64 }
65 }
66}
67
68#[derive(Debug, Clone)]
70pub struct Options {
71 pub max_nfev: Option<usize>,
73
74 pub xtol: Option<f64>,
76
77 pub ftol: Option<f64>,
79
80 pub gtol: Option<f64>,
82
83 pub verbose: usize,
85
86 pub diff_step: Option<f64>,
88
89 pub use_finite_diff: bool,
91}
92
93impl Default for Options {
94 fn default() -> Self {
95 Options {
96 max_nfev: None,
97 xtol: Some(1e-8),
98 ftol: Some(1e-8),
99 gtol: Some(1e-8),
100 verbose: 0,
101 diff_step: None,
102 use_finite_diff: false,
103 }
104 }
105}
106
107#[allow(dead_code)]
159pub fn least_squares<F, J, D, S1, S2>(
160 residuals: F,
161 x0: &ArrayBase<S1, Ix1>,
162 method: Method,
163 jacobian: Option<J>,
164 data: &ArrayBase<S2, Ix1>,
165 options: Option<Options>,
166) -> OptimizeResult<OptimizeResults<f64>>
167where
168 F: Fn(&[f64], &[D]) -> Array1<f64>,
169 J: Fn(&[f64], &[D]) -> Array2<f64>,
170 D: Clone,
171 S1: Data<Elem = f64>,
172 S2: Data<Elem = D>,
173{
174 let options = options.unwrap_or_default();
175
176 match method {
178 Method::LevenbergMarquardt => least_squares_lm(residuals, x0, jacobian, data, &options),
179 Method::TrustRegionReflective => least_squares_trf(residuals, x0, jacobian, data, &options),
180 Method::Dogbox => least_squares_dogbox(residuals, x0, jacobian, data, &options),
181 }
182}
183
184#[allow(dead_code)]
186fn least_squares_lm<F, J, D, S1, S2>(
187 residuals: F,
188 x0: &ArrayBase<S1, Ix1>,
189 jacobian: Option<J>,
190 data: &ArrayBase<S2, Ix1>,
191 options: &Options,
192) -> OptimizeResult<OptimizeResults<f64>>
193where
194 F: Fn(&[f64], &[D]) -> Array1<f64>,
195 J: Fn(&[f64], &[D]) -> Array2<f64>,
196 D: Clone,
197 S1: Data<Elem = f64>,
198 S2: Data<Elem = D>,
199{
200 let ftol = options.ftol.unwrap_or(1e-8);
202 let xtol = options.xtol.unwrap_or(1e-8);
203 let gtol = options.gtol.unwrap_or(1e-8);
204 let max_nfev = options.max_nfev.unwrap_or(100 * x0.len());
205 let eps = options.diff_step.unwrap_or(1e-8);
206
207 let m = x0.len();
209 let mut x = x0.to_owned();
210 let mut res = residuals(
211 x.as_slice().expect("Operation failed"),
212 data.as_slice().expect("Operation failed"),
213 );
214 let n = res.len();
215
216 let mut f = res.iter().map(|&r| r.powi(2)).sum::<f64>() / 2.0;
218
219 let mut nfev = 1;
221 let mut njev = 0;
222 let mut iter = 0;
223
224 let compute_jac = |x_params: &[f64], curr_res_vals: &Array1<f64>| -> (Array2<f64>, usize) {
226 let mut jac = Array2::zeros((n, m));
227 let mut count = 0;
228
229 for j in 0..m {
231 let mut x_h = Vec::from(x_params);
232 x_h[j] += eps;
233 let res_h = residuals(&x_h, data.as_slice().expect("Operation failed"));
234 count += 1;
235
236 for i in 0..n {
237 jac[[i, j]] = (res_h[i] - curr_res_vals[i]) / eps;
238 }
239 }
240
241 (jac, count)
242 };
243
244 let (mut jac, jac_evals) = match &jacobian {
246 Some(jac_fn) => {
247 let j = jac_fn(
248 x.as_slice().expect("Operation failed"),
249 data.as_slice().expect("Operation failed"),
250 );
251 njev += 1;
252 (j, 0)
253 }
254 None => {
255 let (j, count) = compute_jac(x.as_slice().expect("Operation failed"), &res);
256 nfev += count;
257 (j, count)
258 }
259 };
260
261 let mut g = jac.t().dot(&res);
263
264 let mut lambda = 1e-3;
266 let lambda_factor = 10.0;
267
268 while iter < max_nfev {
270 if g.iter().all(|&gi| gi.abs() < gtol) {
272 break;
273 }
274
275 let mut jt_j = jac.t().dot(&jac);
277
278 for i in 0..m {
280 jt_j[[i, i]] += lambda * jt_j[[i, i]].max(1e-10);
281 }
282
283 let neg_g = -&g;
286
287 let step = match solve(&jt_j, &neg_g) {
289 Some(s) => s,
290 None => {
291 lambda *= lambda_factor;
293 continue;
294 }
295 };
296
297 let mut x_new = Array1::zeros(m);
299 for i in 0..m {
300 x_new[i] = x[i] + step[i];
301 }
302
303 let res_new = residuals(
305 x_new.as_slice().expect("Operation failed"),
306 data.as_slice().expect("Operation failed"),
307 );
308 nfev += 1;
309 let f_new = res_new.iter().map(|&r| r.powi(2)).sum::<f64>() / 2.0;
310
311 let actual_reduction = f - f_new;
313
314 let p1 = res.dot(&res);
316 let res_pred = res.clone() + jac.dot(&step);
317 let p2 = res_pred.dot(&res_pred);
318 let _predicted_reduction = 0.5 * (p1 - p2);
319
320 if actual_reduction > 0.0 {
322 lambda /= lambda_factor;
324
325 x = x_new;
327 res = res_new;
328 f = f_new;
329
330 if actual_reduction < ftol * f.abs() {
332 break;
333 }
334
335 if step
337 .iter()
338 .all(|&s| s.abs() < xtol * (1.0 + x.iter().map(|&xi| xi.abs()).sum::<f64>()))
339 {
340 break;
341 }
342
343 let (new_jac, jac_evals) = match &jacobian {
345 Some(jac_fn) => {
346 let j = jac_fn(
347 x.as_slice().expect("Operation failed"),
348 data.as_slice().expect("Operation failed"),
349 );
350 njev += 1;
351 (j, 0)
352 }
353 None => {
354 let (j, count) = compute_jac(x.as_slice().expect("Operation failed"), &res);
355 nfev += count;
356 (j, count)
357 }
358 };
359
360 jac = new_jac;
361
362 g = jac.t().dot(&res);
364 } else {
365 lambda *= lambda_factor;
367 }
368
369 iter += 1;
370 }
371
372 let mut result = OptimizeResults::default();
374 result.x = x.clone();
375 result.fun = f;
376 result.jac = if let Some(jac_fn) = &jacobian {
377 let jac_array = jac_fn(
378 x.as_slice().expect("Operation failed"),
379 data.as_slice().expect("Operation failed"),
380 );
381 njev += 1;
382 let (vec, _) = jac_array.into_raw_vec_and_offset();
383 Some(vec)
384 } else {
385 let (vec, _) = jac.into_raw_vec_and_offset();
386 Some(vec)
387 };
388 result.nfev = nfev;
389 result.njev = njev;
390 result.nit = iter;
391 result.success = iter < max_nfev;
392
393 if result.success {
394 result.message = "Optimization terminated successfully.".to_string();
395 } else {
396 result.message = "Maximum number of evaluations reached.".to_string();
397 }
398
399 Ok(result)
400}
401
402#[allow(dead_code)]
405fn solve(a: &Array2<f64>, b: &Array1<f64>) -> Option<Array1<f64>> {
406 use scirs2_linalg::solve;
407
408 solve(&a.view(), &b.view(), None).ok()
409}
410
411#[allow(dead_code)]
413fn least_squares_trf<F, J, D, S1, S2>(
414 residuals: F,
415 x0: &ArrayBase<S1, Ix1>,
416 jacobian: Option<J>,
417 data: &ArrayBase<S2, Ix1>,
418 options: &Options,
419) -> OptimizeResult<OptimizeResults<f64>>
420where
421 F: Fn(&[f64], &[D]) -> Array1<f64>,
422 J: Fn(&[f64], &[D]) -> Array2<f64>,
423 D: Clone,
424 S1: Data<Elem = f64>,
425 S2: Data<Elem = D>,
426{
427 let ftol = options.ftol.unwrap_or(1e-8);
429 let xtol = options.xtol.unwrap_or(1e-8);
430 let gtol = options.gtol.unwrap_or(1e-8);
431 let max_nfev = options.max_nfev.unwrap_or(100 * x0.len());
432 let eps = options.diff_step.unwrap_or(1e-8);
433
434 let m = x0.len();
436 let mut x = x0.to_owned();
437 let mut res = residuals(
438 x.as_slice().expect("Operation failed"),
439 data.as_slice().expect("Operation failed"),
440 );
441 let n = res.len();
442
443 let mut f = res.iter().map(|&r| r.powi(2)).sum::<f64>() / 2.0;
445
446 let mut nfev = 1;
448 let mut njev = 0;
449 let mut iter = 0;
450
451 let compute_jac = |x_params: &[f64], curr_res_vals: &Array1<f64>| -> (Array2<f64>, usize) {
453 let mut jac = Array2::zeros((n, m));
454 let mut count = 0;
455
456 for j in 0..m {
458 let mut x_h = Vec::from(x_params);
459 x_h[j] += eps;
460 let res_h = residuals(&x_h, data.as_slice().expect("Operation failed"));
461 count += 1;
462
463 for i in 0..n {
464 jac[[i, j]] = (res_h[i] - curr_res_vals[i]) / eps;
465 }
466 }
467
468 (jac, count)
469 };
470
471 let (mut jac, jac_evals) = match &jacobian {
473 Some(jac_fn) => {
474 let j = jac_fn(
475 x.as_slice().expect("Operation failed"),
476 data.as_slice().expect("Operation failed"),
477 );
478 njev += 1;
479 (j, 0)
480 }
481 None => {
482 let (j, count) = compute_jac(x.as_slice().expect("Operation failed"), &res);
483 nfev += count;
484 (j, count)
485 }
486 };
487
488 let mut g = jac.t().dot(&res);
490
491 let mut delta = 100.0 * (1.0 + x.iter().map(|&xi| xi.abs()).sum::<f64>());
493
494 while iter < max_nfev {
496 if g.iter().all(|&gi| gi.abs() < gtol) {
498 break;
499 }
500
501 let jt_j = jac.t().dot(&jac);
503
504 let (step, predicted_reduction) = compute_trust_region_step(&jt_j, &g, delta);
506
507 if step
509 .iter()
510 .all(|&s| s.abs() < xtol * (1.0 + x.iter().map(|&xi| xi.abs()).sum::<f64>()))
511 {
512 break;
513 }
514
515 let x_new = &x + &step;
517
518 let res_new = residuals(
520 x_new.as_slice().expect("Operation failed"),
521 data.as_slice().expect("Operation failed"),
522 );
523 nfev += 1;
524 let f_new = res_new.iter().map(|&r| r.powi(2)).sum::<f64>() / 2.0;
525
526 let actual_reduction = f - f_new;
528
529 let rho = if predicted_reduction > 0.0 {
531 actual_reduction / predicted_reduction
532 } else {
533 0.0
534 };
535
536 if rho < 0.25 {
538 delta *= 0.5;
539 } else if rho > 0.75 && step.iter().map(|&s| s * s).sum::<f64>().sqrt() >= 0.9 * delta {
540 delta *= 2.0;
541 }
542
543 if rho > 0.1 {
545 x = x_new;
547 res = res_new;
548 f = f_new;
549
550 if actual_reduction < ftol * f.abs() {
552 break;
553 }
554
555 let (new_jac, jac_evals) = match &jacobian {
557 Some(jac_fn) => {
558 let j = jac_fn(
559 x.as_slice().expect("Operation failed"),
560 data.as_slice().expect("Operation failed"),
561 );
562 njev += 1;
563 (j, 0)
564 }
565 None => {
566 let (j, count) = compute_jac(x.as_slice().expect("Operation failed"), &res);
567 nfev += count;
568 (j, count)
569 }
570 };
571
572 jac = new_jac;
573
574 g = jac.t().dot(&res);
576 }
577
578 iter += 1;
579 }
580
581 let mut result = OptimizeResults::default();
583 result.x = x.clone();
584 result.fun = f;
585 result.jac = if let Some(jac_fn) = &jacobian {
586 let jac_array = jac_fn(
587 x.as_slice().expect("Operation failed"),
588 data.as_slice().expect("Operation failed"),
589 );
590 njev += 1;
591 let (vec, _) = jac_array.into_raw_vec_and_offset();
592 Some(vec)
593 } else {
594 let (vec, _) = jac.into_raw_vec_and_offset();
595 Some(vec)
596 };
597 result.nfev = nfev;
598 result.njev = njev;
599 result.nit = iter;
600 result.success = iter < max_nfev;
601
602 if result.success {
603 result.message = "Optimization terminated successfully.".to_string();
604 } else {
605 result.message = "Maximum number of evaluations reached.".to_string();
606 }
607
608 Ok(result)
609}
610
611#[allow(dead_code)]
613fn compute_trust_region_step(
614 jt_j: &Array2<f64>,
615 g: &Array1<f64>,
616 delta: f64,
617) -> (Array1<f64>, f64) {
618 let n = g.len();
619
620 let sd_dir = -g;
622 let sd_norm_sq = g.iter().map(|&gi| gi * gi).sum::<f64>();
623
624 if sd_norm_sq < 1e-10 {
626 return (Array1::zeros(n), 0.0);
627 }
628
629 let sd_norm = sd_norm_sq.sqrt();
630
631 let sd_step = &sd_dir * (delta / sd_norm);
633
634 let gn_step = match solve(jt_j, &sd_dir) {
636 Some(step) => step,
637 None => {
638 let pred_red = 0.5 * g.dot(&sd_step);
640 return (sd_step, pred_red);
641 }
642 };
643
644 let gn_norm_sq = gn_step.iter().map(|&s| s * s).sum::<f64>();
646
647 if gn_norm_sq <= delta * delta {
649 let predicted_reduction = 0.5 * g.dot(&gn_step);
650 return (gn_step, predicted_reduction);
651 }
652
653 let gn_norm = gn_norm_sq.sqrt();
654
655 let sd_gn_dot = sd_dir.dot(&gn_step);
658 let sd_sq = sd_norm_sq; let gn_sq = gn_norm_sq; let a = sd_sq;
663 let b = 3.0 * sd_gn_dot;
664 let c = gn_sq - delta * delta;
665
666 if a < 1e-10 {
668 let step = &gn_step * (delta / gn_norm);
670 let predicted_reduction = 0.5 * g.dot(&step);
671 return (step, predicted_reduction);
672 }
673
674 let tau = if b * b - 4.0 * a * c > 0.0 {
676 (-b + (b * b - 4.0 * a * c).sqrt()) / (2.0 * a)
677 } else {
678 delta / sd_norm
679 };
680
681 let tau = tau.clamp(0.0, 1.0);
683
684 let step = if tau < 1.0 {
686 &sd_step * tau + &gn_step * (1.0 - tau)
688 } else {
689 &gn_step * (delta / gn_norm)
691 };
692
693 let predicted_reduction = 0.5 * g.dot(&step);
695
696 (step, predicted_reduction)
697}
698
699#[allow(dead_code)]
701fn least_squares_dogbox<F, J, D, S1, S2>(
702 residuals: F,
703 x0: &ArrayBase<S1, Ix1>,
704 jacobian: Option<J>,
705 data: &ArrayBase<S2, Ix1>,
706 options: &Options,
707) -> OptimizeResult<OptimizeResults<f64>>
708where
709 F: Fn(&[f64], &[D]) -> Array1<f64>,
710 J: Fn(&[f64], &[D]) -> Array2<f64>,
711 D: Clone,
712 S1: Data<Elem = f64>,
713 S2: Data<Elem = D>,
714{
715 let ftol = options.ftol.unwrap_or(1e-8);
717 let xtol = options.xtol.unwrap_or(1e-8);
718 let gtol = options.gtol.unwrap_or(1e-8);
719 let max_nfev = options.max_nfev.unwrap_or(100 * x0.len());
720 let eps = options.diff_step.unwrap_or(1e-8);
721
722 let m = x0.len();
724 let mut x = x0.to_owned();
725 let mut res = residuals(
726 x.as_slice().expect("Operation failed"),
727 data.as_slice().expect("Operation failed"),
728 );
729 let n = res.len();
730
731 let mut f = res.iter().map(|&r| r.powi(2)).sum::<f64>() / 2.0;
733
734 let mut nfev = 1;
736 let mut njev = 0;
737 let mut iter = 0;
738
739 let lb = Array1::from_elem(m, f64::NEG_INFINITY);
741 let ub = Array1::from_elem(m, f64::INFINITY);
742
743 let mut delta = 1.0;
745 let max_delta = 1e3;
746 let min_delta = 1e-12;
747
748 let project_bounds = |x_vals: &mut Array1<f64>| {
750 for i in 0..m {
751 x_vals[i] = x_vals[i].max(lb[i]).min(ub[i]);
752 }
753 };
754
755 let compute_active_set = |x_vals: &Array1<f64>, g_vals: &Array1<f64>| -> Vec<bool> {
757 let mut active = vec![false; m];
758 let boundary_tol = 1e-10;
759
760 for i in 0..m {
761 let at_lower = (x_vals[i] - lb[i]).abs() < boundary_tol && g_vals[i] > 0.0;
762 let at_upper = (ub[i] - x_vals[i]).abs() < boundary_tol && g_vals[i] < 0.0;
763 active[i] = at_lower || at_upper;
764 }
765 active
766 };
767
768 let compute_jac = |x_params: &[f64], curr_res_vals: &Array1<f64>| -> (Array2<f64>, usize) {
770 let mut jac = Array2::zeros((n, m));
771 let mut count = 0;
772
773 for j in 0..m {
775 let mut x_h = Vec::from(x_params);
776 x_h[j] += eps;
777 let res_h = residuals(&x_h, data.as_slice().expect("Operation failed"));
778 count += 1;
779
780 for i in 0..n {
781 jac[[i, j]] = (res_h[i] - curr_res_vals[i]) / eps;
782 }
783 }
784
785 (jac, count)
786 };
787
788 let mut jac = match &jacobian {
790 Some(jac_fn) => {
791 let j = jac_fn(
792 x.as_slice().expect("Operation failed"),
793 data.as_slice().expect("Operation failed"),
794 );
795 njev += 1;
796 j
797 }
798 None => {
799 let (j, count) = compute_jac(x.as_slice().expect("Operation failed"), &res);
800 nfev += count;
801 j
802 }
803 };
804
805 let mut g = jac.t().dot(&res);
807
808 if g.iter().map(|&gi| gi.abs()).fold(0.0, f64::max) < gtol {
810 let mut result = OptimizeResults::default();
811 result.x = x.clone();
812 result.fun = f;
813 result.nfev = nfev;
814 result.njev = njev;
815 result.nit = iter;
816 result.success = true;
817 result.message = "Initial point satisfies convergence criteria.".to_string();
818 return Ok(result);
819 }
820
821 while iter < max_nfev {
823 let jt_j = jac.t().dot(&jac);
825
826 let active = compute_active_set(&x, &g);
828
829 let step = compute_dogbox_step(&jt_j, &g, &active, &lb, &ub, &x, delta);
831
832 let mut x_new = &x + &step;
834 project_bounds(&mut x_new);
835
836 let res_new = residuals(
838 x_new.as_slice().expect("Operation failed"),
839 data.as_slice().expect("Operation failed"),
840 );
841 nfev += 1;
842
843 let f_new = res_new.iter().map(|&r| r.powi(2)).sum::<f64>() / 2.0;
845
846 let predicted_reduction = -g.dot(&step) - 0.5 * step.dot(&jt_j.dot(&step));
848
849 let actual_reduction = f - f_new;
851
852 let rho = if predicted_reduction > 0.0 {
854 actual_reduction / predicted_reduction
855 } else {
856 0.0
857 };
858
859 if rho < 0.25 {
861 delta *= 0.5;
862 } else if rho > 0.75 && step.iter().map(|&s| s * s).sum::<f64>().sqrt() >= 0.9 * delta {
863 delta = (2.0 * delta).min(max_delta);
864 }
865
866 if rho > 0.1 {
868 x = x_new;
870 res = res_new;
871 f = f_new;
872
873 if actual_reduction < ftol * f.abs() {
875 break;
876 }
877
878 if step.iter().map(|&s| s * s).sum::<f64>().sqrt() < xtol {
880 break;
881 }
882
883 let new_jac = match &jacobian {
885 Some(jac_fn) => {
886 let j = jac_fn(
887 x.as_slice().expect("Operation failed"),
888 data.as_slice().expect("Operation failed"),
889 );
890 njev += 1;
891 j
892 }
893 None => {
894 let (j, count) = compute_jac(x.as_slice().expect("Operation failed"), &res);
895 nfev += count;
896 j
897 }
898 };
899
900 jac = new_jac;
901
902 g = jac.t().dot(&res);
904
905 if g.iter().map(|&gi| gi.abs()).fold(0.0, f64::max) < gtol {
907 break;
908 }
909 }
910
911 if delta < min_delta {
913 break;
914 }
915
916 iter += 1;
917 }
918
919 let mut result = OptimizeResults::default();
921 result.x = x.clone();
922 result.fun = f;
923 result.jac = if let Some(jac_fn) = &jacobian {
924 let jac_array = jac_fn(
925 x.as_slice().expect("Operation failed"),
926 data.as_slice().expect("Operation failed"),
927 );
928 njev += 1;
929 let (vec, _) = jac_array.into_raw_vec_and_offset();
930 Some(vec)
931 } else {
932 let (vec, _) = jac.into_raw_vec_and_offset();
933 Some(vec)
934 };
935 result.nfev = nfev;
936 result.njev = njev;
937 result.nit = iter;
938 result.success = iter < max_nfev && delta >= min_delta;
939
940 if result.success {
941 result.message = "Optimization terminated successfully.".to_string();
942 } else if iter >= max_nfev {
943 result.message = "Maximum number of evaluations reached.".to_string();
944 } else {
945 result.message = "Trust region became too small.".to_string();
946 }
947
948 Ok(result)
949}
950
951#[allow(clippy::too_many_arguments)]
953#[allow(dead_code)]
954fn compute_dogbox_step(
955 jt_j: &Array2<f64>,
956 g: &Array1<f64>,
957 active: &[bool],
958 lb: &Array1<f64>,
959 ub: &Array1<f64>,
960 x: &Array1<f64>,
961 delta: f64,
962) -> Array1<f64> {
963 let n = g.len();
964
965 let free_vars: Vec<usize> = (0..n).filter(|&i| !active[i]).collect();
967
968 if free_vars.is_empty() {
969 return Array1::zeros(n);
971 }
972
973 let g_free = Array1::from_vec(free_vars.iter().map(|&i| g[i]).collect());
975 let mut jt_j_free = Array2::zeros((free_vars.len(), free_vars.len()));
976
977 for (i, &fi) in free_vars.iter().enumerate() {
978 for (j, &fj) in free_vars.iter().enumerate() {
979 jt_j_free[[i, j]] = jt_j[[fi, fj]];
980 }
981 }
982
983 let sd_dir_free = -&g_free;
985 let sd_norm_sq = g_free.iter().map(|&gi| gi * gi).sum::<f64>();
986
987 if sd_norm_sq < 1e-15 {
988 return Array1::zeros(n);
989 }
990
991 let sd_norm = sd_norm_sq.sqrt();
992
993 let gn_step_free = match solve(&jt_j_free, &sd_dir_free) {
995 Some(step) => step,
996 None => {
997 let step_free = &sd_dir_free * (delta / sd_norm);
999 let mut step = Array1::zeros(n);
1000 for (i, &fi) in free_vars.iter().enumerate() {
1001 step[fi] = step_free[i];
1002 }
1003 return bound_step(&step, x, lb, ub, delta);
1004 }
1005 };
1006
1007 let gn_norm_sq = gn_step_free.iter().map(|&s| s * s).sum::<f64>();
1009
1010 if gn_norm_sq <= delta * delta {
1011 let mut step = Array1::zeros(n);
1012 for (i, &fi) in free_vars.iter().enumerate() {
1013 step[fi] = gn_step_free[i];
1014 }
1015 return bound_step(&step, x, lb, ub, delta);
1016 }
1017
1018 let gn_norm = gn_norm_sq.sqrt();
1020 let sd_step_free = &sd_dir_free * (delta / sd_norm);
1021
1022 let sd_gn_dot = sd_dir_free.dot(&gn_step_free);
1024 let a = sd_norm_sq;
1025 let b = 3.0 * sd_gn_dot;
1026 let c = gn_norm_sq - delta * delta;
1027
1028 let tau = if a > 1e-15 && b * b - 4.0 * a * c > 0.0 {
1029 (-b + (b * b - 4.0 * a * c).sqrt()) / (2.0 * a)
1030 } else {
1031 delta / sd_norm
1032 };
1033
1034 let tau = tau.clamp(0.0, 1.0);
1035
1036 let step_free = if tau < 1.0 {
1037 &sd_step_free * tau + &gn_step_free * (1.0 - tau)
1038 } else {
1039 &gn_step_free * (delta / gn_norm)
1040 };
1041
1042 let mut step = Array1::zeros(n);
1044 for (i, &fi) in free_vars.iter().enumerate() {
1045 step[fi] = step_free[i];
1046 }
1047
1048 bound_step(&step, x, lb, ub, delta)
1049}
1050
1051#[allow(dead_code)]
1053fn bound_step(
1054 step: &Array1<f64>,
1055 x: &Array1<f64>,
1056 lb: &Array1<f64>,
1057 ub: &Array1<f64>,
1058 delta: f64,
1059) -> Array1<f64> {
1060 let mut bounded_step = step.clone();
1061
1062 for i in 0..step.len() {
1064 let x_new = x[i] + bounded_step[i];
1065 if x_new < lb[i] {
1066 bounded_step[i] = lb[i] - x[i];
1067 } else if x_new > ub[i] {
1068 bounded_step[i] = ub[i] - x[i];
1069 }
1070 }
1071
1072 let step_norm = bounded_step.iter().map(|&s| s * s).sum::<f64>().sqrt();
1074 if step_norm > delta {
1075 bounded_step *= delta / step_norm;
1076 }
1077
1078 bounded_step
1079}
1080
1081#[cfg(test)]
1082mod tests {
1083 use super::*;
1084 use scirs2_core::ndarray::array;
1085
1086 fn residual(x: &[f64], _: &[f64]) -> Array1<f64> {
1087 let y = array![x[0] + 2.0 * x[1] - 2.0, x[0] + x[1] - 1.0];
1088 y
1089 }
1090
1091 fn jacobian(x: &[f64], _: &[f64]) -> Array2<f64> {
1092 array![[1.0, 2.0], [1.0, 1.0]]
1093 }
1094
1095 #[test]
1096 fn test_least_squares_placeholder() {
1097 let x0 = array![0.0, 0.0];
1098 let data = array![];
1099
1100 let options = Options {
1102 max_nfev: Some(1), ..Options::default()
1104 };
1105
1106 let result = least_squares(
1107 residual,
1108 &x0.view(),
1109 Method::LevenbergMarquardt,
1110 Some(jacobian),
1111 &data.view(),
1112 Some(options),
1113 )
1114 .expect("Operation failed");
1115
1116 assert!(!result.success);
1118
1119 assert!(result.fun > 0.0);
1122
1123 assert!(result.jac.is_some());
1125 }
1126
1127 #[test]
1128 fn test_levenberg_marquardt() {
1129 let x0 = array![-1.0, -1.0];
1137 let data = array![];
1138
1139 let options = Options {
1140 max_nfev: Some(100),
1141 xtol: Some(1e-6),
1142 ftol: Some(1e-6),
1143 ..Options::default()
1144 };
1145
1146 let result = least_squares(
1147 residual,
1148 &x0.view(),
1149 Method::LevenbergMarquardt,
1150 Some(jacobian),
1151 &data.view(),
1152 Some(options),
1153 )
1154 .expect("Operation failed");
1155
1156 assert!(result.success);
1158
1159 assert!((result.x[0] - 0.0).abs() < 1e-4);
1161 assert!((result.x[1] - 1.0).abs() < 1e-4);
1162
1163 assert!(result.fun < 1e-8);
1165
1166 println!(
1168 "LM result: x = {:?}, f = {}, iterations = {}",
1169 result.x, result.fun, result.nit
1170 );
1171 }
1172
1173 #[test]
1174 fn test_trust_region_reflective() {
1175 let x0 = array![0.0, 0.0]; let data = array![];
1184
1185 let options = Options {
1186 max_nfev: Some(1000), xtol: Some(1e-5), ftol: Some(1e-5), ..Options::default()
1190 };
1191
1192 let result = least_squares(
1193 residual,
1194 &x0.view(),
1195 Method::TrustRegionReflective,
1196 Some(jacobian),
1197 &data.view(),
1198 Some(options),
1199 )
1200 .expect("Operation failed");
1201
1202 let initial_resid = residual(&[0.0, 0.0], &[]);
1207 let initial_value = 0.5 * initial_resid.iter().map(|&r| r * r).sum::<f64>();
1208
1209 println!(
1211 "TRF result: x = {:?}, f = {}, initial = {}, iterations = {}",
1212 result.x, result.fun, initial_value, result.nit
1213 );
1214
1215 assert!(result.fun <= initial_value || result.success);
1217 }
1218
1219 #[test]
1220 fn test_rosenbrock_least_squares() {
1221 fn rosenbrock_residual(x: &[f64], _: &[f64]) -> Array1<f64> {
1223 array![10.0 * (x[1] - x[0].powi(2)), 1.0 - x[0]]
1224 }
1225
1226 fn rosenbrock_jacobian(x: &[f64], _: &[f64]) -> Array2<f64> {
1227 array![[-20.0 * x[0], 10.0], [-1.0, 0.0]]
1228 }
1229
1230 let x0_lm = array![-1.2, 1.0]; let x0_trf = array![0.5, 0.5]; let data = array![];
1234
1235 let options_common = Options {
1236 xtol: Some(1e-6),
1237 ftol: Some(1e-6),
1238 ..Options::default()
1239 };
1240
1241 let options_trf = Options {
1242 max_nfev: Some(300), ..options_common.clone()
1244 };
1245
1246 let options_lm = Options {
1247 max_nfev: Some(1000), ..options_common
1249 };
1250
1251 let result_trf = least_squares(
1253 rosenbrock_residual,
1254 &x0_trf.view(), Method::TrustRegionReflective,
1256 Some(rosenbrock_jacobian),
1257 &data.view(),
1258 Some(options_trf),
1259 )
1260 .expect("Operation failed");
1261
1262 let result_lm = least_squares(
1264 rosenbrock_residual,
1265 &x0_lm.view(), Method::LevenbergMarquardt,
1267 Some(rosenbrock_jacobian),
1268 &data.view(),
1269 Some(options_lm),
1270 )
1271 .expect("Operation failed");
1272
1273 println!(
1275 "TRF Rosenbrock: x = {:?}, f = {}, iterations = {}",
1276 result_trf.x, result_trf.fun, result_trf.nit
1277 );
1278 println!(
1279 "LM Rosenbrock: x = {:?}, f = {}, iterations = {}",
1280 result_lm.x, result_lm.fun, result_lm.nit
1281 );
1282
1283 let initial_resid_trf = rosenbrock_residual(&[0.5, 0.5], &[]);
1285 let initial_value_trf = 0.5 * initial_resid_trf.iter().map(|&r| r * r).sum::<f64>();
1286 println!("TRF initial value: {}", initial_value_trf);
1287
1288 assert!(result_trf.fun < 100.0); assert!(result_lm.success);
1293 assert!((result_lm.x[0] - 1.0).abs() < 1e-2);
1294 assert!((result_lm.x[1] - 1.0).abs() < 1e-2);
1295 }
1296}