ceres_solver/curve_fit.rs
1//! Wrapping [NllsProblem](crate::nlls_problem::NllsProblem) for 1-D curve fit problems.
2//!
3//! [CurveFitProblem1D] wraps [NllsProblem](crate::nlls_problem::NllsProblem) to give a simpler
4//! interface for the problem of 1-D curve fitting: optimizing parameters of a function boxed into
5//! [CurveFunctionType] for given `x`, `y` and optionally inverse y error values. This approach
6//! also simplifies parameter usage, assuming that the function depends on a single parameter
7//! only.
8
9use crate::cost::CostFunctionType;
10use crate::error::CurveFitProblemBuildError;
11use crate::loss::LossFunction;
12use crate::nlls_problem::{NllsProblem, NllsProblemSolution};
13use crate::parameter_block::ParameterBlock;
14use crate::solver::{SolverOptions, SolverSummary};
15use crate::types::Either;
16
17pub type CurveFunctionType = Box<dyn Fn(f64, &[f64], &mut f64, Option<&mut [Option<f64>]>) -> bool>;
18
19/// A wrapper for [NllsProblem] providing easier interface to solve an 1-D muliparameter curve fit
20/// problem. Use it in two steps: create a new instance with [CurveFitProblem1D::new] or
21/// [CurveFitProblem1D::builder] and then call a destructive method [CurveFitProblem1D::solve]
22/// to get a solution.
23pub struct CurveFitProblem1D<'cost>(NllsProblem<'cost>);
24
25impl<'cost> CurveFitProblem1D<'cost> {
26 /// Creates a new instance of the `CurveFitProblem1D`. If you need more control over the problem
27 /// use [CurveFitProblem1D::builder] instead.
28 ///
29 /// # Arguments
30 /// - func - a function describing a curve. It must return [false] if it cannot calculate
31 /// Jacobian, or [true] otherwise. It accepts the following parameters:
32 /// - x - an independent coordinate.
33 /// - parameters - a slice for the current value of the problem parameters. Note, that unlike
34 /// [NllsProblem] it is a 1-D slice.
35 /// - y - a mutable reference to output the function value.
36 /// - jacobians - an output Jacobian matrix, it (or any of its component) can be [None], which
37 /// means that the solver doesn't need it. Otherwise it has a 2-D shape, the top index
38 /// corresponds to a parameter component, the bottom index corresponds to a data point.
39 /// So the top-level slice inside [Some] has length of `parameters.len()`, while inner
40 /// slices have the same length as `x` and `y`.
41 /// - x - independent coordinate values of data poitns.
42 /// - y - values of data points.
43 /// - parameters - a vector of the initial parameters. Note that unlike [NllsProblem] it is a
44 /// 1-D vector of [f64].
45 /// - loss - optional loss function.
46 ///
47 /// # Panics
48 /// Panics if `x` and `y` have different sizes.
49 pub fn new(
50 func: impl Into<CurveFunctionType>,
51 x: &'cost [f64],
52 y: &'cost [f64],
53 parameters: &[f64],
54 ) -> Self {
55 assert_eq!(x.len(), y.len());
56 let nlls_parameters: Vec<_> = parameters.iter().map(|&x| vec![x]).collect();
57 let (problem, _block_id) = NllsProblem::new()
58 .residual_block_builder()
59 .set_cost(Self::cost_function(x, y, None, func.into()), x.len())
60 .set_parameters(nlls_parameters)
61 .build_into_problem()
62 .unwrap();
63 Self(problem)
64 }
65
66 /// Create a [CurveFitProblem1DBuilder] instance, see its docs for the details.
67 pub fn builder<'param>() -> CurveFitProblem1DBuilder<'cost, 'param> {
68 CurveFitProblem1DBuilder::new()
69 }
70
71 fn cost_function(
72 x: &'cost [f64],
73 y: &'cost [f64],
74 inv_err: Option<&'cost [f64]>,
75 curve_func: CurveFunctionType,
76 ) -> CostFunctionType<'cost> {
77 let n_obs = x.len();
78 Box::new(move |parameters, residuals, mut jacobians| {
79 let mut result = true;
80 let mut f = 0.0;
81 let mut jac: Option<Vec<Option<f64>>> = jacobians.as_ref().map(|jacobians| {
82 jacobians
83 .iter()
84 .map(|der| der.as_ref().map(|_| 0.0))
85 .collect()
86 });
87 let parameters: Vec<_> = parameters.iter().map(|x| x[0]).collect();
88 for ((((i, &x), &y), &inv_err), residual) in (0..n_obs)
89 .zip(x.iter())
90 .zip(y.iter())
91 .zip(match inv_err {
92 Some(inv_err) => Either::Left(inv_err.iter()),
93 None => Either::Right(std::iter::repeat(&1.0)),
94 })
95 .zip(residuals.iter_mut())
96 {
97 result = curve_func(x, ¶meters, &mut f, jac.as_mut().map(|d| &mut d[..]));
98 *residual = inv_err * (y - f);
99 if let Some(jacobians) = jacobians.as_mut() {
100 for (d_in, d_out) in jac.as_ref().unwrap().iter().zip(jacobians.iter_mut()) {
101 if let Some(d_out) = d_out.as_mut() {
102 d_out[i][0] = -inv_err * d_in.unwrap();
103 }
104 }
105 }
106 }
107 result
108 })
109 }
110
111 /// Solves the problem and returns a solution for the parameters.
112 pub fn solve(self, options: &SolverOptions) -> CurveFitProblemSolution {
113 // We know that we have well-defined problem, so we can unwrap
114 let NllsProblemSolution {
115 parameters: nlls_parameters,
116 summary,
117 } = self.0.solve(options).unwrap();
118 // All parameters are 1D - compress to a single vector
119 let parameters = nlls_parameters.into_iter().map(|x| x[0]).collect();
120 CurveFitProblemSolution {
121 parameters,
122 summary,
123 }
124 }
125}
126
127/// A solution for [CurveFitProblem1D].
128pub struct CurveFitProblemSolution {
129 /// A vector of the solution parameters.
130 pub parameters: Vec<f64>,
131 /// Solver summary.
132 pub summary: SolverSummary,
133}
134
135/// Builder for [CurveFitProblem1D].
136///
137/// # Examples
138///
139/// ## Loss function and data point errors
140///
141/// Fit linear function `y = a * x + b` to the data points:
142///
143/// ```rust
144/// use ceres_solver::curve_fit::{CurveFitProblem1D, CurveFunctionType};
145/// use ceres_solver::loss::LossFunction;
146/// use ceres_solver::solver::SolverOptions;
147///
148/// // Linear model
149/// fn model(
150/// x: f64,
151/// parameters: &[f64],
152/// y: &mut f64,
153/// jacobians: Option<&mut [Option<f64>]>,
154/// ) -> bool {
155/// let &[a, b]: &[f64; 2] = parameters.try_into().unwrap();
156/// *y = a * x + b;
157/// if let Some(jacobians) = jacobians {
158/// let [d_da, d_db]: &mut [Option<f64>; 2] = jacobians.try_into().unwrap();
159/// if let Some(d_da) = d_da {
160/// *d_da = x;
161/// }
162/// if let Some(d_db) = d_db {
163/// *d_db = 1.0;
164/// }
165/// }
166/// true
167/// }
168///
169/// let a = 3.0;
170/// let b = -2.0;
171/// let x: Vec<_> = (0..100).map(|i| i as f64).collect();
172/// let y: Vec<_> = x.iter().map(|&x| a * x + b).collect();
173/// // optional data points inverse errors, assumed to be positive
174/// let inverse_error: Vec<_> = x.iter().map(|&x| (x + 1.0) / 100.0).collect();
175///
176/// let func: CurveFunctionType = Box::new(model);
177/// let problem = CurveFitProblem1D::builder()
178/// // Model function
179/// .func(func)
180/// // Initial parameter guess
181/// .parameters(&[1.0, 0.0])
182/// // Data points, inverse errors are optional, if no given unity errors assumed.
183/// .x(&x)
184/// .y(&y)
185/// .inverse_error(&inverse_error)
186/// // Loss function is optional, if not given trivial loss is assumed.
187/// .loss(LossFunction::cauchy(1.0))
188/// .build()
189/// .unwrap();
190/// let solution = problem.solve(&SolverOptions::default());
191///
192/// println!("{}", solution.summary.full_report());
193///
194/// assert!(f64::abs(a - solution.parameters[0]) < 1e-8);
195/// assert!(f64::abs(b - solution.parameters[1]) < 1e-8);
196/// ```
197///
198/// ## Constant parameters
199///
200/// Sometimes it is useful to fix some parameters and optimize only the rest. Let's consider the
201/// curve `y = a * x^k + b` and compare two cases: when `k` is unity and when it is optimized.
202///
203/// ```rust
204/// use ceres_solver::curve_fit::{CurveFitProblem1D, CurveFunctionType};
205/// use ceres_solver::SolverOptions;
206/// use ceres_solver_sys::cxx::S;
207///
208/// fn model(
209/// x: f64,
210/// parameters: &[f64],
211/// y: &mut f64,
212/// jacobians: Option<&mut [Option<f64>]>,
213/// ) -> bool {
214/// let &[k, a, b]: &[f64; 3] = parameters.try_into().unwrap();
215/// *y = a * x.powf(k) + b;
216/// if let Some(jacobians) = jacobians {
217/// let [d_dk, d_da, d_db]: &mut [Option<f64>; 3] = jacobians.try_into().unwrap();
218/// if let Some(d_dk) = d_dk {
219/// *d_dk = a * x.powf(k) * x.ln();
220/// }
221/// if let Some(d_da) = d_da {
222/// *d_da = x.powf(k);
223/// }
224/// if let Some(d_db) = d_db {
225/// *d_db = 1.0;
226/// }
227/// }
228/// true
229/// }
230///
231/// let true_a = 3.0;
232/// let true_b = -2.0;
233/// let true_k = 2.0;
234/// let fixed_k = 1.0;
235/// assert_ne!(true_a, fixed_k);
236///
237/// // Generate data
238/// let x: Vec<_> = (1..101).map(|i| i as f64 / 100.0).collect();
239/// let y: Vec<_> = x
240/// .iter()
241/// .map(|&x| {
242/// let mut y = 0.0;
243/// model(x, &[true_k, true_a, true_b], &mut y, None);
244/// y
245/// })
246/// .collect();
247///
248/// let func: CurveFunctionType = Box::new(model);
249/// let solution_variable_k = CurveFitProblem1D::builder()
250/// .func(func)
251/// .parameters(&[1.0, 1.0, 1.0])
252/// .x(&x)
253/// .y(&y)
254/// .build()
255/// .unwrap()
256/// .solve(&SolverOptions::default());
257/// assert!((true_k - solution_variable_k.parameters[0]).abs() < 1e-8);
258/// assert!((true_a - solution_variable_k.parameters[1]).abs() < 1e-8);
259/// assert!((true_b - solution_variable_k.parameters[2]).abs() < 1e-8);
260///
261/// let func: CurveFunctionType = Box::new(model);
262/// let solution_fixed_k_1 = CurveFitProblem1D::builder()
263/// .func(func)
264/// .parameters(&[fixed_k, 1.0, 1.0])
265/// .constant(&[0]) // indexes of the fixed parameters
266/// .x(&x)
267/// .y(&y)
268/// .build()
269/// .unwrap()
270/// .solve(&SolverOptions::default());
271/// assert!((fixed_k - solution_fixed_k_1.parameters[0]).abs() < 1e-8);
272///
273/// assert!(solution_variable_k.summary.final_cost() < solution_fixed_k_1.summary.final_cost());
274/// ```
275pub struct CurveFitProblem1DBuilder<'cost, 'param> {
276 /// Model function
277 pub func: Option<CurveFunctionType>,
278 /// Independent coordinates for data
279 pub x: Option<&'cost [f64]>,
280 /// Values for data
281 pub y: Option<&'cost [f64]>,
282 /// Optional inverse errors - square root of the weight
283 pub inverse_error: Option<&'cost [f64]>,
284 /// Initial parameters' guess
285 pub parameters: Option<&'param [f64]>,
286 /// Optional lower bounds for parameters
287 pub lower_bounds: Option<&'param [Option<f64>]>,
288 /// Optional upper bounds for parameters
289 pub upper_bounds: Option<&'param [Option<f64>]>,
290 /// Constant parameters, they will not be optimized.
291 pub constant_parameters: Option<&'param [usize]>,
292 /// Optional loss function
293 pub loss: Option<LossFunction>,
294}
295
296impl<'cost, 'param> CurveFitProblem1DBuilder<'cost, 'param> {
297 pub fn new() -> Self {
298 Self {
299 func: None,
300 x: None,
301 y: None,
302 inverse_error: None,
303 parameters: None,
304 lower_bounds: None,
305 upper_bounds: None,
306 constant_parameters: None,
307 loss: None,
308 }
309 }
310
311 /// Add model function.
312 pub fn func(mut self, func: impl Into<CurveFunctionType>) -> Self {
313 self.func = Some(func.into());
314 self
315 }
316
317 /// Add independent parameter values for the data points.
318 pub fn x(mut self, x: &'cost [f64]) -> Self {
319 self.x = Some(x);
320 self
321 }
322
323 /// Add values for the data points.
324 pub fn y(mut self, y: &'cost [f64]) -> Self {
325 self.y = Some(y);
326 self
327 }
328
329 /// Add optional inverse errors for the data points. They must to be positive: think about them
330 /// as the inverse y's uncertainties, or square root of the data point weight. The residual
331 /// would be `(y - model(x)) * inverse_error`. If not given, unity valueas are assumed.
332 pub fn inverse_error(mut self, inv_err: &'cost [f64]) -> Self {
333 self.inverse_error = Some(inv_err);
334 self
335 }
336
337 /// Add initial parameter guess slice, it is borrowed until [CurveFitProblem1DBuilder::build()]
338 /// call only, there it will be copied to the [CurveFitProblem1D] instance.
339 pub fn parameters(mut self, parameters: &'param [f64]) -> Self {
340 self.parameters = Some(parameters);
341 self
342 }
343
344 /// Add optional lower bounds for parameters, in the same order as parameters themselves. If not
345 /// given, no lower bounds are assumed. If some parameter has no lower bound, use [None].
346 pub fn lower_bounds(mut self, lower_bounds: &'param [Option<f64>]) -> Self {
347 self.lower_bounds = Some(lower_bounds);
348 self
349 }
350
351 /// Add optional upper bounds for parameters, in the same order as parameters themselves. If not
352 /// given, no upper bounds are assumed. If some parameter has no upper bound, use [None].
353 pub fn upper_bounds(mut self, upper_bounds: &'param [Option<f64>]) -> Self {
354 self.upper_bounds = Some(upper_bounds);
355 self
356 }
357
358 /// Make parameters constant, i.e. they will not be fitted.
359 pub fn constant(mut self, indexes: &'param [usize]) -> Self {
360 self.constant_parameters = Some(indexes);
361 self
362 }
363
364 /// Add optional loss function, if not given the trivial loss is assumed.
365 pub fn loss(mut self, loss: LossFunction) -> Self {
366 self.loss = Some(loss);
367 self
368 }
369
370 /// Build the [CurveFitProblem1D] instance. Returns [Err] if one of the mandatory fields is
371 /// missed or data slices have inconsistent lengths.
372 pub fn build(self) -> Result<CurveFitProblem1D<'cost>, CurveFitProblemBuildError> {
373 let func = self.func.ok_or(CurveFitProblemBuildError::FuncMissed)?;
374 let x = self.x.ok_or(CurveFitProblemBuildError::XMissed)?;
375 let y = self.y.ok_or(CurveFitProblemBuildError::YMissed)?;
376 let n_obs = x.len();
377 if n_obs != y.len() {
378 return Err(CurveFitProblemBuildError::DataSizesDontMatch);
379 }
380 if let Some(inverse_error) = self.inverse_error {
381 if inverse_error.len() != n_obs {
382 return Err(CurveFitProblemBuildError::DataSizesDontMatch);
383 }
384 }
385 let mut nlls_parameters: Vec<ParameterBlock> = self
386 .parameters
387 .ok_or(CurveFitProblemBuildError::ParametersMissed)?
388 .iter()
389 .map(|&p| vec![p].into())
390 .collect();
391 if let Some(lower_bounds) = self.lower_bounds {
392 if lower_bounds.len() != nlls_parameters.len() {
393 return Err(CurveFitProblemBuildError::LowerBoundarySizeMismatch);
394 }
395 for (i, &lb) in lower_bounds.iter().enumerate() {
396 if let Some(lb) = lb {
397 nlls_parameters[i].set_lower_bounds(vec![Some(lb)]);
398 }
399 }
400 }
401 // TODO: upper bounds
402 let mut residual_block = NllsProblem::new().residual_block_builder().set_cost(
403 CurveFitProblem1D::cost_function(x, y, self.inverse_error, func),
404 n_obs,
405 );
406 if let Some(loss) = self.loss {
407 residual_block = residual_block.set_loss(loss);
408 }
409 let (mut problem, _block_id) = residual_block
410 .set_parameters(nlls_parameters)
411 .build_into_problem()
412 .unwrap();
413 if let Some(indexes) = self.constant_parameters {
414 for &i_param in indexes {
415 problem.set_parameter_block_constant(i_param)?;
416 }
417 }
418 Ok(CurveFitProblem1D(problem))
419 }
420}
421
422impl Default for CurveFitProblem1DBuilder<'_, '_> {
423 fn default() -> Self {
424 Self::new()
425 }
426}
427
428#[cfg(test)]
429mod tests {
430 use super::*;
431
432 use crate::LossFunctionType;
433
434 use approx::assert_abs_diff_eq;
435 use rand::{Rng, SeedableRng};
436
437 fn curve_fit_problem_1d(loss: Option<LossFunction>) -> Vec<f64> {
438 let (x, y): (Vec<_>, Vec<_>) = [
439 0.000000e+00,
440 1.133898e+00,
441 7.500000e-02,
442 1.334902e+00,
443 1.500000e-01,
444 1.213546e+00,
445 2.250000e-01,
446 1.252016e+00,
447 3.000000e-01,
448 1.392265e+00,
449 3.750000e-01,
450 1.314458e+00,
451 4.500000e-01,
452 1.472541e+00,
453 5.250000e-01,
454 1.536218e+00,
455 6.000000e-01,
456 1.355679e+00,
457 6.750000e-01,
458 1.463566e+00,
459 7.500000e-01,
460 1.490201e+00,
461 8.250000e-01,
462 1.658699e+00,
463 9.000000e-01,
464 1.067574e+00,
465 9.750000e-01,
466 1.464629e+00,
467 1.050000e+00,
468 1.402653e+00,
469 1.125000e+00,
470 1.713141e+00,
471 1.200000e+00,
472 1.527021e+00,
473 1.275000e+00,
474 1.702632e+00,
475 1.350000e+00,
476 1.423899e+00,
477 1.425000e+00,
478 1.543078e+00,
479 1.500000e+00,
480 1.664015e+00,
481 1.575000e+00,
482 1.732484e+00,
483 1.650000e+00,
484 1.543296e+00,
485 1.725000e+00,
486 1.959523e+00,
487 1.800000e+00,
488 1.685132e+00,
489 1.875000e+00,
490 1.951791e+00,
491 1.950000e+00,
492 2.095346e+00,
493 2.025000e+00,
494 2.361460e+00,
495 2.100000e+00,
496 2.169119e+00,
497 2.175000e+00,
498 2.061745e+00,
499 2.250000e+00,
500 2.178641e+00,
501 2.325000e+00,
502 2.104346e+00,
503 2.400000e+00,
504 2.584470e+00,
505 2.475000e+00,
506 1.914158e+00,
507 2.550000e+00,
508 2.368375e+00,
509 2.625000e+00,
510 2.686125e+00,
511 2.700000e+00,
512 2.712395e+00,
513 2.775000e+00,
514 2.499511e+00,
515 2.850000e+00,
516 2.558897e+00,
517 2.925000e+00,
518 2.309154e+00,
519 3.000000e+00,
520 2.869503e+00,
521 3.075000e+00,
522 3.116645e+00,
523 3.150000e+00,
524 3.094907e+00,
525 3.225000e+00,
526 2.471759e+00,
527 3.300000e+00,
528 3.017131e+00,
529 3.375000e+00,
530 3.232381e+00,
531 3.450000e+00,
532 2.944596e+00,
533 3.525000e+00,
534 3.385343e+00,
535 3.600000e+00,
536 3.199826e+00,
537 3.675000e+00,
538 3.423039e+00,
539 3.750000e+00,
540 3.621552e+00,
541 3.825000e+00,
542 3.559255e+00,
543 3.900000e+00,
544 3.530713e+00,
545 3.975000e+00,
546 3.561766e+00,
547 4.050000e+00,
548 3.544574e+00,
549 4.125000e+00,
550 3.867945e+00,
551 4.200000e+00,
552 4.049776e+00,
553 4.275000e+00,
554 3.885601e+00,
555 4.350000e+00,
556 4.110505e+00,
557 4.425000e+00,
558 4.345320e+00,
559 4.500000e+00,
560 4.161241e+00,
561 4.575000e+00,
562 4.363407e+00,
563 4.650000e+00,
564 4.161576e+00,
565 4.725000e+00,
566 4.619728e+00,
567 4.800000e+00,
568 4.737410e+00,
569 4.875000e+00,
570 4.727863e+00,
571 4.950000e+00,
572 4.669206e+00,
573 ]
574 .chunks_exact(2)
575 .map(|chunk| (chunk[0], chunk[1]))
576 .unzip();
577
578 let func: CurveFunctionType = Box::new(
579 |x: f64, parameters: &[f64], value: &mut f64, jac: Option<&mut [Option<f64>]>| {
580 let m = parameters[0];
581 let c = parameters[1];
582 *value = f64::exp(m * x + c);
583 if let Some(jac) = jac {
584 if let Some(d_dm) = jac[0].as_mut() {
585 *d_dm = x * f64::exp(m * x + c);
586 }
587 if let Some(d_dc) = jac[1].as_mut() {
588 *d_dc = f64::exp(m * x + c);
589 }
590 }
591 true
592 },
593 );
594 let problem = if let Some(loss) = loss {
595 CurveFitProblem1D::builder()
596 .func(func)
597 .x(&x)
598 .y(&y)
599 .parameters(&[0.0, 0.0])
600 .loss(loss)
601 .build()
602 .unwrap()
603 } else {
604 CurveFitProblem1D::new(func, &x, &y, &[0.0, 0.0])
605 };
606 let CurveFitProblemSolution {
607 parameters: solution,
608 summary,
609 } = problem.solve(&SolverOptions::default());
610
611 assert!(summary.is_solution_usable());
612
613 assert_abs_diff_eq!(0.3, solution[0], epsilon = 0.02);
614 assert_abs_diff_eq!(0.1, solution[1], epsilon = 0.04);
615
616 solution
617 }
618
619 #[test]
620 fn test_curve_fit_problem_1d_trivial_loss() {
621 curve_fit_problem_1d(None);
622 }
623
624 #[test]
625 fn test_curve_fit_problem_1d_custom_arctan_loss() {
626 let loss: LossFunctionType = Box::new(|squared_norm, out| {
627 out[0] = f64::atan(squared_norm);
628 out[1] = 1.0 / (squared_norm.powi(2) + 1.0);
629 out[2] = -2.0 * squared_norm * out[1].powi(2);
630 });
631 let loss = LossFunction::custom(loss);
632 curve_fit_problem_1d(Some(loss));
633 }
634
635 #[test]
636 fn test_curve_fit_problem_2d_stock_arctan_loss() {
637 let loss = LossFunction::arctan(1.0);
638 curve_fit_problem_1d(Some(loss));
639 }
640
641 /// y = a * sin (b * x) + c
642 fn model(
643 x: f64,
644 parameters: &[f64],
645 y: &mut f64,
646 jacobians: Option<&mut [Option<f64>]>,
647 ) -> bool {
648 let &[a, b, c]: &[f64; 3] = parameters.try_into().unwrap();
649 *y = a * f64::sin(b * x) + c;
650 if let Some(jacobians) = jacobians {
651 let [d_da, d_db, d_dc]: &mut [Option<f64>; 3] = jacobians.try_into().unwrap();
652 if let Some(d_da) = d_da {
653 *d_da = f64::sin(b * x);
654 }
655 if let Some(d_db) = d_db {
656 *d_db = a * b * f64::cos(b * x);
657 }
658 if let Some(d_dc) = d_dc {
659 *d_dc = 1.0;
660 }
661 }
662 true
663 }
664
665 #[test]
666 fn compare_new_with_build() {
667 const N: usize = 1000;
668
669 const TRUE_PARAM: [f64; 3] = [1.5, std::f64::consts::PI, -1.0];
670
671 let x: Vec<_> = (0..N).map(|i| i as f64 / N as f64).collect();
672 let mut rng = rand_chacha::ChaCha8Rng::seed_from_u64(0);
673 let noise_level: f64 = 0.1;
674 let y: Vec<_> = x
675 .iter()
676 .map(|&x| {
677 let mut y = 0.0;
678 model(x, &TRUE_PARAM, &mut y, None);
679 let sigma = noise_level * rng.sample::<f64, _>(rand_distr::StandardNormal);
680 y + sigma
681 })
682 .collect();
683 let w = vec![noise_level.powi(-1); x.len()];
684
685 let initial_guess = [0.0, 1.0, 0.0];
686 let options = SolverOptions::default();
687
688 let func: CurveFunctionType = Box::new(model);
689 let CurveFitProblemSolution {
690 parameters: solution_new,
691 summary: summary_new,
692 } = CurveFitProblem1D::new(func, &x, &y, &initial_guess).solve(&options);
693 assert!(summary_new.is_solution_usable());
694
695 let func: CurveFunctionType = Box::new(model);
696 let CurveFitProblemSolution {
697 parameters: solution_build,
698 summary: summary_build,
699 } = CurveFitProblem1D::builder()
700 .func(func)
701 .x(&x)
702 .y(&y)
703 .inverse_error(&w)
704 .parameters(&initial_guess)
705 .build()
706 .unwrap()
707 .solve(&options);
708 assert!(summary_build.is_solution_usable());
709
710 assert_abs_diff_eq!(&solution_new[..], &solution_build[..], epsilon = 1e-10);
711 assert_abs_diff_eq!(&TRUE_PARAM[..], &solution_new[..], epsilon = 0.02);
712 }
713}