FuncEval

Struct FuncEval 

Source
pub struct FuncEval<T, H> { /* private fields */ }

Implementations§

Source§

impl<T, H> FuncEval<T, H>

Auto-generated by derive_getters::Getters.

Source

pub fn f(&self) -> &Floating

Get field f from instance of FuncEval.

Examples found in repository?
examples/quadratic.rs (line 41)
4fn main() {
5    // Setting up log verbosity and _
6    std::env::set_var("RUST_LOG", "debug");
7    let _ = Tracer::default().with_normal_stdout_layer().build();
8
9    // Setting up the oracle
10    let matrix = DMatrix::from_vec(2, 2, vec![1., 0., 0., 1.]);
11    let f_and_g = |x: &DVector<f64>| -> FuncEvalMultivariate {
12        let f = x.dot(&(&matrix * x));
13        let g = 2. * &matrix * x;
14        FuncEvalMultivariate::new(f, g)
15    };
16
17    // Setting up the line search
18    let mut ls = MoreThuente::default();
19    // Setting up the main solver, with its parameters and the initial guess
20    let tol = 1e-6;
21    let x0 = DVector::from_vec(vec![1., 1.]);
22    let mut solver = BFGS::new(tol, x0);
23
24    // Running the solver
25    let max_iter_solver = 100;
26    let max_iter_line_search = 10;
27    let callback = None;
28    solver
29        .minimize(
30            &mut ls,
31            f_and_g,
32            max_iter_solver,
33            max_iter_line_search,
34            callback,
35        )
36        .unwrap();
37    // Printing the result
38    let x = solver.x();
39    let eval = f_and_g(x);
40    println!("x: {:?}", x);
41    println!("f(x): {}", eval.f());
42    println!("g(x): {:?}", eval.g());
43    assert_eq!(eval.f(), &0.0);
44}
More examples
Hide additional examples
examples/quadratic_with_plots.rs (line 47)
6fn main() {
7    // Setting up log verbosity and _.
8    std::env::set_var("RUST_LOG", "debug");
9    let _ = Tracer::default().with_normal_stdout_layer().build();
10    // Setting up the oracle
11    let matrix = DMatrix::from_vec(2, 2, vec![100., 0., 0., 100.]);
12    let mut f_and_g = |x: &DVector<f64>| -> FuncEvalMultivariate {
13        let f = x.dot(&(&matrix * x));
14        let g = 2. * &matrix * x;
15        FuncEvalMultivariate::new(f, g)
16    };
17    // Setting up the line search
18    let armijo_factr = 1e-4;
19    let beta = 0.5; // (beta in (0, 1), ntice that beta = 0.5 corresponds to bisection)
20    let mut ls = BackTracking::new(armijo_factr, beta);
21    // Setting up the main solver, with its parameters and the initial guess
22    let tol = 1e-6;
23    let x0 = DVector::from_vec(vec![10., 10.]);
24    let mut solver = GradientDescent::new(tol, x0);
25    // We define a callback to store iterates and function evaluations
26    let mut iterates = vec![];
27    let mut solver_callback = |s: &GradientDescent| {
28        iterates.push(s.x().clone());
29    };
30    // Running the solver
31    let max_iter_solver = 100;
32    let max_iter_line_search = 10;
33
34    solver
35        .minimize(
36            &mut ls,
37            f_and_g,
38            max_iter_solver,
39            max_iter_line_search,
40            Some(&mut solver_callback),
41        )
42        .unwrap();
43    // Printing the result
44    let x = solver.x();
45    let eval = f_and_g(x);
46    println!("x: {:?}", x);
47    println!("f(x): {}", eval.f());
48    println!("g(x): {:?}", eval.g());
49
50    // Plotting the iterates
51    let n = 50;
52    let start = -5.0;
53    let end = 5.0;
54    let plotter = Plotter3d::new(start, end, start, end, n)
55        .append_plot(&mut f_and_g, "Objective function", 0.5)
56        .append_scatter_points(&mut f_and_g, &iterates, "Iterates")
57        .set_layout_size(1600, 1000);
58    plotter.build("quadratic.html");
59}
examples/gradient_descent_example.rs (line 61)
6fn main() {
7    // Setting up logging
8    std::env::set_var("RUST_LOG", "info");
9    let _ = Tracer::default().with_normal_stdout_layer().build();
10
11    // Convex quadratic function: f(x,y) = x^2 + 2y^2
12    // Global minimum at (0, 0) with f(0,0) = 0
13    let f_and_g = |x: &DVector<f64>| -> FuncEvalMultivariate {
14        let x1 = x[0];
15        let x2 = x[1];
16
17        // Function value
18        let f = x1.powi(2) + 2.0 * x2.powi(2);
19
20        // Gradient
21        let g1 = 2.0 * x1;
22        let g2 = 4.0 * x2;
23        let g = DVector::from_vec(vec![g1, g2]);
24
25        FuncEvalMultivariate::new(f, g)
26    };
27
28    // Setting up the line search (backtracking with Armijo condition)
29    let armijo_factor = 1e-4;
30    let beta = 0.5;
31    let mut ls = BackTracking::new(armijo_factor, beta);
32
33    // Setting up the solver
34    let tol = 1e-6;
35    let x0 = DVector::from_vec(vec![2.0, 1.0]); // Starting point
36    let mut solver = GradientDescent::new(tol, x0.clone());
37
38    // Running the solver
39    let max_iter_solver = 100;
40    let max_iter_line_search = 20;
41
42    println!("=== Gradient Descent Example ===");
43    println!("Objective: f(x,y) = x^2 + 2y^2 (convex quadratic)");
44    println!("Global minimum: (0, 0) with f(0,0) = 0");
45    println!("Starting point: {:?}", x0);
46    println!("Tolerance: {}", tol);
47    println!();
48
49    match solver.minimize(
50        &mut ls,
51        f_and_g,
52        max_iter_solver,
53        max_iter_line_search,
54        None,
55    ) {
56        Ok(()) => {
57            let x = solver.x();
58            let eval = f_and_g(x);
59            println!("✅ Optimization completed successfully!");
60            println!("Final iterate: {:?}", x);
61            println!("Function value: {:.6}", eval.f());
62            println!("Gradient norm: {:.6}", eval.g().norm());
63            println!("Iterations: {}", solver.k());
64
65            // Check if we're close to the known minimum
66            let true_min = DVector::from_vec(vec![0.0, 0.0]);
67            let distance_to_min = (x - true_min).norm();
68            println!("Distance to true minimum: {:.6}", distance_to_min);
69            println!("Expected function value: 0.0");
70        }
71        Err(e) => {
72            println!("❌ Optimization failed: {:?}", e);
73        }
74    }
75}
examples/bfgs_example.rs (line 58)
4fn main() {
5    // Setting up logging
6    std::env::set_var("RUST_LOG", "info");
7    let _ = Tracer::default().with_normal_stdout_layer().build();
8
9    // Convex quadratic function: f(x,y,z) = x^2 + 2y^2 + 3z^2 + xy + yz
10    // This function has a unique minimum that we can verify
11    let f_and_g = |x: &DVector<f64>| -> FuncEvalMultivariate {
12        let x1 = x[0];
13        let x2 = x[1];
14        let x3 = x[2];
15
16        // Function value
17        let f = x1.powi(2) + 2.0 * x2.powi(2) + 3.0 * x3.powi(2) + x1 * x2 + x2 * x3;
18
19        // Gradient
20        let g1 = 2.0 * x1 + x2;
21        let g2 = 4.0 * x2 + x1 + x3;
22        let g3 = 6.0 * x3 + x2;
23        let g = DVector::from_vec(vec![g1, g2, g3]);
24
25        FuncEvalMultivariate::new(f, g)
26    };
27
28    // Setting up the line search (More-Thuente line search)
29    let mut ls = MoreThuente::default();
30
31    // Setting up the solver
32    let tol = 1e-8;
33    let x0 = DVector::from_vec(vec![1.0, 1.0, 1.0]); // Starting point
34    let mut solver = BFGS::new(tol, x0.clone());
35
36    // Running the solver
37    let max_iter_solver = 50;
38    let max_iter_line_search = 20;
39
40    println!("=== BFGS Quasi-Newton Example ===");
41    println!("Objective: f(x,y,z) = x^2 + 2y^2 + 3z^2 + xy + yz (convex quadratic)");
42    println!("Starting point: {:?}", x0);
43    println!("Tolerance: {}", tol);
44    println!();
45
46    match solver.minimize(
47        &mut ls,
48        f_and_g,
49        max_iter_solver,
50        max_iter_line_search,
51        None,
52    ) {
53        Ok(()) => {
54            let x = solver.x();
55            let eval = f_and_g(x);
56            println!("✅ Optimization completed successfully!");
57            println!("Final iterate: {:?}", x);
58            println!("Function value: {:.8}", eval.f());
59            println!("Gradient norm: {:.8}", eval.g().norm());
60            println!("Iterations: {}", solver.k());
61
62            // Verify optimality conditions
63            let gradient_at_solution = eval.g();
64            println!("Gradient at solution: {:?}", gradient_at_solution);
65            println!(
66                "Gradient norm should be close to 0: {}",
67                gradient_at_solution.norm()
68            );
69
70            // For this convex quadratic function, the minimum should be at the solution of the linear system
71            // ∇f(x) = 0, which gives us a system of linear equations
72            println!("Expected minimum: solution of ∇f(x) = 0");
73        }
74        Err(e) => {
75            println!("❌ Optimization failed: {:?}", e);
76        }
77    }
78}
examples/coordinate_descent_example.rs (line 63)
6fn main() {
7    // Setting up logging
8    std::env::set_var("RUST_LOG", "info");
9    let _ = Tracer::default().with_normal_stdout_layer().build();
10
11    // Separable convex function: f(x,y,z) = x^2 + 2y^2 + 3z^2
12    // This function is separable and has a minimum at (0, 0, 0)
13    let f_and_g = |x: &DVector<f64>| -> FuncEvalMultivariate {
14        let x1 = x[0];
15        let x2 = x[1];
16        let x3 = x[2];
17
18        // Function value
19        let f = x1.powi(2) + 2.0 * x2.powi(2) + 3.0 * x3.powi(2);
20
21        // Gradient
22        let g1 = 2.0 * x1;
23        let g2 = 4.0 * x2;
24        let g3 = 6.0 * x3;
25        let g = DVector::from_vec(vec![g1, g2, g3]);
26
27        FuncEvalMultivariate::new(f, g)
28    };
29
30    // Setting up the line search (backtracking)
31    let armijo_factor = 1e-4;
32    let beta = 0.5;
33    let mut ls = BackTracking::new(armijo_factor, beta);
34
35    // Setting up the solver
36    let tol = 1e-6;
37    let x0 = DVector::from_vec(vec![1.0, 1.0, 1.0]); // Starting point
38    let mut solver = CoordinateDescent::new(tol, x0.clone());
39
40    // Running the solver
41    let max_iter_solver = 100;
42    let max_iter_line_search = 10;
43
44    println!("=== Coordinate Descent Example ===");
45    println!("Objective: f(x,y,z) = x^2 + 2y^2 + 3z^2 (separable convex)");
46    println!("Global minimum: (0, 0, 0) with f(0,0,0) = 0");
47    println!("Starting point: {:?}", x0);
48    println!("Tolerance: {}", tol);
49    println!();
50
51    match solver.minimize(
52        &mut ls,
53        f_and_g,
54        max_iter_solver,
55        max_iter_line_search,
56        None,
57    ) {
58        Ok(()) => {
59            let x = solver.x();
60            let eval = f_and_g(x);
61            println!("✅ Optimization completed successfully!");
62            println!("Final iterate: {:?}", x);
63            println!("Function value: {:.6}", eval.f());
64            println!("Gradient norm: {:.6}", eval.g().norm());
65            println!("Iterations: {}", solver.k());
66
67            // Check if we're close to the known minimum
68            let true_min = DVector::from_vec(vec![0.0, 0.0, 0.0]);
69            let distance_to_min = (x - true_min).norm();
70            println!("Distance to true minimum: {:.6}", distance_to_min);
71            println!("Expected function value: 0.0");
72
73            // Verify optimality conditions
74            let gradient_at_solution = eval.g();
75            println!("Gradient at solution: {:?}", gradient_at_solution);
76            println!(
77                "Gradient norm should be close to 0: {}",
78                gradient_at_solution.norm()
79            );
80        }
81        Err(e) => {
82            println!("❌ Optimization failed: {:?}", e);
83        }
84    }
85}
examples/dfp_example.rs (line 56)
4fn main() {
5    // Setting up logging
6    std::env::set_var("RUST_LOG", "info");
7    let _ = Tracer::default().with_normal_stdout_layer().build();
8
9    // Convex function: f(x,y) = x^2 + 5y^2 + xy
10    // This function is convex and has a unique minimum
11    let f_and_g = |x: &DVector<f64>| -> FuncEvalMultivariate {
12        let x1 = x[0];
13        let x2 = x[1];
14
15        // Function value
16        let f = x1.powi(2) + 5.0 * x2.powi(2) + x1 * x2;
17
18        // Gradient
19        let g1 = 2.0 * x1 + x2;
20        let g2 = 10.0 * x2 + x1;
21        let g = DVector::from_vec(vec![g1, g2]);
22
23        FuncEvalMultivariate::new(f, g)
24    };
25
26    // Setting up the line search (More-Thuente line search)
27    let mut ls = MoreThuente::default();
28
29    // Setting up the solver
30    let tol = 1e-6;
31    let x0 = DVector::from_vec(vec![2.0, 1.0]); // Starting point
32    let mut solver = DFP::new(tol, x0.clone());
33
34    // Running the solver
35    let max_iter_solver = 100;
36    let max_iter_line_search = 20;
37
38    println!("=== DFP (Davidon-Fletcher-Powell) Quasi-Newton Example ===");
39    println!("Objective: f(x,y) = x^2 + 5y^2 + xy (convex quadratic)");
40    println!("Starting point: {:?}", x0);
41    println!("Tolerance: {}", tol);
42    println!();
43
44    match solver.minimize(
45        &mut ls,
46        f_and_g,
47        max_iter_solver,
48        max_iter_line_search,
49        None,
50    ) {
51        Ok(()) => {
52            let x = solver.x();
53            let eval = f_and_g(x);
54            println!("✅ Optimization completed successfully!");
55            println!("Final iterate: {:?}", x);
56            println!("Function value: {:.6}", eval.f());
57            println!("Gradient norm: {:.6}", eval.g().norm());
58            println!("Iterations: {}", solver.k());
59
60            // Verify optimality conditions
61            let gradient_at_solution = eval.g();
62            println!("Gradient at solution: {:?}", gradient_at_solution);
63            println!(
64                "Gradient norm should be close to 0: {}",
65                gradient_at_solution.norm()
66            );
67
68            // For this convex quadratic function, the minimum should be at the solution of the linear system
69            // ∇f(x) = 0, which gives us: 2x + y = 0, x + 10y = 0
70            // Solving: x = 0, y = 0
71            let expected_min = DVector::from_vec(vec![0.0, 0.0]);
72            let distance_to_expected = (x - expected_min).norm();
73            println!(
74                "Distance to expected minimum (0,0): {:.6}",
75                distance_to_expected
76            );
77            println!("Expected function value at (0,0): 0.0");
78        }
79        Err(e) => {
80            println!("❌ Optimization failed: {:?}", e);
81        }
82    }
83}
Source

pub fn g(&self) -> &T

Get field g from instance of FuncEval.

Examples found in repository?
examples/quadratic.rs (line 42)
4fn main() {
5    // Setting up log verbosity and _
6    std::env::set_var("RUST_LOG", "debug");
7    let _ = Tracer::default().with_normal_stdout_layer().build();
8
9    // Setting up the oracle
10    let matrix = DMatrix::from_vec(2, 2, vec![1., 0., 0., 1.]);
11    let f_and_g = |x: &DVector<f64>| -> FuncEvalMultivariate {
12        let f = x.dot(&(&matrix * x));
13        let g = 2. * &matrix * x;
14        FuncEvalMultivariate::new(f, g)
15    };
16
17    // Setting up the line search
18    let mut ls = MoreThuente::default();
19    // Setting up the main solver, with its parameters and the initial guess
20    let tol = 1e-6;
21    let x0 = DVector::from_vec(vec![1., 1.]);
22    let mut solver = BFGS::new(tol, x0);
23
24    // Running the solver
25    let max_iter_solver = 100;
26    let max_iter_line_search = 10;
27    let callback = None;
28    solver
29        .minimize(
30            &mut ls,
31            f_and_g,
32            max_iter_solver,
33            max_iter_line_search,
34            callback,
35        )
36        .unwrap();
37    // Printing the result
38    let x = solver.x();
39    let eval = f_and_g(x);
40    println!("x: {:?}", x);
41    println!("f(x): {}", eval.f());
42    println!("g(x): {:?}", eval.g());
43    assert_eq!(eval.f(), &0.0);
44}
More examples
Hide additional examples
examples/quadratic_with_plots.rs (line 48)
6fn main() {
7    // Setting up log verbosity and _.
8    std::env::set_var("RUST_LOG", "debug");
9    let _ = Tracer::default().with_normal_stdout_layer().build();
10    // Setting up the oracle
11    let matrix = DMatrix::from_vec(2, 2, vec![100., 0., 0., 100.]);
12    let mut f_and_g = |x: &DVector<f64>| -> FuncEvalMultivariate {
13        let f = x.dot(&(&matrix * x));
14        let g = 2. * &matrix * x;
15        FuncEvalMultivariate::new(f, g)
16    };
17    // Setting up the line search
18    let armijo_factr = 1e-4;
19    let beta = 0.5; // (beta in (0, 1), ntice that beta = 0.5 corresponds to bisection)
20    let mut ls = BackTracking::new(armijo_factr, beta);
21    // Setting up the main solver, with its parameters and the initial guess
22    let tol = 1e-6;
23    let x0 = DVector::from_vec(vec![10., 10.]);
24    let mut solver = GradientDescent::new(tol, x0);
25    // We define a callback to store iterates and function evaluations
26    let mut iterates = vec![];
27    let mut solver_callback = |s: &GradientDescent| {
28        iterates.push(s.x().clone());
29    };
30    // Running the solver
31    let max_iter_solver = 100;
32    let max_iter_line_search = 10;
33
34    solver
35        .minimize(
36            &mut ls,
37            f_and_g,
38            max_iter_solver,
39            max_iter_line_search,
40            Some(&mut solver_callback),
41        )
42        .unwrap();
43    // Printing the result
44    let x = solver.x();
45    let eval = f_and_g(x);
46    println!("x: {:?}", x);
47    println!("f(x): {}", eval.f());
48    println!("g(x): {:?}", eval.g());
49
50    // Plotting the iterates
51    let n = 50;
52    let start = -5.0;
53    let end = 5.0;
54    let plotter = Plotter3d::new(start, end, start, end, n)
55        .append_plot(&mut f_and_g, "Objective function", 0.5)
56        .append_scatter_points(&mut f_and_g, &iterates, "Iterates")
57        .set_layout_size(1600, 1000);
58    plotter.build("quadratic.html");
59}
examples/gradient_descent_example.rs (line 62)
6fn main() {
7    // Setting up logging
8    std::env::set_var("RUST_LOG", "info");
9    let _ = Tracer::default().with_normal_stdout_layer().build();
10
11    // Convex quadratic function: f(x,y) = x^2 + 2y^2
12    // Global minimum at (0, 0) with f(0,0) = 0
13    let f_and_g = |x: &DVector<f64>| -> FuncEvalMultivariate {
14        let x1 = x[0];
15        let x2 = x[1];
16
17        // Function value
18        let f = x1.powi(2) + 2.0 * x2.powi(2);
19
20        // Gradient
21        let g1 = 2.0 * x1;
22        let g2 = 4.0 * x2;
23        let g = DVector::from_vec(vec![g1, g2]);
24
25        FuncEvalMultivariate::new(f, g)
26    };
27
28    // Setting up the line search (backtracking with Armijo condition)
29    let armijo_factor = 1e-4;
30    let beta = 0.5;
31    let mut ls = BackTracking::new(armijo_factor, beta);
32
33    // Setting up the solver
34    let tol = 1e-6;
35    let x0 = DVector::from_vec(vec![2.0, 1.0]); // Starting point
36    let mut solver = GradientDescent::new(tol, x0.clone());
37
38    // Running the solver
39    let max_iter_solver = 100;
40    let max_iter_line_search = 20;
41
42    println!("=== Gradient Descent Example ===");
43    println!("Objective: f(x,y) = x^2 + 2y^2 (convex quadratic)");
44    println!("Global minimum: (0, 0) with f(0,0) = 0");
45    println!("Starting point: {:?}", x0);
46    println!("Tolerance: {}", tol);
47    println!();
48
49    match solver.minimize(
50        &mut ls,
51        f_and_g,
52        max_iter_solver,
53        max_iter_line_search,
54        None,
55    ) {
56        Ok(()) => {
57            let x = solver.x();
58            let eval = f_and_g(x);
59            println!("✅ Optimization completed successfully!");
60            println!("Final iterate: {:?}", x);
61            println!("Function value: {:.6}", eval.f());
62            println!("Gradient norm: {:.6}", eval.g().norm());
63            println!("Iterations: {}", solver.k());
64
65            // Check if we're close to the known minimum
66            let true_min = DVector::from_vec(vec![0.0, 0.0]);
67            let distance_to_min = (x - true_min).norm();
68            println!("Distance to true minimum: {:.6}", distance_to_min);
69            println!("Expected function value: 0.0");
70        }
71        Err(e) => {
72            println!("❌ Optimization failed: {:?}", e);
73        }
74    }
75}
examples/bfgs_example.rs (line 59)
4fn main() {
5    // Setting up logging
6    std::env::set_var("RUST_LOG", "info");
7    let _ = Tracer::default().with_normal_stdout_layer().build();
8
9    // Convex quadratic function: f(x,y,z) = x^2 + 2y^2 + 3z^2 + xy + yz
10    // This function has a unique minimum that we can verify
11    let f_and_g = |x: &DVector<f64>| -> FuncEvalMultivariate {
12        let x1 = x[0];
13        let x2 = x[1];
14        let x3 = x[2];
15
16        // Function value
17        let f = x1.powi(2) + 2.0 * x2.powi(2) + 3.0 * x3.powi(2) + x1 * x2 + x2 * x3;
18
19        // Gradient
20        let g1 = 2.0 * x1 + x2;
21        let g2 = 4.0 * x2 + x1 + x3;
22        let g3 = 6.0 * x3 + x2;
23        let g = DVector::from_vec(vec![g1, g2, g3]);
24
25        FuncEvalMultivariate::new(f, g)
26    };
27
28    // Setting up the line search (More-Thuente line search)
29    let mut ls = MoreThuente::default();
30
31    // Setting up the solver
32    let tol = 1e-8;
33    let x0 = DVector::from_vec(vec![1.0, 1.0, 1.0]); // Starting point
34    let mut solver = BFGS::new(tol, x0.clone());
35
36    // Running the solver
37    let max_iter_solver = 50;
38    let max_iter_line_search = 20;
39
40    println!("=== BFGS Quasi-Newton Example ===");
41    println!("Objective: f(x,y,z) = x^2 + 2y^2 + 3z^2 + xy + yz (convex quadratic)");
42    println!("Starting point: {:?}", x0);
43    println!("Tolerance: {}", tol);
44    println!();
45
46    match solver.minimize(
47        &mut ls,
48        f_and_g,
49        max_iter_solver,
50        max_iter_line_search,
51        None,
52    ) {
53        Ok(()) => {
54            let x = solver.x();
55            let eval = f_and_g(x);
56            println!("✅ Optimization completed successfully!");
57            println!("Final iterate: {:?}", x);
58            println!("Function value: {:.8}", eval.f());
59            println!("Gradient norm: {:.8}", eval.g().norm());
60            println!("Iterations: {}", solver.k());
61
62            // Verify optimality conditions
63            let gradient_at_solution = eval.g();
64            println!("Gradient at solution: {:?}", gradient_at_solution);
65            println!(
66                "Gradient norm should be close to 0: {}",
67                gradient_at_solution.norm()
68            );
69
70            // For this convex quadratic function, the minimum should be at the solution of the linear system
71            // ∇f(x) = 0, which gives us a system of linear equations
72            println!("Expected minimum: solution of ∇f(x) = 0");
73        }
74        Err(e) => {
75            println!("❌ Optimization failed: {:?}", e);
76        }
77    }
78}
examples/coordinate_descent_example.rs (line 64)
6fn main() {
7    // Setting up logging
8    std::env::set_var("RUST_LOG", "info");
9    let _ = Tracer::default().with_normal_stdout_layer().build();
10
11    // Separable convex function: f(x,y,z) = x^2 + 2y^2 + 3z^2
12    // This function is separable and has a minimum at (0, 0, 0)
13    let f_and_g = |x: &DVector<f64>| -> FuncEvalMultivariate {
14        let x1 = x[0];
15        let x2 = x[1];
16        let x3 = x[2];
17
18        // Function value
19        let f = x1.powi(2) + 2.0 * x2.powi(2) + 3.0 * x3.powi(2);
20
21        // Gradient
22        let g1 = 2.0 * x1;
23        let g2 = 4.0 * x2;
24        let g3 = 6.0 * x3;
25        let g = DVector::from_vec(vec![g1, g2, g3]);
26
27        FuncEvalMultivariate::new(f, g)
28    };
29
30    // Setting up the line search (backtracking)
31    let armijo_factor = 1e-4;
32    let beta = 0.5;
33    let mut ls = BackTracking::new(armijo_factor, beta);
34
35    // Setting up the solver
36    let tol = 1e-6;
37    let x0 = DVector::from_vec(vec![1.0, 1.0, 1.0]); // Starting point
38    let mut solver = CoordinateDescent::new(tol, x0.clone());
39
40    // Running the solver
41    let max_iter_solver = 100;
42    let max_iter_line_search = 10;
43
44    println!("=== Coordinate Descent Example ===");
45    println!("Objective: f(x,y,z) = x^2 + 2y^2 + 3z^2 (separable convex)");
46    println!("Global minimum: (0, 0, 0) with f(0,0,0) = 0");
47    println!("Starting point: {:?}", x0);
48    println!("Tolerance: {}", tol);
49    println!();
50
51    match solver.minimize(
52        &mut ls,
53        f_and_g,
54        max_iter_solver,
55        max_iter_line_search,
56        None,
57    ) {
58        Ok(()) => {
59            let x = solver.x();
60            let eval = f_and_g(x);
61            println!("✅ Optimization completed successfully!");
62            println!("Final iterate: {:?}", x);
63            println!("Function value: {:.6}", eval.f());
64            println!("Gradient norm: {:.6}", eval.g().norm());
65            println!("Iterations: {}", solver.k());
66
67            // Check if we're close to the known minimum
68            let true_min = DVector::from_vec(vec![0.0, 0.0, 0.0]);
69            let distance_to_min = (x - true_min).norm();
70            println!("Distance to true minimum: {:.6}", distance_to_min);
71            println!("Expected function value: 0.0");
72
73            // Verify optimality conditions
74            let gradient_at_solution = eval.g();
75            println!("Gradient at solution: {:?}", gradient_at_solution);
76            println!(
77                "Gradient norm should be close to 0: {}",
78                gradient_at_solution.norm()
79            );
80        }
81        Err(e) => {
82            println!("❌ Optimization failed: {:?}", e);
83        }
84    }
85}
examples/dfp_example.rs (line 57)
4fn main() {
5    // Setting up logging
6    std::env::set_var("RUST_LOG", "info");
7    let _ = Tracer::default().with_normal_stdout_layer().build();
8
9    // Convex function: f(x,y) = x^2 + 5y^2 + xy
10    // This function is convex and has a unique minimum
11    let f_and_g = |x: &DVector<f64>| -> FuncEvalMultivariate {
12        let x1 = x[0];
13        let x2 = x[1];
14
15        // Function value
16        let f = x1.powi(2) + 5.0 * x2.powi(2) + x1 * x2;
17
18        // Gradient
19        let g1 = 2.0 * x1 + x2;
20        let g2 = 10.0 * x2 + x1;
21        let g = DVector::from_vec(vec![g1, g2]);
22
23        FuncEvalMultivariate::new(f, g)
24    };
25
26    // Setting up the line search (More-Thuente line search)
27    let mut ls = MoreThuente::default();
28
29    // Setting up the solver
30    let tol = 1e-6;
31    let x0 = DVector::from_vec(vec![2.0, 1.0]); // Starting point
32    let mut solver = DFP::new(tol, x0.clone());
33
34    // Running the solver
35    let max_iter_solver = 100;
36    let max_iter_line_search = 20;
37
38    println!("=== DFP (Davidon-Fletcher-Powell) Quasi-Newton Example ===");
39    println!("Objective: f(x,y) = x^2 + 5y^2 + xy (convex quadratic)");
40    println!("Starting point: {:?}", x0);
41    println!("Tolerance: {}", tol);
42    println!();
43
44    match solver.minimize(
45        &mut ls,
46        f_and_g,
47        max_iter_solver,
48        max_iter_line_search,
49        None,
50    ) {
51        Ok(()) => {
52            let x = solver.x();
53            let eval = f_and_g(x);
54            println!("✅ Optimization completed successfully!");
55            println!("Final iterate: {:?}", x);
56            println!("Function value: {:.6}", eval.f());
57            println!("Gradient norm: {:.6}", eval.g().norm());
58            println!("Iterations: {}", solver.k());
59
60            // Verify optimality conditions
61            let gradient_at_solution = eval.g();
62            println!("Gradient at solution: {:?}", gradient_at_solution);
63            println!(
64                "Gradient norm should be close to 0: {}",
65                gradient_at_solution.norm()
66            );
67
68            // For this convex quadratic function, the minimum should be at the solution of the linear system
69            // ∇f(x) = 0, which gives us: 2x + y = 0, x + 10y = 0
70            // Solving: x = 0, y = 0
71            let expected_min = DVector::from_vec(vec![0.0, 0.0]);
72            let distance_to_expected = (x - expected_min).norm();
73            println!(
74                "Distance to expected minimum (0,0): {:.6}",
75                distance_to_expected
76            );
77            println!("Expected function value at (0,0): 0.0");
78        }
79        Err(e) => {
80            println!("❌ Optimization failed: {:?}", e);
81        }
82    }
83}
Source

pub fn hessian(&self) -> &Option<H>

Get field hessian from instance of FuncEval.

Source§

impl<T, H> FuncEval<T, H>

Source

pub fn new(f: Floating, g: T) -> Self

Examples found in repository?
examples/quadratic.rs (line 14)
4fn main() {
5    // Setting up log verbosity and _
6    std::env::set_var("RUST_LOG", "debug");
7    let _ = Tracer::default().with_normal_stdout_layer().build();
8
9    // Setting up the oracle
10    let matrix = DMatrix::from_vec(2, 2, vec![1., 0., 0., 1.]);
11    let f_and_g = |x: &DVector<f64>| -> FuncEvalMultivariate {
12        let f = x.dot(&(&matrix * x));
13        let g = 2. * &matrix * x;
14        FuncEvalMultivariate::new(f, g)
15    };
16
17    // Setting up the line search
18    let mut ls = MoreThuente::default();
19    // Setting up the main solver, with its parameters and the initial guess
20    let tol = 1e-6;
21    let x0 = DVector::from_vec(vec![1., 1.]);
22    let mut solver = BFGS::new(tol, x0);
23
24    // Running the solver
25    let max_iter_solver = 100;
26    let max_iter_line_search = 10;
27    let callback = None;
28    solver
29        .minimize(
30            &mut ls,
31            f_and_g,
32            max_iter_solver,
33            max_iter_line_search,
34            callback,
35        )
36        .unwrap();
37    // Printing the result
38    let x = solver.x();
39    let eval = f_and_g(x);
40    println!("x: {:?}", x);
41    println!("f(x): {}", eval.f());
42    println!("g(x): {:?}", eval.g());
43    assert_eq!(eval.f(), &0.0);
44}
More examples
Hide additional examples
examples/quadratic_with_plots.rs (line 15)
6fn main() {
7    // Setting up log verbosity and _.
8    std::env::set_var("RUST_LOG", "debug");
9    let _ = Tracer::default().with_normal_stdout_layer().build();
10    // Setting up the oracle
11    let matrix = DMatrix::from_vec(2, 2, vec![100., 0., 0., 100.]);
12    let mut f_and_g = |x: &DVector<f64>| -> FuncEvalMultivariate {
13        let f = x.dot(&(&matrix * x));
14        let g = 2. * &matrix * x;
15        FuncEvalMultivariate::new(f, g)
16    };
17    // Setting up the line search
18    let armijo_factr = 1e-4;
19    let beta = 0.5; // (beta in (0, 1), ntice that beta = 0.5 corresponds to bisection)
20    let mut ls = BackTracking::new(armijo_factr, beta);
21    // Setting up the main solver, with its parameters and the initial guess
22    let tol = 1e-6;
23    let x0 = DVector::from_vec(vec![10., 10.]);
24    let mut solver = GradientDescent::new(tol, x0);
25    // We define a callback to store iterates and function evaluations
26    let mut iterates = vec![];
27    let mut solver_callback = |s: &GradientDescent| {
28        iterates.push(s.x().clone());
29    };
30    // Running the solver
31    let max_iter_solver = 100;
32    let max_iter_line_search = 10;
33
34    solver
35        .minimize(
36            &mut ls,
37            f_and_g,
38            max_iter_solver,
39            max_iter_line_search,
40            Some(&mut solver_callback),
41        )
42        .unwrap();
43    // Printing the result
44    let x = solver.x();
45    let eval = f_and_g(x);
46    println!("x: {:?}", x);
47    println!("f(x): {}", eval.f());
48    println!("g(x): {:?}", eval.g());
49
50    // Plotting the iterates
51    let n = 50;
52    let start = -5.0;
53    let end = 5.0;
54    let plotter = Plotter3d::new(start, end, start, end, n)
55        .append_plot(&mut f_and_g, "Objective function", 0.5)
56        .append_scatter_points(&mut f_and_g, &iterates, "Iterates")
57        .set_layout_size(1600, 1000);
58    plotter.build("quadratic.html");
59}
examples/gradient_descent_example.rs (line 25)
6fn main() {
7    // Setting up logging
8    std::env::set_var("RUST_LOG", "info");
9    let _ = Tracer::default().with_normal_stdout_layer().build();
10
11    // Convex quadratic function: f(x,y) = x^2 + 2y^2
12    // Global minimum at (0, 0) with f(0,0) = 0
13    let f_and_g = |x: &DVector<f64>| -> FuncEvalMultivariate {
14        let x1 = x[0];
15        let x2 = x[1];
16
17        // Function value
18        let f = x1.powi(2) + 2.0 * x2.powi(2);
19
20        // Gradient
21        let g1 = 2.0 * x1;
22        let g2 = 4.0 * x2;
23        let g = DVector::from_vec(vec![g1, g2]);
24
25        FuncEvalMultivariate::new(f, g)
26    };
27
28    // Setting up the line search (backtracking with Armijo condition)
29    let armijo_factor = 1e-4;
30    let beta = 0.5;
31    let mut ls = BackTracking::new(armijo_factor, beta);
32
33    // Setting up the solver
34    let tol = 1e-6;
35    let x0 = DVector::from_vec(vec![2.0, 1.0]); // Starting point
36    let mut solver = GradientDescent::new(tol, x0.clone());
37
38    // Running the solver
39    let max_iter_solver = 100;
40    let max_iter_line_search = 20;
41
42    println!("=== Gradient Descent Example ===");
43    println!("Objective: f(x,y) = x^2 + 2y^2 (convex quadratic)");
44    println!("Global minimum: (0, 0) with f(0,0) = 0");
45    println!("Starting point: {:?}", x0);
46    println!("Tolerance: {}", tol);
47    println!();
48
49    match solver.minimize(
50        &mut ls,
51        f_and_g,
52        max_iter_solver,
53        max_iter_line_search,
54        None,
55    ) {
56        Ok(()) => {
57            let x = solver.x();
58            let eval = f_and_g(x);
59            println!("✅ Optimization completed successfully!");
60            println!("Final iterate: {:?}", x);
61            println!("Function value: {:.6}", eval.f());
62            println!("Gradient norm: {:.6}", eval.g().norm());
63            println!("Iterations: {}", solver.k());
64
65            // Check if we're close to the known minimum
66            let true_min = DVector::from_vec(vec![0.0, 0.0]);
67            let distance_to_min = (x - true_min).norm();
68            println!("Distance to true minimum: {:.6}", distance_to_min);
69            println!("Expected function value: 0.0");
70        }
71        Err(e) => {
72            println!("❌ Optimization failed: {:?}", e);
73        }
74    }
75}
examples/bfgs_example.rs (line 25)
4fn main() {
5    // Setting up logging
6    std::env::set_var("RUST_LOG", "info");
7    let _ = Tracer::default().with_normal_stdout_layer().build();
8
9    // Convex quadratic function: f(x,y,z) = x^2 + 2y^2 + 3z^2 + xy + yz
10    // This function has a unique minimum that we can verify
11    let f_and_g = |x: &DVector<f64>| -> FuncEvalMultivariate {
12        let x1 = x[0];
13        let x2 = x[1];
14        let x3 = x[2];
15
16        // Function value
17        let f = x1.powi(2) + 2.0 * x2.powi(2) + 3.0 * x3.powi(2) + x1 * x2 + x2 * x3;
18
19        // Gradient
20        let g1 = 2.0 * x1 + x2;
21        let g2 = 4.0 * x2 + x1 + x3;
22        let g3 = 6.0 * x3 + x2;
23        let g = DVector::from_vec(vec![g1, g2, g3]);
24
25        FuncEvalMultivariate::new(f, g)
26    };
27
28    // Setting up the line search (More-Thuente line search)
29    let mut ls = MoreThuente::default();
30
31    // Setting up the solver
32    let tol = 1e-8;
33    let x0 = DVector::from_vec(vec![1.0, 1.0, 1.0]); // Starting point
34    let mut solver = BFGS::new(tol, x0.clone());
35
36    // Running the solver
37    let max_iter_solver = 50;
38    let max_iter_line_search = 20;
39
40    println!("=== BFGS Quasi-Newton Example ===");
41    println!("Objective: f(x,y,z) = x^2 + 2y^2 + 3z^2 + xy + yz (convex quadratic)");
42    println!("Starting point: {:?}", x0);
43    println!("Tolerance: {}", tol);
44    println!();
45
46    match solver.minimize(
47        &mut ls,
48        f_and_g,
49        max_iter_solver,
50        max_iter_line_search,
51        None,
52    ) {
53        Ok(()) => {
54            let x = solver.x();
55            let eval = f_and_g(x);
56            println!("✅ Optimization completed successfully!");
57            println!("Final iterate: {:?}", x);
58            println!("Function value: {:.8}", eval.f());
59            println!("Gradient norm: {:.8}", eval.g().norm());
60            println!("Iterations: {}", solver.k());
61
62            // Verify optimality conditions
63            let gradient_at_solution = eval.g();
64            println!("Gradient at solution: {:?}", gradient_at_solution);
65            println!(
66                "Gradient norm should be close to 0: {}",
67                gradient_at_solution.norm()
68            );
69
70            // For this convex quadratic function, the minimum should be at the solution of the linear system
71            // ∇f(x) = 0, which gives us a system of linear equations
72            println!("Expected minimum: solution of ∇f(x) = 0");
73        }
74        Err(e) => {
75            println!("❌ Optimization failed: {:?}", e);
76        }
77    }
78}
examples/coordinate_descent_example.rs (line 27)
6fn main() {
7    // Setting up logging
8    std::env::set_var("RUST_LOG", "info");
9    let _ = Tracer::default().with_normal_stdout_layer().build();
10
11    // Separable convex function: f(x,y,z) = x^2 + 2y^2 + 3z^2
12    // This function is separable and has a minimum at (0, 0, 0)
13    let f_and_g = |x: &DVector<f64>| -> FuncEvalMultivariate {
14        let x1 = x[0];
15        let x2 = x[1];
16        let x3 = x[2];
17
18        // Function value
19        let f = x1.powi(2) + 2.0 * x2.powi(2) + 3.0 * x3.powi(2);
20
21        // Gradient
22        let g1 = 2.0 * x1;
23        let g2 = 4.0 * x2;
24        let g3 = 6.0 * x3;
25        let g = DVector::from_vec(vec![g1, g2, g3]);
26
27        FuncEvalMultivariate::new(f, g)
28    };
29
30    // Setting up the line search (backtracking)
31    let armijo_factor = 1e-4;
32    let beta = 0.5;
33    let mut ls = BackTracking::new(armijo_factor, beta);
34
35    // Setting up the solver
36    let tol = 1e-6;
37    let x0 = DVector::from_vec(vec![1.0, 1.0, 1.0]); // Starting point
38    let mut solver = CoordinateDescent::new(tol, x0.clone());
39
40    // Running the solver
41    let max_iter_solver = 100;
42    let max_iter_line_search = 10;
43
44    println!("=== Coordinate Descent Example ===");
45    println!("Objective: f(x,y,z) = x^2 + 2y^2 + 3z^2 (separable convex)");
46    println!("Global minimum: (0, 0, 0) with f(0,0,0) = 0");
47    println!("Starting point: {:?}", x0);
48    println!("Tolerance: {}", tol);
49    println!();
50
51    match solver.minimize(
52        &mut ls,
53        f_and_g,
54        max_iter_solver,
55        max_iter_line_search,
56        None,
57    ) {
58        Ok(()) => {
59            let x = solver.x();
60            let eval = f_and_g(x);
61            println!("✅ Optimization completed successfully!");
62            println!("Final iterate: {:?}", x);
63            println!("Function value: {:.6}", eval.f());
64            println!("Gradient norm: {:.6}", eval.g().norm());
65            println!("Iterations: {}", solver.k());
66
67            // Check if we're close to the known minimum
68            let true_min = DVector::from_vec(vec![0.0, 0.0, 0.0]);
69            let distance_to_min = (x - true_min).norm();
70            println!("Distance to true minimum: {:.6}", distance_to_min);
71            println!("Expected function value: 0.0");
72
73            // Verify optimality conditions
74            let gradient_at_solution = eval.g();
75            println!("Gradient at solution: {:?}", gradient_at_solution);
76            println!(
77                "Gradient norm should be close to 0: {}",
78                gradient_at_solution.norm()
79            );
80        }
81        Err(e) => {
82            println!("❌ Optimization failed: {:?}", e);
83        }
84    }
85}
examples/dfp_example.rs (line 23)
4fn main() {
5    // Setting up logging
6    std::env::set_var("RUST_LOG", "info");
7    let _ = Tracer::default().with_normal_stdout_layer().build();
8
9    // Convex function: f(x,y) = x^2 + 5y^2 + xy
10    // This function is convex and has a unique minimum
11    let f_and_g = |x: &DVector<f64>| -> FuncEvalMultivariate {
12        let x1 = x[0];
13        let x2 = x[1];
14
15        // Function value
16        let f = x1.powi(2) + 5.0 * x2.powi(2) + x1 * x2;
17
18        // Gradient
19        let g1 = 2.0 * x1 + x2;
20        let g2 = 10.0 * x2 + x1;
21        let g = DVector::from_vec(vec![g1, g2]);
22
23        FuncEvalMultivariate::new(f, g)
24    };
25
26    // Setting up the line search (More-Thuente line search)
27    let mut ls = MoreThuente::default();
28
29    // Setting up the solver
30    let tol = 1e-6;
31    let x0 = DVector::from_vec(vec![2.0, 1.0]); // Starting point
32    let mut solver = DFP::new(tol, x0.clone());
33
34    // Running the solver
35    let max_iter_solver = 100;
36    let max_iter_line_search = 20;
37
38    println!("=== DFP (Davidon-Fletcher-Powell) Quasi-Newton Example ===");
39    println!("Objective: f(x,y) = x^2 + 5y^2 + xy (convex quadratic)");
40    println!("Starting point: {:?}", x0);
41    println!("Tolerance: {}", tol);
42    println!();
43
44    match solver.minimize(
45        &mut ls,
46        f_and_g,
47        max_iter_solver,
48        max_iter_line_search,
49        None,
50    ) {
51        Ok(()) => {
52            let x = solver.x();
53            let eval = f_and_g(x);
54            println!("✅ Optimization completed successfully!");
55            println!("Final iterate: {:?}", x);
56            println!("Function value: {:.6}", eval.f());
57            println!("Gradient norm: {:.6}", eval.g().norm());
58            println!("Iterations: {}", solver.k());
59
60            // Verify optimality conditions
61            let gradient_at_solution = eval.g();
62            println!("Gradient at solution: {:?}", gradient_at_solution);
63            println!(
64                "Gradient norm should be close to 0: {}",
65                gradient_at_solution.norm()
66            );
67
68            // For this convex quadratic function, the minimum should be at the solution of the linear system
69            // ∇f(x) = 0, which gives us: 2x + y = 0, x + 10y = 0
70            // Solving: x = 0, y = 0
71            let expected_min = DVector::from_vec(vec![0.0, 0.0]);
72            let distance_to_expected = (x - expected_min).norm();
73            println!(
74                "Distance to expected minimum (0,0): {:.6}",
75                distance_to_expected
76            );
77            println!("Expected function value at (0,0): 0.0");
78        }
79        Err(e) => {
80            println!("❌ Optimization failed: {:?}", e);
81        }
82    }
83}
Source§

impl FuncEval<Matrix<f64, Dyn, Const<1>, VecStorage<f64, Dyn, Const<1>>>, Matrix<f64, Dyn, Dyn, VecStorage<f64, Dyn, Dyn>>>

Source

pub fn with_hessian(self, hessian: DMatrix<Floating>) -> Self

Examples found in repository?
examples/newton_example.rs (line 31)
4fn main() {
5    // Setting up logging
6    std::env::set_var("RUST_LOG", "info");
7    let _ = Tracer::default().with_normal_stdout_layer().build();
8
9    // Convex function: f(x,y) = x^2 + y^2 + exp(x^2 + y^2)
10    // This function is convex and has a unique minimum at (0, 0)
11    let f_and_g = |x: &DVector<f64>| -> FuncEvalMultivariate {
12        let x1 = x[0];
13        let x2 = x[1];
14
15        // Function value
16        let f = x1.powi(2) + x2.powi(2) + (x1.powi(2) + x2.powi(2)).exp();
17
18        // Gradient: ∇f = [2x + 2x*exp(x^2+y^2), 2y + 2y*exp(x^2+y^2)]
19        let exp_term = (x1.powi(2) + x2.powi(2)).exp();
20        let g1 = 2.0 * x1 * (1.0 + exp_term);
21        let g2 = 2.0 * x2 * (1.0 + exp_term);
22        let g = DVector::from_vec(vec![g1, g2]);
23
24        // Hessian: ∇²f = [[2(1+exp) + 4x^2*exp, 4xy*exp], [4xy*exp, 2(1+exp) + 4y^2*exp]]
25        let h11 = 2.0 * (1.0 + exp_term) + 4.0 * x1.powi(2) * exp_term;
26        let h12 = 4.0 * x1 * x2 * exp_term;
27        let h21 = h12;
28        let h22 = 2.0 * (1.0 + exp_term) + 4.0 * x2.powi(2) * exp_term;
29        let hessian = DMatrix::from_vec(2, 2, vec![h11, h21, h12, h22]);
30
31        FuncEvalMultivariate::new(f, g).with_hessian(hessian)
32    };
33
34    // Setting up the line search (More-Thuente line search)
35    let mut ls = MoreThuente::default();
36
37    // Setting up the solver
38    let tol = 1e-6;
39    let x0 = DVector::from_vec(vec![1.0, 1.0]); // Starting point
40    let mut solver = Newton::new(tol, x0.clone());
41
42    // Running the solver
43    let max_iter_solver = 20;
44    let max_iter_line_search = 20;
45
46    println!("=== Newton's Method Example ===");
47    println!("Objective: f(x,y) = x^2 + y^2 + exp(x^2 + y^2) (convex)");
48    println!("Global minimum: (0, 0) with f(0,0) = 1");
49    println!("Starting point: {:?}", x0);
50    println!("Tolerance: {}", tol);
51    println!();
52
53    match solver.minimize(
54        &mut ls,
55        f_and_g,
56        max_iter_solver,
57        max_iter_line_search,
58        None,
59    ) {
60        Ok(()) => {
61            let x = solver.x();
62            let eval = f_and_g(x);
63            println!("✅ Optimization completed successfully!");
64            println!("Final iterate: {:?}", x);
65            println!("Function value: {:.6}", eval.f());
66            println!("Gradient norm: {:.6}", eval.g().norm());
67            println!("Iterations: {}", solver.k());
68
69            // Show Newton decrement
70            if let Some(decrement_squared) = solver.decrement_squared() {
71                println!("Newton decrement squared: {:.6}", decrement_squared);
72                println!("Newton decrement: {:.6}", decrement_squared.sqrt());
73            }
74
75            // Check if we're close to the known minimum
76            let true_min = DVector::from_vec(vec![0.0, 0.0]);
77            let distance_to_min = (x - true_min).norm();
78            println!("Distance to true minimum: {:.6}", distance_to_min);
79            println!("Expected function value: 1.0");
80        }
81        Err(e) => {
82            println!("❌ Optimization failed: {:?}", e);
83        }
84    }
85}
Source

pub fn take_hessian(&mut self) -> DMatrix<Floating>

Trait Implementations§

Source§

impl<T: Debug, H: Debug> Debug for FuncEval<T, H>

Source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more

Auto Trait Implementations§

§

impl<T, H> Freeze for FuncEval<T, H>
where T: Freeze, H: Freeze,

§

impl<T, H> RefUnwindSafe for FuncEval<T, H>

§

impl<T, H> Send for FuncEval<T, H>
where T: Send, H: Send,

§

impl<T, H> Sync for FuncEval<T, H>
where T: Sync, H: Sync,

§

impl<T, H> Unpin for FuncEval<T, H>
where T: Unpin, H: Unpin,

§

impl<T, H> UnwindSafe for FuncEval<T, H>
where T: UnwindSafe, H: UnwindSafe,

Blanket Implementations§

Source§

impl<T> Any for T
where T: 'static + ?Sized,

Source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
Source§

impl<T> Borrow<T> for T
where T: ?Sized,

Source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
Source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

Source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
Source§

impl<T> From<T> for T

Source§

fn from(t: T) -> T

Returns the argument unchanged.

Source§

impl<T> Instrument for T

Source§

fn instrument(self, span: Span) -> Instrumented<Self>

Instruments this type with the provided Span, returning an Instrumented wrapper. Read more
Source§

fn in_current_span(self) -> Instrumented<Self>

Instruments this type with the current Span, returning an Instrumented wrapper. Read more
Source§

impl<T, U> Into<U> for T
where U: From<T>,

Source§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

Source§

impl<T> Same for T

Source§

type Output = T

Should always be Self
Source§

impl<SS, SP> SupersetOf<SS> for SP
where SS: SubsetOf<SP>,

Source§

fn to_subset(&self) -> Option<SS>

The inverse inclusion map: attempts to construct self from the equivalent element of its superset. Read more
Source§

fn is_in_subset(&self) -> bool

Checks if self is actually part of its subset T (and can be converted to it).
Source§

fn to_subset_unchecked(&self) -> SS

Use with care! Same as self.to_subset but without any property checks. Always succeeds.
Source§

fn from_subset(element: &SS) -> SP

The inclusion map: converts self to the equivalent element of its superset.
Source§

impl<T, U> TryFrom<U> for T
where U: Into<T>,

Source§

type Error = Infallible

The type returned in the event of a conversion error.
Source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
Source§

impl<T, U> TryInto<U> for T
where U: TryFrom<T>,

Source§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
Source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
Source§

impl<V, T> VZip<V> for T
where V: MultiLane<T>,

Source§

fn vzip(self) -> V

Source§

impl<T> WithSubscriber for T

Source§

fn with_subscriber<S>(self, subscriber: S) -> WithDispatch<Self>
where S: Into<Dispatch>,

Attaches the provided Subscriber to this type, returning a WithDispatch wrapper. Read more
Source§

fn with_current_subscriber(self) -> WithDispatch<Self>

Attaches the current default Subscriber to this type, returning a WithDispatch wrapper. Read more