Skip to main content

scirs2_optimize/constrained/
mod.rs

1//! Constrained optimization algorithms
2//!
3//! This module provides methods for constrained optimization of scalar
4//! functions of one or more variables.
5//!
6//! ## Example
7//!
8//! ```no_run
9//! use scirs2_core::ndarray::{array, Array1};
10//! use scirs2_optimize::constrained::{minimize_constrained, Method, Constraint};
11//!
12//! // Define a simple function to minimize: f(x) = (x[0] - 1)² + (x[1] - 2.5)²
13//! // Unconstrained minimum is at (1.0, 2.5), but we add a constraint.
14//! fn objective(x: &[f64]) -> f64 {
15//!     (x[0] - 1.0).powi(2) + (x[1] - 2.5).powi(2)
16//! }
17//!
18//! // Define a constraint: x[0] + x[1] <= 3
19//! // Written as g(x) >= 0, so: g(x) = 3 - x[0] - x[1]
20//! fn constraint(x: &[f64]) -> f64 {
21//!     3.0 - x[0] - x[1]  // Should be >= 0
22//! }
23//!
24//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
25//! // Minimize the function starting at [1.0, 1.0]
26//! // Note: Initial point should be feasible (satisfy constraints) for best convergence
27//! let initial_point = array![1.0, 1.0];
28//! let constraints = vec![Constraint::new(constraint, Constraint::INEQUALITY)];
29//!
30//! let result = minimize_constrained(
31//!     objective,
32//!     &initial_point,
33//!     &constraints,
34//!     Method::SLSQP,
35//!     None
36//! )?;
37//!
38//! // The constrained minimum is at [0.75, 2.25] with f(x) = 0.125
39//! // This is where the gradient of f is parallel to the constraint boundary,
40//! // solved via Lagrange multipliers on x[0] + x[1] = 3.
41//! # Ok(())
42//! # }
43//! ```
44//!
45//! Note: This function requires LAPACK libraries to be linked for certain optimization methods.
46
47use crate::error::OptimizeResult;
48use crate::result::OptimizeResults;
49use scirs2_core::ndarray::{Array1, ArrayBase, Data, Ix1};
50use std::fmt;
51
52// Re-export optimization methods
53pub mod augmented_lagrangian;
54pub mod cobyla;
55pub mod interior_point;
56pub mod slsqp;
57pub mod sqp;
58pub mod trust_constr;
59
60// Re-export main functions
61pub use augmented_lagrangian::{
62    minimize_augmented_lagrangian, minimize_equality_constrained, minimize_inequality_constrained,
63    AugmentedLagrangianOptions, AugmentedLagrangianResult,
64};
65pub use cobyla::minimize_cobyla;
66pub use interior_point::{
67    minimize_interior_point, minimize_interior_point_constrained, InteriorPointOptions,
68    InteriorPointResult,
69};
70pub use slsqp::minimize_slsqp;
71pub use sqp::{minimize_sqp, SqpOptions, SqpResult};
72pub use trust_constr::{
73    minimize_trust_constr, minimize_trust_constr_with_derivatives, GradientFn, HessianFn,
74    HessianUpdate,
75};
76
77#[cfg(test)]
78mod tests;
79
80/// Type alias for constraint functions that take a slice of f64 and return f64
81pub type ConstraintFn = fn(&[f64]) -> f64;
82
83/// Optimization methods for constrained minimization.
84#[derive(Debug, Clone, Copy, PartialEq, Eq)]
85pub enum Method {
86    /// Sequential Least SQuares Programming
87    SLSQP,
88
89    /// Trust-region constrained algorithm
90    TrustConstr,
91
92    /// Linear programming using the simplex algorithm
93    COBYLA,
94
95    /// Interior point method
96    InteriorPoint,
97
98    /// Augmented Lagrangian method
99    AugmentedLagrangian,
100
101    /// Sequential Quadratic Programming
102    SQP,
103}
104
105impl fmt::Display for Method {
106    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
107        match self {
108            Method::SLSQP => write!(f, "SLSQP"),
109            Method::TrustConstr => write!(f, "trust-constr"),
110            Method::COBYLA => write!(f, "COBYLA"),
111            Method::InteriorPoint => write!(f, "interior-point"),
112            Method::AugmentedLagrangian => write!(f, "augmented-lagrangian"),
113            Method::SQP => write!(f, "SQP"),
114        }
115    }
116}
117
118/// Options for the constrained optimizer.
119#[derive(Debug, Clone)]
120pub struct Options {
121    /// Maximum number of iterations to perform
122    pub maxiter: Option<usize>,
123
124    /// Precision goal for the value in the stopping criterion
125    pub ftol: Option<f64>,
126
127    /// Precision goal for the gradient in the stopping criterion (relative)
128    pub gtol: Option<f64>,
129
130    /// Precision goal for constraint violation
131    pub ctol: Option<f64>,
132
133    /// Step size used for numerical approximation of the jacobian
134    pub eps: Option<f64>,
135
136    /// Whether to print convergence messages
137    pub disp: bool,
138
139    /// Return the optimization result after each iteration
140    pub return_all: bool,
141}
142
143impl Default for Options {
144    fn default() -> Self {
145        Options {
146            maxiter: None,
147            ftol: Some(1e-8),
148            gtol: Some(1e-8),
149            ctol: Some(1e-8),
150            eps: Some(1e-8),
151            disp: false,
152            return_all: false,
153        }
154    }
155}
156
157/// Constraint type for constrained optimization
158pub struct Constraint<F> {
159    /// The constraint function
160    pub fun: F,
161
162    /// The type of constraint (equality or inequality)
163    pub kind: ConstraintKind,
164
165    /// Lower bound for a box constraint
166    pub lb: Option<f64>,
167
168    /// Upper bound for a box constraint
169    pub ub: Option<f64>,
170}
171
172/// The kind of constraint
173#[derive(Debug, Clone, Copy, PartialEq, Eq)]
174pub enum ConstraintKind {
175    /// Equality constraint: fun(x) = 0
176    Equality,
177
178    /// Inequality constraint: fun(x) >= 0
179    Inequality,
180}
181
182impl Constraint<fn(&[f64]) -> f64> {
183    /// Constant for equality constraint
184    pub const EQUALITY: ConstraintKind = ConstraintKind::Equality;
185
186    /// Constant for inequality constraint
187    pub const INEQUALITY: ConstraintKind = ConstraintKind::Inequality;
188
189    /// Create a new constraint
190    pub fn new(fun: fn(&[f64]) -> f64, kind: ConstraintKind) -> Self {
191        Constraint {
192            fun,
193            kind,
194            lb: None,
195            ub: None,
196        }
197    }
198
199    /// Create a new box constraint
200    pub fn new_bounds(lb: Option<f64>, ub: Option<f64>) -> Self {
201        Constraint {
202            fun: |_| 0.0, // Dummy function for box constraints
203            kind: ConstraintKind::Inequality,
204            lb,
205            ub,
206        }
207    }
208}
209
210impl<F> Constraint<F> {
211    /// Check if this is a box constraint
212    pub fn is_bounds(&self) -> bool {
213        self.lb.is_some() || self.ub.is_some()
214    }
215}
216
217/// Minimizes a scalar function of one or more variables with constraints.
218///
219/// # Arguments
220///
221/// * `func` - A function that takes a slice of values and returns a scalar
222/// * `x0` - The initial guess
223/// * `constraints` - Vector of constraints
224/// * `method` - The optimization method to use
225/// * `options` - Options for the optimizer
226///
227/// # Returns
228///
229/// * `OptimizeResults` containing the optimization results
230///
231/// # Example
232///
233/// ```no_run
234/// use scirs2_core::ndarray::array;
235/// use scirs2_optimize::constrained::{minimize_constrained, Method, Constraint};
236///
237/// // Function to minimize
238/// fn objective(x: &[f64]) -> f64 {
239///     (x[0] - 1.0).powi(2) + (x[1] - 2.5).powi(2)
240/// }
241///
242/// // Constraint: x[0] + x[1] <= 3
243/// fn constraint(x: &[f64]) -> f64 {
244///     3.0 - x[0] - x[1]  // Should be >= 0
245/// }
246///
247/// # fn main() -> Result<(), Box<dyn std::error::Error>> {
248/// let initial_point = array![0.0, 0.0];
249/// let constraints = vec![Constraint::new(constraint, Constraint::INEQUALITY)];
250///
251/// let result = minimize_constrained(
252///     objective,
253///     &initial_point,
254///     &constraints,
255///     Method::SLSQP,
256///     None
257/// )?;
258/// # Ok(())
259/// # }
260/// ```
261#[allow(dead_code)]
262pub fn minimize_constrained<F, S>(
263    func: F,
264    x0: &ArrayBase<S, Ix1>,
265    constraints: &[Constraint<ConstraintFn>],
266    method: Method,
267    options: Option<Options>,
268) -> OptimizeResult<OptimizeResults<f64>>
269where
270    F: Fn(&[f64]) -> f64 + Clone,
271    S: Data<Elem = f64>,
272{
273    let options = options.unwrap_or_default();
274
275    // Implementation of various methods will go here
276    match method {
277        Method::SLSQP => minimize_slsqp(func, x0, constraints, &options),
278        Method::TrustConstr => minimize_trust_constr(func, x0, constraints, &options),
279        Method::COBYLA => minimize_cobyla(func, x0, constraints, &options),
280        Method::InteriorPoint => {
281            // Convert constraints to interior point format
282            let x0_arr = Array1::from_vec(x0.to_vec());
283
284            // Create interior point options from general options
285            let ip_options = InteriorPointOptions {
286                max_iter: options.maxiter.unwrap_or(100),
287                tol: options.gtol.unwrap_or(1e-8),
288                feas_tol: options.ctol.unwrap_or(1e-8),
289                ..Default::default()
290            };
291
292            // Convert to OptimizeResults format
293            match minimize_interior_point_constrained(func, x0_arr, constraints, Some(ip_options)) {
294                Ok(result) => {
295                    let opt_result = OptimizeResults::<f64> {
296                        x: result.x,
297                        fun: result.fun,
298                        nit: result.nit,
299                        nfev: result.nfev,
300                        success: result.success,
301                        message: result.message,
302                        jac: None,
303                        hess: None,
304                        constr: None,
305                        njev: 0,  // Not tracked by interior point method
306                        nhev: 0,  // Not tracked by interior point method
307                        maxcv: 0, // Not applicable for interior point
308                        status: if result.success { 0 } else { 1 },
309                    };
310                    Ok(opt_result)
311                }
312                Err(e) => Err(e),
313            }
314        }
315        Method::AugmentedLagrangian => {
316            // Convert to augmented Lagrangian method format (simplified for now)
317            Err(crate::error::OptimizeError::NotImplementedError(
318                "Augmented Lagrangian method integration with minimize_constrained not yet implemented"
319                    .to_string(),
320            ))
321        }
322        Method::SQP => sqp::minimize_sqp_compat(func, x0, constraints, &options),
323    }
324}