scirs2_optimize/constrained/mod.rs
1//! Constrained optimization algorithms
2//!
3//! This module provides methods for constrained optimization of scalar
4//! functions of one or more variables.
5//!
6//! ## Example
7//!
8//! ```no_run
9//! use scirs2_core::ndarray::{array, Array1};
10//! use scirs2_optimize::constrained::{minimize_constrained, Method, Constraint};
11//!
12//! // Define a simple function to minimize: f(x) = (x[0] - 1)² + (x[1] - 2.5)²
13//! // Unconstrained minimum is at (1.0, 2.5), but we add a constraint.
14//! fn objective(x: &[f64]) -> f64 {
15//! (x[0] - 1.0).powi(2) + (x[1] - 2.5).powi(2)
16//! }
17//!
18//! // Define a constraint: x[0] + x[1] <= 3
19//! // Written as g(x) >= 0, so: g(x) = 3 - x[0] - x[1]
20//! fn constraint(x: &[f64]) -> f64 {
21//! 3.0 - x[0] - x[1] // Should be >= 0
22//! }
23//!
24//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
25//! // Minimize the function starting at [1.0, 1.0]
26//! // Note: Initial point should be feasible (satisfy constraints) for best convergence
27//! let initial_point = array![1.0, 1.0];
28//! let constraints = vec![Constraint::new(constraint, Constraint::INEQUALITY)];
29//!
30//! let result = minimize_constrained(
31//! objective,
32//! &initial_point,
33//! &constraints,
34//! Method::SLSQP,
35//! None
36//! )?;
37//!
38//! // The constrained minimum is at [0.75, 2.25] with f(x) = 0.125
39//! // This is where the gradient of f is parallel to the constraint boundary,
40//! // solved via Lagrange multipliers on x[0] + x[1] = 3.
41//! # Ok(())
42//! # }
43//! ```
44//!
45//! Note: This function requires LAPACK libraries to be linked for certain optimization methods.
46
47use crate::error::OptimizeResult;
48use crate::result::OptimizeResults;
49use scirs2_core::ndarray::{Array1, ArrayBase, Data, Ix1};
50use std::fmt;
51
52// Re-export optimization methods
53pub mod augmented_lagrangian;
54pub mod cobyla;
55pub mod interior_point;
56pub mod slsqp;
57pub mod trust_constr;
58
59// Re-export main functions
60pub use augmented_lagrangian::{
61 minimize_augmented_lagrangian, minimize_equality_constrained, minimize_inequality_constrained,
62 AugmentedLagrangianOptions, AugmentedLagrangianResult,
63};
64pub use cobyla::minimize_cobyla;
65pub use interior_point::{
66 minimize_interior_point, minimize_interior_point_constrained, InteriorPointOptions,
67 InteriorPointResult,
68};
69pub use slsqp::minimize_slsqp;
70pub use trust_constr::{
71 minimize_trust_constr, minimize_trust_constr_with_derivatives, GradientFn, HessianFn,
72 HessianUpdate,
73};
74
75#[cfg(test)]
76mod tests;
77
78/// Type alias for constraint functions that take a slice of f64 and return f64
79pub type ConstraintFn = fn(&[f64]) -> f64;
80
81/// Optimization methods for constrained minimization.
82#[derive(Debug, Clone, Copy, PartialEq, Eq)]
83pub enum Method {
84 /// Sequential Least SQuares Programming
85 SLSQP,
86
87 /// Trust-region constrained algorithm
88 TrustConstr,
89
90 /// Linear programming using the simplex algorithm
91 COBYLA,
92
93 /// Interior point method
94 InteriorPoint,
95
96 /// Augmented Lagrangian method
97 AugmentedLagrangian,
98}
99
100impl fmt::Display for Method {
101 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
102 match self {
103 Method::SLSQP => write!(f, "SLSQP"),
104 Method::TrustConstr => write!(f, "trust-constr"),
105 Method::COBYLA => write!(f, "COBYLA"),
106 Method::InteriorPoint => write!(f, "interior-point"),
107 Method::AugmentedLagrangian => write!(f, "augmented-lagrangian"),
108 }
109 }
110}
111
112/// Options for the constrained optimizer.
113#[derive(Debug, Clone)]
114pub struct Options {
115 /// Maximum number of iterations to perform
116 pub maxiter: Option<usize>,
117
118 /// Precision goal for the value in the stopping criterion
119 pub ftol: Option<f64>,
120
121 /// Precision goal for the gradient in the stopping criterion (relative)
122 pub gtol: Option<f64>,
123
124 /// Precision goal for constraint violation
125 pub ctol: Option<f64>,
126
127 /// Step size used for numerical approximation of the jacobian
128 pub eps: Option<f64>,
129
130 /// Whether to print convergence messages
131 pub disp: bool,
132
133 /// Return the optimization result after each iteration
134 pub return_all: bool,
135}
136
137impl Default for Options {
138 fn default() -> Self {
139 Options {
140 maxiter: None,
141 ftol: Some(1e-8),
142 gtol: Some(1e-8),
143 ctol: Some(1e-8),
144 eps: Some(1e-8),
145 disp: false,
146 return_all: false,
147 }
148 }
149}
150
151/// Constraint type for constrained optimization
152pub struct Constraint<F> {
153 /// The constraint function
154 pub fun: F,
155
156 /// The type of constraint (equality or inequality)
157 pub kind: ConstraintKind,
158
159 /// Lower bound for a box constraint
160 pub lb: Option<f64>,
161
162 /// Upper bound for a box constraint
163 pub ub: Option<f64>,
164}
165
166/// The kind of constraint
167#[derive(Debug, Clone, Copy, PartialEq, Eq)]
168pub enum ConstraintKind {
169 /// Equality constraint: fun(x) = 0
170 Equality,
171
172 /// Inequality constraint: fun(x) >= 0
173 Inequality,
174}
175
176impl Constraint<fn(&[f64]) -> f64> {
177 /// Constant for equality constraint
178 pub const EQUALITY: ConstraintKind = ConstraintKind::Equality;
179
180 /// Constant for inequality constraint
181 pub const INEQUALITY: ConstraintKind = ConstraintKind::Inequality;
182
183 /// Create a new constraint
184 pub fn new(fun: fn(&[f64]) -> f64, kind: ConstraintKind) -> Self {
185 Constraint {
186 fun,
187 kind,
188 lb: None,
189 ub: None,
190 }
191 }
192
193 /// Create a new box constraint
194 pub fn new_bounds(lb: Option<f64>, ub: Option<f64>) -> Self {
195 Constraint {
196 fun: |_| 0.0, // Dummy function for box constraints
197 kind: ConstraintKind::Inequality,
198 lb,
199 ub,
200 }
201 }
202}
203
204impl<F> Constraint<F> {
205 /// Check if this is a box constraint
206 pub fn is_bounds(&self) -> bool {
207 self.lb.is_some() || self.ub.is_some()
208 }
209}
210
211/// Minimizes a scalar function of one or more variables with constraints.
212///
213/// # Arguments
214///
215/// * `func` - A function that takes a slice of values and returns a scalar
216/// * `x0` - The initial guess
217/// * `constraints` - Vector of constraints
218/// * `method` - The optimization method to use
219/// * `options` - Options for the optimizer
220///
221/// # Returns
222///
223/// * `OptimizeResults` containing the optimization results
224///
225/// # Example
226///
227/// ```no_run
228/// use scirs2_core::ndarray::array;
229/// use scirs2_optimize::constrained::{minimize_constrained, Method, Constraint};
230///
231/// // Function to minimize
232/// fn objective(x: &[f64]) -> f64 {
233/// (x[0] - 1.0).powi(2) + (x[1] - 2.5).powi(2)
234/// }
235///
236/// // Constraint: x[0] + x[1] <= 3
237/// fn constraint(x: &[f64]) -> f64 {
238/// 3.0 - x[0] - x[1] // Should be >= 0
239/// }
240///
241/// # fn main() -> Result<(), Box<dyn std::error::Error>> {
242/// let initial_point = array![0.0, 0.0];
243/// let constraints = vec![Constraint::new(constraint, Constraint::INEQUALITY)];
244///
245/// let result = minimize_constrained(
246/// objective,
247/// &initial_point,
248/// &constraints,
249/// Method::SLSQP,
250/// None
251/// )?;
252/// # Ok(())
253/// # }
254/// ```
255#[allow(dead_code)]
256pub fn minimize_constrained<F, S>(
257 func: F,
258 x0: &ArrayBase<S, Ix1>,
259 constraints: &[Constraint<ConstraintFn>],
260 method: Method,
261 options: Option<Options>,
262) -> OptimizeResult<OptimizeResults<f64>>
263where
264 F: Fn(&[f64]) -> f64 + Clone,
265 S: Data<Elem = f64>,
266{
267 let options = options.unwrap_or_default();
268
269 // Implementation of various methods will go here
270 match method {
271 Method::SLSQP => minimize_slsqp(func, x0, constraints, &options),
272 Method::TrustConstr => minimize_trust_constr(func, x0, constraints, &options),
273 Method::COBYLA => minimize_cobyla(func, x0, constraints, &options),
274 Method::InteriorPoint => {
275 // Convert constraints to interior point format
276 let x0_arr = Array1::from_vec(x0.to_vec());
277
278 // Create interior point options from general options
279 let ip_options = InteriorPointOptions {
280 max_iter: options.maxiter.unwrap_or(100),
281 tol: options.gtol.unwrap_or(1e-8),
282 feas_tol: options.ctol.unwrap_or(1e-8),
283 ..Default::default()
284 };
285
286 // Convert to OptimizeResults format
287 match minimize_interior_point_constrained(func, x0_arr, constraints, Some(ip_options)) {
288 Ok(result) => {
289 let opt_result = OptimizeResults::<f64> {
290 x: result.x,
291 fun: result.fun,
292 nit: result.nit,
293 nfev: result.nfev,
294 success: result.success,
295 message: result.message,
296 jac: None,
297 hess: None,
298 constr: None,
299 njev: 0, // Not tracked by interior point method
300 nhev: 0, // Not tracked by interior point method
301 maxcv: 0, // Not applicable for interior point
302 status: if result.success { 0 } else { 1 },
303 };
304 Ok(opt_result)
305 }
306 Err(e) => Err(e),
307 }
308 }
309 Method::AugmentedLagrangian => {
310 // Convert to augmented Lagrangian method format (simplified for now)
311 Err(crate::error::OptimizeError::NotImplementedError(
312 "Augmented Lagrangian method integration with minimize_constrained not yet implemented"
313 .to_string(),
314 ))
315 }
316 }
317}