use super::{ConvergenceStatus, OptimizationResult, Optimizer};
use crate::primitives::Vector;
#[derive(Debug, Clone)]
pub struct InteriorPoint {
max_iter: usize,
tol: f32,
initial_mu: f32,
mu: f32,
beta: f32,
}
impl InteriorPoint {
#[must_use]
pub fn new(max_iter: usize, tol: f32, initial_mu: f32) -> Self {
Self {
max_iter,
tol,
initial_mu,
mu: initial_mu,
beta: 0.2,
}
}
#[must_use]
pub fn with_beta(mut self, beta: f32) -> Self {
self.beta = beta;
self
}
#[allow(clippy::too_many_lines)]
pub fn minimize<F, G, H, J>(
&mut self,
objective: F,
gradient: G,
inequality: H,
inequality_jac: J,
x0: Vector<f32>,
) -> OptimizationResult
where
F: Fn(&Vector<f32>) -> f32,
G: Fn(&Vector<f32>) -> Vector<f32>,
H: Fn(&Vector<f32>) -> Vector<f32>,
J: Fn(&Vector<f32>) -> Vec<Vector<f32>>,
{
let start_time = std::time::Instant::now();
let g0 = inequality(&x0);
for i in 0..g0.len() {
assert!(
g0[i] < 0.0,
"Initial point is infeasible: g[{}] = {} ≥ 0. Interior point requires strictly feasible start.",
i, g0[i]
);
}
let mut x = x0;
self.mu = self.initial_mu;
let m = g0.len();
for outer_iter in 0..self.max_iter {
let barrier_grad = |x_inner: &Vector<f32>| {
let grad_f = gradient(x_inner);
let g_val = inequality(x_inner);
let jac_g = inequality_jac(x_inner);
let n = x_inner.len();
let mut barrier_g = Vector::zeros(n);
for i in 0..n {
barrier_g[i] = grad_f[i];
}
for j in 0..m {
if g_val[j] >= 0.0 {
continue;
}
let coeff = -self.mu / g_val[j];
for i in 0..n {
barrier_g[i] += coeff * jac_g[j][i];
}
}
barrier_g
};
let mut x_sub = x.clone();
let alpha = 0.01; for _sub_iter in 0..50 {
let grad = barrier_grad(&x_sub);
let mut grad_norm_sq = 0.0;
for i in 0..grad.len() {
grad_norm_sq += grad[i] * grad[i];
}
if grad_norm_sq < 1e-8 {
break;
}
for i in 0..x_sub.len() {
x_sub[i] -= alpha * grad[i];
}
let g_sub = inequality(&x_sub);
let mut infeasible = false;
for i in 0..m {
if g_sub[i] >= -1e-8 {
infeasible = true;
break;
}
}
if infeasible {
for i in 0..x_sub.len() {
x_sub[i] += alpha * grad[i] * 0.5; }
}
}
x = x_sub;
let grad_barrier = barrier_grad(&x);
let mut grad_norm = 0.0;
for i in 0..grad_barrier.len() {
grad_norm += grad_barrier[i] * grad_barrier[i];
}
grad_norm = grad_norm.sqrt();
let g_val = inequality(&x);
let mut max_violation = 0.0;
for i in 0..m {
if g_val[i] > max_violation {
max_violation = g_val[i];
}
}
if grad_norm < self.tol && self.mu < 1e-4 {
let final_obj = objective(&x);
return OptimizationResult {
solution: x,
objective_value: final_obj,
iterations: outer_iter + 1,
status: ConvergenceStatus::Converged,
gradient_norm: grad_norm,
constraint_violation: max_violation.max(0.0),
elapsed_time: start_time.elapsed(),
};
}
self.mu *= self.beta;
}
let final_obj = objective(&x);
let g_val = inequality(&x);
let mut max_violation = 0.0;
for i in 0..g_val.len() {
if g_val[i] > max_violation {
max_violation = g_val[i];
}
}
let grad_f = gradient(&x);
let mut grad_norm = 0.0;
for i in 0..grad_f.len() {
grad_norm += grad_f[i] * grad_f[i];
}
grad_norm = grad_norm.sqrt();
OptimizationResult {
solution: x,
objective_value: final_obj,
iterations: self.max_iter,
status: ConvergenceStatus::MaxIterations,
gradient_norm: grad_norm,
constraint_violation: max_violation.max(0.0),
elapsed_time: start_time.elapsed(),
}
}
}
impl Optimizer for InteriorPoint {
fn step(&mut self, _params: &mut Vector<f32>, _gradients: &Vector<f32>) {
panic!(
"Interior Point does not support stochastic updates (step). Use minimize() for constrained optimization."
)
}
fn reset(&mut self) {
self.mu = self.initial_mu;
}
}
#[cfg(test)]
#[path = "interior_point_tests.rs"]
mod tests;