use std::collections::HashMap;
use super::autodiff::Tape;
use super::parser::NlFileData;
use crate::NlpProblem;
pub struct NlProblem {
n: usize,
m: usize,
x_l: Vec<f64>,
x_u: Vec<f64>,
g_l: Vec<f64>,
g_u: Vec<f64>,
x0: Vec<f64>,
maximize: bool,
obj_tape: Option<Tape>,
obj_linear: Vec<(usize, f64)>,
con_tapes: Vec<Option<Tape>>,
con_linear: Vec<Vec<(usize, f64)>>,
jac_rows: Vec<usize>,
jac_cols: Vec<usize>,
jac_map: HashMap<(usize, usize), usize>,
hess_rows: Vec<usize>,
hess_cols: Vec<usize>,
hess_map: HashMap<(usize, usize), usize>,
}
impl NlProblem {
pub fn from_nl_data(data: NlFileData) -> Self {
let n = data.header.n_vars;
let m = data.header.n_constrs;
let (obj_idx, maximize, obj_expr) = data
.obj_exprs
.into_iter()
.next()
.unwrap_or((0, false, None));
use super::autodiff::CommonExprCache;
let ce_cache = CommonExprCache::build(&data.common_exprs, n);
let obj_tape = obj_expr.map(|expr| Tape::build_cached(&expr, &data.common_exprs, n, &ce_cache));
let obj_linear = if obj_idx < data.obj_linear.len() {
data.obj_linear[obj_idx].clone()
} else {
Vec::new()
};
let con_tapes: Vec<Option<Tape>> = data
.con_exprs
.iter()
.map(|expr| expr.as_ref().map(|e| Tape::build_cached(e, &data.common_exprs, n, &ce_cache)))
.collect();
let mut jac_entries: Vec<(usize, usize)> = Vec::new();
let mut jac_set: HashMap<(usize, usize), usize> = HashMap::new();
for (i, linear) in data.con_linear.iter().enumerate() {
for &(var_idx, _) in linear {
let key = (i, var_idx);
if !jac_set.contains_key(&key) {
let pos = jac_entries.len();
jac_set.insert(key, pos);
jac_entries.push(key);
}
}
}
for (i, tape) in con_tapes.iter().enumerate() {
if let Some(tape) = tape {
for op in &tape.ops {
if let super::autodiff::TapeOp::Var(j) = op {
let key = (i, *j);
if !jac_set.contains_key(&key) {
let pos = jac_entries.len();
jac_set.insert(key, pos);
jac_entries.push(key);
}
}
}
}
}
let jac_rows: Vec<usize> = jac_entries.iter().map(|&(r, _)| r).collect();
let jac_cols: Vec<usize> = jac_entries.iter().map(|&(_, c)| c).collect();
use std::collections::BTreeSet;
let mut hess_set: BTreeSet<(usize, usize)> = BTreeSet::new();
if let Some(ref tape) = obj_tape {
hess_set.extend(tape.hessian_sparsity());
}
for tape in &con_tapes {
if let Some(tape) = tape {
hess_set.extend(tape.hessian_sparsity());
}
}
let mut all_nonlinear_vars = BTreeSet::new();
if let Some(ref tape) = obj_tape {
for &v in &tape.variables() {
all_nonlinear_vars.insert(v);
}
}
for tape in &con_tapes {
if let Some(tape) = tape {
for &v in &tape.variables() {
all_nonlinear_vars.insert(v);
}
}
}
for &v in &all_nonlinear_vars {
hess_set.insert((v, v));
}
log::info!("Hessian sparsity: {} structural nonzeros (from {} nonlinear vars)", hess_set.len(), all_nonlinear_vars.len());
let mut hess_rows = Vec::with_capacity(hess_set.len());
let mut hess_cols = Vec::with_capacity(hess_set.len());
let mut hess_map = HashMap::with_capacity(hess_set.len());
for (idx, &(r, c)) in hess_set.iter().enumerate() {
hess_rows.push(r);
hess_cols.push(c);
hess_map.insert((r, c), idx);
}
NlProblem {
n,
m,
x_l: data.x_l,
x_u: data.x_u,
g_l: data.g_l,
g_u: data.g_u,
x0: data.x0,
maximize,
obj_tape,
obj_linear,
con_tapes,
con_linear: data.con_linear,
jac_rows,
jac_cols,
jac_map: jac_set,
hess_rows,
hess_cols,
hess_map,
}
}
fn obj_gradient(&self, x: &[f64], grad: &mut [f64]) {
grad.iter_mut().for_each(|v| *v = 0.0);
if let Some(tape) = &self.obj_tape {
tape.gradient(x, grad);
}
for &(idx, coeff) in &self.obj_linear {
if idx < grad.len() {
grad[idx] += coeff;
}
}
if self.maximize {
for g in grad.iter_mut() {
*g = -*g;
}
}
}
fn con_gradient(&self, i: usize, x: &[f64], grad: &mut [f64]) {
grad.iter_mut().for_each(|v| *v = 0.0);
if let Some(tape) = &self.con_tapes[i] {
tape.gradient(x, grad);
}
for &(idx, coeff) in &self.con_linear[i] {
if idx < grad.len() {
grad[idx] += coeff;
}
}
}
}
impl NlpProblem for NlProblem {
fn num_variables(&self) -> usize {
self.n
}
fn num_constraints(&self) -> usize {
self.m
}
fn bounds(&self, x_l: &mut [f64], x_u: &mut [f64]) {
x_l.copy_from_slice(&self.x_l);
x_u.copy_from_slice(&self.x_u);
}
fn constraint_bounds(&self, g_l: &mut [f64], g_u: &mut [f64]) {
g_l.copy_from_slice(&self.g_l);
g_u.copy_from_slice(&self.g_u);
}
fn initial_point(&self, x0: &mut [f64]) {
x0.copy_from_slice(&self.x0);
}
fn objective(&self, x: &[f64]) -> f64 {
let mut val = 0.0;
if let Some(tape) = &self.obj_tape {
val += tape.eval(x);
}
for &(idx, coeff) in &self.obj_linear {
val += coeff * x[idx];
}
if self.maximize {
-val
} else {
val
}
}
fn gradient(&self, x: &[f64], grad: &mut [f64]) {
self.obj_gradient(x, grad);
}
fn constraints(&self, x: &[f64], g: &mut [f64]) {
for i in 0..self.m {
let mut val = 0.0;
if let Some(tape) = &self.con_tapes[i] {
val += tape.eval(x);
}
for &(idx, coeff) in &self.con_linear[i] {
val += coeff * x[idx];
}
g[i] = val;
}
}
fn jacobian_structure(&self) -> (Vec<usize>, Vec<usize>) {
(self.jac_rows.clone(), self.jac_cols.clone())
}
fn jacobian_values(&self, x: &[f64], vals: &mut [f64]) {
vals.iter_mut().for_each(|v| *v = 0.0);
let mut grad = vec![0.0; self.n];
for i in 0..self.m {
self.con_gradient(i, x, &mut grad);
for j in 0..self.n {
if grad[j] != 0.0 {
if let Some(&pos) = self.jac_map.get(&(i, j)) {
vals[pos] = grad[j];
}
}
}
}
}
fn hessian_structure(&self) -> (Vec<usize>, Vec<usize>) {
(self.hess_rows.clone(), self.hess_cols.clone())
}
fn hessian_values(&self, x: &[f64], obj_factor: f64, lambda: &[f64], vals: &mut [f64]) {
vals.iter_mut().for_each(|v| *v = 0.0);
if let Some(ref tape) = self.obj_tape {
let weight = if self.maximize { -obj_factor } else { obj_factor };
tape.hessian_accumulate(x, weight, &self.hess_map, vals);
}
for (i, tape) in self.con_tapes.iter().enumerate() {
if let Some(tape) = tape {
tape.hessian_accumulate(x, lambda[i], &self.hess_map, vals);
}
}
}
}