use std::{
cmp::Ordering,
env,
time::Instant,
};
use maria_linalg::Vector;
pub use super::{
error,
function::Function,
paradigm::Paradigm,
};
pub struct Optimizer<const N: usize, const K: usize> {
paradigm: Paradigm,
pub function: Box<dyn Function<N>>,
criterion: f64,
maxiter: usize,
pub maxtemp: f64,
pub stdev: f64,
}
impl<const N: usize, const K: usize> Optimizer<N, K> {
fn get_cli(
paradigm: &mut Paradigm,
criterion: &mut f64,
maxiter: &mut usize,
maxtemp: &mut f64,
stdev: &mut f64,
) {
let args = env::args().collect::<Vec<String>>();
let mut i = 1;
while i < args.len() {
let arg = args[i].as_str();
match arg {
"--paradigm" => {
i += 1;
if i == args.len() {
error("Missing paradigm");
}
*paradigm = match args[i].as_str() {
"steepest-descent" => Paradigm::SteepestDescent,
"newton" => Paradigm::Newton,
"genetic" => Paradigm::Genetic,
"simulated-annealing" => Paradigm::SimulatedAnnealing,
_ => error("Unrecognized paradigm"),
};
i += 1;
},
"--criterion" => {
i += 1;
if i == args.len() {
error("Missing criterion");
}
*criterion = match str::parse::<f64>(&args[i]) {
Ok (m) => m,
Err (_) => error("Could not parse as floating-point value"),
};
i += 1;
},
"--maxiter" => {
i += 1;
if i == args.len() {
error("Missing maxiter");
}
*maxiter = match str::parse::<usize>(&args[i]) {
Ok (m) => m,
Err (_) => error("Could not parse as integer value"),
};
i += 1;
},
"--maxtemp" => {
i += 1;
if i == args.len() {
error("Missing maximum annealing temperature");
}
*maxtemp = match str::parse::<f64>(&args[i]) {
Ok (m) => m,
Err (_) => error("Could not parse as floating-point value"),
};
i += 1;
},
"--stdev" => {
i += 1;
if i == args.len() {
error("Missing standard deviation");
}
*stdev = match str::parse::<f64>(&args[i]) {
Ok (m) => m,
Err (_) => error("Could not parse as floating-point value"),
};
i += 1;
},
_ => error("Unrecognized command line argument"),
}
}
}
pub fn new(
function: Box<dyn Function<N>>,
) -> Self {
let mut paradigm = Paradigm::SteepestDescent;
let mut criterion = 0.001;
let mut maxiter = 100;
let mut maxtemp = 1.0;
let mut stdev = 1.0;
Self::get_cli(
&mut paradigm,
&mut criterion,
&mut maxiter,
&mut maxtemp,
&mut stdev,
);
Self {
paradigm,
function,
criterion,
maxiter,
maxtemp,
stdev,
}
}
pub fn step(&self, input: Vector<N>) -> Vector<N> {
self.function.gradient(input).scale(-1.0)
}
pub fn update(&self, iter: usize, population: [Vector<N>; K]) -> [Vector<N>; K] {
self.paradigm.update(self, iter, population)
}
pub fn update_discrete(&self, iter: usize, population: [Vector<N>; K], permitted: &[f64]) -> [Vector<N>; K] {
self.paradigm.update_discrete(self, iter, population, permitted)
}
pub fn sort(&self, population: [Vector<N>; K]) -> [Vector<N>; K] {
let mut sorted = population;
sorted.sort_by(|one, two| {
let a = self.function.objective(*one);
let b = self.function.objective(*two);
match (a.is_nan(), b.is_nan()) {
(true, true) => Ordering::Equal,
(true, false) => Ordering::Greater,
(false, true) => Ordering::Less,
(false, false) => a.partial_cmp(&b).unwrap(),
}
});
sorted
}
pub fn get_best(&self, population: [Vector<N>; K]) -> Vector<N> {
self.sort(population)[0]
}
pub fn optimize(&self, input: Vector<N>) -> Vector<N> {
let start = Instant::now();
let mut i = 0;
let mut criterion = self.function.gradient(input).norm();
let mut population = [Vector::zero(); K];
for p in 0..K {
population[p] = Vector::<N>::child(&input, &input, self.stdev);
}
println!("INITIATING CONTINUOUS OPTIMIZATION");
println!("Paradigm: {}", self.paradigm);
println!("Stopping criterion: {}", self.criterion);
println!("Maximum iterations: {}", self.maxiter);
println!();
while criterion > self.criterion && i < self.maxiter {
i += 1;
let best = self.get_best(population);
println!("Iteration: {}", i);
println!("Objective: {:.8}", self.function.objective(best));
println!("Vector\n{}", best);
println!("Gradient magnitude: {:.8}", criterion);
population = self.update(i, population);
criterion = self.function.gradient(best).norm();
println!();
println!();
}
let time = start.elapsed().as_micros() as f64 / 1000.0;
if i == self.maxiter {
println!("Maximum iteration limit reached in {:.3} milliseconds", time);
} else {
println!("Convergence achieved in {:.3} milliseconds", time);
}
let best = self.get_best(population);
println!("Result\n{}", best);
println!("Objective: {:.8}", self.function.objective(best));
best
}
pub fn optimize_discrete(&self, input: Vector<N>, permitted: &[f64]) -> Vector<N> {
let start = Instant::now();
let mut i = 0;
let mut population = [Vector::zero(); K];
for p in 0..K {
population[p] = Vector::<N>::child_discrete(&input, &input, permitted);
}
println!("INITIATING DISCRETE OPTIMIZATION");
println!("Paradigm: {}", self.paradigm);
println!("Maximum iterations: {}", self.maxiter);
println!();
while i < self.maxiter {
i += 1;
let best = self.get_best(population);
println!("Iteration: {}", i);
println!("Objective: {:.8}", self.function.objective(best));
println!("Vector\n{}", best);
population = self.update_discrete(i, population, permitted);
println!();
println!();
}
let time = start.elapsed().as_micros() as f64 / 1000.0;
if i == self.maxiter {
println!("Maximum iteration limit reached in {:.3} milliseconds", time);
} else {
println!("Convergence achieved in {:.3} milliseconds", time);
}
let best = self.get_best(population);
println!("Result\n{}", best);
println!("Objective: {:.8}", self.function.objective(best));
best
}
}
#[test]
fn sort() {
let mut sorted = [f64::NAN, 2.0, 1.0, f64::MAX, f64::NAN];
sorted.sort_by(|a, b| {
match (a.is_nan(), b.is_nan()) {
(true, true) => Ordering::Equal,
(true, false) => Ordering::Greater,
(false, true) => Ordering::Less,
(false, false) => a.partial_cmp(&b).unwrap(),
}
});
dbg!(sorted);
}