1use crate::{
4 constraints,
5 core::{
6 panoc::panoc_engine::PANOCEngine, panoc::PANOCCache, AlgorithmEngine, ExitStatus,
7 Optimizer, Problem, SolverStatus,
8 },
9 matrix_operations, FunctionCallResult, SolverError,
10};
11use lbfgs::LbfgsPrecision;
12use num::Float;
13use std::iter::Sum;
14use std::time;
15
16const MAX_ITER: usize = 100_usize;
17
18pub struct PANOCOptimizer<'a, GradientType, ConstraintType, CostType, T = f64>
22where
23 T: Float + LbfgsPrecision + Sum<T>,
24 GradientType: Fn(&[T], &mut [T]) -> FunctionCallResult,
25 CostType: Fn(&[T], &mut T) -> FunctionCallResult,
26 ConstraintType: constraints::Constraint<T>,
27{
28 panoc_engine: PANOCEngine<'a, GradientType, ConstraintType, CostType, T>,
29 max_iter: usize,
30 max_duration: Option<time::Duration>,
31}
32
33impl<'a, GradientType, ConstraintType, CostType, T>
34 PANOCOptimizer<'a, GradientType, ConstraintType, CostType, T>
35where
36 T: Float + LbfgsPrecision + Sum<T>,
37 GradientType: Fn(&[T], &mut [T]) -> FunctionCallResult,
38 CostType: Fn(&[T], &mut T) -> FunctionCallResult,
39 ConstraintType: constraints::Constraint<T>,
40{
41 #[must_use]
52 pub fn new(
53 problem: Problem<'a, GradientType, ConstraintType, CostType, T>,
54 cache: &'a mut PANOCCache<T>,
55 ) -> Self {
56 PANOCOptimizer {
57 panoc_engine: PANOCEngine::new(problem, cache),
58 max_iter: MAX_ITER,
59 max_duration: None,
60 }
61 }
62
63 #[must_use]
72 pub fn with_tolerance(self, tolerance: T) -> Self {
73 assert!(tolerance > T::zero(), "tolerance must be larger than 0");
74
75 self.panoc_engine.cache.tolerance = tolerance;
76 self
77 }
78
79 #[must_use]
101 pub fn with_akkt_tolerance(self, akkt_tolerance: T) -> Self {
102 assert!(
103 akkt_tolerance > T::zero(),
104 "akkt_tolerance must be positive"
105 );
106 self.panoc_engine.cache.set_akkt_tolerance(akkt_tolerance);
107 self
108 }
109
110 #[must_use]
116 pub fn with_max_iter(mut self, max_iter: usize) -> Self {
117 assert!(max_iter > 0, "max_iter must be larger than 0");
118
119 self.max_iter = max_iter;
120 self
121 }
122
123 #[must_use]
125 pub fn with_max_duration(mut self, max_duation: time::Duration) -> Self {
126 self.max_duration = Some(max_duation);
127 self
128 }
129
130 pub fn solve(&mut self, u: &mut [T]) -> Result<SolverStatus<T>, SolverError> {
132 let now = web_time::Instant::now();
133
134 self.panoc_engine.init(u)?;
139
140 let mut num_iter: usize = 0;
142 let mut continue_num_iters = true;
143 let mut continue_runtime = true;
144
145 let mut step_flag = self.panoc_engine.step(u)?;
146 if let Some(dur) = self.max_duration {
147 while step_flag && continue_num_iters && continue_runtime {
148 num_iter += 1;
149 continue_num_iters = num_iter < self.max_iter;
150 continue_runtime = now.elapsed() <= dur;
151 step_flag = self.panoc_engine.step(u)?;
152 }
153 } else {
154 while step_flag && continue_num_iters {
155 num_iter += 1;
156 continue_num_iters = num_iter < self.max_iter;
157 step_flag = self.panoc_engine.step(u)?;
158 }
159 }
160
161 self.panoc_engine.cache_best_half_step(u);
165
166 if !matrix_operations::is_finite(u) {
168 return Err(SolverError::NotFiniteComputation(
169 "final PANOC iterate contains a non-finite value",
170 ));
171 }
172
173 let exit_status = if !continue_num_iters {
175 ExitStatus::NotConvergedIterations
176 } else if !continue_runtime {
177 ExitStatus::NotConvergedOutOfTime
178 } else {
179 ExitStatus::Converged
180 };
181
182 u.copy_from_slice(&self.panoc_engine.cache.best_u_half_step);
185
186 let best_cost_value = self.panoc_engine.cost_value_at_best_half_step()?;
187
188 Ok(SolverStatus::new(
190 exit_status,
191 num_iter,
192 now.elapsed(),
193 self.panoc_engine.cache.best_norm_gamma_fpr,
194 best_cost_value,
195 ))
196 }
197}
198
199impl<'life, GradientType, ConstraintType, CostType, T> Optimizer<T>
200 for PANOCOptimizer<'life, GradientType, ConstraintType, CostType, T>
201where
202 T: Float + LbfgsPrecision + Sum<T>,
203 GradientType: Fn(&[T], &mut [T]) -> FunctionCallResult + 'life,
204 CostType: Fn(&[T], &mut T) -> FunctionCallResult,
205 ConstraintType: constraints::Constraint<T> + 'life,
206{
207 fn solve(&mut self, u: &mut [T]) -> Result<SolverStatus<T>, SolverError> {
208 PANOCOptimizer::solve(self, u)
209 }
210}
211
212#[cfg(test)]
216mod tests {
217
218 use crate::core::constraints::*;
219 use crate::core::panoc::*;
220 use crate::core::*;
221 use crate::{mocks, FunctionCallResult};
222
223 #[test]
224 fn t_panoc_optimizer_rosenbrock() {
225 let tolerance = 1e-6;
227 let a_param = 1.0;
228 let b_param = 200.0;
229 let n_dimension = 2;
230 let lbfgs_memory = 8;
231 let max_iters = 80;
232 let mut u_solution = [-1.5, 0.9];
233
234 let cost_gradient = |u: &[f64], grad: &mut [f64]| -> FunctionCallResult {
236 mocks::rosenbrock_grad(a_param, b_param, u, grad);
237 Ok(())
238 };
239 let cost_function = |u: &[f64], c: &mut f64| -> FunctionCallResult {
240 *c = mocks::rosenbrock_cost(a_param, b_param, u);
241 Ok(())
242 };
243 let radius = 2.0;
245 let bounds = constraints::Ball2::new(None, radius);
246 let mut panoc_cache = PANOCCache::new(n_dimension, tolerance, lbfgs_memory);
247 let problem = Problem::new(&bounds, cost_gradient, cost_function);
248 let mut panoc = PANOCOptimizer::new(problem, &mut panoc_cache).with_max_iter(max_iters);
249 let now = web_time::Instant::now();
250 let status = panoc.solve(&mut u_solution).unwrap();
251
252 println!("{} iterations", status.iterations());
253 println!("elapsed {:?}", now.elapsed());
254
255 assert_eq!(max_iters, panoc.max_iter);
256 assert!(status.has_converged());
257 assert!(status.iterations() < max_iters);
258 assert!(status.norm_fpr() < tolerance);
259
260 let mut u_project = [0.0; 2];
262 u_project.copy_from_slice(&u_solution);
263 bounds.project(&mut u_project).unwrap();
264 unit_test_utils::assert_nearly_equal_array(
265 &u_solution,
266 &u_project,
267 1e-12,
268 1e-16,
269 "infeasibility detected",
270 );
271 }
272
273 #[test]
274 fn t_panoc_optimizer_rosenbrock_f32() {
275 let tolerance = 1e-4_f32;
276 let a_param = 1.0_f32;
277 let b_param = 200.0_f32;
278 let n_dimension = 2;
279 let lbfgs_memory = 8;
280 let max_iters = 120;
281 let mut u_solution = [-1.5_f32, 0.9_f32];
282
283 let cost_gradient = |u: &[f32], grad: &mut [f32]| -> FunctionCallResult {
284 mocks::rosenbrock_grad(a_param, b_param, u, grad);
285 Ok(())
286 };
287 let cost_function = |u: &[f32], c: &mut f32| -> FunctionCallResult {
288 *c = mocks::rosenbrock_cost(a_param, b_param, u);
289 Ok(())
290 };
291
292 let radius = 2.0_f32;
293 let bounds = constraints::Ball2::new(None, radius);
294 let mut panoc_cache = PANOCCache::<f32>::new(n_dimension, tolerance, lbfgs_memory);
295 let problem = Problem::new(&bounds, cost_gradient, cost_function);
296 let mut panoc = PANOCOptimizer::new(problem, &mut panoc_cache).with_max_iter(max_iters);
297 let status = panoc.solve(&mut u_solution).unwrap();
298
299 assert_eq!(max_iters, panoc.max_iter);
300 assert!(status.has_converged());
301 assert!(status.iterations() < max_iters);
302 assert!(status.norm_fpr() < tolerance);
303
304 let mut u_project = [0.0_f32; 2];
305 u_project.copy_from_slice(&u_solution);
306 bounds.project(&mut u_project).unwrap();
307 assert!((u_solution[0] - u_project[0]).abs() < 1e-5_f32);
308 assert!((u_solution[1] - u_project[1]).abs() < 1e-5_f32);
309 }
310
311 #[test]
312 fn t_panoc_in_loop() {
313 let tolerance = 1e-5;
315 let mut a_param = 1.0;
316 let mut b_param = 100.0;
317 let n_dimension = 2;
318 let lbfgs_memory = 10;
319 let max_iters = 100;
320 let mut u_solution = [-1.5, 0.9];
321 let mut panoc_cache = PANOCCache::new(n_dimension, tolerance, lbfgs_memory);
322 for i in 1..=100 {
323 b_param *= 1.01;
324 a_param -= 1e-3;
325 let radius = 1.0 + 0.01 * (i as f64);
328 let cost_gradient = |u: &[f64], grad: &mut [f64]| -> FunctionCallResult {
329 mocks::rosenbrock_grad(a_param, b_param, u, grad);
330 Ok(())
331 };
332 let cost_function = |u: &[f64], c: &mut f64| -> FunctionCallResult {
333 *c = mocks::rosenbrock_cost(a_param, b_param, u);
334 Ok(())
335 };
336 let bounds = constraints::Ball2::new(None, radius);
337 let problem = Problem::new(&bounds, cost_gradient, cost_function);
338 let mut panoc = PANOCOptimizer::new(problem, &mut panoc_cache).with_max_iter(max_iters);
339
340 let status = panoc.solve(&mut u_solution).unwrap();
341
342 println!("status = {:#?}", status);
343 println!(
344 "parameters: (a={:.4}, b={:.4}, r={:.4}), iters = {}",
345 a_param,
346 b_param,
347 radius,
348 status.iterations()
349 );
350
351 let norm_u = crate::matrix_operations::norm2(&u_solution);
354 assert!(
355 norm_u <= radius + 5e-16,
356 "infeasibility in problem solution"
357 );
358
359 assert_eq!(max_iters, panoc.max_iter);
360 assert!(status.has_converged());
361 assert!(status.iterations() < max_iters);
362 assert!(status.norm_fpr() < tolerance);
363 }
364 }
365
366 #[test]
367 fn t_panoc_optimizer_akkt_tolerance() {
368 let tolerance = 1e-6;
370 let akkt_tolerance = 1e-6;
371 let a_param = 1.0;
372 let b_param = 200.0;
373 let n_dimension = 2;
374 let lbfgs_memory = 8;
375 let max_iters = 580;
376 let mut u_solution = [-1.5, 0.9];
377
378 let cost_gradient = |u: &[f64], grad: &mut [f64]| -> FunctionCallResult {
379 mocks::rosenbrock_grad(a_param, b_param, u, grad);
380 Ok(())
381 };
382 let cost_function = |u: &[f64], c: &mut f64| -> FunctionCallResult {
383 *c = mocks::rosenbrock_cost(a_param, b_param, u);
384 Ok(())
385 };
386
387 let radius = 1.2;
388 let bounds = constraints::Ball2::new(None, radius);
389
390 let mut panoc_cache = PANOCCache::new(n_dimension, tolerance, lbfgs_memory);
391 let problem = Problem::new(&bounds, cost_gradient, cost_function);
392
393 let mut panoc = PANOCOptimizer::new(problem, &mut panoc_cache)
394 .with_max_iter(max_iters)
395 .with_akkt_tolerance(akkt_tolerance);
396
397 let status = panoc.solve(&mut u_solution).unwrap();
398
399 assert_eq!(max_iters, panoc.max_iter);
400 assert!(status.has_converged());
401 assert!(status.iterations() < max_iters);
402 assert!(status.norm_fpr() < tolerance);
403 }
404
405 #[test]
406 fn t_panoc_optimizer_premature_exit_returns_best_previous_half_step() {
407 let tolerance = 1e-6;
408 let radius = 0.05;
409 let n_dimension = 3;
410 let lbfgs_memory = 10;
411
412 let mut found_nonlast_best_half_step = false;
413
414 for max_iters in 2..=25 {
415 let bounds = constraints::Ball2::new(None, radius);
416 let problem = Problem::new(
417 &bounds,
418 mocks::hard_quadratic_gradient,
419 mocks::hard_quadratic_cost,
420 );
421 let mut panoc_cache = PANOCCache::new(n_dimension, tolerance, lbfgs_memory);
422 let mut panoc = PANOCOptimizer::new(problem, &mut panoc_cache).with_max_iter(max_iters);
423 let mut u_solution = [-20.0, 10.0, 0.2];
424
425 let status = panoc.solve(&mut u_solution).unwrap();
426
427 let distance_to_last_half_step =
428 crate::matrix_operations::norm_inf_diff(&u_solution, &panoc_cache.u_half_step);
429
430 if status.exit_status() == ExitStatus::NotConvergedIterations
431 && distance_to_last_half_step > 1e-12
432 {
433 found_nonlast_best_half_step = true;
434
435 unit_test_utils::assert_nearly_equal_array(
436 &u_solution,
437 &panoc_cache.best_u_half_step,
438 1e-12,
439 1e-12,
440 "returned solution should equal the best cached half step",
441 );
442 assert!(
443 status.norm_fpr() < panoc_cache.norm_gamma_fpr,
444 "returned FPR should be strictly better than the last half step"
445 );
446 }
447 }
448
449 assert!(
450 found_nonlast_best_half_step,
451 "did not find a premature exit where the best half step differs from the last one"
452 );
453 }
454}