use crate::optim::*;
use crate::primitives::Vector;
#[test]
fn test_projected_gd_line_search_backtracking() {
let objective = |x: &Vector<f32>| x[0] * x[0];
let gradient = |x: &Vector<f32>| Vector::from_slice(&[2.0 * x[0]]);
let project = |x: &Vector<f32>| x.clone();
let mut pgd = ProjectedGradientDescent::new(100, 10.0, 1e-6).with_line_search(0.5);
let x0 = Vector::from_slice(&[10.0]);
let result = pgd.minimize(objective, gradient, project, x0);
assert_eq!(result.status, ConvergenceStatus::Converged);
assert!(result.solution[0].abs() < 1e-3);
}
#[test]
fn test_projected_gd_line_search_max_backtrack() {
use std::cell::Cell;
use std::rc::Rc;
let call_count = Rc::new(Cell::new(0));
let count_clone = Rc::clone(&call_count);
let objective = move |x: &Vector<f32>| {
count_clone.set(count_clone.get() + 1);
x[0] * x[0] + if count_clone.get() < 5 { 1000.0 } else { 0.0 }
};
let gradient = |x: &Vector<f32>| Vector::from_slice(&[2.0 * x[0]]);
let project = |x: &Vector<f32>| x.clone();
let mut pgd = ProjectedGradientDescent::new(100, 5.0, 1e-6).with_line_search(0.5);
let x0 = Vector::from_slice(&[5.0]);
let result = pgd.minimize(objective, gradient, project, x0);
assert!(result.iterations > 0);
assert!(call_count.get() > 5); }
#[test]
fn test_convergence_status_all_variants() {
let statuses = [
ConvergenceStatus::Converged,
ConvergenceStatus::MaxIterations,
ConvergenceStatus::Stalled,
ConvergenceStatus::NumericalError,
ConvergenceStatus::Running,
ConvergenceStatus::UserTerminated,
];
for status in &statuses {
let cloned = *status;
assert_eq!(*status, cloned);
let debug_str = format!("{:?}", status);
assert!(!debug_str.is_empty());
}
}
#[test]
fn test_optimization_result_fields() {
let result = OptimizationResult::converged(Vector::from_slice(&[1.0, 2.0]), 10);
assert_eq!(result.iterations, 10);
assert_eq!(result.status, ConvergenceStatus::Converged);
assert_eq!(result.solution.len(), 2);
assert!((result.objective_value - 0.0).abs() < 1e-6);
assert!((result.gradient_norm - 0.0).abs() < 1e-6);
assert!((result.constraint_violation - 0.0).abs() < 1e-6);
assert_eq!(result.elapsed_time, std::time::Duration::ZERO);
let result2 = OptimizationResult::max_iterations(Vector::from_slice(&[3.0]));
assert_eq!(result2.status, ConvergenceStatus::MaxIterations);
assert_eq!(result2.iterations, 0);
}
#[test]
fn test_nonnegative_all_negative() {
let x = Vector::from_slice(&[-1.0, -2.0, -3.0, -0.1]);
let result = prox::nonnegative(&x);
for i in 0..result.len() {
assert!(result[i].abs() < 1e-6);
}
}
#[test]
fn test_nonnegative_all_positive() {
let x = Vector::from_slice(&[1.0, 2.0, 3.0, 0.1]);
let result = prox::nonnegative(&x);
for i in 0..result.len() {
assert!((result[i] - x[i]).abs() < 1e-6);
}
}
#[test]
fn test_project_l2_ball_zero_vector() {
let x = Vector::from_slice(&[0.0, 0.0, 0.0]);
let result = prox::project_l2_ball(&x, 1.0);
for i in 0..result.len() {
assert!(result[i].abs() < 1e-6);
}
}
#[test]
fn test_pgd_with_line_search_converges() {
let c = Vector::from_slice(&[1.0, -2.0, 3.0, -1.0]);
let objective = |x: &Vector<f32>| {
let mut obj = 0.0;
for i in 0..x.len() {
let diff = x[i] - c[i];
obj += 0.5 * diff * diff;
}
obj
};
let gradient = |x: &Vector<f32>| {
let mut grad = Vector::zeros(x.len());
for i in 0..x.len() {
grad[i] = x[i] - c[i];
}
grad
};
let project = |x: &Vector<f32>| prox::nonnegative(x);
let mut pgd = ProjectedGradientDescent::new(100, 1.0, 1e-6).with_line_search(0.5);
let x0 = Vector::zeros(4);
let result = pgd.minimize(&objective, &gradient, &project, x0);
assert_eq!(result.status, ConvergenceStatus::Converged);
assert!((result.solution[0] - 1.0).abs() < 1e-3);
assert!(result.solution[1].abs() < 1e-3);
assert!((result.solution[2] - 3.0).abs() < 1e-3);
assert!(result.solution[3].abs() < 1e-3);
}
#[test]
fn test_pgd_line_search_triggers_backtracking() {
let c = Vector::from_slice(&[2.0, 3.0]);
let objective = |x: &Vector<f32>| {
let mut obj = 0.0;
for i in 0..x.len() {
let diff = x[i] - c[i];
obj += 0.5 * diff * diff;
}
obj
};
let gradient = |x: &Vector<f32>| {
let mut grad = Vector::zeros(x.len());
for i in 0..x.len() {
grad[i] = x[i] - c[i];
}
grad
};
let project = |x: &Vector<f32>| prox::nonnegative(x);
let mut pgd = ProjectedGradientDescent::new(200, 10.0, 1e-6).with_line_search(0.5);
let x0 = Vector::zeros(2);
let result = pgd.minimize(&objective, &gradient, &project, x0);
assert!((result.solution[0] - 2.0).abs() < 1e-2);
assert!((result.solution[1] - 3.0).abs() < 1e-2);
}
#[test]
fn test_pgd_max_iterations_reached() {
let c = Vector::from_slice(&[100.0, 100.0, 100.0, 100.0]);
let objective = |x: &Vector<f32>| {
let mut obj = 0.0;
for i in 0..x.len() {
let diff = x[i] - c[i];
obj += 0.5 * diff * diff;
}
obj
};
let gradient = |x: &Vector<f32>| {
let mut grad = Vector::zeros(x.len());
for i in 0..x.len() {
grad[i] = x[i] - c[i];
}
grad
};
let project = |x: &Vector<f32>| prox::nonnegative(x);
let mut pgd = ProjectedGradientDescent::new(5, 0.01, 1e-10);
let x0 = Vector::zeros(4);
let result = pgd.minimize(&objective, &gradient, &project, x0);
assert_eq!(result.status, ConvergenceStatus::MaxIterations);
assert_eq!(result.iterations, 5);
}
#[test]
fn test_pgd_reset() {
let mut pgd = ProjectedGradientDescent::new(100, 0.1, 1e-6);
Optimizer::reset(&mut pgd);
}
#[test]
#[should_panic(expected = "Projected Gradient Descent does not support stochastic updates")]
fn test_pgd_step_not_implemented() {
let mut pgd = ProjectedGradientDescent::new(100, 0.1, 1e-6);
let mut params = Vector::zeros(4);
let grads = Vector::zeros(4);
pgd.step(&mut params, &grads);
}
#[test]
fn test_pgd_struct_debug() {
let pgd = ProjectedGradientDescent::new(100, 0.1, 1e-6);
let debug = format!("{:?}", pgd);
assert!(debug.contains("ProjectedGradientDescent"));
}
#[test]
fn test_pgd_struct_clone() {
let pgd = ProjectedGradientDescent::new(100, 0.1, 1e-6);
let cloned = pgd.clone();
let debug1 = format!("{:?}", pgd);
let debug2 = format!("{:?}", cloned);
assert_eq!(debug1, debug2);
}
#[test]
fn test_pgd_with_line_search_builder() {
let pgd = ProjectedGradientDescent::new(100, 1.0, 1e-6).with_line_search(0.3);
let debug = format!("{:?}", pgd);
assert!(debug.contains("use_line_search: true"));
assert!(debug.contains("beta: 0.3"));
}
#[test]
fn test_pgd_without_line_search() {
let c = Vector::from_slice(&[1.0, 2.0]);
let objective = |x: &Vector<f32>| {
let mut obj = 0.0;
for i in 0..x.len() {
let diff = x[i] - c[i];
obj += 0.5 * diff * diff;
}
obj
};
let gradient = |x: &Vector<f32>| {
let mut grad = Vector::zeros(x.len());
for i in 0..x.len() {
grad[i] = x[i] - c[i];
}
grad
};
let project = |x: &Vector<f32>| x.clone();
let mut pgd = ProjectedGradientDescent::new(1000, 0.5, 1e-6);
let x0 = Vector::zeros(2);
let result = pgd.minimize(&objective, &gradient, &project, x0);
assert_eq!(result.status, ConvergenceStatus::Converged);
assert!((result.solution[0] - 1.0).abs() < 1e-3);
assert!((result.solution[1] - 2.0).abs() < 1e-3);
}
#[test]
fn test_pgd_line_search_max_backtracking() {
let objective = |x: &Vector<f32>| {
let mut obj = 0.0;
for i in 0..x.len() {
obj += x[i] * x[i];
}
obj
};
let gradient = |x: &Vector<f32>| {
let mut grad = Vector::zeros(x.len());
for i in 0..x.len() {
grad[i] = 2.0 * x[i];
}
grad
};
let project = |x: &Vector<f32>| prox::project_l2_ball(x, 0.5);
let mut pgd = ProjectedGradientDescent::new(50, 1.0, 1e-6).with_line_search(0.9);
let x0 = Vector::from_slice(&[0.5, 0.5]);
let result = pgd.minimize(&objective, &gradient, &project, x0);
assert!(result.iterations > 0);
}
#[test]
fn test_pgd_gradient_norm_tracking() {
let c = Vector::from_slice(&[1.0, 1.0]);
let objective = |x: &Vector<f32>| {
let mut obj = 0.0;
for i in 0..x.len() {
let diff = x[i] - c[i];
obj += 0.5 * diff * diff;
}
obj
};
let gradient = |x: &Vector<f32>| {
let mut grad = Vector::zeros(x.len());
for i in 0..x.len() {
grad[i] = x[i] - c[i];
}
grad
};
let project = |x: &Vector<f32>| x.clone();
let mut pgd = ProjectedGradientDescent::new(100, 0.5, 1e-6);
let x0 = Vector::zeros(2);
let result = pgd.minimize(&objective, &gradient, &project, x0);
assert!(result.gradient_norm < 1e-3);
}
#[test]
fn test_pgd_elapsed_time_recorded() {
let c = Vector::from_slice(&[1.0]);
let objective = |x: &Vector<f32>| (x[0] - c[0]).powi(2);
let gradient = |x: &Vector<f32>| {
let mut grad = Vector::zeros(1);
grad[0] = 2.0 * (x[0] - c[0]);
grad
};
let project = |x: &Vector<f32>| x.clone();
let mut pgd = ProjectedGradientDescent::new(100, 0.5, 1e-6);
let x0 = Vector::zeros(1);
let result = pgd.minimize(&objective, &gradient, &project, x0);
assert!(result.elapsed_time.as_nanos() > 0);
}
#[test]
fn test_pgd_constraint_violation_zero() {
let c = Vector::from_slice(&[1.0]);
let objective = |x: &Vector<f32>| (x[0] - c[0]).powi(2);
let gradient = |x: &Vector<f32>| {
let mut grad = Vector::zeros(1);
grad[0] = 2.0 * (x[0] - c[0]);
grad
};
let project = |x: &Vector<f32>| x.clone();
let mut pgd = ProjectedGradientDescent::new(100, 0.5, 1e-6);
let x0 = Vector::zeros(1);
let result = pgd.minimize(&objective, &gradient, &project, x0);
assert_eq!(result.constraint_violation, 0.0);
}