use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ChanceConstraint {
name: String,
confidence: f32,
method: ChanceConstraintMethod,
weight: f32,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum ChanceConstraintMethod {
ScenarioBased {
num_scenarios: usize,
violation_tolerance: f32,
},
Gaussian {
mean: f32,
std_dev: f32,
},
Conservative {
tightening_factor: f32,
},
}
impl ChanceConstraint {
pub fn gaussian(name: impl Into<String>, confidence: f32, mean: f32, std_dev: f32) -> Self {
assert!(
confidence > 0.0 && confidence < 1.0,
"Confidence must be in (0, 1)"
);
assert!(std_dev > 0.0, "Standard deviation must be positive");
Self {
name: name.into(),
confidence,
method: ChanceConstraintMethod::Gaussian { mean, std_dev },
weight: 1.0,
}
}
pub fn scenario_based(name: impl Into<String>, confidence: f32, num_scenarios: usize) -> Self {
assert!(
confidence > 0.0 && confidence < 1.0,
"Confidence must be in (0, 1)"
);
assert!(num_scenarios > 0, "Number of scenarios must be positive");
Self {
name: name.into(),
confidence,
method: ChanceConstraintMethod::ScenarioBased {
num_scenarios,
violation_tolerance: 1.0 - confidence,
},
weight: 1.0,
}
}
pub fn with_weight(mut self, weight: f32) -> Self {
self.weight = weight;
self
}
pub fn get_tightened_bound(&self) -> f32 {
match &self.method {
ChanceConstraintMethod::Gaussian { mean, std_dev } => {
let z_alpha = self.confidence_to_quantile(self.confidence);
mean + z_alpha * std_dev
}
ChanceConstraintMethod::Conservative { tightening_factor } => *tightening_factor,
ChanceConstraintMethod::ScenarioBased { .. } => {
self.confidence * 10.0 }
}
}
fn confidence_to_quantile(&self, confidence: f32) -> f32 {
if confidence >= 0.99 {
2.58
} else if confidence >= 0.95 {
1.96
} else if confidence >= 0.90 {
1.64
} else if confidence >= 0.80 {
1.28
} else {
confidence * 3.0 - 1.5
}
}
pub fn name(&self) -> &str {
&self.name
}
pub fn confidence(&self) -> f32 {
self.confidence
}
pub fn weight(&self) -> f32 {
self.weight
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct RobustConstraint {
name: String,
uncertainty_set: UncertaintySet,
approach: RobustnessApproach,
weight: f32,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum UncertaintySet {
Box { min: Vec<f32>, max: Vec<f32> },
Ellipsoidal {
nominal: Vec<f32>,
shape_matrix: Vec<f32>,
radius: f32,
},
Polyhedral {
a_matrix: Vec<f32>,
b_vector: Vec<f32>,
dim: usize,
},
Budget {
nominal: Vec<f32>,
max_deviations: Vec<f32>,
budget: usize,
},
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum RobustnessApproach {
WorstCase,
AffinelyAdjustable,
Scenarios { num_scenarios: usize },
}
impl RobustConstraint {
pub fn box_uncertain(name: impl Into<String>, min: Vec<f32>, max: Vec<f32>) -> Self {
assert_eq!(min.len(), max.len(), "Min and max must have same dimension");
for (mi, ma) in min.iter().zip(max.iter()) {
assert!(mi <= ma, "Min must be <= max");
}
Self {
name: name.into(),
uncertainty_set: UncertaintySet::Box { min, max },
approach: RobustnessApproach::WorstCase,
weight: 1.0,
}
}
pub fn ellipsoidal_uncertain(name: impl Into<String>, nominal: Vec<f32>, radius: f32) -> Self {
assert!(radius > 0.0, "Radius must be positive");
let dim = nominal.len();
let identity = vec![1.0; dim * dim];
Self {
name: name.into(),
uncertainty_set: UncertaintySet::Ellipsoidal {
nominal,
shape_matrix: identity,
radius,
},
approach: RobustnessApproach::WorstCase,
weight: 1.0,
}
}
pub fn with_approach(mut self, approach: RobustnessApproach) -> Self {
self.approach = approach;
self
}
pub fn with_weight(mut self, weight: f32) -> Self {
self.weight = weight;
self
}
pub fn worst_case_scenario(&self, x: &[f32]) -> Vec<f32> {
match &self.uncertainty_set {
UncertaintySet::Box { min: _, max } => {
max.clone()
}
UncertaintySet::Ellipsoidal {
nominal, radius, ..
} => {
nominal.iter().map(|&v| v + radius).collect()
}
UncertaintySet::Polyhedral { .. } => {
vec![0.0; x.len()]
}
UncertaintySet::Budget {
nominal,
max_deviations,
budget,
} => {
let mut result = nominal.clone();
for (i, &dev) in max_deviations.iter().enumerate().take(*budget) {
if i < result.len() {
result[i] += dev;
}
}
result
}
}
}
pub fn name(&self) -> &str {
&self.name
}
pub fn weight(&self) -> f32 {
self.weight
}
pub fn uncertainty_set(&self) -> &UncertaintySet {
&self.uncertainty_set
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CVaRConstraint {
name: String,
alpha: f32,
threshold: f32,
num_scenarios: usize,
weight: f32,
}
impl CVaRConstraint {
pub fn new(name: impl Into<String>, alpha: f32, threshold: f32, num_scenarios: usize) -> Self {
assert!(alpha > 0.0 && alpha < 1.0, "Alpha must be in (0, 1)");
assert!(num_scenarios > 0, "Number of scenarios must be positive");
Self {
name: name.into(),
alpha,
threshold,
num_scenarios,
weight: 1.0,
}
}
pub fn with_weight(mut self, weight: f32) -> Self {
self.weight = weight;
self
}
pub fn compute_cvar(&self, losses: &[f32]) -> f32 {
if losses.is_empty() {
return 0.0;
}
let mut sorted_losses = losses.to_vec();
sorted_losses.sort_by(|a, b| b.partial_cmp(a).unwrap_or(std::cmp::Ordering::Equal));
let cutoff = (self.alpha * sorted_losses.len() as f32).ceil() as usize;
let cutoff = cutoff.max(1).min(sorted_losses.len());
sorted_losses.iter().take(cutoff).sum::<f32>() / cutoff as f32
}
pub fn check(&self, losses: &[f32]) -> bool {
let cvar = self.compute_cvar(losses);
cvar <= self.threshold
}
pub fn violation(&self, losses: &[f32]) -> f32 {
let cvar = self.compute_cvar(losses);
(cvar - self.threshold).max(0.0)
}
pub fn name(&self) -> &str {
&self.name
}
pub fn alpha(&self) -> f32 {
self.alpha
}
pub fn threshold(&self) -> f32 {
self.threshold
}
pub fn weight(&self) -> f32 {
self.weight
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DistributionallyRobustConstraint {
name: String,
ambiguity_set: AmbiguitySet,
threshold: f32,
weight: f32,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum AmbiguitySet {
Wasserstein {
num_samples: usize,
radius: f32,
},
MomentBased {
mean: Vec<f32>,
cov_radius: f32,
},
PhiDivergence {
num_samples: usize,
radius: f32,
divergence_type: DivergenceType,
},
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum DivergenceType {
KL,
ChiSquared,
ModifiedChiSquared,
}
impl DistributionallyRobustConstraint {
pub fn wasserstein(
name: impl Into<String>,
threshold: f32,
num_samples: usize,
radius: f32,
) -> Self {
assert!(radius > 0.0, "Radius must be positive");
assert!(num_samples > 0, "Number of samples must be positive");
Self {
name: name.into(),
ambiguity_set: AmbiguitySet::Wasserstein {
num_samples,
radius,
},
threshold,
weight: 1.0,
}
}
pub fn moment_based(
name: impl Into<String>,
threshold: f32,
mean: Vec<f32>,
cov_radius: f32,
) -> Self {
assert!(cov_radius > 0.0, "Covariance radius must be positive");
Self {
name: name.into(),
ambiguity_set: AmbiguitySet::MomentBased { mean, cov_radius },
threshold,
weight: 1.0,
}
}
pub fn with_weight(mut self, weight: f32) -> Self {
self.weight = weight;
self
}
pub fn worst_case_expectation(&self, losses: &[f32]) -> f32 {
if losses.is_empty() {
return 0.0;
}
match &self.ambiguity_set {
AmbiguitySet::Wasserstein { radius, .. } => {
let mean: f32 = losses.iter().sum::<f32>() / losses.len() as f32;
let max_loss = losses.iter().cloned().fold(f32::NEG_INFINITY, f32::max);
mean + radius * max_loss.abs()
}
AmbiguitySet::MomentBased { cov_radius, .. } => {
let mean: f32 = losses.iter().sum::<f32>() / losses.len() as f32;
let variance: f32 =
losses.iter().map(|&x| (x - mean).powi(2)).sum::<f32>() / losses.len() as f32;
mean + cov_radius.sqrt() * variance.sqrt()
}
AmbiguitySet::PhiDivergence { radius, .. } => {
let mean: f32 = losses.iter().sum::<f32>() / losses.len() as f32;
mean * (1.0 + radius)
}
}
}
pub fn check(&self, losses: &[f32]) -> bool {
let wc_exp = self.worst_case_expectation(losses);
wc_exp <= self.threshold
}
pub fn violation(&self, losses: &[f32]) -> f32 {
let wc_exp = self.worst_case_expectation(losses);
(wc_exp - self.threshold).max(0.0)
}
pub fn name(&self) -> &str {
&self.name
}
pub fn threshold(&self) -> f32 {
self.threshold
}
pub fn weight(&self) -> f32 {
self.weight
}
pub fn ambiguity_set(&self) -> &AmbiguitySet {
&self.ambiguity_set
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_chance_constraint_gaussian() {
let cc = ChanceConstraint::gaussian("test", 0.95, 10.0, 2.0);
assert_eq!(cc.name(), "test");
assert_eq!(cc.confidence(), 0.95);
let bound = cc.get_tightened_bound();
assert!(bound > 10.0);
assert!(bound < 15.0); }
#[test]
fn test_robust_constraint_box() {
let rc = RobustConstraint::box_uncertain("test", vec![-1.0, -2.0], vec![1.0, 2.0]);
assert_eq!(rc.name(), "test");
let worst_case = rc.worst_case_scenario(&[0.0, 0.0]);
assert_eq!(worst_case, vec![1.0, 2.0]);
}
#[test]
fn test_cvar_constraint() {
let cvar = CVaRConstraint::new("test", 0.1, 10.0, 100);
let losses = vec![1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0];
let cvar_value = cvar.compute_cvar(&losses);
assert!(cvar_value >= 9.0);
assert!(cvar_value <= 10.0);
}
#[test]
fn test_distributionally_robust_wasserstein() {
let drc = DistributionallyRobustConstraint::wasserstein("test", 15.0, 100, 0.5);
let losses = vec![5.0, 10.0, 15.0];
let wc_exp = drc.worst_case_expectation(&losses);
assert!(wc_exp >= 10.0); assert!(drc.check(&losses) || !drc.check(&losses)); }
}