#[derive(Debug, Clone, Copy, PartialEq)]
#[non_exhaustive]
pub enum Activation {
ReLU,
Tanh,
Sigmoid,
ELU { alpha: f64 },
}
impl Activation {
pub fn apply(&self, x: f64) -> f64 {
match self {
Activation::ReLU => x.max(0.0),
Activation::Tanh => x.tanh(),
Activation::Sigmoid => 1.0 / (1.0 + (-x).exp()),
Activation::ELU { alpha } => {
if x > 0.0 {
x
} else {
alpha * (x.exp() - 1.0)
}
}
}
}
pub fn derivative(&self, x: f64) -> f64 {
match self {
Activation::ReLU => {
if x > 0.0 {
1.0
} else {
0.0
}
}
Activation::Tanh => {
let t = x.tanh();
1.0 - t * t
}
Activation::Sigmoid => {
let s = 1.0 / (1.0 + (-x).exp());
s * (1.0 - s)
}
Activation::ELU { alpha } => {
if x > 0.0 {
1.0
} else {
alpha * x.exp()
}
}
}
}
}
#[derive(Debug, Clone)]
pub struct DeepKrigingConfig {
pub hidden_layers: Vec<usize>,
pub learning_rate: f64,
pub epochs: usize,
pub activation: Activation,
pub basis_dim: usize,
pub seed: u64,
}
impl Default for DeepKrigingConfig {
fn default() -> Self {
Self {
hidden_layers: vec![32, 16],
learning_rate: 0.01,
epochs: 100,
activation: Activation::Tanh,
basis_dim: 8,
seed: 42,
}
}
}
#[derive(Debug, Clone, Copy, PartialEq)]
#[non_exhaustive]
pub enum KernelType {
SquaredExponential {
lengthscale: f64,
variance: f64,
},
Matern {
nu: f64,
lengthscale: f64,
variance: f64,
},
RationalQuadratic {
alpha: f64,
lengthscale: f64,
variance: f64,
},
}
impl Default for KernelType {
fn default() -> Self {
KernelType::SquaredExponential {
lengthscale: 1.0,
variance: 1.0,
}
}
}
impl KernelType {
pub fn evaluate(&self, x: &[f64], xp: &[f64]) -> f64 {
let sq_dist: f64 = x
.iter()
.zip(xp.iter())
.map(|(a, b)| (a - b) * (a - b))
.sum();
match self {
KernelType::SquaredExponential {
lengthscale,
variance,
} => {
let l2 = lengthscale * lengthscale;
variance * (-sq_dist / (2.0 * l2)).exp()
}
KernelType::Matern {
nu,
lengthscale,
variance,
} => {
let r = sq_dist.sqrt() / lengthscale;
if r < 1e-12 {
return *variance;
}
if (*nu - 0.5).abs() < 1e-6 {
variance * (-r).exp()
} else if (*nu - 1.5).abs() < 1e-6 {
let s3 = 3.0_f64.sqrt() * r;
variance * (1.0 + s3) * (-s3).exp()
} else if (*nu - 2.5).abs() < 1e-6 {
let s5 = 5.0_f64.sqrt() * r;
variance * (1.0 + s5 + s5 * s5 / 3.0) * (-s5).exp()
} else {
variance * (-sq_dist / (2.0 * lengthscale * lengthscale)).exp()
}
}
KernelType::RationalQuadratic {
alpha,
lengthscale,
variance,
} => {
let l2 = lengthscale * lengthscale;
variance * (1.0 + sq_dist / (2.0 * alpha * l2)).powf(-alpha)
}
}
}
pub fn signal_variance(&self) -> f64 {
match self {
KernelType::SquaredExponential { variance, .. } => *variance,
KernelType::Matern { variance, .. } => *variance,
KernelType::RationalQuadratic { variance, .. } => *variance,
}
}
pub fn lengthscale(&self) -> f64 {
match self {
KernelType::SquaredExponential { lengthscale, .. } => *lengthscale,
KernelType::Matern { lengthscale, .. } => *lengthscale,
KernelType::RationalQuadratic { lengthscale, .. } => *lengthscale,
}
}
}
#[derive(Debug, Clone, Copy, PartialEq)]
#[non_exhaustive]
pub enum AcquisitionFunction {
EI,
PI,
UCB(f64),
LCB(f64),
}
impl Default for AcquisitionFunction {
fn default() -> Self {
AcquisitionFunction::EI
}
}
#[derive(Debug, Clone)]
pub struct GPSurrogateConfig {
pub kernel: KernelType,
pub noise: f64,
pub optimize_hyperparams: bool,
pub n_restarts: usize,
pub max_opt_iterations: usize,
}
impl Default for GPSurrogateConfig {
fn default() -> Self {
Self {
kernel: KernelType::default(),
noise: 1e-6,
optimize_hyperparams: false,
n_restarts: 3,
max_opt_iterations: 100,
}
}
}
#[derive(Debug, Clone)]
pub struct SurrogateResult {
pub predictions: Vec<f64>,
pub variances: Vec<f64>,
pub hyperparameters: Vec<f64>,
}