use ndarray::Array1;
use rand_distr::Distribution;
use stochastic_rs_core::simd_rng::Deterministic;
use stochastic_rs_core::simd_rng::SeedExt;
use stochastic_rs_core::simd_rng::Unseeded;
use stochastic_rs_distributions::exp::SimdExp;
use stochastic_rs_distributions::normal::SimdNormal;
use crate::noise::cgns::Cgns;
use crate::traits::FloatExt;
use crate::traits::ProcessExt;
pub struct DuffieKanJumpExp<T: FloatExt, S: SeedExt = Unseeded> {
pub alpha: T,
pub beta: T,
pub gamma: T,
pub rho: T,
pub a1: T,
pub b1: T,
pub c1: T,
pub sigma1: T,
pub a2: T,
pub b2: T,
pub c2: T,
pub sigma2: T,
pub lambda: T,
pub jump_scale: T,
pub n: usize,
pub r0: Option<T>,
pub x0: Option<T>,
pub t: Option<T>,
pub seed: S,
cgns: Cgns<T, S>,
}
impl<T: FloatExt> DuffieKanJumpExp<T> {
pub fn new(
alpha: T,
beta: T,
gamma: T,
rho: T,
a1: T,
b1: T,
c1: T,
sigma1: T,
a2: T,
b2: T,
c2: T,
sigma2: T,
lambda: T,
jump_scale: T,
n: usize,
r0: Option<T>,
x0: Option<T>,
t: Option<T>,
) -> Self {
Self {
alpha,
beta,
gamma,
rho,
a1,
b1,
c1,
sigma1,
a2,
b2,
c2,
sigma2,
lambda,
jump_scale,
n,
r0,
x0,
t,
seed: Unseeded,
cgns: Cgns::new(rho, n - 1, t),
}
}
}
impl<T: FloatExt> DuffieKanJumpExp<T, Deterministic> {
pub fn seeded(
alpha: T,
beta: T,
gamma: T,
rho: T,
a1: T,
b1: T,
c1: T,
sigma1: T,
a2: T,
b2: T,
c2: T,
sigma2: T,
lambda: T,
jump_scale: T,
n: usize,
r0: Option<T>,
x0: Option<T>,
t: Option<T>,
seed: u64,
) -> Self {
let s = Deterministic::new(seed);
let child = s.derive();
Self {
alpha,
beta,
gamma,
rho,
a1,
b1,
c1,
sigma1,
a2,
b2,
c2,
sigma2,
lambda,
jump_scale,
n,
r0,
x0,
t,
seed: Deterministic::new(seed),
cgns: Cgns::seeded(rho, n - 1, t, child.current()),
}
}
}
impl<T: FloatExt, S: SeedExt> ProcessExt<T> for DuffieKanJumpExp<T, S> {
type Output = [Array1<T>; 2];
fn sample(&self) -> Self::Output {
let dt = self.cgns.dt();
let [cgn1, cgn2] = &self.cgns.sample();
let mut r = Array1::<T>::zeros(self.n);
let mut x = Array1::<T>::zeros(self.n);
r[0] = self.r0.unwrap_or(T::zero());
x[0] = self.x0.unwrap_or(T::zero());
let exp_dist = SimdExp::from_seed_source(self.lambda, &self.seed);
let jump_dist = SimdNormal::<T, 64>::from_seed_source(T::zero(), self.jump_scale, &self.seed);
let mut rng = self.seed.rng();
let mut next_jump_time = exp_dist.sample(&mut rng);
for i in 1..self.n {
let current_time = T::from_usize_(i) * dt;
let r_old = r[i - 1];
let x_old = x[i - 1];
let dr = (self.a1 * r_old + self.b1 * x_old + self.c1) * dt
+ self.sigma1 * (self.alpha * r_old + self.beta * x_old + self.gamma) * cgn1[i - 1];
let dx = (self.a2 * r_old + self.b2 * x_old + self.c2) * dt
+ self.sigma2 * (self.alpha * r_old + self.beta * x_old + self.gamma) * cgn2[i - 1];
let mut jump_sum_x = T::zero();
while next_jump_time <= current_time {
let jump_x = jump_dist.sample(&mut rng);
jump_sum_x += jump_x;
next_jump_time += exp_dist.sample(&mut rng);
}
r[i] = r_old + dr;
x[i] = x_old + dx + jump_sum_x;
}
[r, x]
}
}
py_process_2x1d!(PyDuffieKanJumpExp, DuffieKanJumpExp,
sig: (alpha, beta, gamma_, rho, a1, b1, c1, sigma1, a2, b2, c2, sigma2, lambda_, jump_scale, n, r0=None, x0=None, t=None, seed=None, dtype=None),
params: (alpha: f64, beta: f64, gamma_: f64, rho: f64, a1: f64, b1: f64, c1: f64, sigma1: f64, a2: f64, b2: f64, c2: f64, sigma2: f64, lambda_: f64, jump_scale: f64, n: usize, r0: Option<f64>, x0: Option<f64>, t: Option<f64>)
);
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn sample_returns_two_paths() {
let dkj = DuffieKanJumpExp::<f64>::new(
0.5,
0.04,
0.5,
-0.3,
0.01,
0.0,
0.0,
0.01,
0.0,
0.5,
0.0,
0.005,
0.5,
0.01,
64,
Some(0.05),
Some(0.05),
Some(1.0),
);
let [r, x] = dkj.sample();
assert_eq!(r.len(), 64);
assert_eq!(x.len(), 64);
}
}