use crate::error::{IntegrateError, IntegrateResult};
use scirs2_core::ndarray::{Array1, ArrayView1};
use scirs2_core::random::rand_prelude::StdRng as RandStdRng;
use scirs2_core::random::{seeded_rng, Distribution, Normal};
use scirs2_core::random::{CoreRandom, SeedableRng, Uniform};
#[derive(Debug, Clone)]
pub struct FeynmanKacOptions {
pub n_paths: usize,
pub n_steps: usize,
pub t_end: f64,
pub seed: Option<u64>,
}
impl Default for FeynmanKacOptions {
fn default() -> Self {
Self {
n_paths: 1000,
n_steps: 100,
t_end: 1.0,
seed: None,
}
}
}
pub struct FeynmanKacEstimator {
options: FeynmanKacOptions,
}
impl FeynmanKacEstimator {
pub fn new(options: FeynmanKacOptions) -> Self {
Self { options }
}
pub fn estimate<G, V>(&self, x0: f64, g: G, v: V) -> IntegrateResult<f64>
where
G: Fn(f64) -> f64,
V: Fn(f64, f64) -> f64,
{
let opts = &self.options;
let dt = opts.t_end / opts.n_steps as f64;
let sqrt_dt = dt.sqrt();
let normal = Normal::new(0.0_f64, 1.0_f64)
.map_err(|e| IntegrateError::ComputationError(format!("Normal dist: {e}")))?;
let mut rng = match opts.seed {
Some(s) => seeded_rng(s),
None => seeded_rng(12345),
};
let mut total = 0.0_f64;
for _ in 0..opts.n_paths {
let mut x = x0;
let mut log_weight = 0.0_f64;
let mut t = 0.0_f64;
for _ in 0..opts.n_steps {
log_weight += v(t, x) * dt;
x += sqrt_dt * Distribution::sample(&normal, &mut rng);
t += dt;
}
total += g(x) * log_weight.exp();
}
Ok(total / opts.n_paths as f64)
}
}
#[derive(Debug, Clone)]
pub struct DiffusionBridgeResult {
pub path: Vec<f64>,
pub times: Vec<f64>,
pub log_weight: f64,
}
pub fn diffusion_bridge(
x_start: f64,
x_end: f64,
t_end: f64,
n_steps: usize,
seed: Option<u64>,
) -> IntegrateResult<DiffusionBridgeResult> {
if t_end <= 0.0 {
return Err(IntegrateError::InvalidInput(
"t_end must be positive".to_string(),
));
}
if n_steps == 0 {
return Err(IntegrateError::InvalidInput(
"n_steps must be positive".to_string(),
));
}
let normal = Normal::new(0.0_f64, 1.0_f64)
.map_err(|e| IntegrateError::ComputationError(format!("Normal dist: {e}")))?;
let mut rng = match seed {
Some(s) => seeded_rng(s),
None => seeded_rng(99999),
};
let dt = t_end / n_steps as f64;
let mut path = Vec::with_capacity(n_steps + 1);
let mut times = Vec::with_capacity(n_steps + 1);
path.push(x_start);
times.push(0.0);
for step in 1..=n_steps {
let t = step as f64 * dt;
let remaining = t_end - t + dt; let t_prev = (step - 1) as f64 * dt;
let t_remaining_from_now = t_end - t;
let x_prev = path[step - 1];
let bridge_mean = x_prev + (x_end - x_prev) * dt / (t_end - t_prev + 1e-300);
let bridge_var = (dt * t_remaining_from_now / (t_end - t_prev + 1e-300)).max(0.0);
let _ = remaining;
let x_new = bridge_mean + bridge_var.sqrt() * Distribution::sample(&normal, &mut rng);
path.push(x_new);
times.push(t);
}
if let Some(last) = path.last_mut() {
*last = x_end;
}
Ok(DiffusionBridgeResult {
path,
times,
log_weight: 0.0,
})
}
#[derive(Debug, Clone)]
pub struct MetropolisWalkOptions {
pub n_samples: usize,
pub step_size: f64,
pub n_burnin: usize,
pub thinning: usize,
pub seed: Option<u64>,
}
impl Default for MetropolisWalkOptions {
fn default() -> Self {
Self {
n_samples: 1000,
step_size: 0.5,
n_burnin: 100,
thinning: 1,
seed: None,
}
}
}
#[derive(Debug, Clone)]
pub struct MetropolisWalkResult {
pub samples: Vec<Array1<f64>>,
pub acceptance_rate: f64,
pub mean_estimate: f64,
}
pub fn metropolis_walk<F>(
log_target: F,
x0: Array1<f64>,
options: Option<MetropolisWalkOptions>,
) -> IntegrateResult<MetropolisWalkResult>
where
F: Fn(ArrayView1<f64>) -> f64,
{
let opts = options.unwrap_or_default();
let dim = x0.len();
let normal = Normal::new(0.0_f64, opts.step_size)
.map_err(|e| IntegrateError::ComputationError(format!("Normal dist: {e}")))?;
let uniform = Uniform::new(0.0_f64, 1.0_f64)
.map_err(|e| IntegrateError::ComputationError(format!("Uniform dist: {e}")))?;
let mut rng = match opts.seed {
Some(s) => seeded_rng(s),
None => seeded_rng(77777),
};
let mut x = x0.clone();
let mut log_px = log_target(x.view());
let total_steps = opts.n_burnin + opts.n_samples * opts.thinning;
let mut samples = Vec::with_capacity(opts.n_samples);
let mut n_accepted = 0usize;
for step in 0..total_steps {
let proposal: Array1<f64> = Array1::from_shape_fn(dim, |_| {
x[dim - 1] + Distribution::sample(&normal, &mut rng)
});
let proposal: Array1<f64> =
Array1::from_shape_fn(dim, |i| x[i] + Distribution::sample(&normal, &mut rng));
let log_p_proposal = log_target(proposal.view());
let log_alpha = (log_p_proposal - log_px).min(0.0);
let u: f64 = Distribution::sample(&uniform, &mut rng);
if u.ln() < log_alpha {
x = proposal;
log_px = log_p_proposal;
n_accepted += 1;
}
if step >= opts.n_burnin && (step - opts.n_burnin).is_multiple_of(opts.thinning) {
samples.push(x.clone());
}
}
let acceptance_rate = n_accepted as f64 / total_steps as f64;
let mean_estimate = if samples.is_empty() {
0.0
} else {
samples.iter().map(|s| s[0]).sum::<f64>() / samples.len() as f64
};
Ok(MetropolisWalkResult {
samples,
acceptance_rate,
mean_estimate,
})
}
#[derive(Debug, Clone)]
pub struct AISOptions {
pub n_particles: usize,
pub n_annealing: usize,
pub n_mcmc_per_level: usize,
pub step_size: f64,
pub seed: Option<u64>,
}
impl Default for AISOptions {
fn default() -> Self {
Self {
n_particles: 100,
n_annealing: 10,
n_mcmc_per_level: 5,
step_size: 0.5,
seed: None,
}
}
}
#[derive(Debug, Clone)]
pub struct AISResult {
pub log_z_ratio: f64,
pub z_ratio: f64,
pub ess: f64,
pub particles: Vec<Array1<f64>>,
pub log_weights: Vec<f64>,
}
pub fn annealed_importance_sampling<P, T, S>(
log_prior: P,
log_target: T,
sample_prior: S,
dim: usize,
options: Option<AISOptions>,
) -> IntegrateResult<AISResult>
where
P: Fn(ArrayView1<f64>) -> f64,
T: Fn(ArrayView1<f64>) -> f64,
S: Fn(&mut CoreRandom<RandStdRng>) -> Array1<f64>,
{
let opts = options.unwrap_or_default();
let n = opts.n_particles;
let n_beta = opts.n_annealing;
let normal = Normal::new(0.0_f64, opts.step_size)
.map_err(|e| IntegrateError::ComputationError(format!("Normal dist: {e}")))?;
let uniform = Uniform::new(0.0_f64, 1.0_f64)
.map_err(|e| IntegrateError::ComputationError(format!("Uniform dist: {e}")))?;
let mut rng = match opts.seed {
Some(s) => seeded_rng(s),
None => seeded_rng(55555),
};
let mut particles: Vec<Array1<f64>> = (0..n).map(|_| sample_prior(&mut rng)).collect();
let mut log_weights = vec![0.0_f64; n];
let betas: Vec<f64> = (0..=n_beta).map(|k| k as f64 / n_beta as f64).collect();
for k in 1..=n_beta {
let beta_prev = betas[k - 1];
let beta_curr = betas[k];
let delta_beta = beta_curr - beta_prev;
for i in 0..n {
let lp0 = log_prior(particles[i].view());
let lpt = log_target(particles[i].view());
log_weights[i] += delta_beta * (lpt - lp0);
}
for _ in 0..opts.n_mcmc_per_level {
for i in 0..n {
let log_pi_curr = (1.0 - beta_curr) * log_prior(particles[i].view())
+ beta_curr * log_target(particles[i].view());
let proposal: Array1<f64> = Array1::from_shape_fn(dim, |j| {
particles[i][j] + Distribution::sample(&normal, &mut rng)
});
let log_pi_prop = (1.0 - beta_curr) * log_prior(proposal.view())
+ beta_curr * log_target(proposal.view());
let log_alpha = (log_pi_prop - log_pi_curr).min(0.0);
let u: f64 = Distribution::sample(&uniform, &mut rng);
if u.ln() < log_alpha {
particles[i] = proposal;
}
}
}
}
let max_lw = log_weights
.iter()
.cloned()
.fold(f64::NEG_INFINITY, f64::max);
let sum_exp: f64 = log_weights.iter().map(|&lw| (lw - max_lw).exp()).sum();
let log_z_ratio = max_lw + sum_exp.ln() - (n as f64).ln();
let z_ratio = log_z_ratio.exp();
let weights: Vec<f64> = log_weights.iter().map(|&lw| (lw - max_lw).exp()).collect();
let sum_w: f64 = weights.iter().sum();
let sum_w_sq: f64 = weights.iter().map(|w| w * w).sum();
let ess = if sum_w_sq > 0.0 {
sum_w * sum_w / sum_w_sq
} else {
0.0
};
Ok(AISResult {
log_z_ratio,
z_ratio,
ess,
particles,
log_weights,
})
}
#[cfg(test)]
mod tests {
use super::*;
use scirs2_core::ndarray::array;
#[test]
fn test_diffusion_bridge() {
let result = diffusion_bridge(0.0, 1.0, 1.0, 50, Some(42)).expect("bridge failed");
assert_eq!(result.path.len(), 51);
assert_eq!(result.times.len(), 51);
assert!((result.path[0] - 0.0).abs() < 1e-10);
assert!((result.path[50] - 1.0).abs() < 1e-10);
}
#[test]
fn test_metropolis_walk_standard_normal() {
let log_target = |x: ArrayView1<f64>| -0.5 * x[0] * x[0];
let x0 = array![0.0_f64];
let result = metropolis_walk(
log_target,
x0,
Some(MetropolisWalkOptions {
n_samples: 2000,
step_size: 1.0,
n_burnin: 500,
thinning: 2,
seed: Some(42),
}),
)
.expect("Metropolis failed");
assert!(!result.samples.is_empty());
assert!(result.acceptance_rate > 0.0 && result.acceptance_rate < 1.0);
assert!(
result.mean_estimate.abs() < 0.5,
"mean={}",
result.mean_estimate
);
}
#[test]
fn test_feynman_kac() {
let estimator = FeynmanKacEstimator::new(FeynmanKacOptions {
n_paths: 500,
n_steps: 50,
t_end: 1.0,
seed: Some(42),
});
let result = estimator
.estimate(0.0, |x| x * x, |_t, _x| 0.0)
.expect("FK failed");
assert!(result >= 0.0);
}
#[test]
fn test_ais_normalizing_constant() {
let log_prior = |x: ArrayView1<f64>| -0.25 * x[0] * x[0]; let log_target = |x: ArrayView1<f64>| -0.5 * x[0] * x[0];
let normal_prior = Normal::new(0.0_f64, std::f64::consts::SQRT_2).expect("valid");
let sample_prior =
|rng: &mut CoreRandom<RandStdRng>| array![Distribution::sample(&normal_prior, rng)];
let result = annealed_importance_sampling(
log_prior,
log_target,
sample_prior,
1,
Some(AISOptions {
n_particles: 200,
n_annealing: 5,
n_mcmc_per_level: 3,
step_size: 0.5,
seed: Some(42),
}),
)
.expect("AIS failed");
assert!(result.ess > 0.0);
}
}