1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
use crate::nl_fit::curve_fit::{CurveFitAlgorithm, CurveFitResult, CurveFitTrait};
use crate::nl_fit::data::Data;

use emcee::{EnsembleSampler, Guess, Prob};
use emcee_rand::*;
use ndarray::Zip;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use std::rc::Rc;

/// MCMC sampler for non-linear least squares
///
/// First it generates `4 * dimension_number` walkers from the given initial guess using the
/// Standard distribution with `sigma = 1e-5`, next it samples `niterations` guesses for each walker
/// and chooses guess corresponding to the minimum sum of squared deviations (maximum likelihood).
/// Optionally, if `fine_tuning_algorithm` is `Some`, it sends this best guess to the next
/// optimization as an initial guess and returns its result
#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)]
#[serde(rename = "Mcmc")]
pub struct McmcCurveFit {
    pub niterations: u32,
    pub fine_tuning_algorithm: Option<Box<CurveFitAlgorithm>>,
}

impl McmcCurveFit {
    pub fn new(niterations: u32, fine_tuning_algorithm: Option<CurveFitAlgorithm>) -> Self {
        Self {
            niterations,
            fine_tuning_algorithm: fine_tuning_algorithm.map(|x| x.into()),
        }
    }

    #[inline]
    pub fn default_niterations() -> u32 {
        128
    }

    #[inline]
    pub fn default_fine_tuning_algorithm() -> Option<CurveFitAlgorithm> {
        None
    }
}

impl Default for McmcCurveFit {
    fn default() -> Self {
        Self::new(
            Self::default_niterations(),
            Self::default_fine_tuning_algorithm(),
        )
    }
}

impl CurveFitTrait for McmcCurveFit {
    fn curve_fit<F, DF>(
        &self,
        ts: Rc<Data<f64>>,
        x0: &[f64],
        bounds: &[(f64, f64)],
        model: F,
        derivatives: DF,
    ) -> CurveFitResult<f64>
    where
        F: 'static + Clone + Fn(f64, &[f64]) -> f64,
        DF: 'static + Clone + Fn(f64, &[f64], &mut [f64]),
    {
        const NWALKERS_PER_DIMENSION: usize = 4;
        let ndims = x0.len();
        let nwalkers = NWALKERS_PER_DIMENSION * ndims;
        let nsamples = ts.t.len();

        let lnlike = {
            let ts = ts.clone();
            let model = model.clone();
            move |params: &Guess| {
                let mut residual = 0.0;
                let params = params.values.iter().map(|&x| x as f64).collect::<Vec<_>>();
                Zip::from(&ts.t)
                    .and(&ts.m)
                    .and(&ts.inv_err)
                    .for_each(|&t, &m, &inv_err| {
                        residual += (inv_err * (model(t, &params) - m)).powi(2);
                    });
                -residual as f32
            }
        };

        let initial_guess = Guess::new(&x0.iter().map(|&x| x as f32).collect::<Vec<_>>());
        let initial_lnprob = lnlike(&initial_guess);
        let emcee_model = EmceeModel {
            func: lnlike,
            bounds: bounds
                .iter()
                .map(|&(lower, upper)| (lower as f32, upper as f32))
                .collect(),
        };
        let mut sampler = EnsembleSampler::new(nwalkers, ndims, &emcee_model).unwrap();
        sampler.seed(&[]);

        let (best_x, best_lnprob) = {
            let (mut best_x, mut best_lnprob) = (initial_guess.values.clone(), initial_lnprob);
            let initial_guesses =
                initial_guess.create_initial_guess_with_rng(nwalkers, &mut StdRng::from_seed(&[]));
            let _ = sampler.sample(&initial_guesses, self.niterations as usize, |step| {
                for (pos, &lnprob) in step.pos.iter().zip(step.lnprob.iter()) {
                    if lnprob > best_lnprob {
                        best_x = pos.values.clone();
                        best_lnprob = lnprob;
                    }
                }
            });
            (
                best_x.into_iter().map(|x| x as f64).collect::<Vec<_>>(),
                best_lnprob as f64,
            )
        };

        match self.fine_tuning_algorithm.as_ref() {
            Some(algo) => algo.curve_fit(ts, &best_x, bounds, model, derivatives),
            None => CurveFitResult {
                x: best_x,
                reduced_chi2: -best_lnprob / ((nsamples - ndims) as f64),
                success: true,
            },
        }
    }
}

struct EmceeModel<F> {
    func: F,
    bounds: Vec<(f32, f32)>,
}

impl<F> Prob for EmceeModel<F>
where
    F: Fn(&Guess) -> f32,
{
    fn lnlike(&self, params: &Guess) -> f32 {
        (self.func)(params)
    }

    fn lnprior(&self, params: &Guess) -> f32 {
        for (&p, &(lower, upper)) in params.values.iter().zip(self.bounds.iter()) {
            if p < lower || p > upper {
                return -f32::INFINITY;
            }
        }
        0.0
    }
}