Trait rv::traits::ConjugatePrior[][src]

pub trait ConjugatePrior<X, Fx>: Rv<Fx> where
    Fx: Rv<X> + HasSuffStat<X>, 
{ type Posterior: Rv<Fx>; type LnMCache; type LnPpCache; fn posterior(&self, x: &DataOrSuffStat<'_, X, Fx>) -> Self::Posterior;
fn ln_m_cache(&self) -> Self::LnMCache;
fn ln_m_with_cache(
        &self,
        cache: &Self::LnMCache,
        x: &DataOrSuffStat<'_, X, Fx>
    ) -> f64;
fn ln_pp_cache(&self, x: &DataOrSuffStat<'_, X, Fx>) -> Self::LnPpCache;
fn ln_pp_with_cache(&self, cache: &Self::LnPpCache, y: &X) -> f64; fn ln_m(&self, x: &DataOrSuffStat<'_, X, Fx>) -> f64 { ... }
fn ln_pp(&self, y: &X, x: &DataOrSuffStat<'_, X, Fx>) -> f64 { ... }
fn m(&self, x: &DataOrSuffStat<'_, X, Fx>) -> f64 { ... }
fn pp(&self, y: &X, x: &DataOrSuffStat<'_, X, Fx>) -> f64 { ... } }

A prior on Fx that induces a posterior that is the same form as the prior

Example

Conjugate analysis of coin flips using Bernoulli with a Beta prior on the success probability.

use rv::traits::ConjugatePrior;
use rv::dist::{Bernoulli, Beta};

let flips = vec![true, false, false];
let prior = Beta::jeffreys();

// If we observe more false than true, the posterior predictive
// probability of true decreases.
let pp_no_obs = prior.pp(&true, &(&vec![]).into());
let pp_obs = prior.pp(&true, &(&flips).into());

assert!(pp_obs < pp_no_obs);

Use a cache to speed up repeated computations.

use rv::traits::{Rv, SuffStat};
use rv::dist::{Categorical, SymmetricDirichlet};
use rv::data::{CategoricalSuffStat, DataOrSuffStat};
use std::time::Instant;

let ncats = 10;
let symdir = SymmetricDirichlet::jeffreys(ncats).unwrap();
let mut suffstat = CategoricalSuffStat::new(ncats);
let mut rng = rand::thread_rng();

Categorical::new(&vec![1.0, 1.0, 5.0, 1.0, 2.0, 1.0, 1.0, 2.0, 1.0, 1.0])
    .unwrap()
    .sample_stream(&mut rng)
    .take(1000)
    .for_each(|x: u8| suffstat.observe(&x));


let stat = DataOrSuffStat::SuffStat(&suffstat);

// Get predictions from predictive distribution using the cache
let t_cache = {
    let t_start = Instant::now();
    let cache = symdir.ln_pp_cache(&stat);
    // Argmax
    let k_max = (0..ncats).fold((0, std::f64::NEG_INFINITY), |(ix, f), y| {
            let f_r = symdir.ln_pp_with_cache(&cache, &y);
            if f_r > f {
                (y, f_r)
            } else {
                (ix, f)
            }

        });

    assert_eq!(k_max.0, 2);
    t_start.elapsed()
};

// Get predictions from predictive distribution w/o cache
let t_no_cache = {
    let t_start = Instant::now();
    // Argmax
    let k_max = (0..ncats).fold((0, std::f64::NEG_INFINITY), |(ix, f), y| {
            let f_r = symdir.ln_pp(&y, &stat);
            if f_r > f {
                (y, f_r)
            } else {
                (ix, f)
            }

        });

    assert_eq!(k_max.0, 2);
    t_start.elapsed()
};

// Using cache improves runtime
assert!(t_no_cache.as_nanos() > 2 * t_cache.as_nanos());

Associated Types

type Posterior: Rv<Fx>[src]

Type of the posterior distribution

type LnMCache[src]

Type of the ln_m cache

type LnPpCache[src]

Type of the ln_pp cache

Loading content...

Required methods

fn posterior(&self, x: &DataOrSuffStat<'_, X, Fx>) -> Self::Posterior[src]

Computes the posterior distribution from the data

fn ln_m_cache(&self) -> Self::LnMCache[src]

Compute the cache for the log marginal likelihood.

fn ln_m_with_cache(
    &self,
    cache: &Self::LnMCache,
    x: &DataOrSuffStat<'_, X, Fx>
) -> f64
[src]

Log marginal likelihood with supplied cache.

fn ln_pp_cache(&self, x: &DataOrSuffStat<'_, X, Fx>) -> Self::LnPpCache[src]

Compute the cache for the Log posterior predictive of y given x.

The cache should encompass all information about x.

fn ln_pp_with_cache(&self, cache: &Self::LnPpCache, y: &X) -> f64[src]

Log posterior predictive of y given x with supplied ln(norm)

Loading content...

Provided methods

fn ln_m(&self, x: &DataOrSuffStat<'_, X, Fx>) -> f64[src]

The log marginal likelihood

fn ln_pp(&self, y: &X, x: &DataOrSuffStat<'_, X, Fx>) -> f64[src]

Log posterior predictive of y given x

fn m(&self, x: &DataOrSuffStat<'_, X, Fx>) -> f64[src]

Marginal likelihood of x

fn pp(&self, y: &X, x: &DataOrSuffStat<'_, X, Fx>) -> f64[src]

Posterior Predictive distribution

Loading content...

Implementors

impl ConjugatePrior<f64, Gaussian> for NormalGamma[src]

type Posterior = Self

type LnMCache = f64

type LnPpCache = (GaussianSuffStat, f64)

impl ConjugatePrior<f64, Gaussian> for NormalInvChiSquared[src]

type Posterior = Self

type LnMCache = f64

type LnPpCache = (GaussianSuffStat, f64)

impl ConjugatePrior<f64, Gaussian> for NormalInvGamma[src]

type Posterior = Self

type LnMCache = f64

type LnPpCache = (GaussianSuffStat, f64)

impl ConjugatePrior<u8, Poisson> for Gamma[src]

type Posterior = Self

type LnMCache = f64

type LnPpCache = (f64, f64, f64)

impl ConjugatePrior<u16, Poisson> for Gamma[src]

type Posterior = Self

type LnMCache = f64

type LnPpCache = (f64, f64, f64)

impl ConjugatePrior<u32, Poisson> for Gamma[src]

type Posterior = Self

type LnMCache = f64

type LnPpCache = (f64, f64, f64)

impl ConjugatePrior<Matrix<f64, Dynamic, U1, VecStorage<f64, Dynamic, U1>>, MvGaussian> for NormalInvWishart[src]

type Posterior = Self

type LnMCache = f64

type LnPpCache = (Self, f64)

impl<X: Booleable> ConjugatePrior<X, Bernoulli> for Beta[src]

type Posterior = Self

type LnMCache = f64

type LnPpCache = (f64, f64)

impl<X: CategoricalDatum> ConjugatePrior<X, Categorical> for Dirichlet[src]

type Posterior = Self

type LnMCache = (f64, f64)

type LnPpCache = (Vec<f64>, f64)

impl<X: CategoricalDatum> ConjugatePrior<X, Categorical> for SymmetricDirichlet[src]

type Posterior = Dirichlet

type LnMCache = f64

type LnPpCache = (Vec<f64>, f64)

Loading content...