1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103
/// Representation of the probabilistic execution of a `GenFn`.
#[derive(Clone)]
pub struct Trace<Args,Data,Ret> {
/// Input arguments to the `GenFn`.
pub args: Args,
/// Random variables sampled by the `GenFn`.
pub data: Data,
/// The return value of the `GenFn`.
/// Always `Some(v)` if the `Trace` is returned by a GFI method.
pub retv: Option<Ret>,
/// The log joint probability of all the data `log[p(data; args)]`.
pub logp: f64
}
impl<Args: 'static,Data: 'static,Ret: 'static> Trace<Args,Data,Ret> {
/// Create a `Trace` with a `Some(retv)`.
pub fn new(args: Args, data: Data, retv: Ret, logp: f64) -> Self {
Trace { args, data, retv: Some(retv), logp }
}
/// Set `self.retv` to `Some(v)`.
pub fn set_retv(&mut self, v: Ret) { self.retv = Some(v); }
}
/// Interface for functions that support the standard inference library.
///
/// Implementation follows closely to the Generative Function Interface (GFI), as specified in:
///
/// > Gen: A General-Purpose Probabilistic Programming System with Programmable Inference.
/// > Cusumano-Towner, M. F.; Saad, F. A.; Lew, A.; and Mansinghka, V. K.
/// > In Proceedings of the 40th ACM SIGPLAN Conference on Programming Language
/// > Design and Implementation (PLDI ‘19).
///
/// Any function that implements `GenFn` can use the standard inference library
/// to perform Bayesian inference to generate fair samples from the posterior distribution.
///
/// `p(trace) ~ p( . | constraints)`
///
/// This terminology may be slightly unusual to users from other languages;
/// `data` refers to all random variables, and `constraints` more precisely
/// refers to a subset of the data that we observe.
pub trait GenFn<Args,Data,Ret> {
/// Execute the generative function and return a sampled trace.
fn simulate(&self, args: Args) -> Trace<Args,Data,Ret>;
/// Execute the generative function consistent with `constraints`.
/// Return the log probability
/// `log[p(t; args) / q(t; constraints, args)]`
fn generate(&self, args: Args, constraints: Data) -> (Trace<Args,Data,Ret>, f64);
/// Update a trace
fn update(&self,
trace: Trace<Args,Data,Ret>,
args: Args,
diff: GfDiff,
constraints: Data // Data := forward choices
) -> (Trace<Args,Data,Ret>, Data, f64); // Data := backward choices
/// Call a generative function and return the output.
fn call(&self, args: Args) -> Ret {
self.simulate(args).retv.unwrap()
}
/// Use a generative function to propose some data.
fn propose(&self, args: Args) -> (Data, f64) {
let trace = self.simulate(args);
(trace.data, trace.logp)
}
/// Assess the conditional probability of some proposed `constraints` under a generative function.
fn assess(&self, args: Args, constraints: Data) -> f64 {
let (_, weight) = self.generate(args, constraints);
weight
}
}
/// Flag that gives information about the type of incremental difference a generative
/// function can expect to a `Trace`'s arguments during an update.
///
/// Can be used to increase efficiency for example in particle filter procedures.
#[derive(Debug,Clone)]
pub enum GfDiff {
/// No change to input arguments.
NoChange,
/// An unknown change to input arguments.
Unknown,
/// An incremental change to input arguments.
///
/// Generally means the `trace` has a vector-valued
/// `data` field that is being pushed to.
Extend
}