pointprocesses/likelihood/
hawkes.rs

1/*!
2 * Definitions for the Hawkes process log-likelihood function.
3 */
4use ndarray::prelude::*;
5use ndarray_parallel::prelude::*;
6
7
8use crate::temporal::hawkes::ExpHawkes;
9use crate::temporal::DeterministicIntensity;
10
11pub struct HawkesExpParams {
12    lbda0: f64,
13    alpha: f64,
14    beta: f64
15}
16
17
18/// Log-likelihood of the given event data under the Hawkes model.
19/// $$
20///     \ell =
21///     \sum_{i=1}^N \log\left(
22///         \lambda_0 + \sum_{j < i} \alpha e^{-\beta(t_i-t_j)}
23///     \right)
24///     - \lambda_0 T 
25///     - \sum_{i=1}^N \frac\alpha\beta
26///     \left(1 - e^{-\beta(T - t_i)}\right)
27/// $$
28pub struct HawkesLikelihood<'a> {
29    times: ArrayView1<'a, f64>,
30    params: HawkesExpParams,
31    tmax: f64,
32    partial_sums: Array1<f64>,
33    integral: f64
34}
35
36/// Integral term of the likelihood -- actually easy to compute.
37fn integral_term(
38    times: ArrayView1<f64>,
39    lbda0: f64,
40    alpha: f64,
41    beta: f64,
42    tmax: f64) -> f64
43{
44    let times = times.into_owned();
45    
46    // Sum the int_0^T g(t-t_i) dt
47    let res: f64 = times
48        .par_iter()
49        .fold(|| 0., |acc, t| {
50            acc + alpha / beta * (1. - (-beta*(tmax - t)).exp())
51        }).sum();
52    lbda0*tmax + res
53}
54
55/// Compute
56/// $$
57///     R_i = \sum_{j < i} e^{-\beta (t_i - t_j)}
58/// $$
59fn compute_part_sums(
60    times: ArrayView1<f64>,
61    beta: f64) -> Array1<f64>
62{
63    let n_events = times.shape()[0];
64    let mut r_arr = Array1::zeros(n_events);
65    let mut decay;
66
67    r_arr[0] = 0.;
68    for i in 1..n_events {
69        decay = (-beta*(times[i] - times[i-1])).exp();
70        r_arr[i] = decay * r_arr[i-1] + decay;
71    }
72
73    r_arr
74}
75
76/// Compute
77/// $$
78///     C_i = \sum_{j < i} t_j e^{-\beta (t_i - t_j)}
79/// $$
80fn compute_partial_deriv_sum(
81    times: ArrayView1<f64>,
82    beta: f64) -> Array1<f64>
83{
84    let n_events = times.len();
85    let mut c_arr = Array1::zeros(n_events);
86    let mut decay;
87
88    for i in 1..n_events {
89        decay = (-beta * times[i+1] - times[i]).exp();
90        c_arr[i] = decay * c_arr[i-1] + times[i] * decay;
91    }
92
93    c_arr
94}
95
96
97impl<'a> HawkesLikelihood<'a> {
98    pub fn new(
99        times: ArrayView1<'a, f64>,
100        lbda0: f64,
101        alpha: f64,
102        beta: f64,
103        tmax: f64) -> Self
104    {
105        let intgrl = integral_term(
106            times, lbda0, alpha, beta, tmax);
107        
108        // Get the partial sums sum_{j < i} exp(-beta(ti-tj))
109        let partial_sums = compute_part_sums(times, beta);
110
111        let hl_obj = HawkesLikelihood {
112            times,
113            params: HawkesExpParams {lbda0, alpha, beta},
114            tmax,
115            partial_sums,  // move the partial sums into the struct
116            integral: intgrl
117        };
118
119        hl_obj
120    }
121
122    pub fn compute_likelihood(&self) -> f64
123    {
124        let HawkesExpParams { lbda0, alpha, beta: _ } = self.params;
125        
126        let evt_llhood: f64 = self.partial_sums
127            .into_par_iter()
128            .fold(|| 0., |acc, rk| {
129                acc + (lbda0 + alpha * rk).ln()
130            }).sum();
131
132        evt_llhood - self.integral
133    }
134    
135    pub fn grad(&self) -> Array1<f64> {
136        let HawkesExpParams {lbda0, alpha, beta} = self.params;
137        let tmax = self.tmax;
138
139        // Sum pf exp(-beta(t_i-t_j))
140        let ref part_sums = self.partial_sums;
141
142
143        let c_arr = compute_partial_deriv_sum(
144            self.times, beta);
145        
146        let b_arr = self.times.to_owned() * part_sums.clone() - c_arr;
147
148
149        let lbda0_deriv = part_sums.iter()
150            .fold(0., |acc, r| {
151                acc + 1. / (lbda0 + alpha * r)
152            }) - tmax;
153
154        let alpha_deriv = part_sums.iter()
155            .fold(0., |acc, r| {
156                acc + r / (lbda0 + alpha * r)
157            }) - self.integral / alpha;
158
159        let integral_beta_deriv = self.times.iter()
160            .fold(0., |acc, ti| {
161                acc + alpha / beta
162                * (tmax - ti) * (-beta * (tmax - ti)).exp()
163            });
164
165        let beta_deriv = part_sums.iter()
166            .zip(b_arr.iter())
167            .fold(0., |acc, (r, b)| {
168                acc + (-b)/(lbda0 + alpha * r)
169            })
170            + 1./beta * self.integral
171            - integral_beta_deriv;
172
173        arr1(&[lbda0_deriv, alpha_deriv, beta_deriv])
174    }
175}
176
177
178
179/// Log-likelihood of the given event data under the Hawkes model.
180/// $$
181///     \ell =
182///     \sum_{i=1}^N \log\left(
183///         \lambda_0 + \sum_{j < i} \alpha e^{-\beta(t_i-t_j)}
184///     \right)
185///     - \lambda_0 T 
186///     - \sum_{i=1}^N \frac\alpha\beta
187///     \left(1 - e^{-\beta(T - t_i)}\right)
188/// $$
189pub fn hawkes_likelihood(
190    times: ArrayView1<f64>,
191    model: &ExpHawkes,
192    tmax: f64) -> f64
193{
194    let lbda0 = model.get_background().intensity(0.);
195    let kernel = model.get_kernel();
196    let alpha = kernel.alpha;
197    let beta = kernel.beta;
198
199    let hl_obj = HawkesLikelihood::new(
200        times,
201        lbda0, alpha, beta,
202        tmax);
203
204    hl_obj.compute_likelihood()
205}
206