use crate::{bfs, cn, si, Network, Weight};
use nalgebra::{DMatrix, DVector};
use rayon::prelude::*;
use std::cmp::Ordering;
#[derive(Debug, Clone)]
pub struct Ranking {
ranking: cn::IndexMap<usize, f64>,
true_source: usize,
}
impl Ranking {
pub fn as_map(&self) -> &cn::IndexMap<usize, f64> {
&self.ranking
}
pub fn true_source(&self) -> usize {
self.true_source
}
pub fn top(&self) -> Option<(usize, f64)> {
self.as_map().first().map(|(&i, &s)| (i, s))
}
pub fn true_source_position(&self) -> Option<usize> {
self.as_map().get_index_of(&self.true_source())
}
pub fn true_source_score(&self) -> Option<f64> {
self.as_map().get(&self.true_source()).copied()
}
pub fn precision(&self) -> f64 {
if let Some((_, top_score)) = self.top() {
if let Some(source_score) = self.true_source_score() {
if (top_score - source_score).abs() < f64::EPSILON {
return 1.0
/ self
.as_map()
.values()
.filter(|&s| (s - top_score).abs() < f64::EPSILON)
.count() as f64;
}
}
}
0.0
}
pub fn distance_error(&self, net: &Network) -> cn::Result<Option<usize>> {
if let Some((top, _)) = self.top() {
if top == self.true_source() {
return Ok(Some(0));
} else {
return bfs::distance(net, top, self.true_source());
}
}
Ok(None)
}
}
pub fn pearson(net: &Network, observers: &si::Observers) -> cn::Result<Ranking> {
if !net.is_whole() {
return Err(cn::Err::NotWhole);
}
if let Some(o) = observers.as_map().keys().find(|&o| !net.exists(*o)) {
return Err(cn::Err::NoSuchNode(*o));
}
let avg_t = observers.as_map().values().sum::<usize>() as f64 / observers.as_map().len() as f64;
let times_reduced: Vec<f64> = observers
.as_map()
.values()
.map(|t| (*t as f64 - avg_t))
.collect();
let t_sq_sum: f64 = times_reduced.iter().map(|t| t * t).sum();
let need_distance: Vec<usize> = observers.as_map().keys().copied().collect();
let unobservers: Vec<usize> = net.nodes().collect();
let mut scores: Vec<(usize, f64)> = unobservers
.par_iter()
.map(|unobserver| {
let distances =
bfs::distance_many(net, *unobserver, &need_distance).unwrap_or_default();
let mut avg_d = 0.0;
let mut distances: Vec<f64> = observers
.as_map()
.keys()
.map(|o| {
let d = distances[*o] as f64;
avg_d += d;
d
})
.collect();
avg_d /= distances.len() as f64;
for d in distances.iter_mut() {
let d_red = *d - avg_d;
*d = if d_red.abs() < f64::EPSILON {
f64::EPSILON
} else {
d_red
};
}
let score: f64 = distances
.iter()
.zip(×_reduced)
.map(|(d, t)| d * t)
.sum::<f64>()
/ (distances.into_iter().map(|d| d * d).sum::<f64>() * t_sq_sum).sqrt();
(*unobserver, score)
})
.collect();
scores.sort_unstable_by(|(_, s1), (_, s2)| {
s2.partial_cmp(s1).unwrap_or(std::cmp::Ordering::Less)
});
Ok(Ranking {
ranking: scores.into_iter().collect(),
true_source: observers.true_source(),
})
}
pub fn lptv(net: &Network, observers: &si::Observers) -> cn::Result<Ranking> {
if !net.is_whole() {
return Err(cn::Err::NotWhole);
}
let need_distance: Vec<usize> = observers.as_map().keys().copied().collect();
let (ref_o, other_o) = need_distance.split_first().ok_or(cn::Err::NoTarget)?;
let (mean, mut variance) = match net.weight() {
Weight::Constant { c } => (1.0 / c, (1.0 - c) / (c * c)),
Weight::Uniform => {
return Ok(Ranking {
ranking: cn::IndexMap::default(),
true_source: observers.true_source(),
})
}
};
if variance.abs() < f64::EPSILON {
variance = f64::EPSILON;
}
let suspects: Vec<usize> = net.nodes().collect();
let delays: DVector<f64> = DVector::from_vec(
other_o
.par_iter()
.map(|o| observers.as_map()[o] as f64 - observers.as_map()[ref_o] as f64)
.collect(),
);
let mut scores: Vec<(usize, f64)> = suspects
.par_iter()
.filter_map(|&suspect| {
let tree = bfs::tree_active(net, suspect, &need_distance).ok()?;
let distances = bfs::distance_many(&tree, suspect, &need_distance).ok()?;
let paths = bfs::path_many(&tree, *ref_o, &need_distance).ok()?;
let mut lambda_matrix: DMatrix<f64> =
DMatrix::from_fn(other_o.len(), other_o.len(), |i: usize, j: usize| {
let o_i = other_o[i]; let o_j = other_o[j]; match i.cmp(&j) {
Ordering::Equal => (paths[o_i].as_vec().unwrap().len()) as f64,
Ordering::Greater => {
let path_i = &paths[o_i];
let path_j = &paths[o_j];
(path_i.common_len(path_j).unwrap()) as f64
}
Ordering::Less => 0.0,
}
});
for j in 0..other_o.len() {
for i in 0..j {
*lambda_matrix.index_mut((i, j)) = *lambda_matrix.index((j, i))
}
}
let det = lambda_matrix.determinant().abs();
lambda_matrix *= variance;
if !lambda_matrix.try_inverse_mut() {
panic!("Uh oh the covariance is not invertible!");
};
let mu = DVector::from_vec(
other_o
.iter()
.map(|o| mean * (distances[*o] as i64 - distances[*ref_o] as i64) as f64)
.collect(),
);
let d_red = &delays - μ
let score = -(d_red.transpose() * lambda_matrix * d_red)[0]
- det.ln()
- other_o.len() as f64 * variance.ln();
Some((suspect, score))
})
.collect();
scores.sort_unstable_by(|(_, s1), (_, s2)| s2.partial_cmp(s1).unwrap());
Ok(Ranking {
ranking: scores.into_iter().collect(),
true_source: observers.true_source(),
})
}