use crate::Tensor;
use super::Metric;
fn threshold_to_labels(
predictions: &Tensor,
targets: &Tensor,
threshold: f32,
) -> (Vec<usize>, Vec<usize>) {
let y_pred: Vec<usize> =
predictions.data().iter().map(|&p| usize::from(p >= threshold)).collect();
let y_true: Vec<usize> = targets.data().iter().map(|&t| usize::from(t >= 0.5)).collect();
(y_pred, y_true)
}
#[derive(Debug, Clone)]
pub struct Accuracy {
pub(crate) threshold: f32,
}
impl Accuracy {
pub fn new(threshold: f32) -> Self {
Self { threshold }
}
pub fn default_threshold() -> Self {
Self::new(0.5)
}
}
impl Default for Accuracy {
fn default() -> Self {
Self::new(0.5)
}
}
impl Metric for Accuracy {
fn compute(&self, predictions: &Tensor, targets: &Tensor) -> f32 {
assert_eq!(
predictions.len(),
targets.len(),
"Predictions and targets must have same length"
);
if predictions.is_empty() {
return 0.0;
}
let (y_pred, y_true) = threshold_to_labels(predictions, targets, self.threshold);
aprender::metrics::classification::accuracy(&y_pred, &y_true)
}
fn name(&self) -> &'static str {
"Accuracy"
}
}
#[derive(Debug, Clone)]
pub struct Precision {
pub(crate) threshold: f32,
}
impl Precision {
pub fn new(threshold: f32) -> Self {
Self { threshold }
}
}
impl Default for Precision {
fn default() -> Self {
Self::new(0.5)
}
}
impl Metric for Precision {
fn compute(&self, predictions: &Tensor, targets: &Tensor) -> f32 {
assert_eq!(predictions.len(), targets.len());
if predictions.is_empty() {
return 0.0;
}
let (y_pred, y_true) = threshold_to_labels(predictions, targets, self.threshold);
let mut true_positives = 0usize;
let mut predicted_positives = 0usize;
for (&p, &t) in y_pred.iter().zip(y_true.iter()) {
if p == 1 {
predicted_positives += 1;
if t == 1 {
true_positives += 1;
}
}
}
if predicted_positives == 0 {
return 0.0;
}
true_positives as f32 / predicted_positives as f32
}
fn name(&self) -> &'static str {
"Precision"
}
}
#[derive(Debug, Clone)]
pub struct Recall {
pub(crate) threshold: f32,
}
impl Recall {
pub fn new(threshold: f32) -> Self {
Self { threshold }
}
}
impl Default for Recall {
fn default() -> Self {
Self::new(0.5)
}
}
impl Metric for Recall {
fn compute(&self, predictions: &Tensor, targets: &Tensor) -> f32 {
assert_eq!(predictions.len(), targets.len());
if predictions.is_empty() {
return 0.0;
}
let (y_pred, y_true) = threshold_to_labels(predictions, targets, self.threshold);
let mut true_positives = 0usize;
let mut actual_positives = 0usize;
for (&p, &t) in y_pred.iter().zip(y_true.iter()) {
if t == 1 {
actual_positives += 1;
if p == 1 {
true_positives += 1;
}
}
}
if actual_positives == 0 {
return 0.0;
}
true_positives as f32 / actual_positives as f32
}
fn name(&self) -> &'static str {
"Recall"
}
}
#[derive(Debug, Clone)]
pub struct F1Score {
precision: Precision,
recall: Recall,
}
impl F1Score {
pub fn new(threshold: f32) -> Self {
Self { precision: Precision::new(threshold), recall: Recall::new(threshold) }
}
}
impl Default for F1Score {
fn default() -> Self {
Self::new(0.5)
}
}
impl Metric for F1Score {
fn compute(&self, predictions: &Tensor, targets: &Tensor) -> f32 {
let precision = self.precision.compute(predictions, targets);
let recall = self.recall.compute(predictions, targets);
if precision + recall == 0.0 {
return 0.0;
}
2.0 * (precision * recall) / (precision + recall)
}
fn name(&self) -> &'static str {
"F1"
}
}