1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
use super::mean_variance::{MeanVariance, MeanVarianceOutput};
use num::ToPrimitive;
use tangram_zip::zip;

/// RegressionMetrics computes metrics used to evaluate regressors.
pub struct RegressionMetrics {
	mean_variance: MeanVariance,
	absolute_error: f64,
	squared_error: f64,
}

/// The input to [`RegressionMetrics`].
pub struct RegressionMetricsInput<'a> {
	pub predictions: &'a [f32],
	pub labels: &'a [f32],
}

/// The output from [`RegressionMetrics`].
#[derive(Debug)]
pub struct RegressionMetricsOutput {
	/// The mean squared error is equal to the mean of the squared errors. For a given example, the error is the difference between the true value and the model's predicted value.
	pub mse: f32,
	/// The root mean squared error is equal to the square root of the mean squared error.
	pub rmse: f32,
	/// The mean of the absolute value of the errors.
	pub mae: f32,
	/// The r-squared value. https://en.wikipedia.org/wiki/Coefficient_of_determination.
	pub r2: f32,
}

impl Default for RegressionMetrics {
	fn default() -> RegressionMetrics {
		RegressionMetrics {
			mean_variance: MeanVariance::default(),
			absolute_error: 0.0,
			squared_error: 0.0,
		}
	}
}

impl RegressionMetrics {
	pub fn new() -> RegressionMetrics {
		RegressionMetrics::default()
	}

	pub fn update(&mut self, input: RegressionMetricsInput) {
		for (prediction, label) in zip!(input.predictions.iter(), input.labels.iter()) {
			self.mean_variance.update(*label);
			let absolute_error = prediction - label;
			let squared_error = absolute_error * absolute_error;
			self.absolute_error += absolute_error as f64;
			self.squared_error += squared_error as f64;
		}
	}

	pub fn merge(&mut self, other: RegressionMetrics) {
		self.mean_variance.merge(other.mean_variance);
		self.absolute_error += other.absolute_error;
		self.squared_error += other.squared_error;
	}

	pub fn finalize(self) -> RegressionMetricsOutput {
		let MeanVarianceOutput { variance, n, .. } = self.mean_variance.finalize();
		let mae = self.absolute_error / n.to_f64().unwrap();
		let mse = self.squared_error / n.to_f64().unwrap();
		let rmse = mse.sqrt();
		let r2 = 1.0 - self.squared_error / (variance as f64 * n.to_f64().unwrap());
		RegressionMetricsOutput {
			mae: mae.to_f32().unwrap(),
			mse: mse.to_f32().unwrap(),
			r2: r2.to_f32().unwrap(),
			rmse: rmse.to_f32().unwrap(),
		}
	}
}