use crate::{geonum_mod::Geonum, Angle};
use std::f64::consts::PI;
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum Activation {
ReLU,
Sigmoid,
Tanh,
Identity,
}
pub trait MachineLearning: Sized {
fn regression_from(cov_xy: f64, var_x: f64) -> Self;
fn perceptron_update(&self, learning_rate: f64, error: f64, input: &Self) -> Self;
fn forward_pass(&self, weight: &Self, bias: &Self) -> Self;
fn activate(&self, activation: Activation) -> Self;
}
impl MachineLearning for Geonum {
fn regression_from(cov_xy: f64, var_x: f64) -> Self {
Geonum {
mag: (cov_xy.powi(2) / var_x).sqrt(),
angle: Angle::new(cov_xy.atan2(var_x), PI), }
}
fn perceptron_update(&self, learning_rate: f64, error: f64, input: &Geonum) -> Self {
let input_grade = input.angle.grade();
let sign_x = if input_grade > 2 { -1.0 } else { 1.0 };
let angle_update = Angle::new(-learning_rate * error * sign_x / PI, 1.0);
Geonum {
mag: self.mag + learning_rate * error * input.mag,
angle: self.angle + angle_update,
}
}
fn forward_pass(&self, weight: &Geonum, bias: &Geonum) -> Self {
Geonum {
mag: self.mag * weight.mag + bias.mag,
angle: self.angle + weight.angle,
}
}
fn activate(&self, activation: Activation) -> Self {
match activation {
Activation::ReLU => Geonum {
mag: if self.angle.grade_angle().cos() > 0.0 {
self.mag
} else {
0.0
},
angle: self.angle,
},
Activation::Sigmoid => Geonum {
mag: self.mag / (1.0 + (-self.angle.grade_angle().cos()).exp()),
angle: self.angle,
},
Activation::Tanh => Geonum {
mag: self.mag * self.angle.grade_angle().cos().tanh(),
angle: self.angle,
},
Activation::Identity => *self,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::geonum_mod::EPSILON;
#[test]
fn it_computes_regression_from_covariance() {
let cov_xy = 2.0;
let var_x = 4.0;
let regression = Geonum::regression_from(cov_xy, var_x);
let expected_mag = (cov_xy.powi(2) / var_x).sqrt();
assert!((regression.mag - expected_mag).abs() < EPSILON);
let expected_angle = Angle::new(cov_xy.atan2(var_x), PI);
assert_eq!(regression.angle, expected_angle);
assert_eq!(regression.angle.blade(), 0);
}
#[test]
fn it_updates_perceptron_weights() {
let weight = Geonum::new(1.0, 1.0, 4.0);
let input = Geonum::new(2.0, 1.0, 6.0);
let learning_rate = 0.1;
let error = 0.5;
let updated_weight = weight.perceptron_update(learning_rate, error, &input);
let expected_mag = weight.mag + learning_rate * error * input.mag;
assert!((updated_weight.mag - expected_mag).abs() < EPSILON);
let input_grade = input.angle.grade();
let sign_x = if input_grade > 2 { -1.0 } else { 1.0 };
let angle_update = Angle::new(-learning_rate * error * sign_x / PI, 1.0);
let expected_angle = weight.angle + angle_update;
assert_eq!(updated_weight.angle, expected_angle);
assert_eq!(updated_weight.angle.grade(), weight.angle.grade());
}
#[test]
fn it_performs_neural_network_operations() {
let input = Geonum::new(2.0, 1.0, 3.0); let weight = Geonum::new(1.5, 1.0, 6.0); let bias = Geonum::new(0.5, 0.0, 1.0);
let forward_result = input.forward_pass(&weight, &bias);
let expected_mag = input.mag * weight.mag + bias.mag;
assert!((forward_result.mag - expected_mag).abs() < EPSILON);
let expected_angle = input.angle + weight.angle;
assert_eq!(forward_result.angle, expected_angle);
let test_input = Geonum::new(1.0, 1.0, 4.0);
let relu_result = test_input.activate(Activation::ReLU);
assert!(relu_result.mag > 0.0);
let sigmoid_result = test_input.activate(Activation::Sigmoid);
assert!(sigmoid_result.mag > 0.0 && sigmoid_result.mag < test_input.mag);
let tanh_result = test_input.activate(Activation::Tanh);
assert!(tanh_result.mag.abs() <= test_input.mag);
let identity_result = test_input.activate(Activation::Identity);
assert_eq!(identity_result.mag, test_input.mag);
assert_eq!(identity_result.angle, test_input.angle);
assert_eq!(identity_result.angle.grade(), test_input.angle.grade());
}
}