#![cfg(feature = "neural_network")]
use ndarray::{Array, s};
use rustyml::neural_network::layer::activation_layer::relu::ReLU;
use rustyml::neural_network::layer::activation_layer::sigmoid::Sigmoid;
use rustyml::neural_network::layer::activation_layer::softmax::Softmax;
use rustyml::neural_network::layer::activation_layer::tanh::Tanh;
use rustyml::neural_network::layer::dense::Dense;
use rustyml::neural_network::loss_function::mean_squared_error::MeanSquaredError;
use rustyml::neural_network::neural_network_trait::Layer;
use rustyml::neural_network::optimizer::adam::Adam;
use rustyml::neural_network::optimizer::rms_prop::RMSprop;
use rustyml::neural_network::sequential::Sequential;
#[test]
fn with_activation_test() {
let x = Array::ones((2, 4)).into_dyn();
let y = Array::ones((2, 1)).into_dyn();
let mut model = Sequential::new();
model
.add(Dense::new(4, 3, Sigmoid::new()).unwrap())
.add(Dense::new(3, 1, Softmax::new()).unwrap());
model.compile(
RMSprop::new(0.001, 0.9, 1e-8).unwrap(),
MeanSquaredError::new(),
);
model.summary();
model.fit(&x, &y, 3).unwrap();
let prediction = model.predict(&x);
println!("Prediction: {:?}", prediction);
}
#[test]
fn relu_forward_test() {
let input = Array::from_shape_vec((2, 3), vec![-2.0, -1.0, 0.0, 1.0, 2.0, 3.0])
.unwrap()
.into_dyn();
let mut relu = ReLU::new();
let output = relu.forward(&input).unwrap();
assert_eq!(output[[0, 0]], 0.0);
assert_eq!(output[[0, 1]], 0.0);
assert_eq!(output[[0, 2]], 0.0);
assert_eq!(output[[1, 0]], 1.0);
assert_eq!(output[[1, 1]], 2.0);
assert_eq!(output[[1, 2]], 3.0);
}
#[test]
fn relu_backward_test() {
let input = Array::from_shape_vec((2, 3), vec![-2.0, -1.0, 0.0, 1.0, 2.0, 3.0])
.unwrap()
.into_dyn();
let mut relu = ReLU::new();
relu.forward(&input).unwrap();
let grad_output = Array::ones((2, 3)).into_dyn();
let grad_input = relu.backward(&grad_output).unwrap();
assert_eq!(grad_input[[0, 0]], 0.0);
assert_eq!(grad_input[[0, 1]], 0.0);
assert_eq!(grad_input[[0, 2]], 0.0);
assert_eq!(grad_input[[1, 0]], 1.0);
assert_eq!(grad_input[[1, 1]], 1.0);
assert_eq!(grad_input[[1, 2]], 1.0);
}
#[test]
fn relu_empty_input_test() {
let input = Array::from_shape_vec((0, 0), vec![]).unwrap().into_dyn();
let mut relu = ReLU::new();
let result = relu.forward(&input);
assert!(result.is_err());
}
#[test]
fn sigmoid_forward_test() {
let input = Array::from_shape_vec((2, 2), vec![-10.0, 0.0, 1.0, 10.0])
.unwrap()
.into_dyn();
let mut sigmoid = Sigmoid::new();
let output = sigmoid.forward(&input).unwrap();
assert!(output[[0, 0]] < 0.01); assert!((output[[0, 1]] - 0.5).abs() < 1e-5); assert!((output[[1, 0]] - 0.731).abs() < 0.01); assert!(output[[1, 1]] > 0.99);
for val in output.iter() {
assert!(*val > 0.0 && *val < 1.0);
}
}
#[test]
fn sigmoid_backward_test() {
let input = Array::from_shape_vec((2, 2), vec![0.0, 1.0, -1.0, 2.0])
.unwrap()
.into_dyn();
let mut sigmoid = Sigmoid::new();
sigmoid.forward(&input).unwrap();
let grad_output = Array::ones((2, 2)).into_dyn();
let grad_input = sigmoid.backward(&grad_output).unwrap();
assert!((grad_input[[0, 0]] - 0.25).abs() < 1e-5);
for val in grad_input.iter() {
assert!(*val > 0.0 && *val <= 0.25);
}
}
#[test]
fn sigmoid_empty_input_test() {
let input = Array::from_shape_vec((0, 0), vec![]).unwrap().into_dyn();
let mut sigmoid = Sigmoid::new();
let result = sigmoid.forward(&input);
assert!(result.is_err());
}
#[test]
fn tanh_forward_test() {
let input = Array::from_shape_vec((2, 3), vec![-10.0, -1.0, 0.0, 1.0, 2.0, 10.0])
.unwrap()
.into_dyn();
let mut tanh = Tanh::new();
let output = tanh.forward(&input).unwrap();
assert!(output[[0, 0]] < -0.99); assert!((output[[0, 1]] - (-0.761)).abs() < 0.01); assert!(output[[0, 2]].abs() < 1e-5); assert!((output[[1, 0]] - 0.761).abs() < 0.01); assert!((output[[1, 1]] - 0.964).abs() < 0.01); assert!(output[[1, 2]] > 0.99);
for val in output.iter() {
assert!(*val >= -1.0 && *val <= 1.0);
}
}
#[test]
fn tanh_backward_test() {
let input = Array::from_shape_vec((2, 2), vec![0.0, 1.0, -1.0, 2.0])
.unwrap()
.into_dyn();
let mut tanh = Tanh::new();
tanh.forward(&input).unwrap();
let grad_output = Array::ones((2, 2)).into_dyn();
let grad_input = tanh.backward(&grad_output).unwrap();
assert!((grad_input[[0, 0]] - 1.0).abs() < 1e-5);
for val in grad_input.iter() {
assert!(*val > 0.0 && *val <= 1.0);
}
}
#[test]
fn tanh_empty_input_test() {
let input = Array::from_shape_vec((0, 0), vec![]).unwrap().into_dyn();
let mut tanh = Tanh::new();
let result = tanh.forward(&input);
assert!(result.is_err());
}
#[test]
fn softmax_forward_test() {
let input = Array::from_shape_vec((2, 3), vec![1.0, 2.0, 3.0, 1.0, 1.0, 1.0])
.unwrap()
.into_dyn();
let mut softmax = Softmax::new();
let output = softmax.forward(&input).unwrap();
let row0_sum: f32 = output.slice(s![0, ..]).iter().sum();
let row1_sum: f32 = output.slice(s![1, ..]).iter().sum();
assert!((row0_sum - 1.0).abs() < 1e-5);
assert!((row1_sum - 1.0).abs() < 1e-5);
for val in output.iter() {
assert!(*val > 0.0 && *val < 1.0);
}
assert!((output[[1, 0]] - 1.0 / 3.0).abs() < 1e-5);
assert!((output[[1, 1]] - 1.0 / 3.0).abs() < 1e-5);
assert!((output[[1, 2]] - 1.0 / 3.0).abs() < 1e-5);
}
#[test]
fn softmax_backward_test() {
let input = Array::from_shape_vec((2, 3), vec![1.0, 2.0, 3.0, 1.0, 1.0, 1.0])
.unwrap()
.into_dyn();
let mut softmax = Softmax::new();
softmax.forward(&input).unwrap();
let grad_output = Array::ones((2, 3)).into_dyn();
let grad_input = softmax.backward(&grad_output).unwrap();
let row0_grad_sum: f32 = grad_input.slice(s![0, ..]).iter().sum();
let row1_grad_sum: f32 = grad_input.slice(s![1, ..]).iter().sum();
assert!(row0_grad_sum.abs() < 1e-5);
assert!(row1_grad_sum.abs() < 1e-5);
}
#[test]
fn softmax_1d_input_error_test() {
let input = Array::from_shape_vec((5,), vec![1.0, 2.0, 3.0, 4.0, 5.0])
.unwrap()
.into_dyn();
let mut softmax = Softmax::new();
let result = softmax.forward(&input);
assert!(result.is_err());
}
#[test]
fn softmax_empty_input_test() {
let input = Array::from_shape_vec((0, 0), vec![]).unwrap().into_dyn();
let mut softmax = Softmax::new();
let result = softmax.forward(&input);
assert!(result.is_err());
}
#[test]
fn activation_numerical_stability_test() {
let large_pos = Array::from_shape_vec((1, 3), vec![100.0, 500.0, 1000.0])
.unwrap()
.into_dyn();
let mut relu = ReLU::new();
let relu_out = relu.forward(&large_pos).unwrap();
assert!(!relu_out.iter().any(|x| x.is_nan() || x.is_infinite()));
let mut sigmoid = Sigmoid::new();
let sigmoid_out = sigmoid.forward(&large_pos).unwrap();
assert!(!sigmoid_out.iter().any(|x| x.is_nan() || x.is_infinite()));
let mut tanh = Tanh::new();
let tanh_out = tanh.forward(&large_pos).unwrap();
assert!(!tanh_out.iter().any(|x| x.is_nan() || x.is_infinite()));
let large_neg = Array::from_shape_vec((1, 3), vec![-100.0, -500.0, -1000.0])
.unwrap()
.into_dyn();
let mut relu2 = ReLU::new();
let relu_out2 = relu2.forward(&large_neg).unwrap();
assert!(!relu_out2.iter().any(|x| x.is_nan() || x.is_infinite()));
let mut sigmoid2 = Sigmoid::new();
let sigmoid_out2 = sigmoid2.forward(&large_neg).unwrap();
assert!(!sigmoid_out2.iter().any(|x| x.is_nan() || x.is_infinite()));
let mut tanh2 = Tanh::new();
let tanh_out2 = tanh2.forward(&large_neg).unwrap();
assert!(!tanh_out2.iter().any(|x| x.is_nan() || x.is_infinite()));
}
#[test]
fn activation_layer_properties_test() {
let mut relu = ReLU::new();
assert_eq!(relu.layer_type(), "ReLU");
assert_eq!(relu.output_shape(), "Unknown");
let input = Array::ones((2, 3)).into_dyn();
relu.forward(&input).unwrap();
assert_eq!(relu.output_shape(), "(2, 3)");
let sigmoid = Sigmoid::new();
assert_eq!(sigmoid.layer_type(), "Sigmoid");
let tanh = Tanh::new();
assert_eq!(tanh.layer_type(), "Tanh");
let softmax = Softmax::new();
assert_eq!(softmax.layer_type(), "Softmax");
}
#[test]
fn activation_backward_without_forward_test() {
let grad_output = Array::ones((2, 3)).into_dyn();
let mut relu = ReLU::new();
assert!(relu.backward(&grad_output).is_err());
let mut sigmoid = Sigmoid::new();
assert!(sigmoid.backward(&grad_output).is_err());
let mut tanh = Tanh::new();
assert!(tanh.backward(&grad_output).is_err());
let mut softmax = Softmax::new();
assert!(softmax.backward(&grad_output).is_err());
}
#[test]
fn activation_integration_test() {
let x = Array::from_shape_vec((4, 2), vec![1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0])
.unwrap()
.into_dyn();
let y = Array::from_shape_vec((4, 2), vec![0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0])
.unwrap()
.into_dyn();
let mut model = Sequential::new();
model
.add(Dense::new(2, 4, ReLU::new()).unwrap())
.add(Dense::new(4, 4, Tanh::new()).unwrap())
.add(Dense::new(4, 2, Sigmoid::new()).unwrap());
model.compile(
Adam::new(0.01, 0.9, 0.999, 1e-8).unwrap(),
MeanSquaredError::new(),
);
model.fit(&x, &y, 5).unwrap();
let prediction = model.predict(&x).unwrap();
assert_eq!(prediction.shape(), &[4, 2]);
for val in prediction.iter() {
assert!(*val >= 0.0 && *val <= 1.0);
}
}