test_softmax/
test_softmax.rs1use scirs2_core::ndarray::{arr1, arr2};
2use scirs2_neural::activations_minimal::{Activation, Softmax};
3
4#[allow(dead_code)]
5fn main() {
6 println!("Testing softmax implementation...\n");
7 let input = arr1(&[1.0, 2.0, 3.0]);
9 println!("Input: {input:?}");
10 let softmax = Softmax::new(0);
11 let output = softmax.forward(&input.clone().into_dyn()).unwrap();
12 println!("Softmax output: {output:?}");
13 let sum: f64 = output.sum();
15 println!("Sum of softmax: {sum}");
16 assert!((sum - 1.0).abs() < 1e-6, "Softmax should sum to 1");
17 println!("\nTest case 2: 2D batch");
19 let input_2d = arr2(&[[1.0, 2.0, 3.0], [3.0, 2.0, 1.0], [2.0, 2.0, 2.0]]);
20 println!("Input 2D:\n{input_2d:?}");
21 let softmax_2d = Softmax::new(1);
23 let output_2d = softmax_2d.forward(&input_2d.clone().into_dyn()).unwrap();
24 println!("Softmax output 2D:\n{output_2d:?}");
25 for i in 0..output_2d.shape()[0] {
27 let row_sum: f64 = output_2d.slice(scirs2_core::ndarray::s![i, ..]).sum();
28 println!("Row {i} sum: {row_sum}");
29 assert!((row_sum - 1.0).abs() < 1e-6, "Each row should sum to 1");
30 }
31 println!("\nTest case 3: Gradient computation");
33 let grad_output = arr1(&[0.1, 0.2, 0.3]).into_dyn();
34 let forward_output = softmax.forward(&input.clone().into_dyn()).unwrap();
35 let grad_input = softmax.backward(&grad_output, &forward_output).unwrap();
36 println!("Gradient input: {grad_input:?}");
37 println!("\nAll tests passed!");
38}