pub struct Softmax { /* private fields */ }
Expand description
Softmax activation function.
The softmax function is defined as: f(x_i) = exp(x_i) / sum_j(exp(x_j))
It transforms a vector of real values into a probability distribution.
§Examples
use scirs2_neural::activations::{Softmax, Activation};
use ndarray::arr1;
// Create softmax activation for 1D array (axis 0)
let softmax = Softmax::new(0);
let input = arr1(&[1.0f64, 2.0, 3.0]).into_dyn();
let output = softmax.forward(&input).unwrap();
// Check that the output sums to 1.0
let sum: f64 = output.sum();
assert!((sum - 1.0).abs() < 1e-6);
// Check that all values are between 0 and 1
for val in output.iter() {
assert!(*val >= 0.0 && *val <= 1.0);
}
Implementations§
Source§impl Softmax
impl Softmax
Sourcepub fn new(axis: usize) -> Self
pub fn new(axis: usize) -> Self
Create a new Softmax activation function.
§Arguments
axis
- The axis along which to compute the softmax.
Examples found in repository?
examples/test_softmax.rs (line 11)
4fn main() {
5 println!("Testing softmax implementation...\n");
6
7 // Test case 1: Simple 1D array
8 let input = arr1(&[1.0, 2.0, 3.0]);
9 println!("Input: {:?}", input);
10
11 let softmax = Softmax::new(0);
12 let output = softmax.forward(&input.clone().into_dyn()).unwrap();
13 println!("Softmax output: {:?}", output);
14
15 // Verify that output sums to 1
16 let sum: f64 = output.sum();
17 println!("Sum of softmax: {}", sum);
18 assert!((sum - 1.0).abs() < 1e-6, "Softmax should sum to 1");
19
20 // Test case 2: 2D array (batch processing)
21 println!("\nTest case 2: 2D batch");
22 let input_2d = arr2(&[[1.0, 2.0, 3.0], [3.0, 2.0, 1.0], [2.0, 2.0, 2.0]]);
23 println!("Input 2D:\n{:?}", input_2d);
24
25 // Apply softmax along axis 1 (row-wise)
26 let softmax_2d = Softmax::new(1);
27 let output_2d = softmax_2d.forward(&input_2d.clone().into_dyn()).unwrap();
28 println!("Softmax output 2D:\n{:?}", output_2d);
29
30 // Verify each row sums to 1
31 for i in 0..output_2d.shape()[0] {
32 let row_sum: f64 = output_2d.slice(ndarray::s![i, ..]).sum();
33 println!("Row {} sum: {}", i, row_sum);
34 assert!((row_sum - 1.0).abs() < 1e-6, "Each row should sum to 1");
35 }
36
37 // Test case 3: Gradient computation
38 println!("\nTest case 3: Gradient computation");
39 let grad_output = arr1(&[0.1, 0.2, 0.3]).into_dyn();
40 let forward_output = softmax.forward(&input.clone().into_dyn()).unwrap();
41 let grad_input = softmax.backward(&grad_output, &forward_output).unwrap();
42 println!("Gradient input: {:?}", grad_input);
43
44 println!("\nAll tests passed!");
45}
Trait Implementations§
Source§impl<F: Float + Debug> Activation<F> for Softmax
impl<F: Float + Debug> Activation<F> for Softmax
impl Copy for Softmax
Auto Trait Implementations§
impl Freeze for Softmax
impl RefUnwindSafe for Softmax
impl Send for Softmax
impl Sync for Softmax
impl Unpin for Softmax
impl UnwindSafe for Softmax
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more
Source§impl<T> CloneToUninit for Twhere
T: Clone,
impl<T> CloneToUninit for Twhere
T: Clone,
Source§impl<T> IntoEither for T
impl<T> IntoEither for T
Source§fn into_either(self, into_left: bool) -> Either<Self, Self>
fn into_either(self, into_left: bool) -> Either<Self, Self>
Converts
self
into a Left
variant of Either<Self, Self>
if into_left
is true
.
Converts self
into a Right
variant of Either<Self, Self>
otherwise. Read moreSource§fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
Converts
self
into a Left
variant of Either<Self, Self>
if into_left(&self)
returns true
.
Converts self
into a Right
variant of Either<Self, Self>
otherwise. Read more