1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
use crateTensor;
use crateLossFunction;
/// Categorical Cross Entropy loss function for multi-class classification
///
/// # Example
///
/// ```rust
/// use rustyml::neural_network::loss_function::*;
/// use rustyml::neural_network::neural_network_trait::LossFunction;
/// use ndarray::array;
///
/// // Create the loss function
/// let loss_fn = CategoricalCrossEntropy::new();
///
/// // Ground truth labels (one-hot encoded)
/// // Class 0: [1, 0, 0], Class 1: [0, 1, 0], Class 2: [0, 0, 1]
/// let y_true = array![
/// [1.0f32, 0.0f32, 0.0f32], // Sample 1: belongs to class 0
/// [0.0f32, 1.0f32, 0.0f32], // Sample 2: belongs to class 1
/// [0.0f32, 0.0f32, 1.0f32] // Sample 3: belongs to class 2
/// ].into_dyn();
///
/// // Predicted probabilities (softmax output)
/// let y_pred = array![
/// [0.8f32, 0.1f32, 0.1f32], // Sample 1: high confidence for class 0
/// [0.2f32, 0.7f32, 0.1f32], // Sample 2: good confidence for class 1
/// [0.1f32, 0.2f32, 0.7f32] // Sample 3: good confidence for class 2
/// ].into_dyn();
///
/// // Compute loss
/// let loss = loss_fn.compute_loss(&y_true, &y_pred);
/// println!("Categorical Cross Entropy Loss: {:.4}", loss);
///
/// // Compute gradient for backpropagation
/// let gradient = loss_fn.compute_grad(&y_true, &y_pred);
/// println!("Gradient shape: {:?}", gradient.shape());
/// ```
;