use crate::neural_network::Tensor;
use crate::neural_network::neural_network_trait::LossFunction;
pub struct BinaryCrossEntropy;
impl BinaryCrossEntropy {
pub fn new() -> Self {
Self {}
}
}
impl LossFunction for BinaryCrossEntropy {
fn compute_loss(&self, y_true: &Tensor, y_pred: &Tensor) -> f32 {
let mut y_pred_clipped = y_pred.clone();
y_pred_clipped.par_mapv_inplace(|x| x.max(1e-7).min(1.0 - 1e-7));
let losses = y_true.mapv(|y_t| y_t).to_owned() * &y_pred_clipped.mapv(|y_p| y_p.ln())
+ (1.0 - y_true).mapv(|y_t| y_t) * &(1.0 - &y_pred_clipped).mapv(|y_p| y_p.ln());
let n = losses.len() as f32;
-losses.sum() / n
}
fn compute_grad(&self, y_true: &Tensor, y_pred: &Tensor) -> Tensor {
let mut y_pred_clipped = y_pred.clone();
y_pred_clipped.par_mapv_inplace(|x| x.max(1e-7).min(1.0 - 1e-7));
let grad = -y_true / &y_pred_clipped + (1.0 - y_true) / (1.0 - &y_pred_clipped);
let n = grad.len() as f32;
grad / n
}
}