pub fn binary_log_loss<S1, S2, D1, D2>(
y_true: &ArrayBase<S1, D1>,
y_prob: &ArrayBase<S2, D2>,
eps: f64,
) -> Result<f64>Expand description
Calculate binary log loss, also known as binary cross-entropy
This is the loss function used in (multinomial) logistic regression and neural networks (with softmax output).
§Arguments
y_true- Ground truth binary labelsy_prob- Predicted probabilities for the positive classeps- Small value to avoid log(0)
§Returns
- The log loss (float)
§Examples
use scirs2_core::ndarray::array;
use scirs2_metrics::classification::binary_log_loss;
let y_true = array![0, 1, 1, 0];
let y_prob = array![0.1, 0.9, 0.8, 0.3];
let loss = binary_log_loss(&y_true, &y_prob, 1e-15).unwrap();