irithyll 10.0.1

Streaming ML in Rust -- gradient boosted trees, neural architectures (TTT/KAN/MoE/Mamba/SNN), AutoML, kernel methods, and composable pipelines
Documentation
//! Expectile loss for asymmetric regression.
//!
//! L(y, f) = w * (f - y)^2,  where w = tau if f >= y, else w = 1 - tau.
//!
//! Generalizes squared error loss: tau = 0.5 recovers standard MSE (up to a
//! factor of 2). Values of tau > 0.5 penalize under-prediction more heavily,
//! producing models that target conditional expectiles above the mean.
//!
//! Unlike quantile (pinball) loss, the expectile loss has a well-defined
//! positive Hessian everywhere, making it natively compatible with second-order
//! gradient boosting (Newton leaf weights work directly).

pub use super::{Loss, LossType};
pub use irithyll_core::loss::expectile::*;

#[cfg(test)]
mod tests {
    use super::*;
    use crate::loss::squared::SquaredLoss;

    const EPS: f64 = 1e-12;

    #[test]
    fn test_n_outputs() {
        assert_eq!(ExpectileLoss::new(0.5).n_outputs(), 1);
    }

    #[test]
    fn test_gradient_symmetric_at_half() {
        // tau=0.5 should match SquaredLoss gradient (within factor of 1)
        let exp = ExpectileLoss::new(0.5);
        let sq = SquaredLoss;
        // SquaredLoss gradient = pred - target
        // ExpectileLoss gradient at tau=0.5 = 2*0.5*(pred-target) = pred-target
        assert!((exp.gradient(3.0, 5.0) - sq.gradient(3.0, 5.0)).abs() < EPS);
        assert!((exp.gradient(5.0, 3.0) - sq.gradient(5.0, 3.0)).abs() < EPS);
        assert!((exp.gradient(4.0, 4.0) - sq.gradient(4.0, 4.0)).abs() < EPS);
    }

    #[test]
    fn test_gradient_asymmetric() {
        let loss = ExpectileLoss::new(0.9);
        // Over-prediction (pred > target): w = tau = 0.9
        let g_over = loss.gradient(1.0, 3.0); // 2*0.9*(3-1) = 3.6
        assert!((g_over - 3.6).abs() < EPS);

        // Under-prediction (pred < target): w = 1-tau = 0.1
        let g_under = loss.gradient(3.0, 1.0); // 2*0.1*(1-3) = -0.4
        assert!((g_under - (-0.4)).abs() < EPS);
    }

    #[test]
    fn test_hessian_positive_definite() {
        let loss = ExpectileLoss::new(0.9);
        // Over-prediction: h = 2*tau = 1.8
        assert!((loss.hessian(1.0, 3.0) - 1.8).abs() < EPS);
        // Under-prediction: h = 2*(1-tau) = 0.2
        assert!((loss.hessian(3.0, 1.0) - 0.2).abs() < EPS);
        // At prediction == target: h = 2*tau = 1.8 (>= convention)
        assert!((loss.hessian(2.0, 2.0) - 1.8).abs() < EPS);

        // Always positive for any tau in (0,1)
        for &tau in &[0.01, 0.1, 0.25, 0.5, 0.75, 0.9, 0.99] {
            let l = ExpectileLoss::new(tau);
            assert!(l.hessian(0.0, 1.0) > 0.0);
            assert!(l.hessian(1.0, 0.0) > 0.0);
            assert!(l.hessian(5.0, 5.0) > 0.0);
        }
    }

    #[test]
    fn test_loss_value() {
        let loss = ExpectileLoss::new(0.9);
        // Over-prediction: w=0.9, r=2, loss = 0.9*4 = 3.6
        assert!((loss.loss(1.0, 3.0) - 3.6).abs() < EPS);
        // Under-prediction: w=0.1, r=-2, loss = 0.1*4 = 0.4
        assert!((loss.loss(3.0, 1.0) - 0.4).abs() < EPS);
        // Exact: loss = 0
        assert!((loss.loss(5.0, 5.0)).abs() < EPS);
    }

    #[test]
    fn test_predict_transform_is_identity() {
        let loss = ExpectileLoss::new(0.5);
        assert!((loss.predict_transform(42.0) - 42.0).abs() < EPS);
        assert!((loss.predict_transform(-3.25) - (-3.25)).abs() < EPS);
    }

    #[test]
    fn test_initial_prediction_is_mean() {
        let loss = ExpectileLoss::new(0.9);
        let targets = [1.0, 2.0, 3.0, 4.0, 5.0];
        assert!((loss.initial_prediction(&targets) - 3.0).abs() < EPS);
    }

    #[test]
    fn test_initial_prediction_empty() {
        let loss = ExpectileLoss::new(0.5);
        assert!((loss.initial_prediction(&[])).abs() < EPS);
    }

    #[test]
    fn test_gradient_is_derivative_of_loss() {
        let loss = ExpectileLoss::new(0.75);
        let target = 2.5;
        let pred = 4.0;
        let h = 1e-6;
        let numerical = (loss.loss(target, pred + h) - loss.loss(target, pred - h)) / (2.0 * h);
        let analytical = loss.gradient(target, pred);
        assert!(
            (numerical - analytical).abs() < 1e-5,
            "numerical={numerical}, analytical={analytical}"
        );

        // Also check under-prediction side
        let pred2 = 1.0;
        let numerical2 = (loss.loss(target, pred2 + h) - loss.loss(target, pred2 - h)) / (2.0 * h);
        let analytical2 = loss.gradient(target, pred2);
        assert!(
            (numerical2 - analytical2).abs() < 1e-5,
            "numerical={numerical2}, analytical={analytical2}"
        );
    }

    #[test]
    fn test_loss_type_returns_some() {
        let loss = ExpectileLoss::new(0.75);
        match loss.loss_type() {
            Some(LossType::Expectile { tau }) => assert!((tau - 0.75).abs() < EPS),
            other => panic!("expected Expectile, got {other:?}"),
        }
    }

    #[test]
    #[should_panic(expected = "tau must be in (0, 1)")]
    fn test_invalid_tau_zero() {
        ExpectileLoss::new(0.0);
    }

    #[test]
    #[should_panic(expected = "tau must be in (0, 1)")]
    fn test_invalid_tau_one() {
        ExpectileLoss::new(1.0);
    }
}