quant-metrics 0.7.0

Pure performance statistics library for trading — Sharpe, Sortino, drawdown, VaR, portfolio composition
Documentation
//! Portfolio composition — weighted return aggregation.
//!
//! Combines multiple return series with weights into a single portfolio
//! equity curve. Pure math, no I/O.

#[path = "composition_mixed.rs"]
mod composition_mixed;

use std::collections::HashMap;

use chrono::{DateTime, Utc};
use rust_decimal::Decimal;

use crate::MetricsError;

// Re-export analytics and risk_metrics through composition for backward compatibility.
pub use crate::analytics::*;
pub use crate::risk_metrics::*;

// Re-export mixed-frequency composition types and functions.
pub use composition_mixed::{
    compose_mixed, AllocationMethod, ComposeOptions, MixedCompositionResult, RebalanceEvent,
    RebalanceMode, WeightScheduleEntry,
};

// Re-export test-visible internals for composition_tests.rs
#[cfg(test)]
pub(crate) use composition_mixed::{
    compute_hrp_weights, compute_inverse_vol_weights, should_rebalance, RebalanceState,
};

/// Per-leg return lookup: timestamp → return value.
type ReturnLookup = HashMap<DateTime<Utc>, Decimal>;

/// Observation frequency of a return series.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Frequency {
    Hourly,
    FourHour,
    Daily,
    Weekly,
}

impl Frequency {
    /// Trading periods per year for this frequency.
    ///
    /// Crypto trades 24/7/365, equities ~252 days. We use the crypto
    /// convention (365 days) for daily since the return series doesn't
    /// know asset class. Callers can override via metrics functions.
    pub fn periods_per_year(self) -> u32 {
        match self {
            Frequency::Hourly => 8760,   // 365 * 24
            Frequency::FourHour => 2190, // 365 * 6
            Frequency::Daily => 365,
            Frequency::Weekly => 52,
        }
    }

    /// Infer frequency from median timestamp spacing.
    pub fn infer(timestamps: &[DateTime<Utc>]) -> Result<Self, MetricsError> {
        if timestamps.len() < 2 {
            return Err(MetricsError::InsufficientData {
                required: 2,
                actual: timestamps.len(),
            });
        }

        // Compute median gap in seconds
        let mut gaps: Vec<i64> = timestamps
            .windows(2)
            .map(|w| (w[1] - w[0]).num_seconds())
            .filter(|&g| g > 0)
            .collect();

        if gaps.is_empty() {
            return Err(MetricsError::InsufficientData {
                required: 2,
                actual: 1,
            });
        }

        gaps.sort_unstable();
        let median = gaps[gaps.len() / 2];

        // Match to nearest standard frequency
        Ok(match median {
            0..=5400 => Frequency::Hourly,       // <= 1.5h
            5401..=28800 => Frequency::FourHour, // <= 8h
            28801..=259200 => Frequency::Daily,  // <= 3d
            _ => Frequency::Weekly,
        })
    }
}

/// A single return observation.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct ReturnPoint {
    pub timestamp: DateTime<Utc>,
    pub value: Decimal,
}

/// A labeled time series of periodic returns.
#[derive(Debug, Clone)]
pub struct ReturnSeries {
    pub label: String,
    pub points: Vec<ReturnPoint>,
    pub frequency: Frequency,
}

/// A point on the portfolio equity curve.
#[derive(Debug, Clone, Copy)]
pub struct PortfolioEquityPoint {
    pub timestamp: DateTime<Utc>,
    pub value: Decimal,
}

/// Result of basic (equal-frequency) portfolio composition.
#[derive(Debug, Clone)]
pub struct CompositionResult {
    pub equity_curve: Vec<PortfolioEquityPoint>,
    pub leg_equity_curves: Vec<Vec<PortfolioEquityPoint>>,
    pub periods_per_year: u32,
    pub leg_labels: Vec<String>,
}

impl CompositionResult {
    /// Extract portfolio-level equity values for feeding to metrics functions.
    pub fn equity_values(&self) -> Vec<Decimal> {
        self.equity_curve.iter().map(|p| p.value).collect()
    }

    /// Extract per-leg equity values for feeding to metrics functions.
    pub fn leg_equity_values(&self, leg_index: usize) -> Option<Vec<Decimal>> {
        self.leg_equity_curves
            .get(leg_index)
            .map(|curve| curve.iter().map(|p| p.value).collect())
    }
}

impl ReturnSeries {
    /// Derive a return series from an equity curve.
    ///
    /// Returns `r(t) = (EC(t) - EC(t-1)) / EC(t-1)` for each consecutive pair.
    /// Frequency is inferred from timestamp spacing.
    pub fn from_equity_curve<S: Into<String>>(
        label: S,
        timestamps: &[DateTime<Utc>],
        values: &[Decimal],
    ) -> Result<Self, MetricsError> {
        if timestamps.len() != values.len() {
            return Err(MetricsError::InvalidParameter(
                "timestamps and values must have same length".into(),
            ));
        }
        if timestamps.len() < 2 {
            return Err(MetricsError::InsufficientData {
                required: 2,
                actual: timestamps.len(),
            });
        }

        let frequency = Frequency::infer(timestamps)?;

        let points: Vec<ReturnPoint> = timestamps
            .windows(2)
            .zip(values.windows(2))
            .filter_map(|(ts, vs)| {
                if vs[0] == Decimal::ZERO {
                    None
                } else {
                    Some(ReturnPoint {
                        timestamp: ts[1],
                        value: (vs[1] - vs[0]) / vs[0],
                    })
                }
            })
            .collect();

        Ok(Self {
            label: label.into(),
            points,
            frequency,
        })
    }
}

/// Validate that all legs share one frequency and weights sum to 1.0.
fn validate_compose_inputs(legs: &[(&ReturnSeries, Decimal)]) -> Result<Frequency, MetricsError> {
    if legs.is_empty() {
        return Err(MetricsError::InvalidParameter(
            "at least one leg required".into(),
        ));
    }

    let frequency = legs[0].0.frequency;
    for (series, _) in legs.iter().skip(1) {
        if series.frequency != frequency {
            return Err(MetricsError::InvalidParameter(format!(
                "frequency mismatch: leg '{}' is {:?} but leg '{}' is {:?}",
                legs[0].0.label, frequency, series.label, series.frequency,
            )));
        }
    }

    let weight_sum: Decimal = legs.iter().map(|(_, w)| w).sum();
    let tolerance = Decimal::new(1, 3); // 0.001
    if (weight_sum - Decimal::ONE).abs() > tolerance {
        return Err(MetricsError::InvalidParameter(format!(
            "weights sum to {weight_sum}, expected 1.0 (tolerance: {tolerance})",
        )));
    }

    Ok(frequency)
}

type Timeline = Vec<DateTime<Utc>>;
type ReturnLookups = Vec<HashMap<DateTime<Utc>, Decimal>>;

/// Build the union timeline and per-leg return lookups for `compose`.
fn build_compose_timeline(
    legs: &[(&ReturnSeries, Decimal)],
) -> Result<(Timeline, ReturnLookups), MetricsError> {
    use std::collections::BTreeSet;

    let mut all_timestamps = BTreeSet::new();
    for (series, _) in legs {
        for point in &series.points {
            all_timestamps.insert(point.timestamp);
        }
    }

    let timeline: Vec<DateTime<Utc>> = all_timestamps.into_iter().collect();
    if timeline.is_empty() {
        return Err(MetricsError::InsufficientData {
            required: 1,
            actual: 0,
        });
    }

    // Build per-leg return lookup (timestamp -> return value)
    let leg_lookups: Vec<ReturnLookup> = legs
        .iter()
        .map(|(series, _)| {
            series
                .points
                .iter()
                .map(|p| (p.timestamp, p.value))
                .collect()
        })
        .collect();

    Ok((timeline, leg_lookups))
}

/// Compose multiple return series into a portfolio equity curve.
///
/// `legs` is a slice of (return series, weight) pairs.
/// `capital` is the starting portfolio value.
///
/// All legs must have the same frequency. Weights must sum to 1.0 (within tolerance).
pub fn compose(
    legs: &[(&ReturnSeries, Decimal)],
    capital: Decimal,
) -> Result<CompositionResult, MetricsError> {
    let frequency = validate_compose_inputs(legs)?;
    let (timeline, leg_lookups) = build_compose_timeline(legs)?;

    // Compose: R(t) = Σ w_i * r_i(t), EC(t) = EC(t-1) * (1 + R(t))
    let synthetic_t0 = timeline[0] - chrono::Duration::seconds(1);

    let mut equity_curve = Vec::with_capacity(timeline.len() + 1);
    equity_curve.push(PortfolioEquityPoint {
        timestamp: synthetic_t0,
        value: capital,
    });

    // Per-leg equity curves: each leg starts at weight * capital
    let mut leg_equity_curves: Vec<Vec<PortfolioEquityPoint>> = legs
        .iter()
        .map(|(_, weight)| {
            let mut curve = Vec::with_capacity(timeline.len() + 1);
            curve.push(PortfolioEquityPoint {
                timestamp: synthetic_t0,
                value: *weight * capital,
            });
            curve
        })
        .collect();
    let mut leg_values: Vec<Decimal> = legs.iter().map(|(_, w)| *w * capital).collect();

    let mut current_value = capital;
    for &ts in &timeline {
        let mut portfolio_return = Decimal::ZERO;

        for (i, ((_, weight), lookup)) in legs.iter().zip(leg_lookups.iter()).enumerate() {
            let leg_return = lookup.get(&ts).copied().unwrap_or(Decimal::ZERO);
            portfolio_return += *weight * leg_return;

            leg_values[i] *= Decimal::ONE + leg_return;
            leg_equity_curves[i].push(PortfolioEquityPoint {
                timestamp: ts,
                value: leg_values[i],
            });
        }

        current_value *= Decimal::ONE + portfolio_return;
        equity_curve.push(PortfolioEquityPoint {
            timestamp: ts,
            value: current_value,
        });
    }

    let leg_labels = legs.iter().map(|(s, _)| s.label.clone()).collect();

    Ok(CompositionResult {
        equity_curve,
        leg_equity_curves,
        periods_per_year: frequency.periods_per_year(),
        leg_labels,
    })
}

/// Splice two return series at a given date.
///
/// Returns a new series with returns from `old` before `splice_date`
/// and returns from `new_series` from `splice_date` onward.
pub fn splice_returns(
    old: &ReturnSeries,
    new_series: &ReturnSeries,
    splice_date: DateTime<Utc>,
) -> ReturnSeries {
    let points: Vec<ReturnPoint> = old
        .points
        .iter()
        .filter(|p| p.timestamp < splice_date)
        .copied()
        .chain(
            new_series
                .points
                .iter()
                .filter(|p| p.timestamp >= splice_date)
                .copied(),
        )
        .collect();

    // Infer frequency from combined series
    let timestamps: Vec<DateTime<Utc>> = points.iter().map(|p| p.timestamp).collect();
    let frequency = Frequency::infer(&timestamps).unwrap_or(Frequency::Daily);

    ReturnSeries {
        label: format!("{}+{}", old.label, new_series.label),
        points,
        frequency,
    }
}

#[cfg(test)]
#[path = "composition_tests.rs"]
mod tests;