#[path = "composition_mixed.rs"]
mod composition_mixed;
use std::collections::HashMap;
use chrono::{DateTime, Utc};
use rust_decimal::Decimal;
use crate::MetricsError;
pub use crate::analytics::*;
pub use crate::risk_metrics::*;
pub use composition_mixed::{
compose_mixed, AllocationMethod, ComposeOptions, MixedCompositionResult, RebalanceEvent,
RebalanceMode, WeightScheduleEntry,
};
#[cfg(test)]
pub(crate) use composition_mixed::{
compute_hrp_weights, compute_inverse_vol_weights, should_rebalance, RebalanceState,
};
type ReturnLookup = HashMap<DateTime<Utc>, Decimal>;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Frequency {
Hourly,
FourHour,
Daily,
Weekly,
}
impl Frequency {
pub fn periods_per_year(self) -> u32 {
match self {
Frequency::Hourly => 8760, Frequency::FourHour => 2190, Frequency::Daily => 365,
Frequency::Weekly => 52,
}
}
pub fn infer(timestamps: &[DateTime<Utc>]) -> Result<Self, MetricsError> {
if timestamps.len() < 2 {
return Err(MetricsError::InsufficientData {
required: 2,
actual: timestamps.len(),
});
}
let mut gaps: Vec<i64> = timestamps
.windows(2)
.map(|w| (w[1] - w[0]).num_seconds())
.filter(|&g| g > 0)
.collect();
if gaps.is_empty() {
return Err(MetricsError::InsufficientData {
required: 2,
actual: 1,
});
}
gaps.sort_unstable();
let median = gaps[gaps.len() / 2];
Ok(match median {
0..=5400 => Frequency::Hourly, 5401..=28800 => Frequency::FourHour, 28801..=259200 => Frequency::Daily, _ => Frequency::Weekly,
})
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct ReturnPoint {
pub timestamp: DateTime<Utc>,
pub value: Decimal,
}
#[derive(Debug, Clone)]
pub struct ReturnSeries {
pub label: String,
pub points: Vec<ReturnPoint>,
pub frequency: Frequency,
}
#[derive(Debug, Clone, Copy)]
pub struct PortfolioEquityPoint {
pub timestamp: DateTime<Utc>,
pub value: Decimal,
}
#[derive(Debug, Clone)]
pub struct CompositionResult {
pub equity_curve: Vec<PortfolioEquityPoint>,
pub leg_equity_curves: Vec<Vec<PortfolioEquityPoint>>,
pub periods_per_year: u32,
pub leg_labels: Vec<String>,
}
impl CompositionResult {
pub fn equity_values(&self) -> Vec<Decimal> {
self.equity_curve.iter().map(|p| p.value).collect()
}
pub fn leg_equity_values(&self, leg_index: usize) -> Option<Vec<Decimal>> {
self.leg_equity_curves
.get(leg_index)
.map(|curve| curve.iter().map(|p| p.value).collect())
}
}
impl ReturnSeries {
pub fn from_equity_curve<S: Into<String>>(
label: S,
timestamps: &[DateTime<Utc>],
values: &[Decimal],
) -> Result<Self, MetricsError> {
if timestamps.len() != values.len() {
return Err(MetricsError::InvalidParameter(
"timestamps and values must have same length".into(),
));
}
if timestamps.len() < 2 {
return Err(MetricsError::InsufficientData {
required: 2,
actual: timestamps.len(),
});
}
let frequency = Frequency::infer(timestamps)?;
let points: Vec<ReturnPoint> = timestamps
.windows(2)
.zip(values.windows(2))
.filter_map(|(ts, vs)| {
if vs[0] == Decimal::ZERO {
None
} else {
Some(ReturnPoint {
timestamp: ts[1],
value: (vs[1] - vs[0]) / vs[0],
})
}
})
.collect();
Ok(Self {
label: label.into(),
points,
frequency,
})
}
}
fn validate_compose_inputs(legs: &[(&ReturnSeries, Decimal)]) -> Result<Frequency, MetricsError> {
if legs.is_empty() {
return Err(MetricsError::InvalidParameter(
"at least one leg required".into(),
));
}
let frequency = legs[0].0.frequency;
for (series, _) in legs.iter().skip(1) {
if series.frequency != frequency {
return Err(MetricsError::InvalidParameter(format!(
"frequency mismatch: leg '{}' is {:?} but leg '{}' is {:?}",
legs[0].0.label, frequency, series.label, series.frequency,
)));
}
}
let weight_sum: Decimal = legs.iter().map(|(_, w)| w).sum();
let tolerance = Decimal::new(1, 3); if (weight_sum - Decimal::ONE).abs() > tolerance {
return Err(MetricsError::InvalidParameter(format!(
"weights sum to {weight_sum}, expected 1.0 (tolerance: {tolerance})",
)));
}
Ok(frequency)
}
type Timeline = Vec<DateTime<Utc>>;
type ReturnLookups = Vec<HashMap<DateTime<Utc>, Decimal>>;
fn build_compose_timeline(
legs: &[(&ReturnSeries, Decimal)],
) -> Result<(Timeline, ReturnLookups), MetricsError> {
use std::collections::BTreeSet;
let mut all_timestamps = BTreeSet::new();
for (series, _) in legs {
for point in &series.points {
all_timestamps.insert(point.timestamp);
}
}
let timeline: Vec<DateTime<Utc>> = all_timestamps.into_iter().collect();
if timeline.is_empty() {
return Err(MetricsError::InsufficientData {
required: 1,
actual: 0,
});
}
let leg_lookups: Vec<ReturnLookup> = legs
.iter()
.map(|(series, _)| {
series
.points
.iter()
.map(|p| (p.timestamp, p.value))
.collect()
})
.collect();
Ok((timeline, leg_lookups))
}
pub fn compose(
legs: &[(&ReturnSeries, Decimal)],
capital: Decimal,
) -> Result<CompositionResult, MetricsError> {
let frequency = validate_compose_inputs(legs)?;
let (timeline, leg_lookups) = build_compose_timeline(legs)?;
let synthetic_t0 = timeline[0] - chrono::Duration::seconds(1);
let mut equity_curve = Vec::with_capacity(timeline.len() + 1);
equity_curve.push(PortfolioEquityPoint {
timestamp: synthetic_t0,
value: capital,
});
let mut leg_equity_curves: Vec<Vec<PortfolioEquityPoint>> = legs
.iter()
.map(|(_, weight)| {
let mut curve = Vec::with_capacity(timeline.len() + 1);
curve.push(PortfolioEquityPoint {
timestamp: synthetic_t0,
value: *weight * capital,
});
curve
})
.collect();
let mut leg_values: Vec<Decimal> = legs.iter().map(|(_, w)| *w * capital).collect();
let mut current_value = capital;
for &ts in &timeline {
let mut portfolio_return = Decimal::ZERO;
for (i, ((_, weight), lookup)) in legs.iter().zip(leg_lookups.iter()).enumerate() {
let leg_return = lookup.get(&ts).copied().unwrap_or(Decimal::ZERO);
portfolio_return += *weight * leg_return;
leg_values[i] *= Decimal::ONE + leg_return;
leg_equity_curves[i].push(PortfolioEquityPoint {
timestamp: ts,
value: leg_values[i],
});
}
current_value *= Decimal::ONE + portfolio_return;
equity_curve.push(PortfolioEquityPoint {
timestamp: ts,
value: current_value,
});
}
let leg_labels = legs.iter().map(|(s, _)| s.label.clone()).collect();
Ok(CompositionResult {
equity_curve,
leg_equity_curves,
periods_per_year: frequency.periods_per_year(),
leg_labels,
})
}
pub fn splice_returns(
old: &ReturnSeries,
new_series: &ReturnSeries,
splice_date: DateTime<Utc>,
) -> ReturnSeries {
let points: Vec<ReturnPoint> = old
.points
.iter()
.filter(|p| p.timestamp < splice_date)
.copied()
.chain(
new_series
.points
.iter()
.filter(|p| p.timestamp >= splice_date)
.copied(),
)
.collect();
let timestamps: Vec<DateTime<Utc>> = points.iter().map(|p| p.timestamp).collect();
let frequency = Frequency::infer(×tamps).unwrap_or(Frequency::Daily);
ReturnSeries {
label: format!("{}+{}", old.label, new_series.label),
points,
frequency,
}
}
#[cfg(test)]
#[path = "composition_tests.rs"]
mod tests;