1#[path = "composition_mixed.rs"]
7mod composition_mixed;
8
9use std::collections::HashMap;
10
11use chrono::{DateTime, Utc};
12use rust_decimal::Decimal;
13
14use crate::MetricsError;
15
16pub use crate::analytics::*;
18pub use crate::risk_metrics::*;
19
20pub use composition_mixed::{
22 compose_mixed, AllocationMethod, ComposeOptions, MixedCompositionResult, RebalanceEvent,
23 RebalanceMode, WeightScheduleEntry,
24};
25
26#[cfg(test)]
28pub(crate) use composition_mixed::{
29 compute_hrp_weights, compute_inverse_vol_weights, should_rebalance, RebalanceState,
30};
31
32type ReturnLookup = HashMap<DateTime<Utc>, Decimal>;
34
35#[derive(Debug, Clone, Copy, PartialEq, Eq)]
37pub enum Frequency {
38 Hourly,
39 FourHour,
40 Daily,
41 Weekly,
42}
43
44impl Frequency {
45 pub fn periods_per_year(self) -> u32 {
51 match self {
52 Frequency::Hourly => 8760, Frequency::FourHour => 2190, Frequency::Daily => 365,
55 Frequency::Weekly => 52,
56 }
57 }
58
59 pub fn infer(timestamps: &[DateTime<Utc>]) -> Result<Self, MetricsError> {
61 if timestamps.len() < 2 {
62 return Err(MetricsError::InsufficientData {
63 required: 2,
64 actual: timestamps.len(),
65 });
66 }
67
68 let mut gaps: Vec<i64> = timestamps
70 .windows(2)
71 .map(|w| (w[1] - w[0]).num_seconds())
72 .filter(|&g| g > 0)
73 .collect();
74
75 if gaps.is_empty() {
76 return Err(MetricsError::InsufficientData {
77 required: 2,
78 actual: 1,
79 });
80 }
81
82 gaps.sort_unstable();
83 let median = gaps[gaps.len() / 2];
84
85 Ok(match median {
87 0..=5400 => Frequency::Hourly, 5401..=28800 => Frequency::FourHour, 28801..=259200 => Frequency::Daily, _ => Frequency::Weekly,
91 })
92 }
93}
94
95#[derive(Debug, Clone, Copy, PartialEq, Eq)]
97pub struct ReturnPoint {
98 pub timestamp: DateTime<Utc>,
99 pub value: Decimal,
100}
101
102#[derive(Debug, Clone)]
104pub struct ReturnSeries {
105 pub label: String,
106 pub points: Vec<ReturnPoint>,
107 pub frequency: Frequency,
108}
109
110#[derive(Debug, Clone, Copy)]
112pub struct PortfolioEquityPoint {
113 pub timestamp: DateTime<Utc>,
114 pub value: Decimal,
115}
116
117#[derive(Debug, Clone)]
119pub struct CompositionResult {
120 pub equity_curve: Vec<PortfolioEquityPoint>,
121 pub leg_equity_curves: Vec<Vec<PortfolioEquityPoint>>,
122 pub periods_per_year: u32,
123 pub leg_labels: Vec<String>,
124}
125
126impl CompositionResult {
127 pub fn equity_values(&self) -> Vec<Decimal> {
129 self.equity_curve.iter().map(|p| p.value).collect()
130 }
131
132 pub fn leg_equity_values(&self, leg_index: usize) -> Option<Vec<Decimal>> {
134 self.leg_equity_curves
135 .get(leg_index)
136 .map(|curve| curve.iter().map(|p| p.value).collect())
137 }
138}
139
140impl ReturnSeries {
141 pub fn from_equity_curve<S: Into<String>>(
146 label: S,
147 timestamps: &[DateTime<Utc>],
148 values: &[Decimal],
149 ) -> Result<Self, MetricsError> {
150 if timestamps.len() != values.len() {
151 return Err(MetricsError::InvalidParameter(
152 "timestamps and values must have same length".into(),
153 ));
154 }
155 if timestamps.len() < 2 {
156 return Err(MetricsError::InsufficientData {
157 required: 2,
158 actual: timestamps.len(),
159 });
160 }
161
162 let frequency = Frequency::infer(timestamps)?;
163
164 let points: Vec<ReturnPoint> = timestamps
165 .windows(2)
166 .zip(values.windows(2))
167 .filter_map(|(ts, vs)| {
168 if vs[0] == Decimal::ZERO {
169 None
170 } else {
171 Some(ReturnPoint {
172 timestamp: ts[1],
173 value: (vs[1] - vs[0]) / vs[0],
174 })
175 }
176 })
177 .collect();
178
179 Ok(Self {
180 label: label.into(),
181 points,
182 frequency,
183 })
184 }
185}
186
187fn validate_compose_inputs(legs: &[(&ReturnSeries, Decimal)]) -> Result<Frequency, MetricsError> {
189 if legs.is_empty() {
190 return Err(MetricsError::InvalidParameter(
191 "at least one leg required".into(),
192 ));
193 }
194
195 let frequency = legs[0].0.frequency;
196 for (series, _) in legs.iter().skip(1) {
197 if series.frequency != frequency {
198 return Err(MetricsError::InvalidParameter(format!(
199 "frequency mismatch: leg '{}' is {:?} but leg '{}' is {:?}",
200 legs[0].0.label, frequency, series.label, series.frequency,
201 )));
202 }
203 }
204
205 let weight_sum: Decimal = legs.iter().map(|(_, w)| w).sum();
206 let tolerance = Decimal::new(1, 3); if (weight_sum - Decimal::ONE).abs() > tolerance {
208 return Err(MetricsError::InvalidParameter(format!(
209 "weights sum to {weight_sum}, expected 1.0 (tolerance: {tolerance})",
210 )));
211 }
212
213 Ok(frequency)
214}
215
216type Timeline = Vec<DateTime<Utc>>;
217type ReturnLookups = Vec<HashMap<DateTime<Utc>, Decimal>>;
218
219fn build_compose_timeline(
221 legs: &[(&ReturnSeries, Decimal)],
222) -> Result<(Timeline, ReturnLookups), MetricsError> {
223 use std::collections::BTreeSet;
224
225 let mut all_timestamps = BTreeSet::new();
226 for (series, _) in legs {
227 for point in &series.points {
228 all_timestamps.insert(point.timestamp);
229 }
230 }
231
232 let timeline: Vec<DateTime<Utc>> = all_timestamps.into_iter().collect();
233 if timeline.is_empty() {
234 return Err(MetricsError::InsufficientData {
235 required: 1,
236 actual: 0,
237 });
238 }
239
240 let leg_lookups: Vec<ReturnLookup> = legs
242 .iter()
243 .map(|(series, _)| {
244 series
245 .points
246 .iter()
247 .map(|p| (p.timestamp, p.value))
248 .collect()
249 })
250 .collect();
251
252 Ok((timeline, leg_lookups))
253}
254
255pub fn compose(
262 legs: &[(&ReturnSeries, Decimal)],
263 capital: Decimal,
264) -> Result<CompositionResult, MetricsError> {
265 let frequency = validate_compose_inputs(legs)?;
266 let (timeline, leg_lookups) = build_compose_timeline(legs)?;
267
268 let synthetic_t0 = timeline[0] - chrono::Duration::seconds(1);
270
271 let mut equity_curve = Vec::with_capacity(timeline.len() + 1);
272 equity_curve.push(PortfolioEquityPoint {
273 timestamp: synthetic_t0,
274 value: capital,
275 });
276
277 let mut leg_equity_curves: Vec<Vec<PortfolioEquityPoint>> = legs
279 .iter()
280 .map(|(_, weight)| {
281 let mut curve = Vec::with_capacity(timeline.len() + 1);
282 curve.push(PortfolioEquityPoint {
283 timestamp: synthetic_t0,
284 value: *weight * capital,
285 });
286 curve
287 })
288 .collect();
289 let mut leg_values: Vec<Decimal> = legs.iter().map(|(_, w)| *w * capital).collect();
290
291 let mut current_value = capital;
292 for &ts in &timeline {
293 let mut portfolio_return = Decimal::ZERO;
294
295 for (i, ((_, weight), lookup)) in legs.iter().zip(leg_lookups.iter()).enumerate() {
296 let leg_return = lookup.get(&ts).copied().unwrap_or(Decimal::ZERO);
297 portfolio_return += *weight * leg_return;
298
299 leg_values[i] *= Decimal::ONE + leg_return;
300 leg_equity_curves[i].push(PortfolioEquityPoint {
301 timestamp: ts,
302 value: leg_values[i],
303 });
304 }
305
306 current_value *= Decimal::ONE + portfolio_return;
307 equity_curve.push(PortfolioEquityPoint {
308 timestamp: ts,
309 value: current_value,
310 });
311 }
312
313 let leg_labels = legs.iter().map(|(s, _)| s.label.clone()).collect();
314
315 Ok(CompositionResult {
316 equity_curve,
317 leg_equity_curves,
318 periods_per_year: frequency.periods_per_year(),
319 leg_labels,
320 })
321}
322
323pub fn splice_returns(
328 old: &ReturnSeries,
329 new_series: &ReturnSeries,
330 splice_date: DateTime<Utc>,
331) -> ReturnSeries {
332 let points: Vec<ReturnPoint> = old
333 .points
334 .iter()
335 .filter(|p| p.timestamp < splice_date)
336 .copied()
337 .chain(
338 new_series
339 .points
340 .iter()
341 .filter(|p| p.timestamp >= splice_date)
342 .copied(),
343 )
344 .collect();
345
346 let timestamps: Vec<DateTime<Utc>> = points.iter().map(|p| p.timestamp).collect();
348 let frequency = Frequency::infer(×tamps).unwrap_or(Frequency::Daily);
349
350 ReturnSeries {
351 label: format!("{}+{}", old.label, new_series.label),
352 points,
353 frequency,
354 }
355}
356
357#[cfg(test)]
358#[path = "composition_tests.rs"]
359mod tests;