hyperliquid-backtest 0.1.2

Comprehensive Rust library for backtesting trading strategies with Hyperliquid data, funding rates, and perpetual futures mechanics
Documentation
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
use chrono::{Duration, Utc};
use hyperliquid_backtest::prelude::*;
use rs_backtester::prelude::*;
use std::fs::File;
use std::io::Write;
use std::collections::HashMap;
use serde::{Serialize, Deserialize};
use tokio::time::sleep;

/// # Advanced Portfolio Management Example
///
/// This example demonstrates sophisticated portfolio management techniques for trading
/// across multiple assets on Hyperliquid, including:
///
/// - Dynamic asset allocation based on volatility and correlation
/// - Portfolio rebalancing with custom schedules and thresholds
/// - Risk-adjusted position sizing across multiple assets
/// - Cross-asset correlation management
/// - Sector-based portfolio construction
/// - Performance attribution by asset class

#[derive(Debug, Clone, Serialize, Deserialize)]
struct PortfolioAllocation {
    symbol: String,
    target_weight: f64,
    current_weight: f64,
    volatility_contribution: f64,
    correlation_score: f64,
    max_position_size: f64,
    funding_rate_score: f64,
}

#[derive(Debug, Clone, Serialize, Deserialize)]
struct AssetClass {
    name: String,
    target_allocation: f64,
    current_allocation: f64,
    assets: Vec<String>,
    performance: f64,
}

#[tokio::main]
async fn main() -> Result<()> {
    println!("Advanced Portfolio Management Example");
    println!("====================================\n");

    // Define asset universe with asset classes
    let mut asset_classes = HashMap::new();
    
    // Layer 1 assets
    asset_classes.insert("Layer1", AssetClass {
        name: "Layer1".to_string(),
        target_allocation: 0.40, // 40% allocation to Layer 1
        current_allocation: 0.0,
        assets: vec!["BTC".to_string(), "ETH".to_string()],
        performance: 0.0,
    });
    
    // Layer 2 assets
    asset_classes.insert("Layer2", AssetClass {
        name: "Layer2".to_string(),
        target_allocation: 0.30, // 30% allocation to Layer 2
        current_allocation: 0.0,
        assets: vec!["SOL".to_string(), "AVAX".to_string()],
        performance: 0.0,
    });
    
    // DeFi assets
    asset_classes.insert("DeFi", AssetClass {
        name: "DeFi".to_string(),
        target_allocation: 0.20, // 20% allocation to DeFi
        current_allocation: 0.0,
        assets: vec!["UNI".to_string(), "AAVE".to_string()],
        performance: 0.0,
    });
    
    // Meme assets (smaller allocation due to higher risk)
    asset_classes.insert("Meme", AssetClass {
        name: "Meme".to_string(),
        target_allocation: 0.10, // 10% allocation to Meme coins
        current_allocation: 0.0,
        assets: vec!["DOGE".to_string(), "SHIB".to_string()],
        performance: 0.0,
    });
    
    // Collect all assets from all classes
    let mut all_assets = Vec::new();
    for asset_class in asset_classes.values() {
        for asset in &asset_class.assets {
            all_assets.push(asset.clone());
        }
    }
    
    println!("Asset universe: {:?}", all_assets);
    
    // Fetch historical data for all assets
    let end_time = Utc::now().timestamp() as u64;
    let start_time = end_time - (90 * 24 * 3600); // 90 days of data
    
    println!("Fetching historical data for {} assets...", all_assets.len());
    
    let mut asset_data = HashMap::new();
    for asset in &all_assets {
        println!("Fetching {} data...", asset);
        
        // For this example, we'll use mock data for assets other than BTC and ETH
        let data = match asset.as_str() {
            "BTC" => HyperliquidData::fetch_btc("1h", start_time, end_time).await?,
            "ETH" => HyperliquidData::fetch_eth("1h", start_time, end_time).await?,
            _ => {
                println!("Using mock data for {}", asset);
                // In a real implementation, you would fetch actual data
                // For this example, we'll clone BTC data and modify it slightly
                let mut btc_data = HyperliquidData::fetch_btc("1h", start_time, end_time).await?;
                
                // Modify the data slightly to simulate different asset behavior
                for i in 0..btc_data.close.len() {
                    let random_factor = 0.8 + (asset.len() as f64 * 0.1);
                    btc_data.close[i] *= random_factor;
                    btc_data.open[i] *= random_factor;
                    btc_data.high[i] *= random_factor;
                    btc_data.low[i] *= random_factor;
                }
                
                btc_data
            }
        };
        
        println!("  {} data points fetched", data.len());
        asset_data.insert(asset.clone(), data);
    }
    
    // Calculate volatility for each asset
    println!("\nCalculating asset volatilities...");
    let mut asset_volatilities = HashMap::new();
    for (symbol, data) in &asset_data {
        let volatility = calculate_volatility(&data.close);
        asset_volatilities.insert(symbol.clone(), volatility);
        println!("  {}: {:.4}%", symbol, volatility * 100.0);
    }
    
    // Calculate correlation matrix
    println!("\nCalculating correlation matrix...");
    let correlation_matrix = calculate_correlation_matrix(&asset_data);
    
    // Print correlation matrix
    println!("\nCorrelation Matrix:");
    print!("{:10}", "");
    for asset in &all_assets {
        print!("{:10}", asset);
    }
    println!();
    
    for asset1 in &all_assets {
        print!("{:10}", asset1);
        for asset2 in &all_assets {
            let key = format!("{}-{}", asset1, asset2);
            let correlation = correlation_matrix.get(&key).unwrap_or(&1.0);
            print!("{:10.4}", correlation);
        }
        println!();
    }
    
    // Calculate funding rates for each asset
    println!("\nAnalyzing funding rates...");
    let mut funding_rate_scores = HashMap::new();
    for (symbol, data) in &asset_data {
        let avg_funding_rate = calculate_average_funding_rate(data);
        funding_rate_scores.insert(symbol.clone(), avg_funding_rate);
        println!("  {}: {:.6}% per 8h", symbol, avg_funding_rate * 100.0);
    }
    
    // Calculate optimal portfolio weights using risk parity approach
    println!("\nCalculating optimal portfolio weights using risk parity...");
    let portfolio_weights = calculate_risk_parity_weights(
        &all_assets,
        &asset_volatilities,
        &correlation_matrix,
        &funding_rate_scores
    );
    
    // Print optimal weights
    println!("\nOptimal Portfolio Weights:");
    for (symbol, weight) in &portfolio_weights {
        println!("  {}: {:.2}%", symbol, weight * 100.0);
    }
    
    // Adjust weights to respect asset class allocations
    println!("\nAdjusting weights to respect asset class allocations...");
    let adjusted_weights = adjust_weights_by_asset_class(
        &portfolio_weights,
        &asset_classes
    );
    
    // Print adjusted weights
    println!("\nAdjusted Portfolio Weights:");
    for (symbol, weight) in &adjusted_weights {
        println!("  {}: {:.2}%", symbol, weight * 100.0);
    }
    
    // Calculate maximum position sizes based on risk limits
    println!("\nCalculating maximum position sizes...");
    let max_position_sizes = calculate_max_position_sizes(
        &all_assets,
        &asset_volatilities,
        &correlation_matrix,
        100000.0, // Initial capital
        0.02      // 2% max daily VaR
    );
    
    // Print maximum position sizes
    println!("\nMaximum Position Sizes:");
    for (symbol, max_size) in &max_position_sizes {
        println!("  {}: ${:.2}", symbol, max_size);
    }
    
    // Create portfolio allocations
    let mut portfolio_allocations = Vec::new();
    for asset in &all_assets {
        portfolio_allocations.push(PortfolioAllocation {
            symbol: asset.clone(),
            target_weight: *adjusted_weights.get(asset).unwrap_or(&0.0),
            current_weight: 0.0, // Will be set during rebalancing
            volatility_contribution: 0.0, // Will be calculated
            correlation_score: calculate_correlation_score(asset, &all_assets, &correlation_matrix),
            max_position_size: *max_position_sizes.get(asset).unwrap_or(&0.0),
            funding_rate_score: *funding_rate_scores.get(asset).unwrap_or(&0.0),
        });
    }
    
    // Simulate portfolio rebalancing
    println!("\nSimulating portfolio rebalancing...");
    let rebalanced_portfolio = simulate_portfolio_rebalancing(
        &portfolio_allocations,
        &asset_data,
        100000.0, // Initial capital
        30,       // Rebalance every 30 days
        0.05      // 5% threshold for rebalancing
    ).await?;
    
    // Export portfolio allocation to CSV
    export_portfolio_allocation(&portfolio_allocations)?;
    
    // Export rebalancing results to CSV
    export_rebalancing_results(&rebalanced_portfolio)?;
    
    println!("\nPortfolio management example completed successfully!");
    println!("Portfolio allocation exported to portfolio_allocation.csv");
    println!("Rebalancing results exported to portfolio_rebalancing.csv");
    
    Ok(())
}

fn calculate_volatility(prices: &[f64]) -> f64 {
    if prices.len() < 2 {
        return 0.0;
    }
    
    // Calculate daily returns
    let mut returns = Vec::with_capacity(prices.len() - 1);
    for i in 1..prices.len() {
        let daily_return = (prices[i] - prices[i - 1]) / prices[i - 1];
        returns.push(daily_return);
    }
    
    // Calculate standard deviation of returns
    let mean = returns.iter().sum::<f64>() / returns.len() as f64;
    let variance = returns.iter()
        .map(|r| (r - mean).powi(2))
        .sum::<f64>() / (returns.len() - 1) as f64;
    
    variance.sqrt()
}

fn calculate_correlation_matrix(asset_data: &HashMap<String, HyperliquidData>) -> HashMap<String, f64> {
    let mut correlation_matrix = HashMap::new();
    let assets: Vec<String> = asset_data.keys().cloned().collect();
    
    for i in 0..assets.len() {
        let asset1 = &assets[i];
        let data1 = &asset_data[asset1];
        
        // Calculate returns for asset1
        let returns1: Vec<f64> = (1..data1.close.len())
            .map(|j| (data1.close[j] - data1.close[j - 1]) / data1.close[j - 1])
            .collect();
        
        for j in 0..assets.len() {
            let asset2 = &assets[j];
            let key = format!("{}-{}", asset1, asset2);
            
            if i == j {
                correlation_matrix.insert(key, 1.0);
                continue;
            }
            
            let data2 = &asset_data[asset2];
            
            // Calculate returns for asset2
            let returns2: Vec<f64> = (1..data2.close.len())
                .map(|j| (data2.close[j] - data2.close[j - 1]) / data2.close[j - 1])
                .collect();
            
            // Calculate correlation
            let min_len = returns1.len().min(returns2.len());
            let correlation = calculate_correlation(&returns1[..min_len], &returns2[..min_len]);
            
            correlation_matrix.insert(key, correlation);
        }
    }
    
    correlation_matrix
}

fn calculate_correlation(x: &[f64], y: &[f64]) -> f64 {
    if x.len() != y.len() || x.len() < 2 {
        return 0.0;
    }
    
    let n = x.len() as f64;
    let mean_x = x.iter().sum::<f64>() / n;
    let mean_y = y.iter().sum::<f64>() / n;
    
    let numerator: f64 = x.iter().zip(y.iter())
        .map(|(xi, yi)| (xi - mean_x) * (yi - mean_y))
        .sum();
    
    let sum_sq_x: f64 = x.iter().map(|xi| (xi - mean_x).powi(2)).sum();
    let sum_sq_y: f64 = y.iter().map(|yi| (yi - mean_y).powi(2)).sum();
    
    let denominator = (sum_sq_x * sum_sq_y).sqrt();
    
    if denominator == 0.0 {
        0.0
    } else {
        numerator / denominator
    }
}

fn calculate_average_funding_rate(data: &HyperliquidData) -> f64 {
    let mut sum = 0.0;
    let mut count = 0;
    
    for rate in &data.funding_rates {
        if !rate.is_nan() {
            sum += *rate;
            count += 1;
        }
    }
    
    if count > 0 {
        sum / count as f64
    } else {
        0.0
    }
}

fn calculate_risk_parity_weights(
    assets: &[String],
    volatilities: &HashMap<String, f64>,
    correlation_matrix: &HashMap<String, f64>,
    funding_rate_scores: &HashMap<String, f64>
) -> HashMap<String, f64> {
    let mut weights = HashMap::new();
    
    // Calculate inverse volatility weights as a starting point
    let mut total_inv_vol = 0.0;
    for asset in assets {
        let vol = *volatilities.get(asset).unwrap_or(&0.01);
        let inv_vol = 1.0 / vol.max(0.001);
        total_inv_vol += inv_vol;
    }
    
    for asset in assets {
        let vol = *volatilities.get(asset).unwrap_or(&0.01);
        let inv_vol = 1.0 / vol.max(0.001);
        let weight = inv_vol / total_inv_vol;
        weights.insert(asset.clone(), weight);
    }
    
    // Adjust weights based on correlation
    let mut adjusted_weights = HashMap::new();
    let mut total_weight = 0.0;
    
    for asset in assets {
        let base_weight = *weights.get(asset).unwrap_or(&0.0);
        let correlation_score = calculate_correlation_score(asset, assets, correlation_matrix);
        
        // Penalize highly correlated assets
        let correlation_factor = 1.0 - (correlation_score * 0.5);
        
        // Consider funding rate - boost assets with positive funding rates
        let funding_rate = *funding_rate_scores.get(asset).unwrap_or(&0.0);
        let funding_factor = if funding_rate > 0.0 {
            1.0 + (funding_rate * 10.0).min(0.5)
        } else {
            1.0 + (funding_rate * 5.0).max(-0.3)
        };
        
        let adjusted_weight = base_weight * correlation_factor * funding_factor;
        adjusted_weights.insert(asset.clone(), adjusted_weight);
        total_weight += adjusted_weight;
    }
    
    // Normalize weights
    for (_, weight) in adjusted_weights.iter_mut() {
        *weight /= total_weight;
    }
    
    adjusted_weights
}

fn calculate_correlation_score(
    asset: &str,
    all_assets: &[String],
    correlation_matrix: &HashMap<String, f64>
) -> f64 {
    let mut total_correlation = 0.0;
    let mut count = 0;
    
    for other_asset in all_assets {
        if asset == other_asset {
            continue;
        }
        
        let key = format!("{}-{}", asset, other_asset);
        if let Some(correlation) = correlation_matrix.get(&key) {
            total_correlation += correlation.abs();
            count += 1;
        }
    }
    
    if count > 0 {
        total_correlation / count as f64
    } else {
        0.0
    }
}

fn adjust_weights_by_asset_class(
    weights: &HashMap<String, f64>,
    asset_classes: &HashMap<&str, AssetClass>
) -> HashMap<String, f64> {
    let mut adjusted_weights = HashMap::new();
    
    // Calculate current allocation by asset class
    let mut class_allocations = HashMap::new();
    for (asset, weight) in weights {
        for (class_name, asset_class) in asset_classes {
            if asset_class.assets.contains(asset) {
                let current = class_allocations.get(class_name).unwrap_or(&0.0);
                class_allocations.insert(*class_name, current + weight);
                break;
            }
        }
    }
    
    // Calculate adjustment factors for each asset class
    let mut class_factors = HashMap::new();
    for (class_name, asset_class) in asset_classes {
        let current_allocation = *class_allocations.get(class_name).unwrap_or(&0.0);
        if current_allocation > 0.0 {
            let factor = asset_class.target_allocation / current_allocation;
            class_factors.insert(*class_name, factor);
        } else {
            class_factors.insert(*class_name, 1.0);
        }
    }
    
    // Apply adjustment factors to individual assets
    let mut total_adjusted_weight = 0.0;
    for (asset, weight) in weights {
        for (class_name, asset_class) in asset_classes {
            if asset_class.assets.contains(asset) {
                let factor = *class_factors.get(class_name).unwrap_or(&1.0);
                let adjusted_weight = weight * factor;
                adjusted_weights.insert(asset.clone(), adjusted_weight);
                total_adjusted_weight += adjusted_weight;
                break;
            }
        }
    }
    
    // Normalize weights
    for (_, weight) in adjusted_weights.iter_mut() {
        *weight /= total_adjusted_weight;
    }
    
    adjusted_weights
}

fn calculate_max_position_sizes(
    assets: &[String],
    volatilities: &HashMap<String, f64>,
    correlation_matrix: &HashMap<String, f64>,
    portfolio_value: f64,
    max_var: f64
) -> HashMap<String, f64> {
    let mut max_sizes = HashMap::new();
    
    for asset in assets {
        let vol = *volatilities.get(asset).unwrap_or(&0.01);
        
        // Calculate average correlation with other assets
        let avg_correlation = calculate_correlation_score(asset, assets, correlation_matrix);
        
        // Higher correlation means lower position size
        let correlation_factor = 1.0 - (avg_correlation * 0.5);
        
        // Calculate maximum position size based on volatility and correlation
        // Using a simplified VaR calculation: VaR = Position * Volatility * 1.65 (95% confidence)
        let max_position = (max_var * portfolio_value) / (vol * 1.65);
        
        // Apply correlation adjustment
        let adjusted_max_position = max_position * correlation_factor;
        
        // Cap at 20% of portfolio value
        let capped_position = adjusted_max_position.min(portfolio_value * 0.2);
        
        max_sizes.insert(asset.clone(), capped_position);
    }
    
    max_sizes
}

async fn simulate_portfolio_rebalancing(
    allocations: &[PortfolioAllocation],
    asset_data: &HashMap<String, HyperliquidData>,
    initial_capital: f64,
    rebalance_days: usize,
    rebalance_threshold: f64
) -> Result<Vec<HashMap<String, f64>>> {
    let mut portfolio_value = initial_capital;
    let mut asset_values = HashMap::new();
    let mut rebalancing_history = Vec::new();
    
    // Initialize asset values based on target weights
    for allocation in allocations {
        let asset_value = initial_capital * allocation.target_weight;
        asset_values.insert(allocation.symbol.clone(), asset_value);
    }
    
    // Record initial allocation
    rebalancing_history.push(asset_values.clone());
    
    // Find the shortest data series to align all assets
    let min_length = asset_data.values().map(|data| data.len()).min().unwrap_or(0);
    
    // Convert rebalance_days to hours (assuming hourly data)
    let rebalance_frequency = rebalance_days * 24;
    let mut last_rebalance = 0;
    
    for i in 1..min_length {
        // Calculate current portfolio value
        portfolio_value = 0.0;
        
        for (symbol, data) in asset_data {
            if i >= data.close.len() {
                continue;
            }
            
            let prev_price = data.close[i - 1];
            let curr_price = data.close[i];
            let return_rate = (curr_price - prev_price) / prev_price;
            
            // Update asset value
            if let Some(value) = asset_values.get_mut(symbol) {
                *value *= (1.0 + return_rate);
                portfolio_value += *value;
            }
        }
        
        // Check if rebalancing is needed
        let should_rebalance = i - last_rebalance >= rebalance_frequency;
        
        if should_rebalance {
            println!("Scheduled rebalancing at hour {}", i);
            
            // Calculate current weights
            let mut current_weights = HashMap::new();
            for (symbol, value) in &asset_values {
                current_weights.insert(symbol.clone(), value / portfolio_value);
            }
            
            // Check if any weight deviates from target by more than the threshold
            let mut max_deviation = 0.0;
            for allocation in allocations {
                let current_weight = *current_weights.get(&allocation.symbol).unwrap_or(&0.0);
                let deviation = (current_weight - allocation.target_weight).abs();
                max_deviation = max_deviation.max(deviation);
            }
            
            if max_deviation > rebalance_threshold {
                println!("  Rebalancing triggered: max deviation {:.2}% exceeds threshold {:.2}%", 
                         max_deviation * 100.0, rebalance_threshold * 100.0);
                
                // Rebalance to target weights
                for allocation in allocations {
                    let target_value = portfolio_value * allocation.target_weight;
                    asset_values.insert(allocation.symbol.clone(), target_value);
                }
                
                // Record rebalanced allocation
                rebalancing_history.push(asset_values.clone());
            } else {
                println!("  No rebalancing needed: max deviation {:.2}% below threshold {:.2}%", 
                         max_deviation * 100.0, rebalance_threshold * 100.0);
            }
            
            last_rebalance = i;
        }
    }
    
    Ok(rebalancing_history)
}

fn export_portfolio_allocation(allocations: &[PortfolioAllocation]) -> Result<()> {
    let mut csv = String::from("symbol,target_weight,volatility_contribution,correlation_score,max_position_size,funding_rate_score\n");
    
    for allocation in allocations {
        csv.push_str(&format!(
            "{},{:.4},{:.4},{:.4},{:.2},{:.6}\n",
            allocation.symbol,
            allocation.target_weight,
            allocation.volatility_contribution,
            allocation.correlation_score,
            allocation.max_position_size,
            allocation.funding_rate_score
        ));
    }
    
    let mut file = File::create("portfolio_allocation.csv")?;
    file.write_all(csv.as_bytes())?;
    
    Ok(())
}

fn export_rebalancing_results(history: &[HashMap<String, f64>]) -> Result<()> {
    if history.is_empty() {
        return Ok(());
    }
    
    // Get all symbols
    let mut symbols = Vec::new();
    for (symbol, _) in &history[0] {
        symbols.push(symbol.clone());
    }
    
    // Create CSV header
    let mut csv = String::from("rebalance_id");
    for symbol in &symbols {
        csv.push_str(&format!(",{}", symbol));
    }
    csv.push('\n');
    
    // Add data rows
    for (i, allocation) in history.iter().enumerate() {
        csv.push_str(&format!("{}", i));
        
        for symbol in &symbols {
            let value = allocation.get(symbol).unwrap_or(&0.0);
            csv.push_str(&format!(",{:.2}", value));
        }
        
        csv.push('\n');
    }
    
    let mut file = File::create("portfolio_rebalancing.csv")?;
    file.write_all(csv.as_bytes())?;
    
    Ok(())
}