use crate::prelude::*;
use chrono::{DateTime, FixedOffset, TimeZone, Utc};
use mockito::{Mock, Server};
use serde_json::json;
use std::collections::HashMap;
use tokio_test;
use memory_stats::memory_stats;
use sysinfo::{System, SystemExt, ProcessExt};
use std::sync::Arc;
use std::sync::atomic::{AtomicUsize, Ordering};
use tempfile::TempDir;
use tracing::{info, warn, error};
use futures::future::join_all;
struct MockHyperliquidServer {
server: Server,
}
impl MockHyperliquidServer {
async fn new() -> Self {
Self {
server: Server::new_async().await,
}
}
fn mock_candles_snapshot(&mut self, coin: &str, interval: &str) -> Mock {
let mock_data = json!([
{
"T": 1640995200000i64, "c": "47000.5",
"h": "47500.0",
"l": "46500.0",
"n": 1000,
"o": "47200.0",
"t": 1640995200000i64,
"v": "125.5"
},
{
"T": 1640998800000i64, "c": "47100.0",
"h": "47300.0",
"l": "46800.0",
"n": 950,
"o": "47000.5",
"t": 1640998800000i64,
"v": "98.2"
},
{
"T": 1641002400000i64, "c": "46950.0",
"h": "47200.0",
"l": "46700.0",
"n": 1100,
"o": "47100.0",
"t": 1641002400000i64,
"v": "156.8"
}
]);
self.server
.mock("POST", "/info")
.match_body(mockito::Matcher::JsonString(format!(
r#"{{"type":"candleSnapshot","req":{{"coin":"{}","interval":"{}","startTime":1640995200000,"endTime":1641002400000}}}}"#,
coin, interval
)))
.with_status(200)
.with_header("content-type", "application/json")
.with_body(mock_data.to_string())
}
fn mock_funding_history(&mut self, coin: &str) -> Mock {
let mock_data = json!([
{
"coin": coin,
"fundingRate": "0.0001",
"premium": "0.00005",
"time": 1640995200000i64
},
{
"coin": coin,
"fundingRate": "0.00015",
"premium": "0.0001",
"time": 1640998800000i64
},
{
"coin": coin,
"fundingRate": "0.00008",
"premium": "0.00003",
"time": 1641002400000i64
}
]);
self.server
.mock("POST", "/info")
.match_body(mockito::Matcher::JsonString(format!(
r#"{{"type":"fundingHistory","req":{{"coin":"{}","startTime":1640995200000}}}}"#,
coin
)))
.with_status(200)
.with_header("content-type", "application/json")
.with_body(mock_data.to_string())
}
fn mock_api_error(&mut self, status_code: usize, error_message: &str) -> Mock {
self.server
.mock("POST", "/info")
.with_status(status_code)
.with_header("content-type", "application/json")
.with_body(json!({"error": error_message}).to_string())
}
fn url(&self) -> String {
self.server.url()
}
}
#[tokio::test]
async fn test_complete_data_fetching_workflow() {
let mut mock_server = MockHyperliquidServer::new().await;
let _candles_mock = mock_server.mock_candles_snapshot("BTC", "1h");
let _funding_mock = mock_server.mock_funding_history("BTC");
let start_time = 1640995200000; let end_time = 1641002400000;
let mock_ohlc_data = vec![
(1640995200000, 47200.0, 47500.0, 46500.0, 47000.5, 125.5),
(1640998800000, 47000.5, 47300.0, 46800.0, 47100.0, 98.2),
(1641002400000, 47100.0, 47200.0, 46700.0, 46950.0, 156.8),
];
let mock_funding_data = vec![
(1640995200000, 0.0001),
(1640998800000, 0.00015),
(1641002400000, 0.00008),
];
let datetime: Vec<DateTime<FixedOffset>> = mock_ohlc_data
.iter()
.map(|(ts, _, _, _, _, _)| {
DateTime::from_timestamp(*ts / 1000, 0)
.unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap())
})
.collect();
let funding_timestamps: Vec<DateTime<FixedOffset>> = mock_funding_data
.iter()
.map(|(ts, _)| {
DateTime::from_timestamp(*ts / 1000, 0)
.unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap())
})
.collect();
let hyperliquid_data = HyperliquidData {
ticker: "BTC".to_string(),
datetime: datetime.clone(),
open: mock_ohlc_data.iter().map(|(_, o, _, _, _, _)| *o).collect(),
high: mock_ohlc_data.iter().map(|(_, _, h, _, _, _)| *h).collect(),
low: mock_ohlc_data.iter().map(|(_, _, _, l, _, _)| *l).collect(),
close: mock_ohlc_data.iter().map(|(_, _, _, _, c, _)| *c).collect(),
volume: mock_ohlc_data.iter().map(|(_, _, _, _, _, v)| *v).collect(),
funding_rates: mock_funding_data.iter().map(|(_, fr)| *fr).collect(),
funding_timestamps,
};
let rs_data = hyperliquid_data.to_rs_backtester_data();
assert_eq!(rs_data.close.len(), 3);
assert_eq!(rs_data.close[0], 47000.5);
assert_eq!(rs_data.close[2], 46950.0);
let funding_rate = hyperliquid_data.get_funding_rate_at(datetime[1]);
assert!(funding_rate.is_some());
assert!((funding_rate.unwrap() - 0.00015).abs() < 1e-6);
}
#[tokio::test]
async fn test_end_to_end_backtesting_workflow() {
let datetime: Vec<DateTime<FixedOffset>> = (0..100)
.map(|i| {
DateTime::from_timestamp(1640995200 + i * 3600, 0)
.unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap())
})
.collect();
let mut prices = Vec::new();
let mut base_price = 47000.0;
for i in 0..100 {
let noise = (i as f64 * 0.1).sin() * 100.0;
let trend = i as f64 * 2.0;
base_price = 47000.0 + trend + noise;
prices.push(base_price);
}
let hyperliquid_data = HyperliquidData {
ticker: "BTC".to_string(),
datetime: datetime.clone(),
open: prices.iter().map(|p| p - 10.0).collect(),
high: prices.iter().map(|p| p + 50.0).collect(),
low: prices.iter().map(|p| p - 50.0).collect(),
close: prices.clone(),
volume: vec![100.0; 100],
funding_rates: (0..100).map(|i| 0.0001 + (i as f64 * 0.01).sin() * 0.0001).collect(),
funding_timestamps: datetime.clone(),
};
let strategy = enhanced_sma_cross(10, 20, 0.5);
let mut backtest = HyperliquidBacktest::new(
hyperliquid_data,
strategy,
10000.0,
HyperliquidCommission::default(),
);
backtest.calculate_with_funding();
let report = backtest.enhanced_report();
assert!(report.total_return != 0.0);
assert!(backtest.total_funding_paid >= 0.0 || backtest.total_funding_received >= 0.0);
let funding_report = backtest.funding_report();
assert!(!funding_report.funding_payments.is_empty());
assert!(funding_report.total_funding_paid >= 0.0);
assert!(funding_report.total_funding_received >= 0.0);
}
#[tokio::test]
async fn test_api_error_handling() {
let mut mock_server = MockHyperliquidServer::new().await;
let _error_mock = mock_server.mock_api_error(500, "Internal server error");
let api_error = HyperliquidBacktestError::DataConversion("Invalid JSON response".to_string());
assert!(api_error.to_string().contains("Data conversion error"));
let time_error = HyperliquidBacktestError::InvalidTimeRange {
start: 1641002400000,
end: 1640995200000
};
assert!(time_error.to_string().contains("Invalid time range"));
}
#[tokio::test]
async fn test_concurrent_data_fetching() {
let coins = vec!["BTC", "ETH", "SOL"];
let mut handles = Vec::new();
for coin in coins {
let coin = coin.to_string();
let handle = tokio::spawn(async move {
let datetime: Vec<DateTime<FixedOffset>> = (0..50)
.map(|i| {
DateTime::from_timestamp(1640995200 + i * 3600, 0)
.unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap())
})
.collect();
let base_price = match coin.as_str() {
"BTC" => 47000.0,
"ETH" => 3500.0,
"SOL" => 150.0,
_ => 100.0,
};
let prices: Vec<f64> = (0..50)
.map(|i| base_price + (i as f64 * 0.1).sin() * base_price * 0.02)
.collect();
HyperliquidData {
ticker: coin,
datetime: datetime.clone(),
open: prices.iter().map(|p| p - 1.0).collect(),
high: prices.iter().map(|p| p + 10.0).collect(),
low: prices.iter().map(|p| p - 10.0).collect(),
close: prices,
volume: vec![100.0; 50],
funding_rates: vec![0.0001; 50],
funding_timestamps: datetime,
}
});
handles.push(handle);
}
let results: Vec<HyperliquidData> = futures::future::try_join_all(handles)
.await
.expect("All tasks should complete successfully");
assert_eq!(results.len(), 3);
assert_eq!(results[0].ticker, "BTC");
assert_eq!(results[1].ticker, "ETH");
assert_eq!(results[2].ticker, "SOL");
}
#[tokio::test]
async fn test_data_validation_and_consistency() {
let datetime1: Vec<DateTime<FixedOffset>> = (0..10)
.map(|i| {
DateTime::from_timestamp(1640995200 + i * 3600, 0)
.unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap())
})
.collect();
let datetime2: Vec<DateTime<FixedOffset>> = (0..5) .map(|i| {
DateTime::from_timestamp(1640995200 + i * 7200, 0) .unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap())
})
.collect();
let hyperliquid_data = HyperliquidData {
ticker: "BTC".to_string(),
datetime: datetime1,
open: vec![47000.0; 10],
high: vec![47500.0; 10],
low: vec![46500.0; 10],
close: vec![47200.0; 10],
volume: vec![100.0; 10],
funding_rates: vec![0.0001; 5], funding_timestamps: datetime2,
};
let funding_rate = hyperliquid_data.get_funding_rate_at(
DateTime::from_timestamp(1640995200 + 3600, 0)
.unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap())
);
assert!(funding_rate.is_some() || funding_rate.is_none());
}
#[tokio::test]
async fn test_regression_api_compatibility() {
let hyperliquid_data = create_test_hyperliquid_data();
let rs_data = hyperliquid_data.to_rs_backtester_data();
assert_eq!(rs_data.datetime.len(), rs_data.close.len());
assert_eq!(rs_data.open.len(), rs_data.close.len());
assert_eq!(rs_data.high.len(), rs_data.close.len());
assert_eq!(rs_data.low.len(), rs_data.close.len());
assert_eq!(rs_data.volume.len(), rs_data.close.len());
use rs_backtester::prelude::*;
let strategy = strategies::sma_cross(10, 20);
let backtest = Backtest::new(rs_data, strategy, 10000.0, Commission::default());
assert!(backtest.data.close.len() > 0);
}
struct MemoryTracker {
initial_memory: Option<usize>,
peak_memory: usize,
}
impl MemoryTracker {
fn new() -> Self {
let initial = memory_stats().map(|stats| stats.physical_mem);
Self {
initial_memory: initial,
peak_memory: initial.unwrap_or(0),
}
}
fn update_peak(&mut self) {
if let Some(stats) = memory_stats() {
self.peak_memory = self.peak_memory.max(stats.physical_mem);
}
}
fn memory_increase(&self) -> Option<usize> {
self.initial_memory.map(|initial| self.peak_memory.saturating_sub(initial))
}
}
#[tokio::test]
async fn test_memory_usage_large_datasets() {
let mut tracker = MemoryTracker::new();
let sizes = vec![1_000, 10_000, 100_000, 500_000];
let mut memory_usage = Vec::new();
for size in sizes {
info!("Testing memory usage with {} data points", size);
let data = create_large_test_data(size);
tracker.update_peak();
let rs_data = data.to_rs_backtester_data();
tracker.update_peak();
let strategy = enhanced_sma_cross(20, 50, 0.3);
let mut backtest = HyperliquidBacktest::new(
data,
strategy,
10000.0,
HyperliquidCommission::default(),
);
tracker.update_peak();
backtest.calculate_with_funding();
tracker.update_peak();
if let Some(increase) = tracker.memory_increase() {
memory_usage.push((size, increase));
info!("Memory increase for {} points: {} bytes", size, increase);
let bytes_per_point = increase / size;
assert!(bytes_per_point < 10_000,
"Memory usage per data point ({} bytes) exceeds threshold", bytes_per_point);
}
drop(backtest);
drop(rs_data);
}
if memory_usage.len() >= 2 {
let (small_size, small_mem) = memory_usage[0];
let (large_size, large_mem) = memory_usage[memory_usage.len() - 1];
let size_ratio = large_size as f64 / small_size as f64;
let memory_ratio = large_mem as f64 / small_mem as f64;
assert!(memory_ratio < size_ratio * 3.0,
"Memory usage scaling ({:.2}x) exceeds size scaling ({:.2}x) by too much",
memory_ratio, size_ratio);
}
}
#[tokio::test]
async fn test_memory_leak_detection() {
let mut tracker = MemoryTracker::new();
let initial_memory = tracker.initial_memory.unwrap_or(0);
for iteration in 0..10 {
info!("Memory leak test iteration {}", iteration);
let data = create_large_test_data(10_000);
let strategy = funding_arbitrage_strategy(0.001, Default::default());
let mut backtest = HyperliquidBacktest::new(
data,
strategy,
10000.0,
HyperliquidCommission::default(),
);
backtest.calculate_with_funding();
let _report = backtest.funding_report();
let _enhanced_report = backtest.enhanced_report();
let mut csv_buffer = Vec::new();
let _ = backtest.enhanced_csv_export(&mut csv_buffer);
tracker.update_peak();
drop(backtest);
drop(csv_buffer);
if let Some(current_stats) = memory_stats() {
let current_memory = current_stats.physical_mem;
let growth = current_memory.saturating_sub(initial_memory);
let max_allowed_growth = initial_memory / 10; if growth > max_allowed_growth {
warn!("Potential memory leak detected: {} bytes growth", growth);
}
}
}
}
#[tokio::test]
async fn test_concurrent_memory_usage() {
let mut tracker = MemoryTracker::new();
let tasks = (0..5).map(|i| {
tokio::spawn(async move {
let data = create_large_test_data(20_000 + i * 1000);
let strategy = enhanced_sma_cross(10 + i, 30 + i * 2, 0.2 + i as f64 * 0.1);
let mut backtest = HyperliquidBacktest::new(
data,
strategy,
10000.0,
HyperliquidCommission::default(),
);
backtest.calculate_with_funding();
let report = backtest.enhanced_report();
(i, report.total_return, backtest.total_funding_paid)
})
});
let results = join_all(tasks).await;
tracker.update_peak();
for result in results {
let (task_id, total_return, funding_paid) = result.unwrap();
info!("Task {} completed: return={:.2}, funding={:.2}",
task_id, total_return, funding_paid);
}
if let Some(increase) = tracker.memory_increase() {
info!("Concurrent memory usage increase: {} bytes", increase);
assert!(increase < 500_000_000, "Concurrent memory usage too high: {} bytes", increase);
}
}
impl MockHyperliquidServer {
fn mock_large_candles_snapshot(&mut self, coin: &str, interval: &str, size: usize) -> Mock {
let mut candles = Vec::new();
let mut timestamp = 1640995200000i64;
let mut price = 47000.0;
for i in 0..size {
let noise = (i as f64 * 0.1).sin() * 50.0;
let trend = (i as f64 * 0.01).cos() * 20.0;
price += noise + trend;
candles.push(json!({
"T": timestamp,
"c": format!("{:.1}", price),
"h": format!("{:.1}", price + 25.0),
"l": format!("{:.1}", price - 25.0),
"n": 1000 + i,
"o": format!("{:.1}", price - 5.0),
"t": timestamp,
"v": format!("{:.1}", 100.0 + (i as f64 * 0.05).sin() * 20.0)
}));
timestamp += 3600000; }
self.server
.mock("POST", "/info")
.match_body(mockito::Matcher::Any)
.with_status(200)
.with_header("content-type", "application/json")
.with_body(json!(candles).to_string())
}
fn mock_rate_limited_response(&mut self) -> Mock {
self.server
.mock("POST", "/info")
.with_status(429)
.with_header("content-type", "application/json")
.with_header("retry-after", "1")
.with_body(json!({"error": "Rate limit exceeded"}).to_string())
}
fn mock_timeout_response(&mut self) -> Mock {
self.server
.mock("POST", "/info")
.with_status(408)
.with_header("content-type", "application/json")
.with_body(json!({"error": "Request timeout"}).to_string())
}
fn mock_partial_data_response(&mut self, coin: &str) -> Mock {
let partial_data = json!([
{
"T": 1640995200000i64,
"c": "47000.5",
"h": "47500.0",
"l": "46500.0",
"n": 1000,
"o": "47200.0",
"t": 1640995200000i64,
"v": "125.5"
}
]);
self.server
.mock("POST", "/info")
.match_body(mockito::Matcher::Any)
.with_status(200)
.with_header("content-type", "application/json")
.with_body(partial_data.to_string())
}
}
#[tokio::test]
async fn test_api_resilience_and_error_recovery() {
let mut mock_server = MockHyperliquidServer::new().await;
let _rate_limit_mock = mock_server.mock_rate_limited_response();
let _timeout_mock = mock_server.mock_timeout_response();
let _partial_mock = mock_server.mock_partial_data_response("BTC");
let error_scenarios = vec![
(400, "Bad request"),
(401, "Unauthorized"),
(403, "Forbidden"),
(404, "Not found"),
(500, "Internal server error"),
(502, "Bad gateway"),
(503, "Service unavailable"),
];
for (status_code, error_message) in error_scenarios {
let _error_mock = mock_server.mock_api_error(status_code, error_message);
let error = HyperliquidBacktestError::DataConversion(
format!("HTTP {} error: {}", status_code, error_message)
);
assert!(error.to_string().contains("Data conversion error"));
}
}
#[tokio::test]
async fn test_large_dataset_api_simulation() {
let mut mock_server = MockHyperliquidServer::new().await;
let _large_mock = mock_server.mock_large_candles_snapshot("BTC", "1h", 10000);
let large_data = create_large_test_data(10000);
let start_time = std::time::Instant::now();
let rs_data = large_data.to_rs_backtester_data();
let conversion_time = start_time.elapsed();
info!("Large dataset conversion took: {:?}", conversion_time);
assert!(conversion_time.as_secs() < 5, "Data conversion took too long: {:?}", conversion_time);
let start_time = std::time::Instant::now();
let strategy = enhanced_sma_cross(20, 50, 0.3);
let mut backtest = HyperliquidBacktest::new(
large_data,
strategy,
10000.0,
HyperliquidCommission::default(),
);
backtest.calculate_with_funding();
let backtest_time = start_time.elapsed();
info!("Large dataset backtesting took: {:?}", backtest_time);
assert!(backtest_time.as_secs() < 30, "Backtesting took too long: {:?}", backtest_time);
}
#[tokio::test]
async fn test_complete_trading_workflow() {
let test_scenarios = vec![
("BTC", "1h", 1000),
("ETH", "4h", 500),
("SOL", "1d", 100),
];
for (coin, interval, size) in test_scenarios {
info!("Testing complete workflow for {} {} with {} points", coin, interval, size);
let data = create_realistic_test_data(coin, size);
assert_eq!(data.ticker, coin);
assert_eq!(data.datetime.len(), size);
assert_eq!(data.open.len(), size);
assert_eq!(data.high.len(), size);
assert_eq!(data.low.len(), size);
assert_eq!(data.close.len(), size);
assert_eq!(data.volume.len(), size);
assert_eq!(data.funding_rates.len(), size);
let strategies = vec![
("SMA Cross", enhanced_sma_cross(10, 30, 0.2)),
("Funding Arbitrage", funding_arbitrage_strategy(0.001, Default::default())),
];
for (strategy_name, strategy) in strategies {
info!("Testing strategy: {}", strategy_name);
let mut backtest = HyperliquidBacktest::new(
data.clone(),
strategy,
10000.0,
HyperliquidCommission::default(),
);
backtest.calculate_with_funding();
let enhanced_report = backtest.enhanced_report();
let funding_report = backtest.funding_report();
assert!(enhanced_report.total_return.is_finite());
assert!(funding_report.total_funding_paid >= 0.0);
assert!(funding_report.total_funding_received >= 0.0);
let mut csv_buffer = Vec::new();
backtest.enhanced_csv_export(&mut csv_buffer).unwrap();
assert!(!csv_buffer.is_empty());
info!("Strategy {} completed successfully", strategy_name);
}
}
}
#[tokio::test]
async fn test_multi_asset_portfolio_workflow() {
let assets = vec!["BTC", "ETH", "SOL", "AVAX"];
let mut portfolio_results = HashMap::new();
let tasks: Vec<_> = assets.iter().map(|&asset| {
tokio::spawn(async move {
let data = create_realistic_test_data(asset, 1000);
let strategy = enhanced_sma_cross(15, 35, 0.3);
let mut backtest = HyperliquidBacktest::new(
data,
strategy,
10000.0,
HyperliquidCommission::default(),
);
backtest.calculate_with_funding();
let report = backtest.enhanced_report();
let funding_report = backtest.funding_report();
(asset, report.total_return, funding_report.total_funding_paid)
})
}).collect();
let results = join_all(tasks).await;
for result in results {
let (asset, total_return, funding_paid) = result.unwrap();
portfolio_results.insert(asset, (total_return, funding_paid));
info!("Asset {}: return={:.2}%, funding={:.2}",
asset, total_return * 100.0, funding_paid);
}
assert_eq!(portfolio_results.len(), assets.len());
let total_portfolio_return: f64 = portfolio_results.values()
.map(|(ret, _)| ret)
.sum::<f64>() / portfolio_results.len() as f64;
let total_funding_paid: f64 = portfolio_results.values()
.map(|(_, funding)| funding)
.sum();
info!("Portfolio average return: {:.2}%", total_portfolio_return * 100.0);
info!("Total funding paid: {:.2}", total_funding_paid);
assert!(total_portfolio_return.is_finite());
}
#[tokio::test]
async fn test_api_rate_limiting_simulation() {
let mut mock_server = MockHyperliquidServer::new().await;
let _rate_limit_mock = mock_server.mock_rate_limited_response();
let error = HyperliquidBacktestError::DataConversion("Rate limit exceeded".to_string());
assert!(error.to_string().contains("Rate limit exceeded"));
}
#[tokio::test]
async fn test_network_failure_resilience() {
let mut mock_server = MockHyperliquidServer::new().await;
let failure_scenarios = vec![
(500, "Internal Server Error"),
(502, "Bad Gateway"),
(503, "Service Unavailable"),
(504, "Gateway Timeout"),
];
for (status_code, error_message) in failure_scenarios {
let _error_mock = mock_server.mock_api_error(status_code, error_message);
let error = HyperliquidBacktestError::DataConversion(
format!("Network error {}: {}", status_code, error_message)
);
assert!(error.to_string().contains("Data conversion error"));
}
}
fn create_test_hyperliquid_data() -> HyperliquidData {
let size = 100;
let datetime: Vec<DateTime<FixedOffset>> = (0..size)
.map(|i| {
DateTime::from_timestamp(1640995200 + i as i64 * 3600, 0)
.unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap())
})
.collect();
let prices: Vec<f64> = (0..size)
.map(|i| 47000.0 + (i as f64 * 0.1).sin() * 100.0)
.collect();
HyperliquidData {
ticker: "BTC".to_string(),
datetime: datetime.clone(),
open: prices.iter().enumerate().map(|(i, p)| {
if i == 0 { *p } else { prices[i-1] }
}).collect(),
high: prices.iter().map(|p| p + 25.0).collect(),
low: prices.iter().map(|p| p - 25.0).collect(),
close: prices,
volume: vec![100.0; size],
funding_rates: (0..size).map(|i| 0.0001 + (i as f64 * 0.01).sin() * 0.0001).collect(),
funding_timestamps: datetime,
}
}
fn create_large_test_data(size: usize) -> HyperliquidData {
let datetime: Vec<DateTime<FixedOffset>> = (0..size)
.map(|i| {
DateTime::from_timestamp(1640995200 + i as i64 * 3600, 0)
.unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap())
})
.collect();
let prices: Vec<f64> = (0..size)
.map(|i| 47000.0 + (i as f64 * 0.01).sin() * 200.0)
.collect();
HyperliquidData {
ticker: "BTC".to_string(),
datetime: datetime.clone(),
open: prices.iter().enumerate().map(|(i, p)| {
if i == 0 { *p } else { prices[i-1] }
}).collect(),
high: prices.iter().map(|p| p + 50.0).collect(),
low: prices.iter().map(|p| p - 50.0).collect(),
close: prices,
volume: vec![100.0; size],
funding_rates: (0..size).map(|i| 0.0001 + (i as f64 * 0.001).sin() * 0.0001).collect(),
funding_timestamps: datetime,
}
}
#[tokio::test]
async fn test_api_rate_limiting_simulation() {
let mut mock_server = MockHyperliquidServer::new().await;
let _rate_limit_mock = mock_server.mock_rate_limited_response();
let error = HyperliquidBacktestError::DataConversion("Rate limit exceeded".to_string());
assert!(error.to_string().contains("Rate limit exceeded"));
}
#[tokio::test]
async fn test_network_failure_resilience() {
let mut mock_server = MockHyperliquidServer::new().await;
let failure_scenarios = vec![
(500, "Internal Server Error"),
(502, "Bad Gateway"),
(503, "Service Unavailable"),
(504, "Gateway Timeout"),
];
for (status_code, error_message) in failure_scenarios {
let _error_mock = mock_server.mock_api_error(status_code, error_message);
let error = HyperliquidBacktestError::DataConversion(
format!("Network error {}: {}", status_code, error_message)
);
assert!(error.to_string().contains("Data conversion error"));
}
}
#[tokio::test]
async fn test_data_integrity_validation() {
let test_cases = vec![
("BTC", 1000, "1h"),
("ETH", 2000, "4h"),
("SOL", 500, "1d"),
];
for (coin, size, interval) in test_cases {
let data = create_realistic_test_data(coin, size);
assert_eq!(data.ticker, coin);
assert_eq!(data.datetime.len(), size);
assert_eq!(data.open.len(), size);
assert_eq!(data.high.len(), size);
assert_eq!(data.low.len(), size);
assert_eq!(data.close.len(), size);
assert_eq!(data.volume.len(), size);
for i in 0..size {
assert!(data.high[i] >= data.low[i], "Invalid OHLC at index {}", i);
assert!(data.high[i] >= data.open[i], "High < Open at index {}", i);
assert!(data.high[i] >= data.close[i], "High < Close at index {}", i);
assert!(data.low[i] <= data.open[i], "Low > Open at index {}", i);
assert!(data.low[i] <= data.close[i], "Low > Close at index {}", i);
assert!(data.volume[i] >= 0.0, "Negative volume at index {}", i);
}
let rs_data = data.to_rs_backtester_data();
assert_eq!(rs_data.close.len(), data.close.len());
for i in 0..size {
assert!((rs_data.close[i] - data.close[i]).abs() < 1e-10,
"Price conversion error at index {}", i);
}
}
}
#[tokio::test]
async fn test_extreme_market_conditions() {
let extreme_scenarios = vec![
("Flash Crash", create_flash_crash_data()),
("High Volatility", create_high_volatility_data()),
("Low Liquidity", create_low_liquidity_data()),
("Funding Rate Spike", create_funding_spike_data()),
];
for (scenario_name, data) in extreme_scenarios {
info!("Testing extreme scenario: {}", scenario_name);
let strategy = enhanced_sma_cross(10, 20, 0.3);
let mut backtest = HyperliquidBacktest::new(
data,
strategy,
10000.0,
HyperliquidCommission::default(),
);
backtest.calculate_with_funding();
let report = backtest.enhanced_report();
assert!(report.total_return.is_finite(),
"Scenario {} produced invalid return", scenario_name);
assert!(report.max_drawdown.is_finite(),
"Scenario {} produced invalid drawdown", scenario_name);
info!("Scenario {} handled successfully", scenario_name);
}
}
#[tokio::test]
async fn test_long_running_backtest_stability() {
let data = create_long_time_series_test_data(100_000);
let strategy = enhanced_sma_cross(50, 200, 0.2);
let start_time = std::time::Instant::now();
let mut backtest = HyperliquidBacktest::new(
data,
strategy,
10000.0,
HyperliquidCommission::default(),
);
backtest.calculate_with_funding();
let duration = start_time.elapsed();
let report = backtest.enhanced_report();
let funding_report = backtest.funding_report();
assert!(report.total_return.is_finite());
assert!(funding_report.total_funding_paid >= 0.0);
assert!(funding_report.total_funding_received >= 0.0);
info!("Long-running backtest completed in {:?}", duration);
assert!(duration.as_secs() < 300, "Long backtest took too long: {:?}", duration);
}
#[tokio::test]
async fn test_memory_usage_monitoring() {
use memory_stats::memory_stats;
let initial_memory = memory_stats().map(|stats| stats.physical_mem).unwrap_or(0);
let mut peak_memory = initial_memory;
for i in 0..10 {
let data = create_realistic_test_data("BTC", 10_000 + i * 1000);
let strategy = enhanced_sma_cross(10 + i, 30 + i * 2, 0.1 + i as f64 * 0.05);
let mut backtest = HyperliquidBacktest::new(
data,
strategy,
10000.0,
HyperliquidCommission::default(),
);
backtest.calculate_with_funding();
let _report = backtest.enhanced_report();
if let Some(current_stats) = memory_stats() {
peak_memory = peak_memory.max(current_stats.physical_mem);
}
drop(backtest);
}
let memory_growth = peak_memory.saturating_sub(initial_memory);
info!("Peak memory growth: {} bytes", memory_growth);
assert!(memory_growth < 1_000_000_000, "Excessive memory growth: {} bytes", memory_growth);
}
#[tokio::test]
async fn test_concurrent_api_simulation() {
let num_concurrent = 10;
let tasks: Vec<_> = (0..num_concurrent).map(|i| {
tokio::spawn(async move {
let data = create_realistic_test_data(&format!("ASSET{}", i), 5000);
let strategy = enhanced_sma_cross(10 + i % 5, 30 + i % 10, 0.1 + (i as f64 % 5.0) * 0.1);
let mut backtest = HyperliquidBacktest::new(
data,
strategy,
10000.0,
HyperliquidCommission::default(),
);
backtest.calculate_with_funding();
let report = backtest.enhanced_report();
(i, report.total_return, report.max_drawdown)
})
}).collect();
let results = join_all(tasks).await;
let mut successful_count = 0;
for result in results {
if let Ok((task_id, total_return, max_drawdown)) = result {
if total_return.is_finite() && max_drawdown.is_finite() {
successful_count += 1;
}
}
}
assert_eq!(successful_count, num_concurrent,
"Not all concurrent operations completed successfully");
}
#[tokio::test]
async fn test_data_export_integrity() {
let data = create_realistic_test_data("BTC", 5000);
let strategy = enhanced_sma_cross(20, 50, 0.3);
let mut backtest = HyperliquidBacktest::new(
data.clone(),
strategy,
10000.0,
HyperliquidCommission::default(),
);
backtest.calculate_with_funding();
let mut csv_buffer = Vec::new();
backtest.enhanced_csv_export(&mut csv_buffer).unwrap();
let csv_string = String::from_utf8(csv_buffer).unwrap();
let lines: Vec<&str> = csv_string.lines().collect();
assert!(!lines.is_empty(), "CSV export is empty");
assert!(lines[0].contains("timestamp"), "Missing timestamp header");
assert!(lines[0].contains("close"), "Missing close header");
assert!(lines[0].contains("funding_rate"), "Missing funding_rate header");
assert!(lines.len() > data.datetime.len() / 2, "CSV missing significant data");
for (i, line) in lines.iter().skip(1).take(10).enumerate() {
let fields: Vec<&str> = line.split(',').collect();
assert!(fields.len() >= 3, "CSV line {} has insufficient fields", i);
if let Some(close_field) = fields.get(1) {
assert!(close_field.parse::<f64>().is_ok(),
"Invalid close price in CSV line {}", i);
}
}
}
#[tokio::test]
async fn test_error_recovery_scenarios() {
let error_scenarios = vec![
("Invalid data format", create_invalid_format_data()),
("Missing timestamps", create_missing_timestamp_data()),
("Inconsistent lengths", create_inconsistent_length_data()),
];
for (scenario_name, data_result) in error_scenarios {
info!("Testing error recovery scenario: {}", scenario_name);
match data_result {
Ok(data) => {
let strategy = enhanced_sma_cross(10, 20, 0.3);
let backtest_result = std::panic::catch_unwind(|| {
let mut backtest = HyperliquidBacktest::new(
data,
strategy,
10000.0,
HyperliquidCommission::default(),
);
backtest.calculate_with_funding();
backtest.enhanced_report()
});
match backtest_result {
Ok(report) => {
assert!(report.total_return.is_finite() || report.total_return.is_nan(),
"Invalid report for scenario {}", scenario_name);
}
Err(_) => {
info!("Scenario {} failed as expected", scenario_name);
}
}
}
Err(_) => {
info!("Scenario {} failed at data creation as expected", scenario_name);
}
}
}
}
fn create_realistic_test_data(ticker: &str, size: usize) -> HyperliquidData {
let datetime: Vec<DateTime<FixedOffset>> = (0..size)
.map(|i| {
DateTime::from_timestamp(1640995200 + i as i64 * 3600, 0)
.unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap())
})
.collect();
let base_price = match ticker {
"BTC" => 47000.0,
"ETH" => 3500.0,
"SOL" => 150.0,
"AVAX" => 80.0,
_ => 100.0,
};
let prices: Vec<f64> = (0..size)
.map(|i| {
let t = i as f64 / size as f64;
let trend = base_price * 0.1 * t; let cycle = (i as f64 * 0.02).sin() * base_price * 0.03; let noise = (i as f64 * 0.5).sin() * base_price * 0.005; base_price + trend + cycle + noise
})
.collect();
HyperliquidData {
ticker: ticker.to_string(),
datetime: datetime.clone(),
open: prices.iter().enumerate().map(|(i, p)| {
if i == 0 { *p } else { prices[i-1] + (prices[i] - prices[i-1]) * 0.1 }
}).collect(),
high: prices.iter().map(|p| p + p * 0.008).collect(),
low: prices.iter().map(|p| p - p * 0.008).collect(),
close: prices,
volume: (0..size).map(|i| {
100.0 + (i as f64 * 0.1).sin().abs() * 50.0
}).collect(),
funding_rates: (0..size).map(|i| {
0.0001 + (i as f64 * 0.03).sin() * 0.0002
}).collect(),
funding_timestamps: datetime,
}
}
fn create_flash_crash_data() -> HyperliquidData {
let size = 1000;
let datetime: Vec<DateTime<FixedOffset>> = (0..size)
.map(|i| {
DateTime::from_timestamp(1640995200 + i as i64 * 60, 0) .unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap())
})
.collect();
let mut prices = Vec::new();
let mut price = 47000.0;
for i in 0..size {
if i == 500 { price *= 0.8; } else if i > 500 && i < 600 { price += (47000.0 * 0.8 - price) * 0.1; } else {
price += (i as f64 * 0.1).sin() * 10.0; }
prices.push(price);
}
HyperliquidData {
ticker: "BTC".to_string(),
datetime: datetime.clone(),
open: prices.iter().enumerate().map(|(i, p)| {
if i == 0 { *p } else { prices[i-1] }
}).collect(),
high: prices.iter().map(|p| p + 20.0).collect(),
low: prices.iter().map(|p| p - 20.0).collect(),
close: prices,
volume: (0..size).map(|i| {
if i >= 490 && i <= 510 { 1000.0 } else { 100.0 } }).collect(),
funding_rates: vec![0.0001; size],
funding_timestamps: datetime,
}
}
fn create_high_volatility_data() -> HyperliquidData {
let size = 2000;
let datetime: Vec<DateTime<FixedOffset>> = (0..size)
.map(|i| {
DateTime::from_timestamp(1640995200 + i as i64 * 1800, 0) .unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap())
})
.collect();
let mut price = 47000.0;
let prices: Vec<f64> = (0..size)
.map(|i| {
let volatility = (i as f64 * 0.2).sin() * price * 0.05; let shock = if i % 50 == 0 { (i as f64).cos() * price * 0.02 } else { 0.0 };
price += volatility + shock;
price.max(1000.0) })
.collect();
HyperliquidData {
ticker: "BTC".to_string(),
datetime: datetime.clone(),
open: prices.iter().enumerate().map(|(i, p)| {
if i == 0 { *p } else { prices[i-1] }
}).collect(),
high: prices.iter().map(|p| p + p * 0.03).collect(),
low: prices.iter().map(|p| p - p * 0.03).collect(),
close: prices,
volume: (0..size).map(|i| 100.0 + (i as f64 * 0.3).sin().abs() * 200.0).collect(),
funding_rates: (0..size).map(|i| 0.0001 + (i as f64 * 0.1).sin() * 0.001).collect(),
funding_timestamps: datetime,
}
}
fn create_low_liquidity_data() -> HyperliquidData {
let size = 500;
let datetime: Vec<DateTime<FixedOffset>> = (0..size)
.map(|i| {
DateTime::from_timestamp(1640995200 + i as i64 * 7200, 0) .unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap())
})
.collect();
let prices: Vec<f64> = (0..size)
.map(|i| 47000.0 + (i as f64 * 0.05).sin() * 50.0)
.collect();
HyperliquidData {
ticker: "BTC".to_string(),
datetime: datetime.clone(),
open: prices.iter().enumerate().map(|(i, p)| {
if i == 0 { *p } else { prices[i-1] }
}).collect(),
high: prices.iter().map(|p| p + 5.0).collect(),
low: prices.iter().map(|p| p - 5.0).collect(),
close: prices,
volume: vec![10.0; size], funding_rates: vec![0.0001; size],
funding_timestamps: datetime,
}
}
fn create_funding_spike_data() -> HyperliquidData {
let size = 1000;
let datetime: Vec<DateTime<FixedOffset>> = (0..size)
.map(|i| {
DateTime::from_timestamp(1640995200 + i as i64 * 3600, 0)
.unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap())
})
.collect();
let prices: Vec<f64> = (0..size)
.map(|i| 47000.0 + (i as f64 * 0.01).sin() * 100.0)
.collect();
let funding_rates: Vec<f64> = (0..size)
.map(|i| {
if i >= 400 && i <= 450 { 0.01 } else {
0.0001 + (i as f64 * 0.02).sin() * 0.0001
}
})
.collect();
HyperliquidData {
ticker: "BTC".to_string(),
datetime: datetime.clone(),
open: prices.iter().enumerate().map(|(i, p)| {
if i == 0 { *p } else { prices[i-1] }
}).collect(),
high: prices.iter().map(|p| p + 25.0).collect(),
low: prices.iter().map(|p| p - 25.0).collect(),
close: prices,
volume: vec![100.0; size],
funding_rates,
funding_timestamps: datetime,
}
}
fn create_long_time_series_test_data(size: usize) -> HyperliquidData {
let datetime: Vec<DateTime<FixedOffset>> = (0..size)
.map(|i| {
DateTime::from_timestamp(1640995200 + i as i64 * 3600, 0)
.unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap())
})
.collect();
let prices: Vec<f64> = (0..size)
.map(|i| {
let t = i as f64 / size as f64;
let long_trend = 47000.0 * (1.0 + t * 0.5); let medium_cycle = (i as f64 * 0.001).sin() * 2000.0; let short_cycle = (i as f64 * 0.01).sin() * 200.0; long_trend + medium_cycle + short_cycle
})
.collect();
HyperliquidData {
ticker: "BTC".to_string(),
datetime: datetime.clone(),
open: prices.iter().enumerate().map(|(i, p)| {
if i == 0 { *p } else { prices[i-1] }
}).collect(),
high: prices.iter().map(|p| p + 50.0).collect(),
low: prices.iter().map(|p| p - 50.0).collect(),
close: prices,
volume: vec![100.0; size],
funding_rates: (0..size).map(|i| 0.0001 + (i as f64 * 0.0001).sin() * 0.0001).collect(),
funding_timestamps: datetime,
}
}
fn create_invalid_format_data() -> Result<HyperliquidData, &'static str> {
Err("Invalid data format")
}
fn create_missing_timestamp_data() -> Result<HyperliquidData, &'static str> {
Err("Missing timestamp data")
}
fn create_inconsistent_length_data() -> Result<HyperliquidData, &'static str> {
Err("Inconsistent data lengths")
}
#[tokio::test]
async fn test_api_compatibility_regression() {
let mut mock_server = MockHyperliquidServer::new().await;
let api_versions = vec![
("v1", create_v1_api_response()),
("v2", create_v2_api_response()),
];
for (version, response) in api_versions {
info!("Testing API compatibility for version: {}", version);
let _mock = mock_server.server
.mock("POST", "/info")
.with_status(200)
.with_header("content-type", "application/json")
.with_body(response);
let test_result = parse_api_response(&response);
assert!(test_result.is_ok(), "Failed to parse {} API response", version);
}
}
#[tokio::test]
async fn test_network_resilience() {
let mut mock_server = MockHyperliquidServer::new().await;
let network_scenarios = vec![
("connection_timeout", 408, "Request timeout"),
("server_error", 500, "Internal server error"),
("bad_gateway", 502, "Bad gateway"),
("service_unavailable", 503, "Service unavailable"),
("rate_limited", 429, "Too many requests"),
];
for (scenario, status_code, error_msg) in network_scenarios {
info!("Testing network resilience scenario: {}", scenario);
let _mock = mock_server.mock_api_error(status_code, error_msg);
let error = HyperliquidBacktestError::DataConversion(
format!("Network error {}: {}", status_code, error_msg)
);
assert!(error.to_string().contains("Data conversion error"));
assert!(error.to_string().contains(&status_code.to_string()));
}
}
#[tokio::test]
async fn test_data_integrity_validation() {
let test_cases = vec![
("missing_ohlc_data", create_missing_ohlc_data()),
("inconsistent_timestamps", create_inconsistent_timestamp_data()),
("negative_prices", create_negative_price_data()),
("extreme_values", create_extreme_value_data()),
];
for (case_name, data) in test_cases {
info!("Testing data integrity case: {}", case_name);
let validation_result = validate_hyperliquid_data(&data);
match case_name {
"missing_ohlc_data" | "negative_prices" => {
assert!(validation_result.is_err(), "Should reject invalid data for {}", case_name);
}
"inconsistent_timestamps" | "extreme_values" => {
assert!(validation_result.is_ok() || validation_result.is_err());
}
_ => {}
}
}
}
#[tokio::test]
async fn test_performance_under_load() {
use std::sync::Arc;
use std::sync::atomic::{AtomicUsize, Ordering};
let processed_count = Arc::new(AtomicUsize::new(0));
let error_count = Arc::new(AtomicUsize::new(0));
let tasks: Vec<_> = (0..20).map(|i| {
let processed = Arc::clone(&processed_count);
let errors = Arc::clone(&error_count);
tokio::spawn(async move {
for j in 0..10 {
let data = create_realistic_test_data(&format!("ASSET{}", i), 1000 + j * 100);
let strategy = enhanced_sma_cross(10 + j, 30 + j * 2, 0.1 + j as f64 * 0.05);
match std::panic::catch_unwind(|| {
let mut backtest = HyperliquidBacktest::new(
data,
strategy,
10000.0,
HyperliquidCommission::default(),
);
backtest.calculate_with_funding();
backtest.enhanced_report()
}) {
Ok(_) => {
processed.fetch_add(1, Ordering::Relaxed);
}
Err(_) => {
errors.fetch_add(1, Ordering::Relaxed);
}
}
}
})
}).collect();
join_all(tasks).await;
let total_processed = processed_count.load(Ordering::Relaxed);
let total_errors = error_count.load(Ordering::Relaxed);
info!("Performance under load: {} processed, {} errors", total_processed, total_errors);
assert!(total_processed > total_errors * 10, "Too many errors under load");
assert!(total_processed > 150, "Not enough requests processed successfully");
}
fn create_test_hyperliquid_data() -> HyperliquidData {
let size = 100;
let datetime: Vec<DateTime<FixedOffset>> = (0..size)
.map(|i| {
DateTime::from_timestamp(1640995200 + i as i64 * 3600, 0)
.unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap())
})
.collect();
let prices: Vec<f64> = (0..size)
.map(|i| 47000.0 + (i as f64 * 0.1).sin() * 100.0)
.collect();
HyperliquidData {
ticker: "BTC".to_string(),
datetime: datetime.clone(),
open: prices.iter().enumerate().map(|(i, p)| {
if i == 0 { *p } else { prices[i-1] }
}).collect(),
high: prices.iter().map(|p| p + 25.0).collect(),
low: prices.iter().map(|p| p - 25.0).collect(),
close: prices,
volume: vec![100.0; size],
funding_rates: (0..size).map(|i| 0.0001 + (i as f64 * 0.01).sin() * 0.0001).collect(),
funding_timestamps: datetime,
}
}
fn create_large_test_data(size: usize) -> HyperliquidData {
let datetime: Vec<DateTime<FixedOffset>> = (0..size)
.map(|i| {
DateTime::from_timestamp(1640995200 + i as i64 * 3600, 0)
.unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap())
})
.collect();
let prices: Vec<f64> = (0..size)
.map(|i| 47000.0 + (i as f64 * 0.01).sin() * 500.0)
.collect();
HyperliquidData {
ticker: "BTC".to_string(),
datetime: datetime.clone(),
open: prices.iter().enumerate().map(|(i, p)| {
if i == 0 { *p } else { prices[i-1] }
}).collect(),
high: prices.iter().map(|p| p + 50.0).collect(),
low: prices.iter().map(|p| p - 50.0).collect(),
close: prices,
volume: vec![100.0; size],
funding_rates: (0..size).map(|i| 0.0001 + (i as f64 * 0.001).sin() * 0.0001).collect(),
funding_timestamps: datetime,
}
}
fn create_v1_api_response() -> String {
serde_json::json!([
{
"T": 1640995200000i64,
"c": "47000.5",
"h": "47500.0",
"l": "46500.0",
"n": 1000,
"o": "47200.0",
"t": 1640995200000i64,
"v": "125.5"
}
]).to_string()
}
fn create_v2_api_response() -> String {
serde_json::json!([
{
"timestamp": 1640995200000i64,
"close": "47000.5",
"high": "47500.0",
"low": "46500.0",
"trades": 1000,
"open": "47200.0",
"time": 1640995200000i64,
"volume": "125.5",
"extra_field": "new_data" }
]).to_string()
}
fn parse_api_response(response: &str) -> Result<(), Box<dyn std::error::Error>> {
let _parsed: serde_json::Value = serde_json::from_str(response)?;
Ok(())
}
fn validate_hyperliquid_data(data: &HyperliquidData) -> Result<(), String> {
if data.open.is_empty() {
return Err("Missing OHLC data".to_string());
}
if data.close.iter().any(|&price| price <= 0.0) {
return Err("Negative or zero prices detected".to_string());
}
if data.datetime.len() != data.close.len() {
return Err("Inconsistent data lengths".to_string());
}
Ok(())
}
fn create_missing_ohlc_data() -> HyperliquidData {
HyperliquidData {
ticker: "BTC".to_string(),
datetime: vec![],
open: vec![],
high: vec![],
low: vec![],
close: vec![],
volume: vec![],
funding_rates: vec![0.0001],
funding_timestamps: vec![DateTime::from_timestamp(1640995200, 0)
.unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap())],
}
}
fn create_inconsistent_timestamp_data() -> HyperliquidData {
let datetime1 = vec![
DateTime::from_timestamp(1640995200, 0)
.unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap()),
DateTime::from_timestamp(1640995100, 0) .unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap()),
];
HyperliquidData {
ticker: "BTC".to_string(),
datetime: datetime1.clone(),
open: vec![47000.0, 47100.0],
high: vec![47500.0, 47600.0],
low: vec![46500.0, 46600.0],
close: vec![47200.0, 47300.0],
volume: vec![100.0, 110.0],
funding_rates: vec![0.0001, 0.00015],
funding_timestamps: datetime1,
}
}
fn create_negative_price_data() -> HyperliquidData {
let datetime = vec![
DateTime::from_timestamp(1640995200, 0)
.unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap()),
];
HyperliquidData {
ticker: "BTC".to_string(),
datetime: datetime.clone(),
open: vec![-100.0], high: vec![47500.0],
low: vec![46500.0],
close: vec![47200.0],
volume: vec![100.0],
funding_rates: vec![0.0001],
funding_timestamps: datetime,
}
}
fn create_extreme_value_data() -> HyperliquidData {
let datetime = vec![
DateTime::from_timestamp(1640995200, 0)
.unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap()),
];
HyperliquidData {
ticker: "BTC".to_string(),
datetime: datetime.clone(),
open: vec![f64::MAX / 2.0], high: vec![f64::MAX],
low: vec![f64::MIN_POSITIVE],
close: vec![1e10], volume: vec![1e15], funding_rates: vec![1.0], funding_timestamps: datetime,
}
}
fn create_realistic_test_data(ticker: &str, size: usize) -> HyperliquidData {
let datetime: Vec<DateTime<FixedOffset>> = (0..size)
.map(|i| {
DateTime::from_timestamp(1640995200 + i as i64 * 3600, 0)
.unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap())
})
.collect();
let base_price = match ticker {
"BTC" => 47000.0,
"ETH" => 3500.0,
"SOL" => 150.0,
"AVAX" => 80.0,
_ => 100.0,
};
let prices: Vec<f64> = (0..size)
.map(|i| {
let trend = (i as f64 / size as f64) * base_price * 0.1;
let cycle = (i as f64 * 0.02).sin() * base_price * 0.05;
let noise = (i as f64 * 0.5).sin() * base_price * 0.01;
base_price + trend + cycle + noise
})
.collect();
HyperliquidData {
ticker: ticker.to_string(),
datetime: datetime.clone(),
open: prices.iter().enumerate().map(|(i, p)| {
if i == 0 { *p } else { prices[i-1] }
}).collect(),
high: prices.iter().map(|p| p + p * 0.01).collect(),
low: prices.iter().map(|p| p - p * 0.01).collect(),
close: prices,
volume: (0..size).map(|i| 100.0 + (i as f64 * 0.1).sin() * 50.0).collect(),
funding_rates: (0..size).map(|i| 0.0001 + (i as f64 * 0.01).sin() * 0.0001).collect(),
funding_timestamps: datetime,
}
}
#[tokio::test]
async fn test_api_compatibility_regression() {
let mut mock_server = MockHyperliquidServer::new().await;
let api_versions = vec![
("v1", create_v1_api_response()),
("v2", create_v2_api_response()),
];
for (version, response) in api_versions {
info!("Testing API compatibility for version: {}", version);
let _mock = mock_server.server
.mock("POST", "/info")
.with_status(200)
.with_header("content-type", "application/json")
.with_body(response);
let test_result = parse_api_response(&response);
assert!(test_result.is_ok(), "Failed to parse {} API response", version);
}
}
#[tokio::test]
async fn test_network_resilience() {
let mut mock_server = MockHyperliquidServer::new().await;
let network_scenarios = vec![
("connection_timeout", 408, "Request timeout"),
("server_error", 500, "Internal server error"),
("bad_gateway", 502, "Bad gateway"),
("service_unavailable", 503, "Service unavailable"),
("rate_limited", 429, "Too many requests"),
];
for (scenario, status_code, error_msg) in network_scenarios {
info!("Testing network resilience scenario: {}", scenario);
let _mock = mock_server.mock_api_error(status_code, error_msg);
let error = HyperliquidBacktestError::DataConversion(
format!("Network error {}: {}", status_code, error_msg)
);
assert!(error.to_string().contains("Data conversion error"));
assert!(error.to_string().contains(&status_code.to_string()));
}
}
#[tokio::test]
async fn test_data_integrity_validation() {
let test_cases = vec![
("missing_ohlc_data", create_missing_ohlc_data()),
("inconsistent_timestamps", create_inconsistent_timestamp_data()),
("negative_prices", create_negative_price_data()),
("extreme_values", create_extreme_value_data()),
];
for (case_name, data) in test_cases {
info!("Testing data integrity case: {}", case_name);
let validation_result = validate_hyperliquid_data(&data);
match case_name {
"missing_ohlc_data" | "negative_prices" => {
assert!(validation_result.is_err(), "Should reject invalid data for {}", case_name);
}
"inconsistent_timestamps" | "extreme_values" => {
assert!(validation_result.is_ok() || validation_result.is_err());
}
_ => {}
}
}
}
#[tokio::test]
async fn test_performance_under_load() {
use std::sync::Arc;
use std::sync::atomic::{AtomicUsize, Ordering};
let processed_count = Arc::new(AtomicUsize::new(0));
let error_count = Arc::new(AtomicUsize::new(0));
let tasks: Vec<_> = (0..20).map(|i| {
let processed = Arc::clone(&processed_count);
let errors = Arc::clone(&error_count);
tokio::spawn(async move {
for j in 0..10 {
let data = create_realistic_test_data(&format!("ASSET{}", i), 1000 + j * 100);
let strategy = enhanced_sma_cross(10 + j, 30 + j * 2, 0.1 + j as f64 * 0.05);
match std::panic::catch_unwind(|| {
let mut backtest = HyperliquidBacktest::new(
data,
strategy,
10000.0,
HyperliquidCommission::default(),
);
backtest.calculate_with_funding();
backtest.enhanced_report()
}) {
Ok(_) => {
processed.fetch_add(1, Ordering::Relaxed);
}
Err(_) => {
errors.fetch_add(1, Ordering::Relaxed);
}
}
}
})
}).collect();
join_all(tasks).await;
let total_processed = processed_count.load(Ordering::Relaxed);
let total_errors = error_count.load(Ordering::Relaxed);
info!("Performance under load: {} processed, {} errors", total_processed, total_errors);
assert!(total_processed > total_errors * 10, "Too many errors under load");
assert!(total_processed > 150, "Not enough requests processed successfully");
}
fn create_v1_api_response() -> String {
serde_json::json!([
{
"T": 1640995200000i64,
"c": "47000.5",
"h": "47500.0",
"l": "46500.0",
"n": 1000,
"o": "47200.0",
"t": 1640995200000i64,
"v": "125.5"
}
]).to_string()
}
fn create_v2_api_response() -> String {
serde_json::json!([
{
"timestamp": 1640995200000i64,
"close": "47000.5",
"high": "47500.0",
"low": "46500.0",
"trades": 1000,
"open": "47200.0",
"time": 1640995200000i64,
"volume": "125.5",
"extra_field": "new_data" }
]).to_string()
}
fn parse_api_response(response: &str) -> Result<(), Box<dyn std::error::Error>> {
let _parsed: serde_json::Value = serde_json::from_str(response)?;
Ok(())
}
fn validate_hyperliquid_data(data: &HyperliquidData) -> Result<(), String> {
if data.open.is_empty() {
return Err("Missing OHLC data".to_string());
}
if data.close.iter().any(|&price| price <= 0.0) {
return Err("Negative or zero prices detected".to_string());
}
if data.datetime.len() != data.close.len() {
return Err("Inconsistent data lengths".to_string());
}
Ok(())
}
fn create_missing_ohlc_data() -> HyperliquidData {
HyperliquidData {
ticker: "BTC".to_string(),
datetime: vec![],
open: vec![],
high: vec![],
low: vec![],
close: vec![],
volume: vec![],
funding_rates: vec![0.0001],
funding_timestamps: vec![DateTime::from_timestamp(1640995200, 0)
.unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap())],
}
}
fn create_inconsistent_timestamp_data() -> HyperliquidData {
let datetime1 = vec![
DateTime::from_timestamp(1640995200, 0)
.unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap()),
DateTime::from_timestamp(1640995100, 0) .unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap()),
];
HyperliquidData {
ticker: "BTC".to_string(),
datetime: datetime1.clone(),
open: vec![47000.0, 47100.0],
high: vec![47500.0, 47600.0],
low: vec![46500.0, 46600.0],
close: vec![47200.0, 47300.0],
volume: vec![100.0, 110.0],
funding_rates: vec![0.0001, 0.00015],
funding_timestamps: datetime1,
}
}
fn create_negative_price_data() -> HyperliquidData {
let datetime = vec![
DateTime::from_timestamp(1640995200, 0)
.unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap()),
];
HyperliquidData {
ticker: "BTC".to_string(),
datetime: datetime.clone(),
open: vec![-100.0], high: vec![47500.0],
low: vec![46500.0],
close: vec![47200.0],
volume: vec![100.0],
funding_rates: vec![0.0001],
funding_timestamps: datetime,
}
}
fn create_extreme_value_data() -> HyperliquidData {
let datetime = vec![
DateTime::from_timestamp(1640995200, 0)
.unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap()),
];
HyperliquidData {
ticker: "BTC".to_string(),
datetime: datetime.clone(),
open: vec![f64::MAX / 2.0], high: vec![f64::MAX],
low: vec![f64::MIN_POSITIVE],
close: vec![1e10], volume: vec![1e15], funding_rates: vec![1.0], funding_timestamps: datetime,
}
}
fn create_realistic_test_data(ticker: &str, size: usize) -> HyperliquidData {
let datetime: Vec<DateTime<FixedOffset>> = (0..size)
.map(|i| {
DateTime::from_timestamp(1640995200 + i as i64 * 3600, 0)
.unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap())
})
.collect();
let base_price = match ticker {
"BTC" => 47000.0,
"ETH" => 3500.0,
"SOL" => 150.0,
"AVAX" => 80.0,
_ => 100.0,
};
let prices: Vec<f64> = (0..size)
.map(|i| {
let trend = (i as f64 / size as f64) * base_price * 0.1;
let cycle = (i as f64 * 0.02).sin() * base_price * 0.05;
let noise = (i as f64 * 0.5).sin() * base_price * 0.01;
base_price + trend + cycle + noise
})
.collect();
HyperliquidData {
ticker: ticker.to_string(),
datetime: datetime.clone(),
open: prices.iter().enumerate().map(|(i, p)| {
if i == 0 { *p } else { prices[i-1] }
}).collect(),
high: prices.iter().map(|p| p + p * 0.01).collect(),
low: prices.iter().map(|p| p - p * 0.01).collect(),
close: prices,
volume: (0..size).map(|i| 100.0 + (i as f64 * 0.1).sin() * 50.0).collect(),
funding_rates: (0..size).map(|i| 0.0001 + (i as f64 * 0.01).sin() * 0.0001).collect(),
funding_timestamps: datetime,
}
}
#[tokio::test]
async fn test_stress_testing_workflow() {
let extreme_scenarios = vec![
("High Volatility", create_high_volatility_data(1000)),
("Market Crash", create_crash_scenario_data(1000)),
("Bull Market", create_bull_market_data(1000)),
("Sideways Market", create_sideways_market_data(1000)),
];
for (scenario_name, data) in extreme_scenarios {
info!("Testing stress scenario: {}", scenario_name);
let strategies = vec![
enhanced_sma_cross(5, 20, 0.1),
enhanced_sma_cross(20, 50, 0.5),
funding_arbitrage_strategy(0.0005, Default::default()),
];
for (i, strategy) in strategies.into_iter().enumerate() {
let mut backtest = HyperliquidBacktest::new(
data.clone(),
strategy,
10000.0,
HyperliquidCommission::default(),
);
backtest.calculate_with_funding();
let report = backtest.enhanced_report();
assert!(report.total_return.is_finite(),
"Strategy {} in scenario {} produced invalid return", i, scenario_name);
assert!(report.max_drawdown.is_finite(),
"Strategy {} in scenario {} produced invalid drawdown", i, scenario_name);
info!("Scenario {} Strategy {}: return={:.2}%, drawdown={:.2}%",
scenario_name, i, report.total_return * 100.0, report.max_drawdown * 100.0);
}
}
}
#[tokio::test]
async fn test_rs_backtester_compatibility_regression() {
let hyperliquid_data = create_test_hyperliquid_data();
let rs_data = hyperliquid_data.to_rs_backtester_data();
use rs_backtester::prelude::*;
let rs_strategies = vec![
strategies::sma_cross(10, 20),
strategies::rsi_strategy(14, 30.0, 70.0),
strategies::bollinger_bands(20, 2.0),
];
for (i, strategy) in rs_strategies.into_iter().enumerate() {
let backtest = Backtest::new(
rs_data.clone(),
strategy,
10000.0,
Commission::default(),
);
assert!(backtest.data.close.len() > 0);
assert_eq!(backtest.initial_capital, 10000.0);
info!("rs-backtester strategy {} compatibility verified", i);
}
}
#[tokio::test]
async fn test_data_structure_compatibility() {
let data = create_test_hyperliquid_data();
assert!(!data.ticker.is_empty());
assert!(!data.datetime.is_empty());
assert!(!data.open.is_empty());
assert!(!data.high.is_empty());
assert!(!data.low.is_empty());
assert!(!data.close.is_empty());
assert!(!data.volume.is_empty());
assert!(!data.funding_rates.is_empty());
assert!(!data.funding_timestamps.is_empty());
let len = data.datetime.len();
assert_eq!(data.open.len(), len);
assert_eq!(data.high.len(), len);
assert_eq!(data.low.len(), len);
assert_eq!(data.close.len(), len);
assert_eq!(data.volume.len(), len);
let rs_data = data.to_rs_backtester_data();
assert_eq!(rs_data.datetime.len(), len);
assert_eq!(rs_data.close.len(), len);
if !data.funding_timestamps.is_empty() {
let funding_rate = data.get_funding_rate_at(data.datetime[0]);
assert!(funding_rate.is_some() || funding_rate.is_none()); }
}
fn create_test_hyperliquid_data() -> HyperliquidData {
create_realistic_test_data("BTC", 100)
}
fn create_large_test_data(size: usize) -> HyperliquidData {
create_realistic_test_data("BTC", size)
}
fn create_realistic_test_data(ticker: &str, size: usize) -> HyperliquidData {
let datetime: Vec<DateTime<FixedOffset>> = (0..size)
.map(|i| {
DateTime::from_timestamp(1640995200 + i as i64 * 3600, 0)
.unwrap()
.with_timezone(&FixedOffset::east_opt(0).unwrap())
})
.collect();
let base_price = match ticker {
"BTC" => 47000.0,
"ETH" => 3500.0,
"SOL" => 150.0,
"AVAX" => 80.0,
_ => 100.0,
};
let prices: Vec<f64> = (0..size)
.map(|i| {
let trend = (i as f64 * 0.001).sin() * base_price * 0.1;
let noise = (i as f64 * 0.1).sin() * base_price * 0.02;
let volatility = (i as f64 * 0.05).cos() * base_price * 0.01;
base_price + trend + noise + volatility
})
.collect();
HyperliquidData {
ticker: ticker.to_string(),
datetime: datetime.clone(),
open: prices.iter().map(|p| p - p * 0.001).collect(),
high: prices.iter().map(|p| p + p * 0.005).collect(),
low: prices.iter().map(|p| p - p * 0.005).collect(),
close: prices,
volume: (0..size).map(|i| 100.0 + (i as f64 * 0.1).sin() * 20.0).collect(),
funding_rates: (0..size).map(|i| 0.0001 + (i as f64 * 0.01).sin() * 0.0001).collect(),
funding_timestamps: datetime,
}
}
fn create_high_volatility_data(size: usize) -> HyperliquidData {
let mut data = create_realistic_test_data("BTC", size);
for i in 0..size {
let volatility_multiplier = 5.0;
let base_price = data.close[i];
let high_vol_change = (i as f64 * 0.1).sin() * base_price * 0.1 * volatility_multiplier;
data.close[i] = base_price + high_vol_change;
data.high[i] = data.close[i] + base_price * 0.02;
data.low[i] = data.close[i] - base_price * 0.02;
data.open[i] = if i > 0 { data.close[i-1] } else { data.close[i] };
}
data
}
fn create_crash_scenario_data(size: usize) -> HyperliquidData {
let mut data = create_realistic_test_data("BTC", size);
let crash_start = size / 3;
let crash_end = crash_start + size / 6;
for i in crash_start..crash_end {
let crash_factor = 1.0 - ((i - crash_start) as f64 / (crash_end - crash_start) as f64) * 0.5;
data.close[i] *= crash_factor;
data.high[i] *= crash_factor;
data.low[i] *= crash_factor;
data.open[i] *= crash_factor;
}
data
}
fn create_bull_market_data(size: usize) -> HyperliquidData {
let mut data = create_realistic_test_data("BTC", size);
for i in 0..size {
let bull_factor = 1.0 + (i as f64 / size as f64) * 2.0; data.close[i] *= bull_factor;
data.high[i] *= bull_factor;
data.low[i] *= bull_factor;
data.open[i] *= bull_factor;
}
data
}
fn create_sideways_market_data(size: usize) -> HyperliquidData {
let mut data = create_realistic_test_data("BTC", size);
for i in 0..size {
let base_price = data.close[0]; let noise = (i as f64 * 0.2).sin() * base_price * 0.05;
data.close[i] = base_price + noise;
data.high[i] = data.close[i] + base_price * 0.01;
data.low[i] = data.close[i] - base_price * 0.01;
data.open[i] = if i > 0 { data.close[i-1] } else { data.close[i] };
}
data
}