#![cfg(all(feature = "unstable", feature = "chrono"))]
use chrono::{DateTime, Duration, Utc};
use solar_positioning::spa;
use std::collections::HashMap;
use std::time::Instant;
#[test]
fn test_combined_pattern_realistic_scenario() {
let base_datetime = "2023-06-21T00:00:00Z".parse::<DateTime<Utc>>().unwrap();
let coordinates: Vec<(f64, f64)> = (0..10)
.flat_map(|i| {
(0..10).map(move |j| {
let lat = 47.0 + (i as f64) * 0.5; let lon = 7.0 + (j as f64) * 0.5; (lat, lon)
})
})
.collect();
let times: Vec<DateTime<Utc>> = (0..24)
.map(|h| base_datetime + Duration::hours(h))
.collect();
println!(
"Testing {}×{} grid over {} hours = {} calculations",
10,
10,
times.len(),
coordinates.len() * times.len()
);
let start = Instant::now();
let mut naive_results = Vec::new();
for &time in × {
for &(lat, lon) in &coordinates {
let result = spa::solar_position(
time,
lat,
lon,
100.0,
69.0,
Some(solar_positioning::RefractionCorrection::new(1013.25, 15.0).unwrap()),
)
.unwrap();
naive_results.push(result);
}
}
let naive_duration = start.elapsed();
let start = Instant::now();
let mut cached_results = Vec::new();
let mut time_cache: HashMap<DateTime<Utc>, spa::SpaTimeDependent> = HashMap::new();
for &time in × {
let time_parts = time_cache
.entry(time)
.or_insert_with(|| spa::spa_time_dependent_parts(time, 69.0).unwrap());
for &(lat, lon) in &coordinates {
let result = spa::spa_with_time_dependent_parts(
lat,
lon,
100.0,
Some(solar_positioning::RefractionCorrection::new(1013.25, 15.0).unwrap()),
time_parts,
)
.unwrap();
cached_results.push(result);
}
}
let cached_duration = start.elapsed();
assert_eq!(naive_results.len(), cached_results.len());
for (naive, cached) in naive_results.iter().zip(cached_results.iter()) {
assert!((naive.azimuth() - cached.azimuth()).abs() < 1e-10);
assert!((naive.zenith_angle() - cached.zenith_angle()).abs() < 1e-10);
}
let speedup = naive_duration.as_secs_f64() / cached_duration.as_secs_f64();
println!("Naive approach: {:.3}s", naive_duration.as_secs_f64());
println!(
"Time-cached approach: {:.3}s",
cached_duration.as_secs_f64()
);
println!("Speedup: {:.2}x", speedup);
println!("Time cache entries: {}", time_cache.len());
assert!(
speedup > 1.5,
"Expected speedup > 1.5x, got {:.2}x",
speedup
);
}
#[test]
fn test_combined_pattern_varying_time_density() {
let base_datetime = "2023-06-21T00:00:00Z".parse::<DateTime<Utc>>().unwrap();
let test_scenarios = vec![
(5, 2), (5, 10), (2, 10), (10, 1), (1, 10), ];
for (coord_count, time_count) in test_scenarios {
let coordinates: Vec<(f64, f64)> = (0..coord_count)
.map(|i| (50.0 + i as f64, 10.0 + i as f64))
.collect();
let times: Vec<DateTime<Utc>> = (0..time_count)
.map(|h| base_datetime + Duration::hours(h))
.collect();
let start = Instant::now();
let mut time_cache: HashMap<DateTime<Utc>, spa::SpaTimeDependent> = HashMap::new();
let mut result_count = 0;
for &time in × {
let time_parts = time_cache
.entry(time)
.or_insert_with(|| spa::spa_time_dependent_parts(time, 69.0).unwrap());
for &(lat, lon) in &coordinates {
let _result = spa::spa_with_time_dependent_parts(
lat,
lon,
0.0,
Some(solar_positioning::RefractionCorrection::new(1013.25, 15.0).unwrap()),
time_parts,
)
.unwrap();
result_count += 1;
}
}
let cached_duration = start.elapsed();
println!(
"{}C×{}T ({} calcs): {:.3}s, {} cache entries",
coord_count,
time_count,
result_count,
cached_duration.as_secs_f64(),
time_cache.len()
);
assert_eq!(result_count, coord_count * time_count);
assert_eq!(time_cache.len(), time_count as usize);
}
}
#[test]
fn test_combined_vs_pure_patterns() {
let base_datetime = "2023-06-21T12:00:00Z".parse::<DateTime<Utc>>().unwrap();
let coordinates: Vec<(f64, f64)> = (0..100)
.map(|i| (45.0 + (i as f64) * 0.1, 8.0 + (i as f64) * 0.1))
.collect();
println!("\n=== Pure Coordinate Sweep (100 coords, 1 time) ===");
let start = Instant::now();
let time_parts = spa::spa_time_dependent_parts(base_datetime, 69.0).unwrap();
for &(lat, lon) in &coordinates {
let _result = spa::spa_with_time_dependent_parts(
lat,
lon,
0.0,
Some(solar_positioning::RefractionCorrection::new(1013.25, 15.0).unwrap()),
&time_parts,
)
.unwrap();
}
let coord_sweep_duration = start.elapsed();
println!("Duration: {:.3}s", coord_sweep_duration.as_secs_f64());
let coords_small: Vec<(f64, f64)> = coordinates[0..10].to_vec();
let times: Vec<DateTime<Utc>> = (0..10)
.map(|h| base_datetime + Duration::hours(h))
.collect();
println!("\n=== Combined Pattern (10 coords, 10 times) ===");
let start = Instant::now();
let mut time_cache: HashMap<DateTime<Utc>, spa::SpaTimeDependent> = HashMap::new();
for &time in × {
let time_parts = time_cache
.entry(time)
.or_insert_with(|| spa::spa_time_dependent_parts(time, 69.0).unwrap());
for &(lat, lon) in &coords_small {
let _result = spa::spa_with_time_dependent_parts(
lat,
lon,
0.0,
Some(solar_positioning::RefractionCorrection::new(1013.25, 15.0).unwrap()),
time_parts,
)
.unwrap();
}
}
let combined_duration = start.elapsed();
println!("Duration: {:.3}s", combined_duration.as_secs_f64());
println!("Time cache entries: {}", time_cache.len());
println!("\n=== Analysis ===");
println!("Both scenarios: 100 total calculations");
println!("Pure coordinate sweep: leverages maximum time sharing");
println!("Combined pattern: partial time sharing (10 time points)");
let efficiency_ratio = coord_sweep_duration.as_secs_f64() / combined_duration.as_secs_f64();
println!(
"Efficiency ratio (coord_sweep/combined): {:.2}",
efficiency_ratio
);
}