quaver_rs/difficulty_processor/
calculations.rs

1use crate::rulesets::structs::{StrainSolverData, Hand};
2
3/// Calculation functions for difficulty processing
4
5/// Used to calculate Coefficient for Strain Difficulty
6pub fn get_coefficient_value(
7    duration: f32,
8    x_min: f32,
9    x_max: f32,
10    strain_max: f32,
11    exp: f32,
12    average_note_density: f32,
13) -> f32 {
14    const LOWEST_DIFFICULTY: f32 = 1.0;
15    const DENSITY_MULTIPLIER: f32 = 0.266;
16    const DENSITY_DIFFICULTY_MIN: f32 = 0.4;
17
18    // Calculate ratio between min and max value
19    let ratio = (1.0 - (duration - x_min) / (x_max - x_min)).max(0.0);
20
21    // If ratio is too big and map isn't a beginner map (nps > 4) scale based on nps instead
22    if ratio == 0.0 && average_note_density < 4.0 {
23        // If note density is too low don't bother calculating for density either
24        if average_note_density < 1.0 {
25            return DENSITY_DIFFICULTY_MIN;
26        }
27        return average_note_density * DENSITY_MULTIPLIER + 0.134;
28    }
29
30    // Compute for difficulty
31    LOWEST_DIFFICULTY + (strain_max - LOWEST_DIFFICULTY) * ratio.powf(exp)
32}
33
34/// Calculate overall difficulty of a map
35pub fn calculate_overall_difficulty(
36    strain_solver_data: &mut [StrainSolverData],
37    map_start: f32,
38    map_end: f32,
39    use_fallback: bool,
40) -> f32 {
41    // When the map has only scratch key notes, StrainSolverData would be empty, so we return 0
42    if strain_solver_data.is_empty() {
43        return 0.0;
44    }
45
46    // Solve strain value of every data point
47    for data in strain_solver_data.iter_mut() {
48        data.calculate_strain_value();
49    }
50
51    let calculated_diff = strain_solver_data
52        .iter()
53        .filter(|s| matches!(s.hand, Hand::Left | Hand::Right))
54        .map(|s| s.total_strain_value)
55        .sum::<f32>()
56        / strain_solver_data
57            .iter()
58            .filter(|s| matches!(s.hand, Hand::Left | Hand::Right))
59            .count() as f32;
60
61    let bins = create_difficulty_bins(strain_solver_data, map_start, map_end, use_fallback);
62
63    if !bins.iter().any(|&strain| strain > 0.0) {
64        return 0.0;
65    }
66
67    let (continuity_adjustment, continuity) = calculate_continuity_adjustment(&bins);
68    let short_map_adjustment = calculate_short_map_adjustment(&bins, continuity);
69
70    calculated_diff * continuity_adjustment * short_map_adjustment
71}
72
73/// Create difficulty bins for analysis
74fn create_difficulty_bins(
75    strain_solver_data: &[StrainSolverData],
76    map_start: f32,
77    map_end: f32,
78    use_fallback: bool,
79) -> Vec<f32> {
80    let mut bins = Vec::new();
81    const BIN_SIZE: f32 = 1000.0;
82
83    let mut left_index = 0;
84    let mut right_index = 0;
85
86    // Find starting index
87    while left_index < strain_solver_data.len() && strain_solver_data[left_index].start_time < map_start {
88        left_index += 1;
89    }
90
91    for i in (map_start as i32..map_end as i32).step_by(BIN_SIZE as usize) {
92        let bin_start = i as f32;
93        let bin_end = bin_start + BIN_SIZE;
94
95        let values_in_bin: Vec<&StrainSolverData> = if use_fallback {
96            strain_solver_data
97                .iter()
98                .filter(|s| s.start_time >= bin_start && s.start_time < bin_end)
99                .collect()
100        } else {
101            while right_index < strain_solver_data.len() - 1 
102                && strain_solver_data[right_index + 1].start_time < bin_end {
103                right_index += 1;
104            }
105
106            if left_index >= strain_solver_data.len() {
107                bins.push(0.0);
108                continue;
109            }
110
111            strain_solver_data[left_index..=right_index].iter().collect()
112        };
113
114        let average_rating = if values_in_bin.is_empty() {
115            0.0
116        } else {
117            values_in_bin.iter().map(|s| s.total_strain_value).sum::<f32>() / values_in_bin.len() as f32
118        };
119
120        bins.push(average_rating);
121        left_index = right_index + 1;
122    }
123
124    bins
125}
126
127/// Calculate continuity adjustment for difficulty
128pub fn calculate_continuity_adjustment(bins: &[f32]) -> (f32, f32) {
129    // Average of the hardest 40% of the map
130    let cutoff_pos = (bins.len() as f32 * 0.4).floor() as usize;
131    let mut sorted_bins = bins.to_vec();
132    sorted_bins.sort_by(|a, b| b.partial_cmp(a).unwrap());
133    
134    let top_40 = &sorted_bins[..cutoff_pos];
135    let easy_rating_cutoff = if top_40.is_empty() {
136        0.0
137    } else {
138        top_40.iter().sum::<f32>() / top_40.len() as f32
139    };
140
141    // Calculate continuity - this should match the C# implementation exactly
142    let continuity = if easy_rating_cutoff > 0.0 {
143        let non_zero_bins: Vec<f32> = bins.iter()
144            .filter(|&&strain| strain > 0.0)
145            .map(|&strain| (strain / easy_rating_cutoff).sqrt())
146            .collect();
147        
148        if !non_zero_bins.is_empty() {
149            non_zero_bins.iter().sum::<f32>() / non_zero_bins.len() as f32
150        } else {
151            0.0
152        }
153    } else {
154        0.0
155    };
156
157    // Apply continuity adjustment
158    const MAX_CONTINUITY: f32 = 1.00;
159    const AVG_CONTINUITY: f32 = 0.85;
160    const MIN_CONTINUITY: f32 = 0.60;
161
162    const MAX_ADJUSTMENT: f32 = 1.05;
163    const AVG_ADJUSTMENT: f32 = 1.00;
164    const MIN_ADJUSTMENT: f32 = 0.90;
165
166    let continuity_adjustment = if continuity > AVG_CONTINUITY {
167        let continuity_factor = 1.0 - (continuity - AVG_CONTINUITY) / (MAX_CONTINUITY - AVG_CONTINUITY);
168        (continuity_factor * (AVG_ADJUSTMENT - MIN_ADJUSTMENT) + MIN_ADJUSTMENT)
169            .min(AVG_ADJUSTMENT)
170            .max(MIN_ADJUSTMENT)
171    } else {
172        let continuity_factor = 1.0 - (continuity - MIN_CONTINUITY) / (AVG_CONTINUITY - MIN_CONTINUITY);
173        (continuity_factor * (MAX_ADJUSTMENT - AVG_ADJUSTMENT) + AVG_ADJUSTMENT)
174            .min(MAX_ADJUSTMENT)
175            .max(AVG_ADJUSTMENT)
176    };
177
178    (continuity_adjustment, continuity)
179}
180
181/// Calculate short map adjustment for difficulty
182pub fn calculate_short_map_adjustment(bins: &[f32], continuity: f32) -> f32 {
183    const MAX_SHORT_MAP_ADJUSTMENT: f32 = 0.75;
184    const SHORT_MAP_THRESHOLD: f32 = 60.0 * 1000.0; // 60 seconds in milliseconds
185    const BIN_SIZE: f32 = 1000.0;
186
187    // Use the continuity value passed from the continuity adjustment calculation
188    let true_drain_time = bins.len() as f32 * continuity * BIN_SIZE;
189    let short_map_adjustment = (0.25 * (true_drain_time / SHORT_MAP_THRESHOLD).sqrt() + 0.75)
190        .min(1.0)
191        .max(MAX_SHORT_MAP_ADJUSTMENT);
192
193    short_map_adjustment
194}