osu_map_analyzer/analyze/
stream.rs1use crate::utils::bpm;
2use rosu_map::{section::hit_objects::HitObject, Beatmap};
3use std::collections::VecDeque;
4
5pub struct Stream {
6 map: Beatmap,
7}
8
9#[derive(Debug, Clone, Copy, serde::Deserialize)]
10#[cfg_attr(feature = "serialize", derive(serde::Serialize))]
11pub struct StreamAnalysis {
12 pub overall_confidence: f64,
13 pub short_streams: usize,
14 pub medium_streams: usize,
15 pub long_streams: usize,
16
17 pub max_stream_length: usize,
18 pub stream_density: f64,
19 pub bpm_consistency: f64,
20}
21
22impl Stream {
23 pub fn new(map: Beatmap) -> Self {
29 Self { map }
30 }
31
32 pub fn analyze(&mut self) -> StreamAnalysis {
45 let bpm = bpm(
46 self.map.hit_objects.last_mut(),
47 &self.map.control_points.timing_points,
48 );
49 let beat_length = 60.0 / bpm * 1000.0;
50 let expected_stream_interval = beat_length / 4.0; let hit_objects = &self.map.hit_objects;
53
54 let (consecutive_notes, bpm_variations) =
55 self.calculate_consecutive_notes(hit_objects, expected_stream_interval);
56
57 let bursts_amount = consecutive_notes
58 .iter()
59 .filter(|&&len| len >= 3 && len <= 5)
60 .count();
61
62 let short_streams_amount = consecutive_notes
63 .iter()
64 .filter(|&&len| len >= 6 && len < 10)
65 .count();
66 let medium_streams_amount = consecutive_notes
67 .iter()
68 .filter(|&&len| len >= 10 && len < 20)
69 .count();
70 let long_streams_amount = consecutive_notes.iter().filter(|&&len| len >= 20).count();
71
72 let streams_lengths: Vec<usize> = consecutive_notes
76 .iter()
77 .filter(|&&len| len >= 6)
78 .map(|&len| len)
79 .collect();
80
81 let total_stream_notes: usize = streams_lengths.iter().sum();
82 let max_stream_length = *streams_lengths.iter().max().unwrap_or(&0);
83
84 let total_streams_amount =
85 short_streams_amount + medium_streams_amount + long_streams_amount;
86
87 let stream_density = total_stream_notes as f64 / hit_objects.len() as f64;
88
89 let bpm_consistency = if !bpm_variations.is_empty() {
91 1.0 - (bpm_variations.iter().sum::<f64>() / bpm_variations.len() as f64)
92 / expected_stream_interval
93 } else {
94 0.0
95 };
96
97 let average_stream_length = if total_streams_amount > 0 {
98 total_stream_notes as f64 / total_streams_amount as f64
99 } else {
100 0.0
101 };
102
103 let stream_variety = (medium_streams_amount * 2 + long_streams_amount * 3) as f64
104 / (short_streams_amount + medium_streams_amount + long_streams_amount).max(1) as f64;
105
106 let long_stream_ratio = long_streams_amount as f64 / total_streams_amount.max(1) as f64;
107
108 let overall_confidence = (stream_density * 0.3
109 + bpm_consistency * 0.2
110 + stream_variety * 0.2
111 + long_stream_ratio * 0.2
112 + (average_stream_length / 5.0).min(1.0) * 0.2)
113 .min(1.0);
114
115 StreamAnalysis {
116 overall_confidence,
117 short_streams: short_streams_amount,
118 medium_streams: medium_streams_amount,
119 long_streams: long_streams_amount,
120 max_stream_length,
121 stream_density,
122 bpm_consistency,
123 }
124 }
125
126 fn calculate_consecutive_notes(
127 &self,
128 hit_objects: &[HitObject],
129 expected_interval: f64,
130 ) -> (Vec<usize>, Vec<f64>) {
131 let mut stream_lengths = Vec::new();
132 let mut current_stream = VecDeque::new();
133 let mut bpm_variations = Vec::new();
134 let tolerance = 0.10; for pair in hit_objects.windows(2) {
143 let time_diff = pair[1].start_time - pair[0].start_time;
144
145 if (time_diff - expected_interval).abs() / expected_interval <= tolerance {
147 current_stream.push_back(time_diff);
148 if current_stream.len() > 1 {
149 let prev_diff = current_stream[current_stream.len() - 2];
150 bpm_variations.push((time_diff - prev_diff).abs());
151 }
152 } else if !current_stream.is_empty() {
153 stream_lengths.push(current_stream.len());
154 current_stream.clear();
155 }
156 }
157
158 if !current_stream.is_empty() {
159 stream_lengths.push(current_stream.len());
160 }
161
162 (stream_lengths, bpm_variations)
163 }
164}