1use serde::{Deserialize, Serialize};
8use std::collections::HashMap;
9
10#[derive(Debug, Clone, Default, Serialize, Deserialize)]
12#[serde(rename_all = "camelCase")]
13pub struct StatsCache {
14 #[serde(default)]
16 pub version: u32,
17
18 #[serde(default)]
20 pub last_computed_date: Option<String>,
21
22 #[serde(default)]
24 pub daily_activity: Vec<DailyActivityEntry>,
25
26 #[serde(default)]
28 pub daily_model_tokens: Vec<DailyModelTokens>,
29
30 #[serde(default)]
32 pub model_usage: HashMap<String, ModelUsage>,
33
34 #[serde(default)]
36 pub total_sessions: u64,
37
38 #[serde(default)]
40 pub total_messages: u64,
41
42 #[serde(default)]
44 pub longest_session: Option<LongestSession>,
45
46 #[serde(default)]
48 pub first_session_date: Option<String>,
49
50 #[serde(default)]
52 pub hour_counts: HashMap<String, u64>,
53
54 #[serde(default)]
56 pub total_speculation_time_saved_ms: u64,
57}
58
59#[derive(Debug, Clone, Default, Serialize, Deserialize)]
61#[serde(rename_all = "camelCase")]
62pub struct DailyActivityEntry {
63 pub date: String,
64 #[serde(default)]
65 pub message_count: u64,
66 #[serde(default)]
67 pub session_count: u64,
68 #[serde(default)]
69 pub tool_call_count: u64,
70}
71
72#[derive(Debug, Clone, Default, Serialize, Deserialize)]
74#[serde(rename_all = "camelCase")]
75pub struct DailyModelTokens {
76 pub date: String,
77 #[serde(default)]
78 pub tokens_by_model: HashMap<String, u64>,
79}
80
81#[derive(Debug, Clone, Default, Serialize, Deserialize)]
83#[serde(rename_all = "camelCase")]
84pub struct ModelUsage {
85 #[serde(default)]
86 pub input_tokens: u64,
87 #[serde(default)]
88 pub output_tokens: u64,
89 #[serde(default)]
90 pub cache_read_input_tokens: u64,
91 #[serde(default)]
92 pub cache_creation_input_tokens: u64,
93 #[serde(default)]
94 pub web_search_requests: u64,
95 #[serde(default)]
96 pub cost_usd: f64,
97 #[serde(default)]
98 pub context_window: u64,
99 #[serde(default)]
100 pub max_output_tokens: u64,
101}
102
103impl ModelUsage {
104 pub fn total_tokens(&self) -> u64 {
105 self.input_tokens + self.output_tokens
106 }
107
108 pub fn total_with_cache(&self) -> u64 {
109 self.input_tokens
110 + self.output_tokens
111 + self.cache_read_input_tokens
112 + self.cache_creation_input_tokens
113 }
114}
115
116#[derive(Debug, Clone, Default, Serialize, Deserialize)]
118#[serde(rename_all = "camelCase")]
119pub struct LongestSession {
120 #[serde(default)]
121 pub session_id: Option<String>,
122 #[serde(default)]
123 pub message_count: u64,
124 #[serde(default)]
125 pub date: Option<String>,
126}
127
128#[derive(Debug, Clone, Default, Serialize, Deserialize)]
130#[serde(rename_all = "camelCase")]
131pub struct DailyActivity {
132 #[serde(default)]
133 pub tokens: u64,
134 #[serde(default)]
135 pub input_tokens: u64,
136 #[serde(default)]
137 pub output_tokens: u64,
138 #[serde(default)]
139 pub messages: u64,
140 #[serde(default)]
141 pub sessions: u64,
142}
143
144impl StatsCache {
145 pub fn total_input_tokens(&self) -> u64 {
147 self.model_usage.values().map(|m| m.input_tokens).sum()
148 }
149
150 pub fn total_output_tokens(&self) -> u64 {
152 self.model_usage.values().map(|m| m.output_tokens).sum()
153 }
154
155 pub fn total_tokens(&self) -> u64 {
157 self.total_input_tokens() + self.total_output_tokens()
158 }
159
160 pub fn total_cache_read_tokens(&self) -> u64 {
162 self.model_usage
163 .values()
164 .map(|m| m.cache_read_input_tokens)
165 .sum()
166 }
167
168 pub fn total_cache_write_tokens(&self) -> u64 {
170 self.model_usage
171 .values()
172 .map(|m| m.cache_creation_input_tokens)
173 .sum()
174 }
175
176 pub fn session_count(&self) -> u64 {
178 self.total_sessions
179 }
180
181 pub fn message_count(&self) -> u64 {
183 self.total_messages
184 }
185
186 pub fn top_models(&self, n: usize) -> Vec<(&str, &ModelUsage)> {
188 let mut models: Vec<_> = self
189 .model_usage
190 .iter()
191 .filter(|(_, usage)| usage.total_tokens() > 0)
192 .map(|(k, v)| (k.as_str(), v))
193 .collect();
194 models.sort_by(|a, b| b.1.total_tokens().cmp(&a.1.total_tokens()));
195 models.truncate(n);
196 models
197 }
198
199 pub fn recent_daily(&self, n: usize) -> Vec<&DailyActivityEntry> {
201 let len = self.daily_activity.len();
202 if len <= n {
203 self.daily_activity.iter().collect()
204 } else {
205 self.daily_activity[len - n..].iter().collect()
206 }
207 }
208
209 pub fn cache_ratio(&self) -> f64 {
211 let cache_read = self.total_cache_read_tokens();
212 let total_input = self.total_input_tokens() + cache_read;
213 if total_input == 0 {
214 return 0.0;
215 }
216 cache_read as f64 / total_input as f64
217 }
218
219 pub const CONTEXT_WINDOW: u64 = 200_000;
221
222 pub fn calculate_context_saturation(
227 session_metadata: &[&crate::models::SessionMetadata],
228 last_n: usize,
229 ) -> ContextWindowStats {
230 if session_metadata.is_empty() {
231 return ContextWindowStats::default();
232 }
233
234 let mut sorted: Vec<_> = session_metadata
236 .iter()
237 .filter(|s| s.last_timestamp.is_some() && s.total_tokens > 0)
238 .collect();
239 sorted.sort_by(|a, b| b.last_timestamp.cmp(&a.last_timestamp));
240
241 let recent: Vec<_> = sorted.into_iter().take(last_n).collect();
243
244 if recent.is_empty() {
245 return ContextWindowStats::default();
246 }
247
248 let mut total_pct = 0.0;
250 let mut high_load_count = 0;
251 let mut peak_pct = 0.0;
252
253 for session in &recent {
254 let saturation_pct =
255 (session.total_tokens as f64 / Self::CONTEXT_WINDOW as f64) * 100.0;
256 total_pct += saturation_pct;
257
258 if saturation_pct > 85.0 {
259 high_load_count += 1;
260 }
261
262 if saturation_pct > peak_pct {
263 peak_pct = saturation_pct;
264 }
265 }
266
267 ContextWindowStats {
268 avg_saturation_pct: total_pct / recent.len() as f64,
269 high_load_count,
270 peak_saturation_pct: peak_pct,
271 }
272 }
273}
274
275#[derive(Debug, Clone, Default)]
277pub struct ContextWindowStats {
278 pub avg_saturation_pct: f64,
280
281 pub high_load_count: usize,
283
284 pub peak_saturation_pct: f64,
286}
287
288#[cfg(test)]
289mod tests {
290 use super::*;
291
292 #[test]
293 fn test_stats_cache_defaults() {
294 let stats = StatsCache::default();
295 assert_eq!(stats.total_tokens(), 0);
296 assert!(stats.model_usage.is_empty());
297 }
298
299 #[test]
300 fn test_model_usage_total() {
301 let usage = ModelUsage {
302 input_tokens: 1000,
303 output_tokens: 500,
304 ..Default::default()
305 };
306 assert_eq!(usage.total_tokens(), 1500);
307 }
308
309 #[test]
310 fn test_cache_ratio() {
311 let mut stats = StatsCache::default();
312 stats.model_usage.insert(
313 "test".to_string(),
314 ModelUsage {
315 input_tokens: 800,
316 cache_read_input_tokens: 200,
317 ..Default::default()
318 },
319 );
320 assert!((stats.cache_ratio() - 0.2).abs() < 0.001);
321 }
322
323 #[test]
324 fn test_top_models() {
325 let mut stats = StatsCache::default();
326 stats.model_usage.insert(
327 "opus".to_string(),
328 ModelUsage {
329 input_tokens: 1000,
330 output_tokens: 500,
331 ..Default::default()
332 },
333 );
334 stats.model_usage.insert(
335 "sonnet".to_string(),
336 ModelUsage {
337 input_tokens: 2000,
338 output_tokens: 1000,
339 ..Default::default()
340 },
341 );
342
343 let top = stats.top_models(2);
344 assert_eq!(top[0].0, "sonnet");
345 assert_eq!(top[1].0, "opus");
346 }
347
348 #[test]
349 fn test_parse_real_format() {
350 let json = r#"{
351 "version": 2,
352 "lastComputedDate": "2026-01-31",
353 "dailyActivity": [
354 {"date": "2026-01-30", "messageCount": 100, "sessionCount": 5, "toolCallCount": 20}
355 ],
356 "modelUsage": {
357 "claude-opus-4-5": {
358 "inputTokens": 1000,
359 "outputTokens": 500,
360 "cacheReadInputTokens": 200,
361 "cacheCreationInputTokens": 100
362 }
363 },
364 "totalSessions": 10,
365 "totalMessages": 1000,
366 "hourCounts": {"10": 50, "14": 100}
367 }"#;
368
369 let stats: StatsCache = serde_json::from_str(json).unwrap();
370 assert_eq!(stats.version, 2);
371 assert_eq!(stats.total_sessions, 10);
372 assert_eq!(stats.total_messages, 1000);
373 assert_eq!(stats.daily_activity.len(), 1);
374 assert_eq!(stats.total_input_tokens(), 1000);
375 assert_eq!(stats.total_output_tokens(), 500);
376 }
377
378 #[test]
379 fn test_context_saturation_calculation() {
380 use crate::models::SessionMetadata;
381 use chrono::Utc;
382 use std::path::PathBuf;
383
384 let mut sessions = vec![];
385 let now = Utc::now();
386
387 for (i, tokens) in [50_000u64, 100_000, 150_000, 170_000, 190_000]
389 .iter()
390 .enumerate()
391 {
392 let mut meta = SessionMetadata::from_path(
393 PathBuf::from(format!("/test{}.jsonl", i)),
394 "test".to_string(),
395 );
396 meta.total_tokens = *tokens;
397 meta.last_timestamp = Some(now - chrono::Duration::seconds((4 - i) as i64 * 60));
398 sessions.push(meta);
399 }
400
401 let refs: Vec<_> = sessions.iter().collect();
402 let stats = StatsCache::calculate_context_saturation(&refs, 30);
403
404 assert!((stats.avg_saturation_pct - 66.0).abs() < 1.0);
406
407 assert_eq!(stats.high_load_count, 1);
409
410 assert!((stats.peak_saturation_pct - 95.0).abs() < 1.0);
412 }
413
414 #[test]
415 fn test_context_saturation_empty_sessions() {
416 let stats = StatsCache::calculate_context_saturation(&[], 30);
417 assert_eq!(stats.avg_saturation_pct, 0.0);
418 assert_eq!(stats.high_load_count, 0);
419 }
420
421 #[test]
422 fn test_context_saturation_fewer_than_requested() {
423 use crate::models::SessionMetadata;
424 use chrono::Utc;
425 use std::path::PathBuf;
426
427 let mut sessions = vec![];
428 let now = Utc::now();
429
430 for (i, tokens) in [60_000u64, 80_000, 120_000].iter().enumerate() {
432 let mut meta = SessionMetadata::from_path(
433 PathBuf::from(format!("/test{}.jsonl", i)),
434 "test".to_string(),
435 );
436 meta.total_tokens = *tokens;
437 meta.last_timestamp = Some(now - chrono::Duration::seconds((2 - i) as i64 * 60));
438 sessions.push(meta);
439 }
440
441 let refs: Vec<_> = sessions.iter().collect();
442 let stats = StatsCache::calculate_context_saturation(&refs, 30);
443
444 assert!((stats.avg_saturation_pct - 43.33).abs() < 0.1);
447 }
448}