1use serde::{Deserialize, Serialize};
11use std::path::{Path, PathBuf};
12use std::time::{SystemTime, UNIX_EPOCH};
13use tokio::io::AsyncWriteExt;
14use tokio::sync::mpsc;
15
16#[derive(Debug, Clone, Serialize, Deserialize)]
18pub struct MetricEvent {
19 pub ts: u64,
20 pub tool: &'static str,
21 pub duration_ms: u64,
22 pub output_chars: usize,
23 pub param_path_depth: usize,
24 pub max_depth: Option<u32>,
25 pub result: &'static str,
26 pub error_type: Option<String>,
27 #[serde(default)]
28 pub session_id: Option<String>,
29 #[serde(default)]
30 pub seq: Option<u32>,
31}
32
33#[derive(Clone)]
35pub struct MetricsSender(pub tokio::sync::mpsc::UnboundedSender<MetricEvent>);
36
37impl MetricsSender {
38 pub fn send(&self, event: MetricEvent) {
39 let _ = self.0.send(event);
40 }
41}
42
43pub struct MetricsWriter {
45 rx: tokio::sync::mpsc::UnboundedReceiver<MetricEvent>,
46 base_dir: PathBuf,
47}
48
49impl MetricsWriter {
50 pub fn new(
51 rx: tokio::sync::mpsc::UnboundedReceiver<MetricEvent>,
52 base_dir: Option<PathBuf>,
53 ) -> Self {
54 let dir = base_dir.unwrap_or_else(xdg_metrics_dir);
55 Self { rx, base_dir: dir }
56 }
57
58 pub async fn run(mut self) {
59 cleanup_old_files(&self.base_dir).await;
60 let mut current_date = current_date_str();
61 let mut current_file: Option<PathBuf> = None;
62
63 loop {
64 let mut batch = Vec::new();
65 if let Some(event) = self.rx.recv().await {
66 batch.push(event);
67 for _ in 0..99 {
68 match self.rx.try_recv() {
69 Ok(e) => batch.push(e),
70 Err(
71 mpsc::error::TryRecvError::Empty
72 | mpsc::error::TryRecvError::Disconnected,
73 ) => break,
74 }
75 }
76 } else {
77 break;
78 }
79
80 let new_date = current_date_str();
81 if new_date != current_date {
82 current_date = new_date;
83 current_file = None;
84 }
85
86 if current_file.is_none() {
87 current_file = Some(rotate_path(&self.base_dir, ¤t_date));
88 }
89
90 let path = current_file.as_ref().unwrap();
91
92 if let Some(parent) = path.parent()
94 && !parent.as_os_str().is_empty()
95 {
96 tokio::fs::create_dir_all(parent).await.ok();
97 }
98
99 let file = tokio::fs::OpenOptions::new()
101 .create(true)
102 .append(true)
103 .open(path)
104 .await;
105
106 if let Ok(mut file) = file {
107 for event in batch {
108 if let Ok(mut json) = serde_json::to_string(&event) {
109 json.push('\n');
110 let _ = file.write_all(json.as_bytes()).await;
111 }
112 }
113 let _ = file.flush().await;
114 }
115 }
116 }
117}
118
119#[must_use]
121pub fn unix_ms() -> u64 {
122 SystemTime::now()
123 .duration_since(UNIX_EPOCH)
124 .unwrap_or_default()
125 .as_millis()
126 .try_into()
127 .unwrap_or(u64::MAX)
128}
129
130#[must_use]
132pub fn path_component_count(path: &str) -> usize {
133 Path::new(path).components().count()
134}
135
136#[must_use]
138pub fn error_code_to_type(code: rmcp::model::ErrorCode) -> &'static str {
139 match code {
140 rmcp::model::ErrorCode::PARSE_ERROR => "parse",
141 rmcp::model::ErrorCode::INVALID_PARAMS => "invalid_params",
142 _ => "unknown",
143 }
144}
145
146fn xdg_metrics_dir() -> PathBuf {
147 if let Ok(xdg_data_home) = std::env::var("XDG_DATA_HOME")
148 && !xdg_data_home.is_empty()
149 {
150 return PathBuf::from(xdg_data_home).join("code-analyze-mcp");
151 }
152
153 if let Ok(home) = std::env::var("HOME") {
154 PathBuf::from(home)
155 .join(".local")
156 .join("share")
157 .join("code-analyze-mcp")
158 } else {
159 PathBuf::from(".")
160 }
161}
162
163fn rotate_path(base_dir: &Path, date_str: &str) -> PathBuf {
164 base_dir.join(format!("metrics-{date_str}.jsonl"))
165}
166
167async fn cleanup_old_files(base_dir: &Path) {
168 let now_days = u32::try_from(unix_ms() / 86_400_000).unwrap_or(u32::MAX);
169
170 let Ok(mut entries) = tokio::fs::read_dir(base_dir).await else {
171 return;
172 };
173
174 loop {
175 match entries.next_entry().await {
176 Ok(Some(entry)) => {
177 let path = entry.path();
178 let file_name = match path.file_name() {
179 Some(n) => n.to_string_lossy().into_owned(),
180 None => continue,
181 };
182
183 if !file_name.starts_with("metrics-")
185 || std::path::Path::new(&*file_name)
186 .extension()
187 .is_none_or(|e| !e.eq_ignore_ascii_case("jsonl"))
188 {
189 continue;
190 }
191 let date_part = &file_name[8..file_name.len() - 6];
192 if date_part.len() != 10
193 || date_part.as_bytes().get(4) != Some(&b'-')
194 || date_part.as_bytes().get(7) != Some(&b'-')
195 {
196 continue;
197 }
198 let Ok(year) = date_part[0..4].parse::<u32>() else {
199 continue;
200 };
201 let Ok(month) = date_part[5..7].parse::<u32>() else {
202 continue;
203 };
204 let Ok(day) = date_part[8..10].parse::<u32>() else {
205 continue;
206 };
207 if month == 0 || month > 12 || day == 0 || day > 31 {
208 continue;
209 }
210
211 let file_days = date_to_days_since_epoch(year, month, day);
212 if now_days > file_days && (now_days - file_days) > 30 {
213 let _ = tokio::fs::remove_file(&path).await;
214 }
215 }
216 Ok(None) => break,
217 Err(e) => {
218 tracing::warn!("error reading metrics directory entry: {e}");
219 }
220 }
221 }
222}
223
224fn date_to_days_since_epoch(y: u32, m: u32, d: u32) -> u32 {
225 let (y, m) = if m <= 2 { (y - 1, m + 9) } else { (y, m - 3) };
227 let era = y / 400;
228 let yoe = y - era * 400;
229 let doy = (153 * m + 2) / 5 + d - 1;
230 let doe = yoe * 365 + yoe / 4 - yoe / 100 + doy;
231 (era * 146_097 + doe).saturating_sub(719_468)
235}
236
237#[must_use]
239pub fn current_date_str() -> String {
240 let days = u32::try_from(unix_ms() / 86_400_000).unwrap_or(u32::MAX);
241 let z = days + 719_468;
242 let era = z / 146_097;
243 let doe = z - era * 146_097;
244 let yoe = (doe - doe / 1460 + doe / 36524 - doe / 146_096) / 365;
245 let y = yoe + era * 400;
246 let doy = doe - (365 * yoe + yoe / 4 - yoe / 100);
247 let mp = (5 * doy + 2) / 153;
248 let d = doy - (153 * mp + 2) / 5 + 1;
249 let m = if mp < 10 { mp + 3 } else { mp - 9 };
250 let y = if m <= 2 { y + 1 } else { y };
251 format!("{y:04}-{m:02}-{d:02}")
252}
253
254#[cfg(test)]
255mod tests {
256 use super::*;
257
258 #[test]
261 fn test_date_to_days_since_epoch_known_dates() {
262 assert_eq!(date_to_days_since_epoch(1970, 1, 1), 0);
264 assert_eq!(date_to_days_since_epoch(2020, 1, 1), 18_262);
266 assert_eq!(date_to_days_since_epoch(2000, 2, 29), 11_016);
268 }
269
270 #[test]
271 fn test_current_date_str_format() {
272 let s = current_date_str();
273 assert_eq!(s.len(), 10, "date string must be 10 chars: {s}");
274 assert_eq!(s.as_bytes()[4], b'-', "char at index 4 must be '-': {s}");
275 assert_eq!(s.as_bytes()[7], b'-', "char at index 7 must be '-': {s}");
276 let year: u32 = s[0..4].parse().expect("year must be numeric");
278 assert!(year >= 2020 && year <= 2100, "unexpected year {year}");
279 }
280
281 #[tokio::test]
282 async fn test_metrics_writer_batching() {
283 use tempfile::TempDir;
284
285 let dir = TempDir::new().unwrap();
286 let (tx, rx) = tokio::sync::mpsc::unbounded_channel::<MetricEvent>();
287 let writer = MetricsWriter::new(rx, Some(dir.path().to_path_buf()));
288
289 let make_event = || MetricEvent {
290 ts: unix_ms(),
291 tool: "analyze_directory",
292 duration_ms: 1,
293 output_chars: 10,
294 param_path_depth: 1,
295 max_depth: None,
296 result: "ok",
297 error_type: None,
298 session_id: None,
299 seq: None,
300 };
301
302 tx.send(make_event()).unwrap();
303 tx.send(make_event()).unwrap();
304 tx.send(make_event()).unwrap();
305 drop(tx);
307
308 writer.run().await;
309
310 let entries: Vec<_> = std::fs::read_dir(dir.path())
312 .unwrap()
313 .filter_map(|e| e.ok())
314 .filter(|e| {
315 e.path()
316 .extension()
317 .and_then(|x| x.to_str())
318 .map(|x| x.eq_ignore_ascii_case("jsonl"))
319 .unwrap_or(false)
320 })
321 .collect();
322 assert_eq!(entries.len(), 1, "expected exactly 1 .jsonl file");
323 let content = std::fs::read_to_string(entries[0].path()).unwrap();
324 let lines: Vec<&str> = content.lines().collect();
325 assert_eq!(lines.len(), 3, "expected exactly 3 lines; got: {content}");
326 }
327
328 #[tokio::test]
329 async fn test_cleanup_old_files_deletes_old_keeps_recent() {
330 use tempfile::TempDir;
331
332 let dir = TempDir::new().unwrap();
335 let old_file = dir.path().join("metrics-2026-02-25.jsonl");
336 let recent_file = dir.path().join("metrics-2026-02-27.jsonl");
337 std::fs::write(&old_file, "old\n").unwrap();
338 std::fs::write(&recent_file, "recent\n").unwrap();
339
340 cleanup_old_files(dir.path()).await;
341
342 assert!(
343 !old_file.exists(),
344 "31-day-old file should have been deleted"
345 );
346 assert!(
347 recent_file.exists(),
348 "29-day-old file should have been kept"
349 );
350 }
351
352 #[test]
353 fn test_metric_event_serialization() {
354 let event = MetricEvent {
355 ts: 1_700_000_000_000,
356 tool: "analyze_directory",
357 duration_ms: 42,
358 output_chars: 100,
359 param_path_depth: 3,
360 max_depth: Some(2),
361 result: "ok",
362 error_type: None,
363 session_id: None,
364 seq: None,
365 };
366 let json = serde_json::to_string(&event).unwrap();
367 assert!(json.contains("analyze_directory"));
368 assert!(json.contains(r#""result":"ok""#));
369 assert!(json.contains(r#""output_chars":100"#));
370 }
371
372 #[test]
373 fn test_metric_event_serialization_error() {
374 let event = MetricEvent {
375 ts: 1_700_000_000_000,
376 tool: "analyze_directory",
377 duration_ms: 5,
378 output_chars: 0,
379 param_path_depth: 3,
380 max_depth: Some(3),
381 result: "error",
382 error_type: Some("invalid_params".to_string()),
383 session_id: None,
384 seq: None,
385 };
386 let json = serde_json::to_string(&event).unwrap();
387 assert!(json.contains(r#""result":"error""#));
388 assert!(json.contains(r#""error_type":"invalid_params""#));
389 assert!(json.contains(r#""output_chars":0"#));
390 assert!(json.contains(r#""max_depth":3"#));
391 }
392
393 #[test]
394 fn test_metric_event_new_fields_round_trip() {
395 let event = MetricEvent {
396 ts: 1_700_000_000_000,
397 tool: "analyze_file",
398 duration_ms: 100,
399 output_chars: 500,
400 param_path_depth: 2,
401 max_depth: Some(3),
402 result: "ok",
403 error_type: None,
404 session_id: Some("1742468880123-42".to_string()),
405 seq: Some(5),
406 };
407 let serialized = serde_json::to_string(&event).unwrap();
408 let json_str = r#"{"ts":1700000000000,"tool":"analyze_file","duration_ms":100,"output_chars":500,"param_path_depth":2,"max_depth":3,"result":"ok","error_type":null,"session_id":"1742468880123-42","seq":5}"#;
409 assert_eq!(serialized, json_str);
410 let parsed: MetricEvent = serde_json::from_str(json_str).unwrap();
411 assert_eq!(parsed.session_id, Some("1742468880123-42".to_string()));
412 assert_eq!(parsed.seq, Some(5));
413 }
414
415 #[test]
416 fn test_metric_event_backward_compat_parse() {
417 let old_jsonl = r#"{"ts":1700000000000,"tool":"analyze_directory","duration_ms":42,"output_chars":100,"param_path_depth":3,"max_depth":2,"result":"ok","error_type":null}"#;
418 let parsed: MetricEvent = serde_json::from_str(old_jsonl).unwrap();
419 assert_eq!(parsed.tool, "analyze_directory");
420 assert_eq!(parsed.session_id, None);
421 assert_eq!(parsed.seq, None);
422 }
423
424 #[test]
425 fn test_session_id_format() {
426 let event = MetricEvent {
427 ts: 1_700_000_000_000,
428 tool: "analyze_symbol",
429 duration_ms: 20,
430 output_chars: 50,
431 param_path_depth: 1,
432 max_depth: None,
433 result: "ok",
434 error_type: None,
435 session_id: Some("1742468880123-0".to_string()),
436 seq: Some(0),
437 };
438 let sid = event.session_id.unwrap();
439 assert!(sid.contains('-'), "session_id should contain a dash");
440 let parts: Vec<&str> = sid.split('-').collect();
441 assert_eq!(parts.len(), 2, "session_id should have exactly 2 parts");
442 assert!(parts[0].len() == 13, "millis part should be 13 digits");
443 }
444}