1use serde::{Deserialize, Serialize};
9use std::path::{Path, PathBuf};
10use std::time::{SystemTime, UNIX_EPOCH};
11use tokio::io::AsyncWriteExt;
12use tokio::sync::mpsc;
13
14#[derive(Debug, Clone, Serialize, Deserialize)]
16pub struct MetricEvent {
17 pub ts: u64,
18 pub tool: &'static str,
19 pub duration_ms: u64,
20 pub output_chars: usize,
21 pub param_path_depth: usize,
22 pub max_depth: Option<u32>,
23 pub result: &'static str,
24 pub error_type: Option<String>,
25 #[serde(default)]
26 pub session_id: Option<String>,
27 #[serde(default)]
28 pub seq: Option<u32>,
29}
30
31#[derive(Clone)]
33pub struct MetricsSender(pub tokio::sync::mpsc::UnboundedSender<MetricEvent>);
34
35impl MetricsSender {
36 pub fn send(&self, event: MetricEvent) {
37 let _ = self.0.send(event);
38 }
39}
40
41pub struct MetricsWriter {
43 rx: tokio::sync::mpsc::UnboundedReceiver<MetricEvent>,
44 base_dir: PathBuf,
45}
46
47impl MetricsWriter {
48 pub fn new(
49 rx: tokio::sync::mpsc::UnboundedReceiver<MetricEvent>,
50 base_dir: Option<PathBuf>,
51 ) -> Self {
52 let dir = base_dir.unwrap_or_else(xdg_metrics_dir);
53 Self { rx, base_dir: dir }
54 }
55
56 pub async fn run(mut self) {
57 cleanup_old_files(&self.base_dir).await;
58 let mut current_date = current_date_str();
59 let mut current_file: Option<PathBuf> = None;
60
61 loop {
62 let mut batch = Vec::new();
63 if let Some(event) = self.rx.recv().await {
64 batch.push(event);
65 for _ in 0..99 {
66 match self.rx.try_recv() {
67 Ok(e) => batch.push(e),
68 Err(
69 mpsc::error::TryRecvError::Empty
70 | mpsc::error::TryRecvError::Disconnected,
71 ) => break,
72 }
73 }
74 } else {
75 break;
76 }
77
78 let new_date = current_date_str();
79 if new_date != current_date {
80 current_date = new_date;
81 current_file = None;
82 }
83
84 if current_file.is_none() {
85 current_file = Some(rotate_path(&self.base_dir, ¤t_date));
86 }
87
88 let path = current_file.as_ref().unwrap();
89
90 if let Some(parent) = path.parent()
92 && !parent.as_os_str().is_empty()
93 {
94 tokio::fs::create_dir_all(parent).await.ok();
95 }
96
97 let file = tokio::fs::OpenOptions::new()
99 .create(true)
100 .append(true)
101 .open(path)
102 .await;
103
104 if let Ok(mut file) = file {
105 for event in batch {
106 if let Ok(mut json) = serde_json::to_string(&event) {
107 json.push('\n');
108 let _ = file.write_all(json.as_bytes()).await;
109 }
110 }
111 let _ = file.flush().await;
112 }
113 }
114 }
115}
116
117#[must_use]
119pub fn unix_ms() -> u64 {
120 SystemTime::now()
121 .duration_since(UNIX_EPOCH)
122 .unwrap_or_default()
123 .as_millis()
124 .try_into()
125 .unwrap_or(u64::MAX)
126}
127
128#[must_use]
130pub fn path_component_count(path: &str) -> usize {
131 Path::new(path).components().count()
132}
133
134#[must_use]
136pub fn error_code_to_type(code: rmcp::model::ErrorCode) -> &'static str {
137 match code {
138 rmcp::model::ErrorCode::PARSE_ERROR => "parse",
139 rmcp::model::ErrorCode::INVALID_PARAMS => "invalid_params",
140 _ => "unknown",
141 }
142}
143
144fn xdg_metrics_dir() -> PathBuf {
145 if let Ok(xdg_data_home) = std::env::var("XDG_DATA_HOME")
146 && !xdg_data_home.is_empty()
147 {
148 return PathBuf::from(xdg_data_home).join("code-analyze-mcp");
149 }
150
151 if let Ok(home) = std::env::var("HOME") {
152 PathBuf::from(home)
153 .join(".local")
154 .join("share")
155 .join("code-analyze-mcp")
156 } else {
157 PathBuf::from(".")
158 }
159}
160
161fn rotate_path(base_dir: &Path, date_str: &str) -> PathBuf {
162 base_dir.join(format!("metrics-{date_str}.jsonl"))
163}
164
165async fn cleanup_old_files(base_dir: &Path) {
166 let now_days = u32::try_from(unix_ms() / 86_400_000).unwrap_or(u32::MAX);
167
168 let Ok(mut entries) = tokio::fs::read_dir(base_dir).await else {
169 return;
170 };
171
172 loop {
173 match entries.next_entry().await {
174 Ok(Some(entry)) => {
175 let path = entry.path();
176 let file_name = match path.file_name() {
177 Some(n) => n.to_string_lossy().into_owned(),
178 None => continue,
179 };
180
181 if !file_name.starts_with("metrics-")
183 || std::path::Path::new(&*file_name)
184 .extension()
185 .is_none_or(|e| !e.eq_ignore_ascii_case("jsonl"))
186 {
187 continue;
188 }
189 let date_part = &file_name[8..file_name.len() - 6];
190 if date_part.len() != 10
191 || date_part.as_bytes().get(4) != Some(&b'-')
192 || date_part.as_bytes().get(7) != Some(&b'-')
193 {
194 continue;
195 }
196 let Ok(year) = date_part[0..4].parse::<u32>() else {
197 continue;
198 };
199 let Ok(month) = date_part[5..7].parse::<u32>() else {
200 continue;
201 };
202 let Ok(day) = date_part[8..10].parse::<u32>() else {
203 continue;
204 };
205 if month == 0 || month > 12 || day == 0 || day > 31 {
206 continue;
207 }
208
209 let file_days = date_to_days_since_epoch(year, month, day);
210 if now_days > file_days && (now_days - file_days) > 30 {
211 let _ = tokio::fs::remove_file(&path).await;
212 }
213 }
214 Ok(None) => break,
215 Err(e) => {
216 tracing::warn!("error reading metrics directory entry: {e}");
217 }
218 }
219 }
220}
221
222fn date_to_days_since_epoch(y: u32, m: u32, d: u32) -> u32 {
223 let (y, m) = if m <= 2 { (y - 1, m + 9) } else { (y, m - 3) };
225 let era = y / 400;
226 let yoe = y - era * 400;
227 let doy = (153 * m + 2) / 5 + d - 1;
228 let doe = yoe * 365 + yoe / 4 - yoe / 100 + doy;
229 (era * 146_097 + doe).saturating_sub(719_468)
233}
234
235#[must_use]
237pub fn current_date_str() -> String {
238 let days = u32::try_from(unix_ms() / 86_400_000).unwrap_or(u32::MAX);
239 let z = days + 719_468;
240 let era = z / 146_097;
241 let doe = z - era * 146_097;
242 let yoe = (doe - doe / 1460 + doe / 36524 - doe / 146_096) / 365;
243 let y = yoe + era * 400;
244 let doy = doe - (365 * yoe + yoe / 4 - yoe / 100);
245 let mp = (5 * doy + 2) / 153;
246 let d = doy - (153 * mp + 2) / 5 + 1;
247 let m = if mp < 10 { mp + 3 } else { mp - 9 };
248 let y = if m <= 2 { y + 1 } else { y };
249 format!("{y:04}-{m:02}-{d:02}")
250}
251
252#[cfg(test)]
253mod tests {
254 use super::*;
255
256 #[test]
259 fn test_date_to_days_since_epoch_known_dates() {
260 assert_eq!(date_to_days_since_epoch(1970, 1, 1), 0);
262 assert_eq!(date_to_days_since_epoch(2020, 1, 1), 18_262);
264 assert_eq!(date_to_days_since_epoch(2000, 2, 29), 11_016);
266 }
267
268 #[test]
269 fn test_current_date_str_format() {
270 let s = current_date_str();
271 assert_eq!(s.len(), 10, "date string must be 10 chars: {s}");
272 assert_eq!(s.as_bytes()[4], b'-', "char at index 4 must be '-': {s}");
273 assert_eq!(s.as_bytes()[7], b'-', "char at index 7 must be '-': {s}");
274 let year: u32 = s[0..4].parse().expect("year must be numeric");
276 assert!(year >= 2020 && year <= 2100, "unexpected year {year}");
277 }
278
279 #[tokio::test]
280 async fn test_metrics_writer_batching() {
281 use tempfile::TempDir;
282
283 let dir = TempDir::new().unwrap();
284 let (tx, rx) = tokio::sync::mpsc::unbounded_channel::<MetricEvent>();
285 let writer = MetricsWriter::new(rx, Some(dir.path().to_path_buf()));
286
287 let make_event = || MetricEvent {
288 ts: unix_ms(),
289 tool: "analyze_directory",
290 duration_ms: 1,
291 output_chars: 10,
292 param_path_depth: 1,
293 max_depth: None,
294 result: "ok",
295 error_type: None,
296 session_id: None,
297 seq: None,
298 };
299
300 tx.send(make_event()).unwrap();
301 tx.send(make_event()).unwrap();
302 tx.send(make_event()).unwrap();
303 drop(tx);
305
306 writer.run().await;
307
308 let entries: Vec<_> = std::fs::read_dir(dir.path())
310 .unwrap()
311 .filter_map(|e| e.ok())
312 .filter(|e| {
313 e.path()
314 .extension()
315 .and_then(|x| x.to_str())
316 .map(|x| x.eq_ignore_ascii_case("jsonl"))
317 .unwrap_or(false)
318 })
319 .collect();
320 assert_eq!(entries.len(), 1, "expected exactly 1 .jsonl file");
321 let content = std::fs::read_to_string(entries[0].path()).unwrap();
322 let lines: Vec<&str> = content.lines().collect();
323 assert_eq!(lines.len(), 3, "expected exactly 3 lines; got: {content}");
324 }
325
326 #[tokio::test]
327 async fn test_cleanup_old_files_deletes_old_keeps_recent() {
328 use tempfile::TempDir;
329
330 let dir = TempDir::new().unwrap();
333 let old_file = dir.path().join("metrics-2026-02-25.jsonl");
334 let recent_file = dir.path().join("metrics-2026-02-27.jsonl");
335 std::fs::write(&old_file, "old\n").unwrap();
336 std::fs::write(&recent_file, "recent\n").unwrap();
337
338 cleanup_old_files(dir.path()).await;
339
340 assert!(
341 !old_file.exists(),
342 "31-day-old file should have been deleted"
343 );
344 assert!(
345 recent_file.exists(),
346 "29-day-old file should have been kept"
347 );
348 }
349
350 #[test]
351 fn test_metric_event_serialization() {
352 let event = MetricEvent {
353 ts: 1_700_000_000_000,
354 tool: "analyze_directory",
355 duration_ms: 42,
356 output_chars: 100,
357 param_path_depth: 3,
358 max_depth: Some(2),
359 result: "ok",
360 error_type: None,
361 session_id: None,
362 seq: None,
363 };
364 let json = serde_json::to_string(&event).unwrap();
365 assert!(json.contains("analyze_directory"));
366 assert!(json.contains(r#""result":"ok""#));
367 assert!(json.contains(r#""output_chars":100"#));
368 }
369
370 #[test]
371 fn test_metric_event_serialization_error() {
372 let event = MetricEvent {
373 ts: 1_700_000_000_000,
374 tool: "analyze_directory",
375 duration_ms: 5,
376 output_chars: 0,
377 param_path_depth: 3,
378 max_depth: Some(3),
379 result: "error",
380 error_type: Some("invalid_params".to_string()),
381 session_id: None,
382 seq: None,
383 };
384 let json = serde_json::to_string(&event).unwrap();
385 assert!(json.contains(r#""result":"error""#));
386 assert!(json.contains(r#""error_type":"invalid_params""#));
387 assert!(json.contains(r#""output_chars":0"#));
388 assert!(json.contains(r#""max_depth":3"#));
389 }
390
391 #[test]
392 fn test_metric_event_new_fields_round_trip() {
393 let event = MetricEvent {
394 ts: 1_700_000_000_000,
395 tool: "analyze_file",
396 duration_ms: 100,
397 output_chars: 500,
398 param_path_depth: 2,
399 max_depth: Some(3),
400 result: "ok",
401 error_type: None,
402 session_id: Some("1742468880123-42".to_string()),
403 seq: Some(5),
404 };
405 let serialized = serde_json::to_string(&event).unwrap();
406 let json_str = r#"{"ts":1700000000000,"tool":"analyze_file","duration_ms":100,"output_chars":500,"param_path_depth":2,"max_depth":3,"result":"ok","error_type":null,"session_id":"1742468880123-42","seq":5}"#;
407 assert_eq!(serialized, json_str);
408 let parsed: MetricEvent = serde_json::from_str(json_str).unwrap();
409 assert_eq!(parsed.session_id, Some("1742468880123-42".to_string()));
410 assert_eq!(parsed.seq, Some(5));
411 }
412
413 #[test]
414 fn test_metric_event_backward_compat_parse() {
415 let old_jsonl = r#"{"ts":1700000000000,"tool":"analyze_directory","duration_ms":42,"output_chars":100,"param_path_depth":3,"max_depth":2,"result":"ok","error_type":null}"#;
416 let parsed: MetricEvent = serde_json::from_str(old_jsonl).unwrap();
417 assert_eq!(parsed.tool, "analyze_directory");
418 assert_eq!(parsed.session_id, None);
419 assert_eq!(parsed.seq, None);
420 }
421
422 #[test]
423 fn test_session_id_format() {
424 let event = MetricEvent {
425 ts: 1_700_000_000_000,
426 tool: "analyze_symbol",
427 duration_ms: 20,
428 output_chars: 50,
429 param_path_depth: 1,
430 max_depth: None,
431 result: "ok",
432 error_type: None,
433 session_id: Some("1742468880123-0".to_string()),
434 seq: Some(0),
435 };
436 let sid = event.session_id.unwrap();
437 assert!(sid.contains('-'), "session_id should contain a dash");
438 let parts: Vec<&str> = sid.split('-').collect();
439 assert_eq!(parts.len(), 2, "session_id should have exactly 2 parts");
440 assert!(parts[0].len() == 13, "millis part should be 13 digits");
441 }
442}