1use serde::{Deserialize, Serialize};
11use std::path::{Path, PathBuf};
12use std::time::{SystemTime, UNIX_EPOCH};
13use tokio::io::AsyncWriteExt;
14use tokio::sync::mpsc;
15
16#[derive(Debug, Clone, Serialize, Deserialize)]
18pub struct MetricEvent {
19 pub ts: u64,
20 pub tool: &'static str,
21 pub duration_ms: u64,
22 pub output_chars: usize,
23 pub param_path_depth: usize,
24 pub max_depth: Option<u32>,
25 pub result: &'static str,
26 pub error_type: Option<String>,
27 #[serde(default)]
28 pub session_id: Option<String>,
29 #[serde(default)]
30 pub seq: Option<u32>,
31}
32
33#[derive(Clone)]
35pub struct MetricsSender(pub tokio::sync::mpsc::UnboundedSender<MetricEvent>);
36
37impl MetricsSender {
38 pub fn send(&self, event: MetricEvent) {
39 let _ = self.0.send(event);
40 }
41}
42
43pub struct MetricsWriter {
45 rx: tokio::sync::mpsc::UnboundedReceiver<MetricEvent>,
46 base_dir: PathBuf,
47}
48
49impl MetricsWriter {
50 pub fn new(
51 rx: tokio::sync::mpsc::UnboundedReceiver<MetricEvent>,
52 base_dir: Option<PathBuf>,
53 ) -> Self {
54 let dir = base_dir.unwrap_or_else(xdg_metrics_dir);
55 Self { rx, base_dir: dir }
56 }
57
58 pub async fn run(mut self) {
59 cleanup_old_files(&self.base_dir).await;
60 let mut current_date = current_date_str();
61 let mut current_file: Option<PathBuf> = None;
62
63 loop {
64 let mut batch = Vec::new();
65 if let Some(event) = self.rx.recv().await {
66 batch.push(event);
67 for _ in 0..99 {
68 match self.rx.try_recv() {
69 Ok(e) => batch.push(e),
70 Err(
71 mpsc::error::TryRecvError::Empty
72 | mpsc::error::TryRecvError::Disconnected,
73 ) => break,
74 }
75 }
76 } else {
77 break;
78 }
79
80 let new_date = current_date_str();
81 if new_date != current_date {
82 current_date = new_date;
83 current_file = None;
84 }
85
86 if current_file.is_none() {
87 current_file = Some(rotate_path(&self.base_dir, ¤t_date));
88 }
89
90 let path = current_file.as_ref().unwrap();
91
92 if let Some(parent) = path.parent()
94 && !parent.as_os_str().is_empty()
95 {
96 tokio::fs::create_dir_all(parent).await.ok();
97 }
98
99 let file = tokio::fs::OpenOptions::new()
101 .create(true)
102 .append(true)
103 .open(path)
104 .await;
105
106 if let Ok(mut file) = file {
107 for event in batch {
108 if let Ok(mut json) = serde_json::to_string(&event) {
109 json.push('\n');
110 let _ = file.write_all(json.as_bytes()).await;
111 }
112 }
113 let _ = file.flush().await;
114 }
115 }
116 }
117}
118
119#[must_use]
121pub fn unix_ms() -> u64 {
122 SystemTime::now()
123 .duration_since(UNIX_EPOCH)
124 .unwrap_or_default()
125 .as_millis()
126 .try_into()
127 .unwrap_or(u64::MAX)
128}
129
130#[must_use]
132pub fn path_component_count(path: &str) -> usize {
133 Path::new(path).components().count()
134}
135
136fn xdg_metrics_dir() -> PathBuf {
137 if let Ok(xdg_data_home) = std::env::var("XDG_DATA_HOME")
138 && !xdg_data_home.is_empty()
139 {
140 return PathBuf::from(xdg_data_home).join("code-analyze-mcp");
141 }
142
143 if let Ok(home) = std::env::var("HOME") {
144 PathBuf::from(home)
145 .join(".local")
146 .join("share")
147 .join("code-analyze-mcp")
148 } else {
149 PathBuf::from(".")
150 }
151}
152
153fn rotate_path(base_dir: &Path, date_str: &str) -> PathBuf {
154 base_dir.join(format!("metrics-{date_str}.jsonl"))
155}
156
157async fn cleanup_old_files(base_dir: &Path) {
158 let now_days = u32::try_from(unix_ms() / 86_400_000).unwrap_or(u32::MAX);
159
160 let Ok(mut entries) = tokio::fs::read_dir(base_dir).await else {
161 return;
162 };
163
164 loop {
165 match entries.next_entry().await {
166 Ok(Some(entry)) => {
167 let path = entry.path();
168 let file_name = match path.file_name() {
169 Some(n) => n.to_string_lossy().into_owned(),
170 None => continue,
171 };
172
173 if !file_name.starts_with("metrics-")
175 || std::path::Path::new(&*file_name)
176 .extension()
177 .is_none_or(|e| !e.eq_ignore_ascii_case("jsonl"))
178 {
179 continue;
180 }
181 let date_part = &file_name[8..file_name.len() - 6];
182 if date_part.len() != 10
183 || date_part.as_bytes().get(4) != Some(&b'-')
184 || date_part.as_bytes().get(7) != Some(&b'-')
185 {
186 continue;
187 }
188 let Ok(year) = date_part[0..4].parse::<u32>() else {
189 continue;
190 };
191 let Ok(month) = date_part[5..7].parse::<u32>() else {
192 continue;
193 };
194 let Ok(day) = date_part[8..10].parse::<u32>() else {
195 continue;
196 };
197 if month == 0 || month > 12 || day == 0 || day > 31 {
198 continue;
199 }
200
201 let file_days = date_to_days_since_epoch(year, month, day);
202 if now_days > file_days && (now_days - file_days) > 30 {
203 let _ = tokio::fs::remove_file(&path).await;
204 }
205 }
206 Ok(None) => break,
207 Err(e) => {
208 tracing::warn!("error reading metrics directory entry: {e}");
209 }
210 }
211 }
212}
213
214fn date_to_days_since_epoch(y: u32, m: u32, d: u32) -> u32 {
215 let (y, m) = if m <= 2 { (y - 1, m + 9) } else { (y, m - 3) };
217 let era = y / 400;
218 let yoe = y - era * 400;
219 let doy = (153 * m + 2) / 5 + d - 1;
220 let doe = yoe * 365 + yoe / 4 - yoe / 100 + doy;
221 (era * 146_097 + doe).saturating_sub(719_468)
225}
226
227#[must_use]
229pub fn current_date_str() -> String {
230 let days = u32::try_from(unix_ms() / 86_400_000).unwrap_or(u32::MAX);
231 let z = days + 719_468;
232 let era = z / 146_097;
233 let doe = z - era * 146_097;
234 let yoe = (doe - doe / 1460 + doe / 36524 - doe / 146_096) / 365;
235 let y = yoe + era * 400;
236 let doy = doe - (365 * yoe + yoe / 4 - yoe / 100);
237 let mp = (5 * doy + 2) / 153;
238 let d = doy - (153 * mp + 2) / 5 + 1;
239 let m = if mp < 10 { mp + 3 } else { mp - 9 };
240 let y = if m <= 2 { y + 1 } else { y };
241 format!("{y:04}-{m:02}-{d:02}")
242}
243
244#[cfg(test)]
245mod tests {
246 use super::*;
247
248 #[test]
251 fn test_date_to_days_since_epoch_known_dates() {
252 assert_eq!(date_to_days_since_epoch(1970, 1, 1), 0);
254 assert_eq!(date_to_days_since_epoch(2020, 1, 1), 18_262);
256 assert_eq!(date_to_days_since_epoch(2000, 2, 29), 11_016);
258 }
259
260 #[test]
261 fn test_current_date_str_format() {
262 let s = current_date_str();
263 assert_eq!(s.len(), 10, "date string must be 10 chars: {s}");
264 assert_eq!(s.as_bytes()[4], b'-', "char at index 4 must be '-': {s}");
265 assert_eq!(s.as_bytes()[7], b'-', "char at index 7 must be '-': {s}");
266 let year: u32 = s[0..4].parse().expect("year must be numeric");
268 assert!(year >= 2020 && year <= 2100, "unexpected year {year}");
269 }
270
271 #[tokio::test]
272 async fn test_metrics_writer_batching() {
273 use tempfile::TempDir;
274
275 let dir = TempDir::new().unwrap();
276 let (tx, rx) = tokio::sync::mpsc::unbounded_channel::<MetricEvent>();
277 let writer = MetricsWriter::new(rx, Some(dir.path().to_path_buf()));
278
279 let make_event = || MetricEvent {
280 ts: unix_ms(),
281 tool: "analyze_directory",
282 duration_ms: 1,
283 output_chars: 10,
284 param_path_depth: 1,
285 max_depth: None,
286 result: "ok",
287 error_type: None,
288 session_id: None,
289 seq: None,
290 };
291
292 tx.send(make_event()).unwrap();
293 tx.send(make_event()).unwrap();
294 tx.send(make_event()).unwrap();
295 drop(tx);
297
298 writer.run().await;
299
300 let entries: Vec<_> = std::fs::read_dir(dir.path())
302 .unwrap()
303 .filter_map(|e| e.ok())
304 .filter(|e| {
305 e.path()
306 .extension()
307 .and_then(|x| x.to_str())
308 .map(|x| x.eq_ignore_ascii_case("jsonl"))
309 .unwrap_or(false)
310 })
311 .collect();
312 assert_eq!(entries.len(), 1, "expected exactly 1 .jsonl file");
313 let content = std::fs::read_to_string(entries[0].path()).unwrap();
314 let lines: Vec<&str> = content.lines().collect();
315 assert_eq!(lines.len(), 3, "expected exactly 3 lines; got: {content}");
316 }
317
318 #[tokio::test]
319 async fn test_cleanup_old_files_deletes_old_keeps_recent() {
320 use tempfile::TempDir;
321
322 let dir = TempDir::new().unwrap();
325 let old_file = dir.path().join("metrics-1970-01-01.jsonl");
326 let today = current_date_str();
327 let recent_file = dir.path().join(format!("metrics-{}.jsonl", today));
328 std::fs::write(&old_file, "old\n").unwrap();
329 std::fs::write(&recent_file, "recent\n").unwrap();
330
331 cleanup_old_files(dir.path()).await;
332
333 assert!(!old_file.exists(), "old file should have been deleted");
334 assert!(recent_file.exists(), "today's file should have been kept");
335 }
336
337 #[test]
338 fn test_metric_event_serialization() {
339 let event = MetricEvent {
340 ts: 1_700_000_000_000,
341 tool: "analyze_directory",
342 duration_ms: 42,
343 output_chars: 100,
344 param_path_depth: 3,
345 max_depth: Some(2),
346 result: "ok",
347 error_type: None,
348 session_id: None,
349 seq: None,
350 };
351 let json = serde_json::to_string(&event).unwrap();
352 assert!(json.contains("analyze_directory"));
353 assert!(json.contains(r#""result":"ok""#));
354 assert!(json.contains(r#""output_chars":100"#));
355 }
356
357 #[test]
358 fn test_metric_event_serialization_error() {
359 let event = MetricEvent {
360 ts: 1_700_000_000_000,
361 tool: "analyze_directory",
362 duration_ms: 5,
363 output_chars: 0,
364 param_path_depth: 3,
365 max_depth: Some(3),
366 result: "error",
367 error_type: Some("invalid_params".to_string()),
368 session_id: None,
369 seq: None,
370 };
371 let json = serde_json::to_string(&event).unwrap();
372 assert!(json.contains(r#""result":"error""#));
373 assert!(json.contains(r#""error_type":"invalid_params""#));
374 assert!(json.contains(r#""output_chars":0"#));
375 assert!(json.contains(r#""max_depth":3"#));
376 }
377
378 #[test]
379 fn test_metric_event_new_fields_round_trip() {
380 let event = MetricEvent {
381 ts: 1_700_000_000_000,
382 tool: "analyze_file",
383 duration_ms: 100,
384 output_chars: 500,
385 param_path_depth: 2,
386 max_depth: Some(3),
387 result: "ok",
388 error_type: None,
389 session_id: Some("1742468880123-42".to_string()),
390 seq: Some(5),
391 };
392 let serialized = serde_json::to_string(&event).unwrap();
393 let json_str = r#"{"ts":1700000000000,"tool":"analyze_file","duration_ms":100,"output_chars":500,"param_path_depth":2,"max_depth":3,"result":"ok","error_type":null,"session_id":"1742468880123-42","seq":5}"#;
394 assert_eq!(serialized, json_str);
395 let parsed: MetricEvent = serde_json::from_str(json_str).unwrap();
396 assert_eq!(parsed.session_id, Some("1742468880123-42".to_string()));
397 assert_eq!(parsed.seq, Some(5));
398 }
399
400 #[test]
401 fn test_metric_event_backward_compat_parse() {
402 let old_jsonl = r#"{"ts":1700000000000,"tool":"analyze_directory","duration_ms":42,"output_chars":100,"param_path_depth":3,"max_depth":2,"result":"ok","error_type":null}"#;
403 let parsed: MetricEvent = serde_json::from_str(old_jsonl).unwrap();
404 assert_eq!(parsed.tool, "analyze_directory");
405 assert_eq!(parsed.session_id, None);
406 assert_eq!(parsed.seq, None);
407 }
408
409 #[test]
410 fn test_session_id_format() {
411 let event = MetricEvent {
412 ts: 1_700_000_000_000,
413 tool: "analyze_symbol",
414 duration_ms: 20,
415 output_chars: 50,
416 param_path_depth: 1,
417 max_depth: None,
418 result: "ok",
419 error_type: None,
420 session_id: Some("1742468880123-0".to_string()),
421 seq: Some(0),
422 };
423 let sid = event.session_id.unwrap();
424 assert!(sid.contains('-'), "session_id should contain a dash");
425 let parts: Vec<&str> = sid.split('-').collect();
426 assert_eq!(parts.len(), 2, "session_id should have exactly 2 parts");
427 assert!(parts[0].len() == 13, "millis part should be 13 digits");
428 }
429}