1use std::collections::HashMap;
7use std::fs::File;
8use std::io::{BufRead, BufReader, Write};
9use std::path::Path;
10use serde::{Deserialize, Serialize};
13use thiserror::Error;
14use tracing::{info, instrument, warn};
15use tracing_subscriber::{
16 fmt::{self, format::FmtSpan},
17 layer::SubscriberExt,
18 util::SubscriberInitExt,
19 EnvFilter,
20};
21
22#[derive(Error, Debug)]
24pub enum LoggingError {
25 #[error("Failed to read log file: {0}")]
26 FileRead(String),
27
28 #[error("Failed to write log file: {0}")]
29 FileWrite(String),
30
31 #[error("Invalid log format: {0}")]
32 InvalidFormat(String),
33
34 #[error("Filter compilation failed: {0}")]
35 FilterCompilation(String),
36}
37
38pub type Result<T> = std::result::Result<T, LoggingError>;
40
41#[derive(Debug, Clone, Serialize, Deserialize)]
43pub struct LogEntry {
44 pub timestamp: String,
45 pub level: String,
46 pub target: String,
47 pub message: String,
48 pub fields: HashMap<String, serde_json::Value>,
49 pub span_id: Option<String>,
50 pub trace_id: Option<String>,
51}
52
53#[derive(Debug, Clone, Serialize, Deserialize)]
55pub struct LogFilter {
56 pub level: Option<String>,
57 pub target: Option<String>,
58 pub message_pattern: Option<String>,
59 pub time_range: Option<TimeRange>,
60 pub fields: HashMap<String, serde_json::Value>,
61}
62
63#[derive(Debug, Clone, Serialize, Deserialize)]
64pub struct TimeRange {
65 pub start: Option<String>,
66 pub end: Option<String>,
67}
68
69pub struct LogAggregator {
71 log_file: String,
72 max_entries: usize,
73 entries: Vec<LogEntry>,
74}
75
76impl LogAggregator {
77 #[must_use]
79 pub fn new(log_file: String, max_entries: usize) -> Self {
80 Self {
81 log_file,
82 max_entries,
83 entries: Vec::new(),
84 }
85 }
86
87 #[instrument(skip(self))]
92 pub fn load_logs(&mut self) -> Result<()> {
93 if !Path::new(&self.log_file).exists() {
94 info!("Log file does not exist, starting with empty logs");
95 return Ok(());
96 }
97
98 let file = File::open(&self.log_file)
99 .map_err(|e| LoggingError::FileRead(format!("Failed to open log file: {e}")))?;
100
101 let reader = BufReader::new(file);
102 let mut line_count = 0;
103
104 for line in reader.lines() {
105 let line =
106 line.map_err(|e| LoggingError::FileRead(format!("Failed to read line: {e}")))?;
107
108 if let Ok(entry) = Self::parse_log_line(&line) {
109 self.entries.push(entry);
110 line_count += 1;
111 }
112 }
113
114 if self.entries.len() > self.max_entries {
116 let start = self.entries.len() - self.max_entries;
117 self.entries.drain(0..start);
118 }
119
120 info!("Loaded {} log entries from file", line_count);
121 Ok(())
122 }
123
124 fn parse_log_line(line: &str) -> Result<LogEntry> {
126 if let Ok(entry) = serde_json::from_str::<LogEntry>(line) {
128 return Ok(entry);
129 }
130
131 Self::parse_text_log_line(line)
133 }
134
135 fn parse_text_log_line(line: &str) -> Result<LogEntry> {
137 let parts: Vec<&str> = line.splitn(4, ' ').collect();
139
140 if parts.len() < 4 {
141 return Err(LoggingError::InvalidFormat(
142 "Insufficient log line parts".to_string(),
143 ));
144 }
145
146 let timestamp = parts[0].to_string();
147 let level = parts[1].to_string();
148 let target = parts[2].to_string();
149 let message = parts[3..].join(" ");
150
151 Ok(LogEntry {
152 timestamp,
153 level,
154 target,
155 message,
156 fields: HashMap::new(),
157 span_id: None,
158 trace_id: None,
159 })
160 }
161
162 #[instrument(skip(self))]
164 pub fn filter_logs(&self, filter: &LogFilter) -> Vec<LogEntry> {
165 self.entries
166 .iter()
167 .filter(|entry| Self::matches_filter(entry, filter))
168 .cloned()
169 .collect()
170 }
171
172 fn matches_filter(entry: &LogEntry, filter: &LogFilter) -> bool {
174 if let Some(ref level) = filter.level {
176 if !entry.level.eq_ignore_ascii_case(level) {
177 return false;
178 }
179 }
180
181 if let Some(ref target) = filter.target {
183 if !entry.target.contains(target) {
184 return false;
185 }
186 }
187
188 if let Some(ref pattern) = filter.message_pattern {
190 if !entry.message.contains(pattern) {
191 return false;
192 }
193 }
194
195 if let Some(ref time_range) = filter.time_range {
197 if !Self::matches_time_range(entry, time_range) {
198 return false;
199 }
200 }
201
202 for (key, value) in &filter.fields {
204 if let Some(entry_value) = entry.fields.get(key) {
205 if entry_value != value {
206 return false;
207 }
208 } else {
209 return false;
210 }
211 }
212
213 true
214 }
215
216 fn matches_time_range(entry: &LogEntry, time_range: &TimeRange) -> bool {
218 if let Some(ref start) = time_range.start {
220 if entry.timestamp < *start {
221 return false;
222 }
223 }
224
225 if let Some(ref end) = time_range.end {
226 if entry.timestamp > *end {
227 return false;
228 }
229 }
230
231 true
232 }
233
234 #[instrument(skip(self))]
236 pub fn get_statistics(&self) -> LogStatistics {
237 let mut level_counts = HashMap::new();
238 let mut target_counts = HashMap::new();
239
240 for entry in &self.entries {
241 *level_counts.entry(entry.level.clone()).or_insert(0) += 1;
242 *target_counts.entry(entry.target.clone()).or_insert(0) += 1;
243 }
244
245 LogStatistics {
246 total_entries: self.entries.len(),
247 level_counts,
248 target_counts,
249 oldest_entry: self.entries.first().map(|e| e.timestamp.clone()),
250 newest_entry: self.entries.last().map(|e| e.timestamp.clone()),
251 }
252 }
253
254 #[instrument(skip(self))]
259 pub fn export_logs(&self, filter: &LogFilter, output_file: &str) -> Result<()> {
260 let filtered_logs = self.filter_logs(filter);
261
262 let mut file = File::create(output_file)
263 .map_err(|e| LoggingError::FileWrite(format!("Failed to create output file: {e}")))?;
264
265 let count = filtered_logs.len();
266 for entry in filtered_logs {
267 let json = serde_json::to_string(&entry)
268 .map_err(|e| LoggingError::FileWrite(format!("Failed to serialize entry: {e}")))?;
269 writeln!(file, "{json}")
270 .map_err(|e| LoggingError::FileWrite(format!("Failed to write entry: {e}")))?;
271 }
272
273 info!("Exported {} log entries to {}", count, output_file);
274 Ok(())
275 }
276}
277
278#[derive(Debug, Clone, Serialize, Deserialize)]
280pub struct LogStatistics {
281 pub total_entries: usize,
282 pub level_counts: HashMap<String, usize>,
283 pub target_counts: HashMap<String, usize>,
284 pub oldest_entry: Option<String>,
285 pub newest_entry: Option<String>,
286}
287
288pub struct LogRotator {
290 log_file: String,
291 max_size: u64,
292 max_files: usize,
293}
294
295impl LogRotator {
296 #[must_use]
298 pub fn new(log_file: String, max_size: u64, max_files: usize) -> Self {
299 Self {
300 log_file,
301 max_size,
302 max_files,
303 }
304 }
305
306 #[instrument(skip(self))]
308 pub fn should_rotate(&self) -> bool {
309 if let Ok(metadata) = std::fs::metadata(&self.log_file) {
310 metadata.len() > self.max_size
311 } else {
312 false
313 }
314 }
315
316 #[instrument(skip(self))]
321 pub fn rotate(&self) -> Result<()> {
322 if !self.should_rotate() {
323 return Ok(());
324 }
325
326 info!("Rotating log file: {}", self.log_file);
327
328 for i in (1..self.max_files).rev() {
330 let old_file = format!("{}.{}", self.log_file, i);
331 let new_file = format!("{}.{}", self.log_file, i + 1);
332
333 if Path::new(&old_file).exists() {
334 std::fs::rename(&old_file, &new_file)
335 .map_err(|e| LoggingError::FileWrite(format!("Failed to rotate file: {e}")))?;
336 }
337 }
338
339 let rotated_file = format!("{}.1", self.log_file);
341 std::fs::rename(&self.log_file, &rotated_file)
342 .map_err(|e| LoggingError::FileWrite(format!("Failed to rotate current log: {e}")))?;
343
344 File::create(&self.log_file)
346 .map_err(|e| LoggingError::FileWrite(format!("Failed to create new log file: {e}")))?;
347
348 info!("Log rotation completed");
349 Ok(())
350 }
351}
352
353pub fn init_file_logging(log_file: &str, level: &str, json_format: bool) -> Result<()> {
358 let filter = EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new(level));
359
360 let file = std::fs::OpenOptions::new()
361 .create(true)
362 .append(true)
363 .open(log_file)
364 .map_err(|e| LoggingError::FileWrite(format!("Failed to open log file: {e}")))?;
365
366 let registry = tracing_subscriber::registry().with(filter);
367
368 if json_format {
369 let json_layer = fmt::layer()
370 .json()
371 .with_writer(file)
372 .with_current_span(true)
373 .with_span_list(true)
374 .with_target(true)
375 .with_thread_ids(true)
376 .with_thread_names(true)
377 .with_file(true)
378 .with_line_number(true);
379
380 registry.with(json_layer).init();
381 } else {
382 let fmt_layer = fmt::layer()
383 .with_writer(file)
384 .with_target(true)
385 .with_thread_ids(true)
386 .with_thread_names(true)
387 .with_file(true)
388 .with_line_number(true)
389 .with_span_events(FmtSpan::CLOSE);
390
391 registry.with(fmt_layer).init();
392 }
393
394 info!("File logging initialized: {}", log_file);
395 Ok(())
396}
397
398pub struct LogSearcher {
400 aggregator: LogAggregator,
401}
402
403impl LogSearcher {
404 #[must_use]
406 pub fn new(aggregator: LogAggregator) -> Self {
407 Self { aggregator }
408 }
409
410 #[instrument(skip(self))]
412 pub fn search(&self, query: &str) -> Vec<LogEntry> {
413 let filter = LogFilter {
414 level: None,
415 target: None,
416 message_pattern: Some(query.to_string()),
417 time_range: None,
418 fields: HashMap::new(),
419 };
420
421 self.aggregator.filter_logs(&filter)
422 }
423
424 #[instrument(skip(self))]
426 pub fn search_by_level(&self, level: &str) -> Vec<LogEntry> {
427 let filter = LogFilter {
428 level: Some(level.to_string()),
429 target: None,
430 message_pattern: None,
431 time_range: None,
432 fields: HashMap::new(),
433 };
434
435 self.aggregator.filter_logs(&filter)
436 }
437
438 #[instrument(skip(self))]
440 pub fn search_by_target(&self, target: &str) -> Vec<LogEntry> {
441 let filter = LogFilter {
442 level: None,
443 target: Some(target.to_string()),
444 message_pattern: None,
445 time_range: None,
446 fields: HashMap::new(),
447 };
448
449 self.aggregator.filter_logs(&filter)
450 }
451}
452
453#[cfg(test)]
454mod tests {
455 use super::*;
456 use std::fs;
457 use tempfile::TempDir;
458
459 #[test]
460 fn test_log_entry_creation() {
461 let entry = LogEntry {
462 timestamp: "2023-01-01T00:00:00Z".to_string(),
463 level: "INFO".to_string(),
464 target: "things3_cli".to_string(),
465 message: "Test message".to_string(),
466 fields: HashMap::new(),
467 span_id: None,
468 trace_id: None,
469 };
470
471 assert_eq!(entry.level, "INFO");
472 assert_eq!(entry.message, "Test message");
473 }
474
475 #[test]
476 fn test_log_entry_with_fields() {
477 let mut fields = HashMap::new();
478 fields.insert(
479 "user_id".to_string(),
480 serde_json::Value::String("123".to_string()),
481 );
482 fields.insert(
483 "action".to_string(),
484 serde_json::Value::String("login".to_string()),
485 );
486
487 let entry = LogEntry {
488 timestamp: "2023-01-01T00:00:00Z".to_string(),
489 level: "INFO".to_string(),
490 target: "things3_cli".to_string(),
491 message: "User logged in".to_string(),
492 fields,
493 span_id: Some("span-123".to_string()),
494 trace_id: Some("trace-456".to_string()),
495 };
496
497 assert_eq!(entry.fields.len(), 2);
498 assert_eq!(entry.span_id, Some("span-123".to_string()));
499 assert_eq!(entry.trace_id, Some("trace-456".to_string()));
500 }
501
502 #[test]
503 fn test_log_filter_creation() {
504 let filter = LogFilter {
505 level: Some("ERROR".to_string()),
506 target: None,
507 message_pattern: None,
508 time_range: None,
509 fields: HashMap::new(),
510 };
511
512 assert_eq!(filter.level, Some("ERROR".to_string()));
513 }
514
515 #[test]
516 fn test_log_filter_with_all_fields() {
517 let mut fields = HashMap::new();
518 fields.insert(
519 "module".to_string(),
520 serde_json::Value::String("auth".to_string()),
521 );
522
523 let time_range = TimeRange {
524 start: Some("2023-01-01T00:00:00Z".to_string()),
525 end: Some("2023-01-01T23:59:59Z".to_string()),
526 };
527
528 let filter = LogFilter {
529 level: Some("WARN".to_string()),
530 target: Some("things3_cli::auth".to_string()),
531 message_pattern: Some("failed".to_string()),
532 time_range: Some(time_range),
533 fields,
534 };
535
536 assert_eq!(filter.level, Some("WARN".to_string()));
537 assert_eq!(filter.target, Some("things3_cli::auth".to_string()));
538 assert_eq!(filter.message_pattern, Some("failed".to_string()));
539 assert!(filter.time_range.is_some());
540 assert_eq!(filter.fields.len(), 1);
541 }
542
543 #[test]
544 fn test_time_range_creation() {
545 let time_range = TimeRange {
546 start: Some("2023-01-01T00:00:00Z".to_string()),
547 end: Some("2023-01-01T23:59:59Z".to_string()),
548 };
549
550 assert_eq!(time_range.start, Some("2023-01-01T00:00:00Z".to_string()));
551 assert_eq!(time_range.end, Some("2023-01-01T23:59:59Z".to_string()));
552 }
553
554 #[test]
555 fn test_log_aggregator_creation() {
556 let aggregator = LogAggregator::new("test.log".to_string(), 1000);
557 assert_eq!(aggregator.max_entries, 1000);
558 assert_eq!(aggregator.entries.len(), 0);
559 }
560
561 #[test]
562 fn test_log_aggregator_entries_access() {
563 let aggregator = LogAggregator::new("test.log".to_string(), 1000);
564 assert_eq!(aggregator.entries.len(), 0);
565 }
566
567 #[test]
568 fn test_log_aggregator_filter_logs() {
569 let mut aggregator = LogAggregator::new("test.log".to_string(), 1000);
570
571 let entry1 = LogEntry {
573 timestamp: "2023-01-01T00:00:00Z".to_string(),
574 level: "INFO".to_string(),
575 target: "things3_cli".to_string(),
576 message: "Info message".to_string(),
577 fields: HashMap::new(),
578 span_id: None,
579 trace_id: None,
580 };
581
582 let entry2 = LogEntry {
583 timestamp: "2023-01-01T00:00:01Z".to_string(),
584 level: "ERROR".to_string(),
585 target: "things3_cli".to_string(),
586 message: "Error message".to_string(),
587 fields: HashMap::new(),
588 span_id: None,
589 trace_id: None,
590 };
591
592 aggregator.entries.push(entry1);
593 aggregator.entries.push(entry2);
594
595 let filter = LogFilter {
596 level: Some("ERROR".to_string()),
597 target: None,
598 message_pattern: None,
599 time_range: None,
600 fields: HashMap::new(),
601 };
602
603 let filtered = aggregator.filter_logs(&filter);
604 assert_eq!(filtered.len(), 1);
605 assert_eq!(filtered[0].level, "ERROR");
606 }
607
608 #[test]
609 fn test_log_aggregator_filter_by_message_pattern() {
610 let mut aggregator = LogAggregator::new("test.log".to_string(), 1000);
611
612 let entry1 = LogEntry {
613 timestamp: "2023-01-01T00:00:00Z".to_string(),
614 level: "INFO".to_string(),
615 target: "things3_cli".to_string(),
616 message: "User login successful".to_string(),
617 fields: HashMap::new(),
618 span_id: None,
619 trace_id: None,
620 };
621
622 let entry2 = LogEntry {
623 timestamp: "2023-01-01T00:00:01Z".to_string(),
624 level: "INFO".to_string(),
625 target: "things3_cli".to_string(),
626 message: "Database connection failed".to_string(),
627 fields: HashMap::new(),
628 span_id: None,
629 trace_id: None,
630 };
631
632 aggregator.entries.push(entry1);
633 aggregator.entries.push(entry2);
634
635 let filter = LogFilter {
636 level: None,
637 target: None,
638 message_pattern: Some("failed".to_string()),
639 time_range: None,
640 fields: HashMap::new(),
641 };
642
643 let filtered = aggregator.filter_logs(&filter);
644 assert_eq!(filtered.len(), 1);
645 assert!(filtered[0].message.contains("failed"));
646 }
647
648 #[test]
649 fn test_log_aggregator_get_statistics() {
650 let mut aggregator = LogAggregator::new("test.log".to_string(), 1000);
651
652 for i in 0..5 {
654 let level = if i % 2 == 0 { "INFO" } else { "ERROR" };
655 let entry = LogEntry {
656 timestamp: format!("2023-01-01T00:00:0{i}Z"),
657 level: level.to_string(),
658 target: "things3_cli".to_string(),
659 message: format!("Message {i}"),
660 fields: HashMap::new(),
661 span_id: None,
662 trace_id: None,
663 };
664 aggregator.entries.push(entry);
665 }
666
667 let stats = aggregator.get_statistics();
668 assert_eq!(stats.total_entries, 5);
669 assert_eq!(stats.level_counts.get("INFO"), Some(&3));
670 assert_eq!(stats.level_counts.get("ERROR"), Some(&2));
671 }
672
673 #[test]
674 fn test_log_rotator_creation() {
675 let rotator = LogRotator::new("test.log".to_string(), 1024 * 1024, 5);
676 assert_eq!(rotator.max_size, 1024 * 1024);
677 assert_eq!(rotator.max_files, 5);
678 }
679
680 #[test]
681 fn test_log_rotator_should_rotate() {
682 let temp_dir = TempDir::new().unwrap();
683 let log_file = temp_dir.path().join("test.log");
684 let log_file_str = log_file.to_string_lossy().to_string();
685
686 fs::write(&log_file, "small content").unwrap();
688
689 let rotator = LogRotator::new(log_file_str.clone(), 100, 5);
690
691 assert!(!rotator.should_rotate());
693
694 let large_content = "x".repeat(200);
696 fs::write(&log_file, large_content).unwrap();
697
698 let rotator_large = LogRotator::new(log_file_str, 100, 5);
699
700 assert!(rotator_large.should_rotate());
702 }
703
704 #[test]
705 fn test_log_rotator_rotate() {
706 let temp_dir = TempDir::new().unwrap();
707 let log_file = temp_dir.path().join("test.log");
708 let log_file_str = log_file.to_string_lossy().to_string();
709
710 let large_content = "x".repeat(200);
712 fs::write(&log_file, large_content).unwrap();
713
714 let rotator = LogRotator::new(log_file_str, 100, 5);
715
716 let result = rotator.rotate();
718 assert!(result.is_ok());
719
720 let rotated_files: Vec<_> = fs::read_dir(temp_dir.path())
722 .unwrap()
723 .map(|entry| entry.unwrap().file_name())
724 .collect();
725
726 assert!(!rotated_files.is_empty());
728 }
729
730 #[test]
731 fn test_logging_error_display() {
732 let error = LoggingError::FileRead("test error".to_string());
733 assert!(error.to_string().contains("Failed to read log file"));
734 assert!(error.to_string().contains("test error"));
735 }
736
737 #[test]
738 fn test_logging_error_variants() {
739 let file_read_error = LoggingError::FileRead("read error".to_string());
740 let file_write_error = LoggingError::FileWrite("write error".to_string());
741 let invalid_format_error = LoggingError::InvalidFormat("format error".to_string());
742 let filter_compilation_error = LoggingError::FilterCompilation("filter error".to_string());
743
744 assert!(matches!(file_read_error, LoggingError::FileRead(_)));
745 assert!(matches!(file_write_error, LoggingError::FileWrite(_)));
746 assert!(matches!(
747 invalid_format_error,
748 LoggingError::InvalidFormat(_)
749 ));
750 assert!(matches!(
751 filter_compilation_error,
752 LoggingError::FilterCompilation(_)
753 ));
754 }
755
756 #[test]
757 fn test_log_aggregator_load_logs_nonexistent_file() {
758 let mut aggregator = LogAggregator::new("nonexistent.log".to_string(), 1000);
759 let result = aggregator.load_logs();
760 assert!(result.is_ok());
761 assert_eq!(aggregator.entries.len(), 0);
762 }
763
764 #[test]
765 fn test_log_aggregator_export_logs() {
766 let temp_dir = TempDir::new().unwrap();
767 let log_file = temp_dir.path().join("test.log");
768 let log_file_str = log_file.to_string_lossy().to_string();
769 let output_file = temp_dir.path().join("exported.log");
770 let output_file_str = output_file.to_string_lossy().to_string();
771
772 let mut aggregator = LogAggregator::new(log_file_str, 1000);
773
774 let entry = LogEntry {
775 timestamp: "2023-01-01T00:00:00Z".to_string(),
776 level: "INFO".to_string(),
777 target: "things3_cli".to_string(),
778 message: "Test message".to_string(),
779 fields: HashMap::new(),
780 span_id: None,
781 trace_id: None,
782 };
783
784 aggregator.entries.push(entry);
785
786 let filter = LogFilter {
787 level: None,
788 target: None,
789 message_pattern: None,
790 time_range: None,
791 fields: HashMap::new(),
792 };
793
794 let result = aggregator.export_logs(&filter, &output_file_str);
795 assert!(result.is_ok());
796
797 assert!(output_file.exists());
799 }
800}