1use serde_json::{json, Value};
7use std::collections::HashMap;
8use std::fmt::{self, Display};
9use std::fs::{File, OpenOptions};
10use std::io::{BufWriter, Write};
11use std::path::Path;
12use std::str::FromStr;
13use std::sync::{Arc, Mutex, RwLock};
14use std::time::{Duration, Instant, SystemTime, UNIX_EPOCH};
15
16#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
18pub enum LogLevel {
19 Error = 0,
20 Warn = 1,
21 Info = 2,
22 Debug = 3,
23 Trace = 4,
24}
25
26impl Display for LogLevel {
27 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
28 match self {
29 LogLevel::Error => write!(f, "ERROR"),
30 LogLevel::Warn => write!(f, "WARN"),
31 LogLevel::Info => write!(f, "INFO"),
32 LogLevel::Debug => write!(f, "DEBUG"),
33 LogLevel::Trace => write!(f, "TRACE"),
34 }
35 }
36}
37
38impl FromStr for LogLevel {
39 type Err = String;
40
41 fn from_str(s: &str) -> Result<Self, Self::Err> {
42 match s.to_uppercase().as_str() {
43 "ERROR" => Ok(LogLevel::Error),
44 "WARN" => Ok(LogLevel::Warn),
45 "INFO" => Ok(LogLevel::Info),
46 "DEBUG" => Ok(LogLevel::Debug),
47 "TRACE" => Ok(LogLevel::Trace),
48 _ => Err(format!("Invalid log level: {s}")),
49 }
50 }
51}
52
53#[derive(Debug, Clone)]
55pub struct LogEntry {
56 pub timestamp: SystemTime,
57 pub level: LogLevel,
58 pub message: String,
59 pub module: String,
60 pub file: String,
61 pub line: u32,
62 pub thread_id: String,
63 pub fields: HashMap<String, Value>,
64}
65
66impl LogEntry {
67 pub fn new(level: LogLevel, message: String, module: String, file: String, line: u32) -> Self {
68 Self {
69 timestamp: SystemTime::now(),
70 level,
71 message,
72 module,
73 file,
74 line,
75 thread_id: format!("{:?}", std::thread::current().id()),
76 fields: HashMap::new(),
77 }
78 }
79
80 pub fn with_field<V: Into<Value>>(mut self, key: String, value: V) -> Self {
81 self.fields.insert(key, value.into());
82 self
83 }
84
85 pub fn to_json(&self) -> Value {
86 let timestamp_ms = self
87 .timestamp
88 .duration_since(UNIX_EPOCH)
89 .unwrap_or_default()
90 .as_millis();
91
92 let mut json = json!({
93 "timestamp": timestamp_ms,
94 "level": self.level.to_string(),
95 "message": self.message,
96 "module": self.module,
97 "file": self.file,
98 "line": self.line,
99 "thread_id": self.thread_id,
100 });
101
102 if let Value::Object(ref mut map) = json {
103 for (key, value) in &self.fields {
104 map.insert(key.clone(), value.clone());
105 }
106 }
107
108 json
109 }
110
111 pub fn to_text(&self) -> String {
112 let timestamp = self
113 .timestamp
114 .duration_since(UNIX_EPOCH)
115 .unwrap_or_default()
116 .as_millis();
117
118 format!(
119 "[{}] {} [{}:{}] [{}] {} {}",
120 timestamp, self.level, self.file, self.line, self.thread_id, self.module, self.message
121 )
122 }
123}
124
125pub trait LogFormatter: Send + Sync {
127 fn format(&self, entry: &LogEntry) -> String;
128}
129
130pub struct JsonFormatter;
132
133impl LogFormatter for JsonFormatter {
134 fn format(&self, entry: &LogEntry) -> String {
135 entry.to_json().to_string()
136 }
137}
138
139pub struct TextFormatter;
141
142impl LogFormatter for TextFormatter {
143 fn format(&self, entry: &LogEntry) -> String {
144 entry.to_text()
145 }
146}
147
148pub trait LogOutput: Send + Sync {
150 fn write(&mut self, formatted_log: &str) -> Result<(), std::io::Error>;
151 fn flush(&mut self) -> Result<(), std::io::Error>;
152}
153
154pub struct ConsoleOutput;
156
157impl LogOutput for ConsoleOutput {
158 fn write(&mut self, formatted_log: &str) -> Result<(), std::io::Error> {
159 println!("{formatted_log}");
160 Ok(())
161 }
162
163 fn flush(&mut self) -> Result<(), std::io::Error> {
164 std::io::stdout().flush()
165 }
166}
167
168pub struct FileOutput {
170 writer: BufWriter<File>,
171}
172
173impl FileOutput {
174 pub fn new<P: AsRef<Path>>(path: P) -> Result<Self, std::io::Error> {
175 let file = OpenOptions::new().create(true).append(true).open(path)?;
176 Ok(Self {
177 writer: BufWriter::new(file),
178 })
179 }
180}
181
182impl LogOutput for FileOutput {
183 fn write(&mut self, formatted_log: &str) -> Result<(), std::io::Error> {
184 writeln!(self.writer, "{formatted_log}")?;
185 Ok(())
186 }
187
188 fn flush(&mut self) -> Result<(), std::io::Error> {
189 self.writer.flush()
190 }
191}
192
193#[derive(Debug, Clone)]
195pub struct LoggerConfig {
196 pub level: LogLevel,
197 pub module_filters: HashMap<String, LogLevel>,
198 pub enable_performance_logging: bool,
199 pub buffer_size: usize,
200 pub auto_flush: bool,
201 pub include_caller_info: bool,
202}
203
204impl Default for LoggerConfig {
205 fn default() -> Self {
206 Self {
207 level: LogLevel::Info,
208 module_filters: HashMap::new(),
209 enable_performance_logging: false,
210 buffer_size: 1000,
211 auto_flush: true,
212 include_caller_info: true,
213 }
214 }
215}
216
217pub struct Logger {
219 config: Arc<RwLock<LoggerConfig>>,
220 outputs: Arc<Mutex<Vec<Box<dyn LogOutput>>>>,
221 formatter: Arc<dyn LogFormatter>,
222 buffer: Arc<Mutex<Vec<LogEntry>>>,
223 stats: Arc<Mutex<LogStats>>,
224}
225
226#[derive(Debug, Default)]
227pub struct LogStats {
228 pub total_logs: u64,
229 pub logs_by_level: HashMap<LogLevel, u64>,
230 pub logs_by_module: HashMap<String, u64>,
231 pub buffer_overflows: u64,
232 pub write_errors: u64,
233}
234
235impl Logger {
236 pub fn new(config: LoggerConfig) -> Self {
237 Self {
238 config: Arc::new(RwLock::new(config)),
239 outputs: Arc::new(Mutex::new(Vec::new())),
240 formatter: Arc::new(TextFormatter),
241 buffer: Arc::new(Mutex::new(Vec::new())),
242 stats: Arc::new(Mutex::new(LogStats::default())),
243 }
244 }
245
246 pub fn with_formatter(mut self, formatter: Arc<dyn LogFormatter>) -> Self {
247 self.formatter = formatter;
248 self
249 }
250
251 pub fn add_output(&self, output: Box<dyn LogOutput>) {
252 let mut outputs = self.outputs.lock().unwrap();
253 outputs.push(output);
254 }
255
256 pub fn log(&self, entry: LogEntry) {
257 let config = self.config.read().unwrap();
258
259 if !self.should_log(&entry.level, &entry.module, &config) {
261 return;
262 }
263
264 {
266 let mut stats = self.stats.lock().unwrap();
267 stats.total_logs += 1;
268 *stats.logs_by_level.entry(entry.level).or_insert(0) += 1;
269 *stats
270 .logs_by_module
271 .entry(entry.module.clone())
272 .or_insert(0) += 1;
273 }
274
275 {
277 let mut buffer = self.buffer.lock().unwrap();
278 if buffer.len() >= config.buffer_size {
279 buffer.remove(0); let mut stats = self.stats.lock().unwrap();
281 stats.buffer_overflows += 1;
282 }
283 buffer.push(entry.clone());
284 }
285
286 if config.auto_flush {
288 self.flush_entry(&entry);
289 }
290 }
291
292 fn should_log(&self, level: &LogLevel, module: &str, config: &LoggerConfig) -> bool {
293 if let Some(module_level) = config.module_filters.get(module) {
295 return level <= module_level;
296 }
297
298 level <= &config.level
300 }
301
302 fn flush_entry(&self, entry: &LogEntry) {
303 let formatted = self.formatter.format(entry);
304 let mut outputs = self.outputs.lock().unwrap();
305
306 for output in outputs.iter_mut() {
307 if output.write(&formatted).is_err() {
308 let mut stats = self.stats.lock().unwrap();
309 stats.write_errors += 1;
310 }
311 }
312 }
313
314 pub fn flush(&self) {
315 let buffer = {
316 let mut buffer = self.buffer.lock().unwrap();
317 let entries = buffer.clone();
318 buffer.clear();
319 entries
320 };
321
322 for entry in buffer {
323 self.flush_entry(&entry);
324 }
325
326 let mut outputs = self.outputs.lock().unwrap();
328 for output in outputs.iter_mut() {
329 let _ = output.flush();
330 }
331 }
332
333 pub fn set_level(&self, level: LogLevel) {
334 let mut config = self.config.write().unwrap();
335 config.level = level;
336 }
337
338 pub fn set_module_level(&self, module: String, level: LogLevel) {
339 let mut config = self.config.write().unwrap();
340 config.module_filters.insert(module, level);
341 }
342
343 pub fn stats(&self) -> LogStats {
344 self.stats.lock().unwrap().clone()
345 }
346
347 pub fn clear_stats(&self) {
348 let mut stats = self.stats.lock().unwrap();
349 *stats = LogStats::default();
350 }
351}
352
353impl Clone for LogStats {
354 fn clone(&self) -> Self {
355 Self {
356 total_logs: self.total_logs,
357 logs_by_level: self.logs_by_level.clone(),
358 logs_by_module: self.logs_by_module.clone(),
359 buffer_overflows: self.buffer_overflows,
360 write_errors: self.write_errors,
361 }
362 }
363}
364
365pub struct PerformanceLogger {
367 logger: Arc<Logger>,
368 operations: Arc<Mutex<HashMap<String, Vec<Duration>>>>,
369}
370
371impl PerformanceLogger {
372 pub fn new(logger: Arc<Logger>) -> Self {
373 Self {
374 logger,
375 operations: Arc::new(Mutex::new(HashMap::new())),
376 }
377 }
378
379 pub fn time_operation<F, R>(&self, name: &str, operation: F) -> R
380 where
381 F: FnOnce() -> R,
382 {
383 let start = Instant::now();
384 let result = operation();
385 let duration = start.elapsed();
386
387 {
389 let mut operations = self.operations.lock().unwrap();
390 operations
391 .entry(name.to_string())
392 .or_default()
393 .push(duration);
394 }
395
396 let entry = LogEntry::new(
398 LogLevel::Debug,
399 format!("Operation '{name}' completed"),
400 "performance".to_string(),
401 "performance_logger.rs".to_string(),
402 0,
403 )
404 .with_field("operation".to_string(), name.to_string())
405 .with_field("duration_ms".to_string(), duration.as_millis() as f64);
406
407 self.logger.log(entry);
408
409 result
410 }
411
412 pub fn get_operation_stats(&self, name: &str) -> Option<OperationStats> {
413 let operations = self.operations.lock().unwrap();
414 if let Some(durations) = operations.get(name) {
415 if durations.is_empty() {
416 return None;
417 }
418
419 let total_ms: f64 = durations.iter().map(|d| d.as_millis() as f64).sum();
420 let count = durations.len();
421 let avg_ms = total_ms / count as f64;
422
423 let mut sorted_durations = durations.clone();
424 sorted_durations.sort();
425
426 let min_ms = sorted_durations.first().unwrap().as_millis() as f64;
427 let max_ms = sorted_durations.last().unwrap().as_millis() as f64;
428
429 let median_ms = if count % 2 == 0 {
430 let mid = count / 2;
431 (sorted_durations[mid - 1].as_millis() + sorted_durations[mid].as_millis()) as f64
432 / 2.0
433 } else {
434 sorted_durations[count / 2].as_millis() as f64
435 };
436
437 Some(OperationStats {
438 name: name.to_string(),
439 count,
440 total_ms,
441 avg_ms,
442 min_ms,
443 max_ms,
444 median_ms,
445 })
446 } else {
447 None
448 }
449 }
450
451 pub fn clear_operation_stats(&self, name: &str) {
452 let mut operations = self.operations.lock().unwrap();
453 operations.remove(name);
454 }
455
456 pub fn get_all_operations(&self) -> Vec<String> {
457 let operations = self.operations.lock().unwrap();
458 operations.keys().cloned().collect()
459 }
460}
461
462#[derive(Debug, Clone)]
463pub struct OperationStats {
464 pub name: String,
465 pub count: usize,
466 pub total_ms: f64,
467 pub avg_ms: f64,
468 pub min_ms: f64,
469 pub max_ms: f64,
470 pub median_ms: f64,
471}
472
473pub struct DistributedLogger {
475 local_logger: Arc<Logger>,
476 node_id: String,
477 cluster_nodes: Arc<RwLock<Vec<String>>>,
478}
479
480impl DistributedLogger {
481 pub fn new(local_logger: Arc<Logger>, node_id: String) -> Self {
482 Self {
483 local_logger,
484 node_id,
485 cluster_nodes: Arc::new(RwLock::new(Vec::new())),
486 }
487 }
488
489 pub fn add_node(&self, node_id: String) {
490 let mut nodes = self.cluster_nodes.write().unwrap();
491 if !nodes.contains(&node_id) {
492 nodes.push(node_id);
493 }
494 }
495
496 pub fn remove_node(&self, node_id: &str) {
497 let mut nodes = self.cluster_nodes.write().unwrap();
498 nodes.retain(|id| id != node_id);
499 }
500
501 pub fn log_distributed(&self, mut entry: LogEntry) {
502 entry = entry.with_field("node_id".to_string(), self.node_id.clone());
504
505 self.local_logger.log(entry);
507
508 }
511
512 pub fn get_cluster_nodes(&self) -> Vec<String> {
513 self.cluster_nodes.read().unwrap().clone()
514 }
515}
516
517pub struct LogAnalyzer {
519 entries: Vec<LogEntry>,
520}
521
522impl LogAnalyzer {
523 pub fn new() -> Self {
524 Self {
525 entries: Vec::new(),
526 }
527 }
528
529 pub fn add_entries(&mut self, entries: Vec<LogEntry>) {
530 self.entries.extend(entries);
531 }
532
533 pub fn analyze_patterns(&self) -> LogAnalysis {
534 let mut analysis = LogAnalysis::default();
535
536 for entry in &self.entries {
537 analysis.total_entries += 1;
538 *analysis.entries_by_level.entry(entry.level).or_insert(0) += 1;
539 *analysis
540 .entries_by_module
541 .entry(entry.module.clone())
542 .or_insert(0) += 1;
543
544 if entry.level == LogLevel::Error {
546 *analysis
547 .error_patterns
548 .entry(entry.message.clone())
549 .or_insert(0) += 1;
550 }
551 }
552
553 analysis
554 }
555
556 pub fn find_errors_in_timeframe(&self, start: SystemTime, end: SystemTime) -> Vec<LogEntry> {
557 self.entries
558 .iter()
559 .filter(|entry| {
560 entry.level == LogLevel::Error && entry.timestamp >= start && entry.timestamp <= end
561 })
562 .cloned()
563 .collect()
564 }
565
566 pub fn get_module_activity(&self, module: &str) -> Vec<LogEntry> {
567 self.entries
568 .iter()
569 .filter(|entry| entry.module == module)
570 .cloned()
571 .collect()
572 }
573}
574
575#[derive(Debug, Default)]
576pub struct LogAnalysis {
577 pub total_entries: u64,
578 pub entries_by_level: HashMap<LogLevel, u64>,
579 pub entries_by_module: HashMap<String, u64>,
580 pub error_patterns: HashMap<String, u64>,
581}
582
583impl Default for LogAnalyzer {
584 fn default() -> Self {
585 Self::new()
586 }
587}
588
589lazy_static::lazy_static! {
590 static ref GLOBAL_LOGGER: Arc<Logger> = {
592 let config = LoggerConfig::default();
593 let logger = Arc::new(Logger::new(config));
594 logger.add_output(Box::new(ConsoleOutput));
595 logger
596 };
597}
598
599#[macro_export]
601macro_rules! log_error {
602 ($($arg:tt)*) => {
603 $crate::logging::log_with_level($crate::logging::LogLevel::Error, format!($($arg)*))
604 };
605}
606
607#[macro_export]
608macro_rules! log_warn {
609 ($($arg:tt)*) => {
610 $crate::logging::log_with_level($crate::logging::LogLevel::Warn, format!($($arg)*))
611 };
612}
613
614#[macro_export]
615macro_rules! log_info {
616 ($($arg:tt)*) => {
617 $crate::logging::log_with_level($crate::logging::LogLevel::Info, format!($($arg)*))
618 };
619}
620
621#[macro_export]
622macro_rules! log_debug {
623 ($($arg:tt)*) => {
624 $crate::logging::log_with_level($crate::logging::LogLevel::Debug, format!($($arg)*))
625 };
626}
627
628#[macro_export]
629macro_rules! log_trace {
630 ($($arg:tt)*) => {
631 $crate::logging::log_with_level($crate::logging::LogLevel::Trace, format!($($arg)*))
632 };
633}
634
635pub fn log_with_level(level: LogLevel, message: String) {
636 let entry = LogEntry::new(
637 level,
638 message,
639 "global".to_string(),
640 "unknown".to_string(),
641 0,
642 );
643 GLOBAL_LOGGER.log(entry);
644}
645
646pub fn get_global_logger() -> Arc<Logger> {
647 GLOBAL_LOGGER.clone()
648}
649
650pub fn set_global_level(level: LogLevel) {
651 GLOBAL_LOGGER.set_level(level);
652}
653
654pub fn flush_global_logger() {
655 GLOBAL_LOGGER.flush();
656}
657
658#[allow(non_snake_case)]
659#[cfg(test)]
660mod tests {
661 use super::*;
662 use std::sync::Arc;
663 use tempfile::NamedTempFile;
664
665 #[test]
666 fn test_log_levels() {
667 assert!(LogLevel::Error < LogLevel::Warn);
668 assert!(LogLevel::Warn < LogLevel::Info);
669 assert!(LogLevel::Info < LogLevel::Debug);
670 assert!(LogLevel::Debug < LogLevel::Trace);
671 }
672
673 #[test]
674 fn test_log_entry_creation() {
675 let entry = LogEntry::new(
676 LogLevel::Info,
677 "Test message".to_string(),
678 "test_module".to_string(),
679 "test.rs".to_string(),
680 42,
681 );
682
683 assert_eq!(entry.level, LogLevel::Info);
684 assert_eq!(entry.message, "Test message");
685 assert_eq!(entry.module, "test_module");
686 assert_eq!(entry.file, "test.rs");
687 assert_eq!(entry.line, 42);
688 }
689
690 #[test]
691 fn test_log_entry_with_fields() {
692 let entry = LogEntry::new(
693 LogLevel::Debug,
694 "Debug message".to_string(),
695 "test".to_string(),
696 "test.rs".to_string(),
697 1,
698 )
699 .with_field("key1".to_string(), "value1".to_string())
700 .with_field("key2".to_string(), 42);
701
702 assert_eq!(entry.fields.len(), 2);
703 assert_eq!(
704 entry.fields.get("key1").unwrap(),
705 &Value::String("value1".to_string())
706 );
707 assert_eq!(entry.fields.get("key2").unwrap(), &Value::Number(42.into()));
708 }
709
710 #[test]
711 fn test_logger_creation() {
712 let config = LoggerConfig::default();
713 let logger = Logger::new(config);
714
715 let stats = logger.stats();
716 assert_eq!(stats.total_logs, 0);
717 }
718
719 #[test]
720 fn test_logger_with_file_output() {
721 let temp_file = NamedTempFile::new().unwrap();
722 let config = LoggerConfig::default();
723 let logger = Logger::new(config);
724
725 let file_output = FileOutput::new(temp_file.path()).unwrap();
726 logger.add_output(Box::new(file_output));
727
728 let entry = LogEntry::new(
729 LogLevel::Info,
730 "Test log".to_string(),
731 "test".to_string(),
732 "test.rs".to_string(),
733 1,
734 );
735
736 logger.log(entry);
737 logger.flush();
738
739 let stats = logger.stats();
740 assert_eq!(stats.total_logs, 1);
741 }
742
743 #[test]
744 fn test_performance_logger() {
745 let config = LoggerConfig::default();
746 let logger = Arc::new(Logger::new(config));
747 let perf_logger = PerformanceLogger::new(logger);
748
749 let result = perf_logger.time_operation("test_op", || {
750 std::thread::sleep(std::time::Duration::from_millis(10));
751 42
752 });
753
754 assert_eq!(result, 42);
755
756 let stats = perf_logger.get_operation_stats("test_op").unwrap();
757 assert_eq!(stats.count, 1);
758 assert!(stats.avg_ms >= 10.0);
759 }
760
761 #[test]
762 fn test_log_analyzer() {
763 let mut analyzer = LogAnalyzer::new();
764
765 let entries = vec![
766 LogEntry::new(
767 LogLevel::Info,
768 "Info message".to_string(),
769 "module1".to_string(),
770 "test.rs".to_string(),
771 1,
772 ),
773 LogEntry::new(
774 LogLevel::Error,
775 "Error message".to_string(),
776 "module1".to_string(),
777 "test.rs".to_string(),
778 2,
779 ),
780 LogEntry::new(
781 LogLevel::Debug,
782 "Debug message".to_string(),
783 "module2".to_string(),
784 "test.rs".to_string(),
785 3,
786 ),
787 ];
788
789 analyzer.add_entries(entries);
790 let analysis = analyzer.analyze_patterns();
791
792 assert_eq!(analysis.total_entries, 3);
793 assert_eq!(*analysis.entries_by_level.get(&LogLevel::Info).unwrap(), 1);
794 assert_eq!(*analysis.entries_by_level.get(&LogLevel::Error).unwrap(), 1);
795 assert_eq!(*analysis.entries_by_module.get("module1").unwrap(), 2);
796 assert_eq!(*analysis.entries_by_module.get("module2").unwrap(), 1);
797 }
798
799 #[test]
800 fn test_distributed_logger() {
801 let config = LoggerConfig::default();
802 let local_logger = Arc::new(Logger::new(config));
803 let dist_logger = DistributedLogger::new(local_logger, "node1".to_string());
804
805 dist_logger.add_node("node2".to_string());
806 dist_logger.add_node("node3".to_string());
807
808 let nodes = dist_logger.get_cluster_nodes();
809 assert_eq!(nodes.len(), 2);
810 assert!(nodes.contains(&"node2".to_string()));
811 assert!(nodes.contains(&"node3".to_string()));
812 }
813}